commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
3cb6abf9c079cf17a9c4c0def102338945fabfa1 | Fix fasttext loading | tomekkorbak/treehopper,tomekkorbak/treehopper | utils.py | utils.py | import os
import torch
from gensim.models import KeyedVectors
from gensim.models.wrappers import FastText
from vocab import Vocab
def load_word_vectors(embeddings_path):
if os.path.isfile(embeddings_path+ '.pth') and os.path.isfile(embeddings_path+ '.vocab'):
print('==> File found, loading to memory')
vectors = torch.load(embeddings_path + '.pth')
vocab = Vocab(filename=embeddings_path + '.vocab')
return vocab, vectors
if os.path.isfile(embeddings_path + '.model'):
model = KeyedVectors.load(embeddings_path + ".model")
if os.path.isfile(embeddings_path + '.vec'):
model = FastText.load_word2vec_format(embeddings_path + '.vec')
list_of_tokens = model.vocab.keys()
vectors = torch.zeros(len(list_of_tokens), model.vector_size)
with open(embeddings_path + '.vocab', 'w', encoding='utf-8') as f:
for token in list_of_tokens:
f.write(token+'\n')
vocab = Vocab(filename=embeddings_path + '.vocab')
for index, word in enumerate(list_of_tokens):
vectors[index, :] = torch.from_numpy(model[word])
return vocab, vectors
# write unique words from a set of files to a new file
def build_vocab(filenames, vocabfile):
vocab = set()
for filename in filenames:
with open(filename,'r', encoding='utf-8') as f:
for line in f:
tokens = line.rstrip('\n').split(' ')
vocab |= set(tokens)
with open(vocabfile,'w', encoding='utf-8') as f:
for token in vocab:
f.write(token+'\n')
def map_label_to_target_sentiment(label, num_classes = 3):
# num_classes not use yet
target = torch.LongTensor(1)
target[0] = int(label) # nothing to do here as we preprocess data
return target
| import os
import torch
from gensim.models import KeyedVectors
from gensim.models.wrappers import FastText
from vocab import Vocab
def load_word_vectors(embeddings_path):
if os.path.isfile(embeddings_path+ '.pth') and os.path.isfile(embeddings_path+ '.vocab'):
print('==> File found, loading to memory')
vectors = torch.load(embeddings_path + '.pth')
vocab = Vocab(filename=embeddings_path + '.vocab')
return vocab, vectors
if os.path.isfile(embeddings_path+ '.model'):
model = KeyedVectors.load(embeddings_path + ".model")
if os.path.isfile(embeddings_path + '.vec'):
model = FastText.load_fasttext_format(embeddings_path).wv
list_of_tokens = model.vocab.keys()
vectors = torch.zeros(len(list_of_tokens), model.vector_size)
with open(embeddings_path + '.vocab', 'w', encoding='utf-8') as f:
for token in list_of_tokens:
f.write(token+'\n')
vocab = Vocab(filename=embeddings_path + '.vocab')
for index, word in enumerate(list_of_tokens):
vectors[index, :] = torch.from_numpy(model[word])
return vocab, vectors
# write unique words from a set of files to a new file
def build_vocab(filenames, vocabfile):
vocab = set()
for filename in filenames:
with open(filename,'r', encoding='utf-8') as f:
for line in f:
tokens = line.rstrip('\n').split(' ')
vocab |= set(tokens)
with open(vocabfile,'w', encoding='utf-8') as f:
for token in vocab:
f.write(token+'\n')
def map_label_to_target_sentiment(label, num_classes = 3):
# num_classes not use yet
target = torch.LongTensor(1)
target[0] = int(label) # nothing to do here as we preprocess data
return target
| apache-2.0 | Python |
fe01a92cac476fef87f4a1fc9a862f21eb9aaa4b | Add support for geography option in geodjango | theatlantic/django-south,theatlantic/django-south | south/introspection_plugins/geodjango.py | south/introspection_plugins/geodjango.py | """
GeoDjango introspection rules
"""
import django
from django.conf import settings
from south.modelsinspector import add_introspection_rules
has_gis = "django.contrib.gis" in settings.INSTALLED_APPS
if has_gis:
# Alright,import the field
from django.contrib.gis.db.models.fields import GeometryField
# Make some introspection rules
if django.VERSION[0] == 1 and django.VERSION[1] >= 1:
# Django 1.1's gis module renamed these.
rules = [
(
(GeometryField, ),
[],
{
"srid": ["srid", {"default": 4326}],
"spatial_index": ["spatial_index", {"default": True}],
"dim": ["dim", {"default": 2}],
"geography": ["geography", {"default": False}],
},
),
]
else:
rules = [
(
(GeometryField, ),
[],
{
"srid": ["_srid", {"default": 4326}],
"spatial_index": ["_index", {"default": True}],
"dim": ["_dim", {"default": 2}],
},
),
]
# Install them
add_introspection_rules(rules, ["^django\.contrib\.gis"]) | """
GeoDjango introspection rules
"""
import django
from django.conf import settings
from south.modelsinspector import add_introspection_rules
has_gis = "django.contrib.gis" in settings.INSTALLED_APPS
if has_gis:
# Alright,import the field
from django.contrib.gis.db.models.fields import GeometryField
# Make some introspection rules
if django.VERSION[0] == 1 and django.VERSION[1] >= 1:
# Django 1.1's gis module renamed these.
rules = [
(
(GeometryField, ),
[],
{
"srid": ["srid", {"default": 4326}],
"spatial_index": ["spatial_index", {"default": True}],
"dim": ["dim", {"default": 2}],
},
),
]
else:
rules = [
(
(GeometryField, ),
[],
{
"srid": ["_srid", {"default": 4326}],
"spatial_index": ["_index", {"default": True}],
"dim": ["_dim", {"default": 2}],
},
),
]
# Install them
add_introspection_rules(rules, ["^django\.contrib\.gis"]) | apache-2.0 | Python |
037fd3abf2c9a0942b70b83c0504a082cfeadabb | FIX integer division to double slash | pdebuyl/pyh5md | examples/random_walk_1d_analysis.py | examples/random_walk_1d_analysis.py | # -*- coding: utf-8 -*-
# Copyright 2012, 2013, 2016 Pierre de Buyl
# Copyright 2013 Felix Hoëfling
#
# This file is part of pyh5md
#
# pyh5md is free software and is licensed under the modified BSD license (see
# LICENSE file).
import numpy as np
import matplotlib.pyplot as plt
from pyh5md import File, element
# Open a H5MD file
f = File('walk_1d.h5', 'r')
# Open a trajectory group
part = f.particles_group('particles')
# Open trajectory position data element in the trajectory group
part_pos = element(part, 'position')
# Get data and time
r = part_pos.value
r_time = part_pos.time
# Compute the time-averaged mean-square displacement,
# drop large correlation times due to insufficient statistics
T = r.shape[0]
msd = np.empty((T//4, r.shape[1]))
time = r_time[:T//4]
for n in range(T//4):
# the sum over "axis=2" is over the spatial components of the positions
msd[n] = np.mean(np.sum(pow(r[n:] - r[:T-n], 2), axis=2), axis=0)
# Compute mean and standard error of mean (particle- and component-wise)
msd_mean = msd.mean(axis=1)
msd_err = msd.std(axis=1) / np.sqrt(msd.shape[1] - 1)
# Display the MSD and its standard error
plt.plot(time, msd_mean, 'k-', label=r'$\langle [{\bf r}(t)-{\bf r}(0)]^2\rangle$')
plt.plot(time, msd_mean+msd_err, 'k:', label=r'$\langle [{\bf r}(t)-{\bf r}(0)]^2\rangle \pm \sigma$')
plt.plot(time, msd_mean-msd_err, 'k:')
# display reference line for long-time diffusion with D = <a^2> / (2 d <\tau>),
# here: <a^2> = 1, <\tau> = 0.1, and d=1
plt.plot(time, 2 * (.5 * 1 / 0.1 / 1) * time, 'k--', label=r'$2 D t$')
plt.xlabel(r'$t$')
plt.ylabel(r'$MSD(t)$')
plt.xscale('log')
plt.yscale('log')
plt.legend(loc='upper left')
# Create a new figure
plt.figure()
# Obtain and plot the center_of_mass observable
f.observables = f.require_group('observables')
obs_com = element(f.observables, 'center_of_mass')
plt.plot(obs_com.time, obs_com.value, 'k-')
plt.xlabel(r'$t$')
plt.ylabel(r'center of mass')
# Close the file
f.close()
plt.show()
| # -*- coding: utf-8 -*-
# Copyright 2012, 2013, 2016 Pierre de Buyl
# Copyright 2013 Felix Hoëfling
#
# This file is part of pyh5md
#
# pyh5md is free software and is licensed under the modified BSD license (see
# LICENSE file).
import numpy as np
import matplotlib.pyplot as plt
from pyh5md import File, element
# Open a H5MD file
f = File('walk_1d.h5', 'r')
# Open a trajectory group
part = f.particles_group('particles')
# Open trajectory position data element in the trajectory group
part_pos = element(part, 'position')
# Get data and time
r = part_pos.value
r_time = part_pos.time
# Compute the time-averaged mean-square displacement,
# drop large correlation times due to insufficient statistics
T = r.shape[0]
msd = np.empty((T/4, r.shape[1]))
time = r_time[:T/4]
for n in range(T/4):
# the sum over "axis=2" is over the spatial components of the positions
msd[n] = np.mean(np.sum(pow(r[n:] - r[:T-n], 2), axis=2), axis=0)
# Compute mean and standard error of mean (particle- and component-wise)
msd_mean = msd.mean(axis=1)
msd_err = msd.std(axis=1) / np.sqrt(msd.shape[1] - 1)
# Display the MSD and its standard error
plt.plot(time, msd_mean, 'k-', label=r'$\langle [{\bf r}(t)-{\bf r}(0)]^2\rangle$')
plt.plot(time, msd_mean+msd_err, 'k:', label=r'$\langle [{\bf r}(t)-{\bf r}(0)]^2\rangle \pm \sigma$')
plt.plot(time, msd_mean-msd_err, 'k:')
# display reference line for long-time diffusion with D = <a^2> / (2 d <\tau>),
# here: <a^2> = 1, <\tau> = 0.1, and d=1
plt.plot(time, 2 * (.5 * 1 / 0.1 / 1) * time, 'k--', label=r'$2 D t$')
plt.xlabel(r'$t$')
plt.ylabel(r'$MSD(t)$')
plt.xscale('log')
plt.yscale('log')
plt.legend(loc='upper left')
# Create a new figure
plt.figure()
# Obtain and plot the center_of_mass observable
f.observables = f.require_group('observables')
obs_com = element(f.observables, 'center_of_mass')
plt.plot(obs_com.time, obs_com.value, 'k-')
plt.xlabel(r'$t$')
plt.ylabel(r'center of mass')
# Close the file
f.close()
plt.show()
| bsd-3-clause | Python |
7c2b1dbed09fe08b44fdbbd50ac59af8e5357ea6 | add published to admin | MichalMaM/ella,WhiskeyMedia/ella,petrlosa/ella,petrlosa/ella,whalerock/ella,ella/ella,whalerock/ella,MichalMaM/ella,whalerock/ella,WhiskeyMedia/ella | ella/articles/admin.py | ella/articles/admin.py | from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from ella.core.admin import PublishableAdmin, ListingInlineAdmin
from ella.articles.models import Article
class ArticleAdmin(PublishableAdmin):
ordering = ('-created',)
fieldsets = (
(_("Article heading"), {'fields': ('title', 'upper_title', 'updated', 'slug')}),
(_("Article contents"), {'fields': ('description', 'content')}),
(_("Metadata"), {'fields': ('category', 'authors', 'source', 'photo')}),
(_("Publication"), {'fields': (('publish_from', 'publish_to'), 'published', 'static')}),
)
inlines = [ListingInlineAdmin]
admin.site.register(Article, ArticleAdmin)
| from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from ella.core.admin import PublishableAdmin, ListingInlineAdmin
from ella.articles.models import Article
class ArticleAdmin(PublishableAdmin):
ordering = ('-created',)
fieldsets = (
(_("Article heading"), {'fields': ('title', 'upper_title', 'updated', 'slug')}),
(_("Article contents"), {'fields': ('description', 'content')}),
(_("Metadata"), {'fields': ('category', 'authors', 'source', 'photo')}),
(_("Publication"), {'fields': (('publish_from', 'publish_to'), 'static')}),
)
inlines = [ListingInlineAdmin]
admin.site.register(Article, ArticleAdmin)
| bsd-3-clause | Python |
5b011488b5fcfd17f2029e833b757d24d437908e | Revert to Beta as document_page is Beta | OCA/knowledge,OCA/knowledge,OCA/knowledge | document_page_project/__manifest__.py | document_page_project/__manifest__.py | # Copyright 2019 ForgeFlow S.L. (https://www.forgeflow.com) - Lois Rilo
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
"name": "Document Page Project",
"summary": "This module links document pages to projects",
"version": "13.0.1.0.1",
"development_status": "Beta",
"category": "Project",
"author": "ForgeFlow, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/knowledge",
"license": "AGPL-3",
"depends": ["project", "document_page"],
"data": ["views/document_page_views.xml", "views/project_project_views.xml"],
"installable": True,
}
| # Copyright 2019 ForgeFlow S.L. (https://www.forgeflow.com) - Lois Rilo
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
"name": "Document Page Project",
"summary": "This module links document pages to projects",
"version": "13.0.1.0.1",
"development_status": "Production/Stable",
"category": "Project",
"author": "ForgeFlow, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/knowledge",
"license": "AGPL-3",
"depends": ["project", "document_page"],
"data": ["views/document_page_views.xml", "views/project_project_views.xml"],
"installable": True,
}
| agpl-3.0 | Python |
3277c486d337c87b8c7c0d0fea1e8a5be4c48deb | Add function to print planet as roman numeral. | StephenSwat/eve_lunar_mining_organiser,StephenSwat/eve_lunar_mining_organiser | elmo/eve_sde/models.py | elmo/eve_sde/models.py | from django.db import models
import roman
class Region(models.Model):
id = models.IntegerField(primary_key=True)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
def __str__(self):
return self.name
class Constellation(models.Model):
id = models.IntegerField(primary_key=True)
region = models.ForeignKey(
Region,
related_name='constellations',
db_index=True)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
def __str__(self):
return self.name
class SolarSystem(models.Model):
id = models.IntegerField(primary_key=True)
constellation = models.ForeignKey(
Constellation,
related_name='systems',
db_index=True
)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
security = models.FloatField()
def __str__(self):
return self.name
class Planet(models.Model):
id = models.IntegerField(primary_key=True)
system = models.ForeignKey(
SolarSystem,
related_name='planets',
db_index=True
)
number = models.IntegerField(db_index=True)
def as_roman(self):
return roman.toRoman(self.number)
class Moon(models.Model):
id = models.IntegerField(primary_key=True)
planet = models.ForeignKey(
Planet,
related_name='moons',
db_index=True
)
number = models.IntegerField(db_index=True)
| from django.db import models
class Region(models.Model):
id = models.IntegerField(primary_key=True)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
def __str__(self):
return self.name
class Constellation(models.Model):
id = models.IntegerField(primary_key=True)
region = models.ForeignKey(
Region,
related_name='constellations',
db_index=True)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
def __str__(self):
return self.name
class SolarSystem(models.Model):
id = models.IntegerField(primary_key=True)
constellation = models.ForeignKey(
Constellation,
related_name='systems',
db_index=True
)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
security = models.FloatField()
def __str__(self):
return self.name
class Planet(models.Model):
id = models.IntegerField(primary_key=True)
system = models.ForeignKey(
SolarSystem,
related_name='planets',
db_index=True
)
number = models.IntegerField(db_index=True)
class Moon(models.Model):
id = models.IntegerField(primary_key=True)
planet = models.ForeignKey(
Planet,
related_name='moons',
db_index=True
)
number = models.IntegerField(db_index=True)
| mit | Python |
f40bf1441121c138877e27bd23bcef73cf5c2cef | Move ok response creation to pytest fixture | Vnet-as/cisco-olt-http-client,beezz/cisco-olt-http-client | cisco_olt_http/tests/test_operations.py | cisco_olt_http/tests/test_operations.py |
import os
import pytest
import requests
from cisco_olt_http import operations
from cisco_olt_http.client import Client
@pytest.fixture
def data_dir():
return os.path.abspath(
os.path.join(os.path.dirname(__file__), 'data'))
@pytest.fixture
def ok_response(data_dir, mocker):
response = mocker.Mock(autospec=requests.Response)
with open(os.path.join(data_dir, 'ok_response.xml')) as of:
response.content = of.read()
return response
def test_get_data():
client = Client('http://base-url')
show_equipment_op = operations.ShowEquipmentOp(client)
op_data = show_equipment_op.get_data()
assert op_data
class TestOperationResult:
def test_ok_response(self, ok_response):
operation_result = operations.OperationResult(ok_response)
assert not operation_result.error
assert operation_result.error_str == 'OK'
|
import os
import pytest
import requests
from cisco_olt_http import operations
from cisco_olt_http.client import Client
@pytest.fixture
def data_dir():
return os.path.abspath(
os.path.join(os.path.dirname(__file__), 'data'))
def test_get_data():
client = Client('http://base-url')
show_equipment_op = operations.ShowEquipmentOp(client)
op_data = show_equipment_op.get_data()
assert op_data
class TestOperationResult:
def test_ok_response(self, data_dir, mocker):
response = mocker.Mock(autospec=requests.Response)
with open(os.path.join(data_dir, 'ok_response.xml')) as of:
response.content = of.read()
operation_result = operations.OperationResult(response)
assert not operation_result.error
assert operation_result.error_str == 'OK'
| mit | Python |
9f07865d3a57ec4683d4d7e47e6d1f1568d8dd29 | Fix test_get_replay_file_name for windows | pjbull/cookiecutter,Springerle/cookiecutter,cguardia/cookiecutter,venumech/cookiecutter,hackebrot/cookiecutter,takeflight/cookiecutter,audreyr/cookiecutter,ramiroluz/cookiecutter,michaeljoseph/cookiecutter,terryjbates/cookiecutter,Springerle/cookiecutter,stevepiercy/cookiecutter,takeflight/cookiecutter,terryjbates/cookiecutter,willingc/cookiecutter,hackebrot/cookiecutter,luzfcb/cookiecutter,agconti/cookiecutter,moi65/cookiecutter,moi65/cookiecutter,michaeljoseph/cookiecutter,audreyr/cookiecutter,cguardia/cookiecutter,stevepiercy/cookiecutter,venumech/cookiecutter,ramiroluz/cookiecutter,dajose/cookiecutter,benthomasson/cookiecutter,dajose/cookiecutter,willingc/cookiecutter,pjbull/cookiecutter,christabor/cookiecutter,benthomasson/cookiecutter,luzfcb/cookiecutter,christabor/cookiecutter,agconti/cookiecutter | tests/replay/test_replay.py | tests/replay/test_replay.py | # -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
import os
import pytest
from cookiecutter import replay, main, exceptions
def test_get_replay_file_name():
"""Make sure that replay.get_file_name generates a valid json file path."""
exp_replay_file_name = os.path.join('foo', 'bar.json')
assert replay.get_file_name('foo', 'bar') == exp_replay_file_name
@pytest.fixture(params=[
{'no_input': True},
{'extra_context': {}},
{'no_input': True, 'extra_context': {}},
])
def invalid_kwargs(request):
return request.param
def test_raise_on_invalid_mode(invalid_kwargs):
with pytest.raises(exceptions.InvalidModeException):
main.cookiecutter('foo', replay=True, **invalid_kwargs)
def test_main_does_not_invoke_dump_but_load(mocker):
mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config')
mock_gen_context = mocker.patch('cookiecutter.main.generate_context')
mock_gen_files = mocker.patch('cookiecutter.main.generate_files')
mock_replay_dump = mocker.patch('cookiecutter.main.dump')
mock_replay_load = mocker.patch('cookiecutter.main.load')
main.cookiecutter('foobar', replay=True)
assert not mock_prompt.called
assert not mock_gen_context.called
assert not mock_replay_dump.called
assert mock_replay_load.called
assert mock_gen_files.called
def test_main_does_not_invoke_load_but_dump(mocker):
mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config')
mock_gen_context = mocker.patch('cookiecutter.main.generate_context')
mock_gen_files = mocker.patch('cookiecutter.main.generate_files')
mock_replay_dump = mocker.patch('cookiecutter.main.dump')
mock_replay_load = mocker.patch('cookiecutter.main.load')
main.cookiecutter('foobar', replay=False)
assert mock_prompt.called
assert mock_gen_context.called
assert mock_replay_dump.called
assert not mock_replay_load.called
assert mock_gen_files.called
| # -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
import pytest
from cookiecutter import replay, main, exceptions
def test_get_replay_file_name():
"""Make sure that replay.get_file_name generates a valid json file path."""
assert replay.get_file_name('foo', 'bar') == 'foo/bar.json'
@pytest.fixture(params=[
{'no_input': True},
{'extra_context': {}},
{'no_input': True, 'extra_context': {}},
])
def invalid_kwargs(request):
return request.param
def test_raise_on_invalid_mode(invalid_kwargs):
with pytest.raises(exceptions.InvalidModeException):
main.cookiecutter('foo', replay=True, **invalid_kwargs)
def test_main_does_not_invoke_dump_but_load(mocker):
mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config')
mock_gen_context = mocker.patch('cookiecutter.main.generate_context')
mock_gen_files = mocker.patch('cookiecutter.main.generate_files')
mock_replay_dump = mocker.patch('cookiecutter.main.dump')
mock_replay_load = mocker.patch('cookiecutter.main.load')
main.cookiecutter('foobar', replay=True)
assert not mock_prompt.called
assert not mock_gen_context.called
assert not mock_replay_dump.called
assert mock_replay_load.called
assert mock_gen_files.called
def test_main_does_not_invoke_load_but_dump(mocker):
mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config')
mock_gen_context = mocker.patch('cookiecutter.main.generate_context')
mock_gen_files = mocker.patch('cookiecutter.main.generate_files')
mock_replay_dump = mocker.patch('cookiecutter.main.dump')
mock_replay_load = mocker.patch('cookiecutter.main.load')
main.cookiecutter('foobar', replay=False)
assert mock_prompt.called
assert mock_gen_context.called
assert mock_replay_dump.called
assert not mock_replay_load.called
assert mock_gen_files.called
| bsd-3-clause | Python |
19af4b5c8c849750dd0885ea4fcfb651545b7985 | Remove disallowed fields before resaving on migrations. | alphagov/backdrop,alphagov/backdrop,alphagov/backdrop | migrations/002_add_month_start.py | migrations/002_add_month_start.py | """
Add _week_start_at field to all documents in all collections
"""
from backdrop.core.bucket import utc
from backdrop.core.records import Record
import logging
log = logging.getLogger(__name__)
def up(db):
for name in db.collection_names():
log.info("Migrating collection: {0}".format(name))
collection = db[name]
query = {
"_timestamp": {"$exists": True},
"_month_start_at": {"$exists": False}
}
for document in collection.find(query):
document['_timestamp'] = utc(document['_timestamp'])
if '_week_start_at' in document:
document.pop('_week_start_at')
if '_updated_at' in document:
document.pop('_updated_at')
record = Record(document)
collection.save(record.to_mongo())
| """
Add _week_start_at field to all documents in all collections
"""
from backdrop.core.bucket import utc
from backdrop.core.records import Record
import logging
log = logging.getLogger(__name__)
def up(db):
for name in db.collection_names():
log.info("Migrating collection: {0}".format(name))
collection = db[name]
query = {
"_timestamp": {"$exists": True},
"_month_start_at": {"$exists": False}
}
for document in collection.find(query):
document['_timestamp'] = utc(document['_timestamp'])
if '_week_start_at' in document:
document.pop('_week_start_at')
record = Record(document)
collection.save(record.to_mongo())
| mit | Python |
39c6b5b14153fe3c7fb03f0e4f7c96fb90e0e91c | Fix openssl dependant recipe: scrypt (and grants python3 compatibility) | PKRoma/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,germn/python-for-android,kronenpj/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,germn/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,kivy/python-for-android,kivy/python-for-android,germn/python-for-android,PKRoma/python-for-android,kivy/python-for-android,germn/python-for-android,kronenpj/python-for-android,kivy/python-for-android,germn/python-for-android,germn/python-for-android,kronenpj/python-for-android,kivy/python-for-android | pythonforandroid/recipes/scrypt/__init__.py | pythonforandroid/recipes/scrypt/__init__.py | from pythonforandroid.recipe import CythonRecipe
class ScryptRecipe(CythonRecipe):
version = '0.8.6'
url = 'https://bitbucket.org/mhallin/py-scrypt/get/v{version}.zip'
depends = ['setuptools', 'openssl']
call_hostpython_via_targetpython = False
patches = ["remove_librt.patch"]
def get_recipe_env(self, arch, with_flags_in_cc=True):
"""
Adds openssl recipe to include and library path.
"""
env = super(ScryptRecipe, self).get_recipe_env(arch, with_flags_in_cc)
openssl_recipe = self.get_recipe('openssl', self.ctx)
env['CFLAGS'] += openssl_recipe.include_flags(arch)
env['LDFLAGS'] += ' -L{}'.format(self.ctx.get_libs_dir(arch.arch))
env['LDFLAGS'] += ' -L{}'.format(self.ctx.libs_dir)
env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch)
env['LIBS'] = env.get('LIBS', '') + openssl_recipe.link_libs_flags()
return env
recipe = ScryptRecipe()
| import os
from pythonforandroid.recipe import CythonRecipe
class ScryptRecipe(CythonRecipe):
version = '0.8.6'
url = 'https://bitbucket.org/mhallin/py-scrypt/get/v{version}.zip'
depends = [('python2', 'python3crystax'), 'setuptools', 'openssl']
call_hostpython_via_targetpython = False
patches = ["remove_librt.patch"]
def get_recipe_env(self, arch, with_flags_in_cc=True):
"""
Adds openssl recipe to include and library path.
"""
env = super(ScryptRecipe, self).get_recipe_env(arch, with_flags_in_cc)
openssl_build_dir = self.get_recipe(
'openssl', self.ctx).get_build_dir(arch.arch)
env['CFLAGS'] += ' -I{}'.format(os.path.join(openssl_build_dir, 'include'))
env['LDFLAGS'] += ' -L{}'.format(
self.ctx.get_libs_dir(arch.arch) +
'-L{}'.format(self.ctx.libs_dir)) + ' -L{}'.format(
openssl_build_dir)
# required additional library and path for Crystax
if self.ctx.ndk == 'crystax':
# only keeps major.minor (discards patch)
python_version = self.ctx.python_recipe.version[0:3]
ndk_dir_python = os.path.join(self.ctx.ndk_dir, 'sources/python/', python_version)
env['LDFLAGS'] += ' -L{}'.format(os.path.join(ndk_dir_python, 'libs', arch.arch))
env['LDFLAGS'] += ' -lpython{}m'.format(python_version)
# until `pythonforandroid/archs.py` gets merged upstream:
# https://github.com/kivy/python-for-android/pull/1250/files#diff-569e13021e33ced8b54385f55b49cbe6
env['CFLAGS'] += ' -I{}/include/python/'.format(ndk_dir_python)
return env
recipe = ScryptRecipe()
| mit | Python |
1bc2fe0ca44d6906bb0044a4d880bd606c2f44d6 | Update postgresql.py | sch3m4/intelmq,pkug/intelmq,robcza/intelmq,pkug/intelmq,aaronkaplan/intelmq,sch3m4/intelmq,certtools/intelmq,robcza/intelmq,sch3m4/intelmq,pkug/intelmq,robcza/intelmq,pkug/intelmq,sch3m4/intelmq,aaronkaplan/intelmq,robcza/intelmq,certtools/intelmq,certtools/intelmq,aaronkaplan/intelmq | src/bots/outputs/postgresql/postgresql.py | src/bots/outputs/postgresql/postgresql.py | import sys
import psycopg2
from lib.bot import *
from lib.utils import *
from lib.event import *
class PostgreSQLBot(Bot):
def init(self):
con = None
try:
self.con = psycopg2.connect(
database=self.parameters.database,
user=self.parameters.user,
#password=self.parameters.password, # FIXME
host=self.parameters.host,
port=self.parameters.port
)
self.logger.info("info: con = %r" %self.con)
except psycopg2.DatabaseError, e:
self.logger.error("Postgresql Problem. Could not connect to the database. Error: %s " % e.pgerror)
self.stop()
self.cur = self.con.cursor()
def process(self):
event = self.receive_message()
if event:
evdict = event.to_dict2() # FIXME: rename the method or use to_dict()
KEYS = ", ".join(evdict.keys())
VALUES = evdict.values()
FVALUES = len(VALUES) * "%s, "
QUERY = "INSERT INTO logentry (" + KEYS + ") VALUES (" + FVALUES[:-2] + ")"
try:
self.cur.execute(QUERY, VALUES)
except psycopg2.DatabaseError, e:
# FIXME: try to use the try:except from start method at lib/bot.py
self.logger.error("Postgresql Problem. Could not INSERT. Error: %s " % e.pgerror)
self.con.commit()
self.acknowledge_message()
if __name__ == "__main__":
bot = PostgreSQLBot(sys.argv[1])
bot.start()
| import sys
import psycopg2
import time
from lib.bot import *
from lib.utils import *
from lib.event import *
from lib.cache import *
class PostgreSQLBot(Bot):
def init(self):
con = None
try:
self.con = psycopg2.connect(
database=self.parameters.database,
user=self.parameters.user,
#password=self.parameters.password,
host=self.parameters.host,
port=self.parameters.port
)
self.logger.info("info: con = %r" %self.con)
except psycopg2.DatabaseError, e:
self.logger.error("Postgresql Problem. Could not connect to the database. Error: %s " % e.pgerror)
self.stop()
self.cur = self.con.cursor()
def process(self):
event = self.receive_message()
if event:
evdict = event.to_dict2()
KEYS = ", ".join(evdict.keys())
VALUES = evdict.values()
FVALUES = len(VALUES) * "%s, "
QUERY = "INSERT INTO logentry (" + KEYS + ") VALUES (" + FVALUES[:-2] + ")"
try:
self.cur.execute(QUERY, VALUES)
except psycopg2.DatabaseError, e:
print QUERY
print VALUES
print "\n\n"
print e.pgerror
self.logger.error("Postgresql Problem. Could not INSERT. Error: %s " % e.pgerror)
time.sleep(5)
self.con.commit()
self.acknowledge_message()
if __name__ == "__main__":
bot = PostgreSQLBot(sys.argv[1])
bot.start()
| agpl-3.0 | Python |
099855fa4e56d6fe54c7260cb149bb41640f0e43 | Add test for populateQueryMetrics | Azure/azure-sdk-for-python,Azure/azure-documentdb-python,Azure/azure-sdk-for-python,Azure/azure-sdk-for-python,Azure/azure-sdk-for-python | test/query_tests.py | test/query_tests.py | import unittest
import pydocumentdb.document_client as document_client
import pydocumentdb.documents as documents
import test.test_config as test_config
class QueryTest(unittest.TestCase):
"""Test to ensure escaping of non-ascii characters from partition key"""
host = test_config._test_config.host
masterKey = test_config._test_config.masterKey
testDbName = 'testDatabase'
testCollectionName = 'testCollection'
@classmethod
def cleanUpTestDatabase(cls):
global client
client = document_client.DocumentClient(cls.host,
{'masterKey': cls.masterKey})
query_iterable = client.QueryDatabases('SELECT * FROM root r WHERE r.id=\'' + cls.testDbName + '\'')
it = iter(query_iterable)
test_db = next(it, None)
if test_db is not None:
client.DeleteDatabase("/dbs/" + cls.testDbName + "/")
""" change """
@classmethod
def tearDownClass(cls):
QueryTest.cleanUpTestDatabase()
def setUp(self):
global created_collection
QueryTest.cleanUpTestDatabase()
created_db = client.CreateDatabase({ 'id': self.testDbName })
collection_definition = { 'id': self.testCollectionName, 'partitionKey': {'paths': ['/pk'],'kind': 'Hash'} }
collection_options = { 'offerThroughput': 10100 }
created_collection = client.CreateCollection(created_db['_self'], collection_definition, collection_options)
document_definition = {'pk': 'pk', 'id':'myId'}
created_doc = client.CreateDocument(created_collection['_self'], document_definition)
def test_first_and_last_slashes_trimmed_for_query_string (self):
query_options = {'partitionKey': 'pk'}
collectionLink = '/dbs/' + self.testDbName + '/colls/' + self.testCollectionName + '/'
query = 'SELECT * from ' + self.testCollectionName
query_iterable = client.QueryDocuments(collectionLink, query, query_options)
iter_list = list(query_iterable)
self.assertEqual(iter_list[0]['id'], 'myId')
def test_populate_query_metrics (self):
query_options = {'partitionKey': 'pk',
'populateQueryMetrics': True}
collectionLink = '/dbs/' + self.testDbName + '/colls/' + self.testCollectionName + '/'
query = 'SELECT * from ' + self.testCollectionName
query_iterable = client.QueryDocuments(collectionLink, query, query_options)
iter_list = list(query_iterable)
self.assertEqual(iter_list[0]['id'], 'myId')
METRICS_HEADER_NAME = 'x-ms-documentdb-query-metrics'
self.assertTrue(METRICS_HEADER_NAME in client.last_response_headers)
metrics_header = client.last_response_headers[METRICS_HEADER_NAME]
# Validate header is well-formed: "key1=value1;key2=value2;etc"
metrics = metrics_header.split(';')
self.assertTrue(len(metrics) > 1)
self.assertTrue(all(['=' in x for x in metrics]))
| import unittest
import pydocumentdb.document_client as document_client
import pydocumentdb.documents as documents
import test.test_config as test_config
class QueryTest(unittest.TestCase):
"""Test to ensure escaping of non-ascii characters from partition key"""
host = test_config._test_config.host
masterKey = test_config._test_config.masterKey
testDbName = 'testDatabase'
testCollectionName = 'testCollection'
@classmethod
def cleanUpTestDatabase(cls):
global client
client = document_client.DocumentClient(cls.host,
{'masterKey': cls.masterKey})
query_iterable = client.QueryDatabases('SELECT * FROM root r WHERE r.id=\'' + cls.testDbName + '\'')
it = iter(query_iterable)
test_db = next(it, None)
if test_db is not None:
client.DeleteDatabase("/dbs/" + cls.testDbName + "/")
""" change """
@classmethod
def tearDownClass(cls):
QueryTest.cleanUpTestDatabase()
def setUp(self):
global created_collection
QueryTest.cleanUpTestDatabase()
created_db = client.CreateDatabase({ 'id': self.testDbName })
collection_definition = { 'id': self.testCollectionName, 'partitionKey': {'paths': ['/pk'],'kind': 'Hash'} }
collection_options = { 'offerThroughput': 10100 }
created_collection = client.CreateCollection(created_db['_self'], collection_definition, collection_options)
def test_first_and_last_slashes_trimmed_for_query_string (self):
document_definition = {'pk': 'pk', 'id':'myId'}
created_doc = client.CreateDocument(created_collection['_self'], document_definition)
query_options = {'partitionKey': 'pk'}
collectionLink = '/dbs/' + self.testDbName + '/colls/' + self.testCollectionName + '/'
query = 'SELECT * from ' + self.testCollectionName
query_iterable = client.QueryDocuments(collectionLink, query, query_options)
iter_list = list(query_iterable)
self.assertEqual(iter_list[0]['id'], 'myId')
| mit | Python |
b615bce7bb9fcdab8d3c364e459d7ce516665feb | fix on messages type | messagebird/python-rest-api | messagebird/verify.py | messagebird/verify.py | from messagebird.base import Base
from messagebird.recipient import Recipient
from messagebird.message import Message
class Verify(Base):
def __init__(self):
self.id = None
self.href = None
self.type = None
self.originator = None
self.reference = None
self.template = None
self.timeout = None
self.tokenLength = None
self.voice = None
self.language = None
self.status = None
self._createdDatetime = None
self._validUntilDatetime= None
self._recipient = None
self._messages = None
@property
def createdDatetime(self):
return self._createdDatetime
@createdDatetime.setter
def createdDatetime(self, value):
self._createdDatetime = self.value_to_time(value)
@property
def validUntilDatetime(self):
return self._validUntilDatetime
@validUntilDatetime.setter
def validUntilDatetime(self, value):
self._validUntilDatetime = self.value_to_time(value)
@property
def recipient(self):
return self._recipient
@recipient.setter
def recipient(self, value):
self._recipient = Recipient().load(value)
@property
def messages(self):
return self._messages
@messages.setter
def messages(self, value):
value = [Message().load(r) for r in value]
self._messages = value
| from messagebird.base import Base
from messagebird.recipient import Recipient
from messagebird.message import Message
class Verify(Base):
def __init__(self):
self.id = None
self.href = None
self.type = None
self.originator = None
self.reference = None
self.template = None
self.timeout = None
self.tokenLength = None
self.voice = None
self.language = None
self.status = None
self._createdDatetime = None
self._validUntilDatetime= None
self._recipient = None
self._messages = None
@property
def createdDatetime(self):
return self._createdDatetime
@createdDatetime.setter
def createdDatetime(self, value):
self._createdDatetime = self.value_to_time(value)
@property
def validUntilDatetime(self):
return self._validUntilDatetime
@validUntilDatetime.setter
def validUntilDatetime(self, value):
self._validUntilDatetime = self.value_to_time(value)
@property
def recipient(self):
return self._recipient
@recipient.setter
def recipient(self, value):
self._recipient = Recipient().load(value)
@property
def messages(self):
return self._messages
@messages.setter
def messages(self, value):
value['items'] = [Message().load(r) for r in value['items']]
self._messages = value
| bsd-2-clause | Python |
d9237f7546efa0552d8af255aa6247ba250db5e8 | fix environ test for mac/win | ponty/easyprocess,ponty/easyprocess,ponty/EasyProcess,ponty/EasyProcess | tests/test_fast/test_env.py | tests/test_fast/test_env.py | from easyprocess import EasyProcess
from nose.tools import eq_, ok_
import sys
import json
python = sys.executable
def pass_env(e):
prog = 'import os,json;print(json.dumps(dict(os.environ)))'
s = EasyProcess([python, '-c', prog], env=e).call().stdout
return json.loads(s)
def test_env():
ok_(len(pass_env(None)))
e = pass_env(None)
eq_(pass_env(e).get('FOO'), None)
e['FOO'] = '2'
eq_(pass_env(e).get('FOO'), '2')
| from easyprocess import EasyProcess
from nose.tools import eq_, ok_
import sys
python = sys.executable
def pass_env(e):
# py37 creates "LC_CTYPE" automatically
prog = 'import os;d=dict(os.environ);d.pop("LC_CTYPE",None);print(d)'
return EasyProcess([python, '-c', prog], env=e).call().stdout
def test_env():
ok_(len(pass_env(None)))
eq_(pass_env({}), '{}')
eq_(pass_env(dict(x='2')), "{'x': '2'}")
| bsd-2-clause | Python |
a31c3ca18473cfa65bdf8538e611606049e145c3 | Remove test_sparse_scipy for now | tum-pbs/PhiFlow,tum-pbs/PhiFlow | tests/test_poisson_solve.py | tests/test_poisson_solve.py | from unittest import TestCase
import numpy as np
from phi import math
from phi.flow import CLOSED, PERIODIC, OPEN, Domain, poisson_solve
from phi.physics.pressuresolver.sparse import SparseCG, SparseSciPy
def _test_solve_no_obstacles(domain, solver):
print('Testing domain with boundaries: %s' % (domain.boundaries,))
# --- Example 1 ---
ex1 = np.tile(np.linspace(1, 0, 5), [4, 1])
ex1 = math.expand_dims(math.expand_dims(ex1, -1), 0) - math.mean(ex1)
# --- Example 2 ---
ex2 = np.zeros([1, 4, 5, 1])
ex2[0, :, 2, 0] = 1
ex2 -= math.mean(ex2)
# --- Stack examples to batch ---
data_in = math.concat([ex1, ex2], axis=0)
p = poisson_solve(domain.centered_grid(data_in), domain, solver=solver)[0]
np.testing.assert_almost_equal(p.laplace().data[:, 1:-1, 1:-1, :], data_in[:, 1:-1, 1:-1, :], decimal=5)
if domain.boundaries is CLOSED:
np.testing.assert_almost_equal(p.laplace().data, data_in, decimal=5)
# rows = math.unstack(p.data, 1)
# for row in rows[1:]:
# np.testing.assert_almost_equal(row, rows[0], decimal=5)
DOMAINS = [
Domain([4, 5], boundaries=CLOSED),
Domain([4, 5], boundaries=OPEN),
Domain([4, 5], boundaries=PERIODIC),
Domain([4, 5], boundaries=[PERIODIC, CLOSED]),
Domain([4, 5], boundaries=[CLOSED, OPEN]),
]
class TestPoissonSolve(TestCase):
def test_sparse_cg(self):
solver = SparseCG()
for domain in DOMAINS:
_test_solve_no_obstacles(domain, solver)
# def test_sparse_scipy(self):
# solver = SparseSciPy()
# for domain in DOMAINS:
# _test_solve_no_obstacles(domain, solver)
| from unittest import TestCase
import numpy as np
from phi import math
from phi.flow import CLOSED, PERIODIC, OPEN, Domain, poisson_solve
from phi.physics.pressuresolver.sparse import SparseCG, SparseSciPy
def _test_solve_no_obstacles(domain, solver):
print('Testing domain with boundaries: %s' % (domain.boundaries,))
# --- Example 1 ---
ex1 = np.tile(np.linspace(1, 0, 5), [4, 1])
ex1 = math.expand_dims(math.expand_dims(ex1, -1), 0) - math.mean(ex1)
# --- Example 2 ---
ex2 = np.zeros([1, 4, 5, 1])
ex2[0, :, 2, 0] = 1
ex2 -= math.mean(ex2)
# --- Stack examples to batch ---
data_in = math.concat([ex1, ex2], axis=0)
p = poisson_solve(domain.centered_grid(data_in), domain, solver=solver)[0]
np.testing.assert_almost_equal(p.laplace().data[:, 1:-1, 1:-1, :], data_in[:, 1:-1, 1:-1, :], decimal=5)
if domain.boundaries is CLOSED:
np.testing.assert_almost_equal(p.laplace().data, data_in, decimal=5)
# rows = math.unstack(p.data, 1)
# for row in rows[1:]:
# np.testing.assert_almost_equal(row, rows[0], decimal=5)
DOMAINS = [
Domain([4, 5], boundaries=CLOSED),
Domain([4, 5], boundaries=OPEN),
Domain([4, 5], boundaries=PERIODIC),
Domain([4, 5], boundaries=[PERIODIC, CLOSED]),
Domain([4, 5], boundaries=[CLOSED, OPEN]),
]
class TestPoissonSolve(TestCase):
def test_sparse_cg(self):
solver = SparseCG()
for domain in DOMAINS:
_test_solve_no_obstacles(domain, solver)
def test_sparse_scipy(self):
solver = SparseSciPy()
for domain in DOMAINS:
_test_solve_no_obstacles(domain, solver)
| mit | Python |
a8a6f23ac4519acdfe744a4ad3d994cbc1c3ef36 | Update deprecated usage of django-guardian "assign" | michaelwisely/django-competition,michaelwisely/django-competition,michaelwisely/django-competition | src/competition/models/organizer_model.py | src/competition/models/organizer_model.py | from django.db import models
from django.dispatch import receiver
from django.db.models.signals import post_save, pre_delete
from django.contrib.auth.models import User
from guardian.shortcuts import assign_perm, remove_perm
from competition.validators import validate_name
from competition.models.competition_model import Competition
class OrganizerRole(models.Model):
class Meta:
app_label = 'competition'
name = models.CharField(max_length=50, validators=[validate_name])
description = models.TextField()
def __str__(self):
return self.name
class Organizer(models.Model):
class Meta:
app_label = 'competition'
competition = models.ForeignKey(Competition)
user = models.ForeignKey(User)
role = models.ManyToManyField(OrganizerRole)
def __str__(self):
return "%s: %s Organizer" % (self.user.username, self.competition.name)
@receiver(post_save, sender=Organizer)
def organizer_post_save(sender, instance, created, **kwargs):
"""Called after an Organizer is saved
Adds competition-specific permissions to corresponding user
"""
# If django is filling in fixtures, don't change anything
if kwargs['raw']:
return
# If we just made this organizer, grant them organizer permissions
if created:
for permission_code in Competition.get_organizer_permissions():
assign_perm(permission_code, instance.user, instance.competition)
@receiver(pre_delete, sender=Organizer)
def organizer_pre_delete(sender, instance, **kwargs):
"""Called before an Organizer is deleted
Removes competition-specific permissions from corresponding user
"""
for permission_code in Competition.get_organizer_permissions():
remove_perm(permission_code, instance.user, instance.competition)
| from django.db import models
from django.dispatch import receiver
from django.db.models.signals import post_save, pre_delete
from django.contrib.auth.models import User
from guardian.shortcuts import assign, remove_perm
from competition.validators import validate_name
from competition.models.competition_model import Competition
class OrganizerRole(models.Model):
class Meta:
app_label = 'competition'
name = models.CharField(max_length=50, validators=[validate_name])
description = models.TextField()
def __str__(self):
return self.name
class Organizer(models.Model):
class Meta:
app_label = 'competition'
competition = models.ForeignKey(Competition)
user = models.ForeignKey(User)
role = models.ManyToManyField(OrganizerRole)
def __str__(self):
return "%s: %s Organizer" % (self.user.username, self.competition.name)
@receiver(post_save, sender=Organizer)
def organizer_post_save(sender, instance, created, **kwargs):
"""Called after an Organizer is saved
Adds competition-specific permissions to corresponding user
"""
# If django is filling in fixtures, don't change anything
if kwargs['raw']:
return
# If we just made this organizer, grant them organizer permissions
if created:
for permission_code in Competition.get_organizer_permissions():
assign(permission_code, instance.user, instance.competition)
@receiver(pre_delete, sender=Organizer)
def organizer_pre_delete(sender, instance, **kwargs):
"""Called before an Organizer is deleted
Removes competition-specific permissions from corresponding user
"""
for permission_code in Competition.get_organizer_permissions():
remove_perm(permission_code, instance.user, instance.competition)
| bsd-3-clause | Python |
87b1bed2c23fa992f16fcd6c4a32a33c6bf8ceb3 | Use `pwndbg.lib.stdio.stdio` to refactor the code | pwndbg/pwndbg,pwndbg/pwndbg,pwndbg/pwndbg,pwndbg/pwndbg | pwndbg/commands/ipython_interactive.py | pwndbg/commands/ipython_interactive.py | """
Command to start an interactive IPython prompt.
"""
import sys
from contextlib import contextmanager
import gdb
import pwndbg.color.message as M
import pwndbg.commands
import pwndbg.lib.stdio
@contextmanager
def switch_to_ipython_env():
"""We need to change stdout/stderr to the default ones, otherwise we can't use tab or autocomplete"""
# Save GDB's excepthook
saved_excepthook = sys.excepthook
# Switch to default stdout/stderr
with pwndbg.lib.stdio.stdio:
yield
# Restore Python's default ps1, ps2, and excepthook for GDB's `pi` command
sys.ps1 = ">>> "
sys.ps2 = "... "
sys.excepthook = saved_excepthook
@pwndbg.commands.ArgparsedCommand("Start an interactive IPython prompt.")
def ipi():
with switch_to_ipython_env():
# Use `gdb.execute` to embed IPython into GDB's variable scope
try:
gdb.execute("pi import IPython")
except gdb.error:
print(
M.warn(
"Cannot import IPython.\n"
"You need to install IPython if you want to use this command.\n"
"Maybe you can try `pip install ipython` first."
)
)
return
code4ipython = """import jedi
import pwn
jedi.Interpreter._allow_descriptor_getattr_default = False
IPython.embed(colors='neutral',banner1='',confirm_exit=False,simple_prompt=False)
"""
gdb.execute(f"py\n{code4ipython}")
| """
Command to start an interactive IPython prompt.
"""
import sys
from contextlib import contextmanager
import gdb
import pwndbg.color.message as M
import pwndbg.commands
@contextmanager
def switch_to_ipython_env():
"""We need to change stdout/stderr to the default ones, otherwise we can't use tab or autocomplete"""
# Save GDB's stdout and stderr
saved_stdout = sys.stdout
saved_stderr = sys.stderr
saved_excepthook = sys.excepthook
# Use Python's default stdout and stderr
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
yield
# Restore GDB's stdout and stderr
sys.stdout = saved_stdout
sys.stderr = saved_stderr
# Restore Python's default ps1, ps2, and excepthook for GDB's `pi` command
sys.ps1 = ">>> "
sys.ps2 = "... "
sys.excepthook = saved_excepthook
@pwndbg.commands.ArgparsedCommand("Start an interactive IPython prompt.")
def ipi():
with switch_to_ipython_env():
# Use `gdb.execute` to embed IPython into GDB's variable scope
try:
gdb.execute("pi import IPython")
except gdb.error:
print(
M.warn(
"Cannot import IPython.\n"
"You need to install IPython if you want to use this command.\n"
"Maybe you can try `pip install ipython` first."
)
)
return
code4ipython = """import jedi
import pwn
jedi.Interpreter._allow_descriptor_getattr_default = False
IPython.embed(colors='neutral',banner1='',confirm_exit=False,simple_prompt=False)
"""
gdb.execute(f"py\n{code4ipython}")
| mit | Python |
b2fed60512b17b19850f06f4b0cdb7114dc3e27b | fix super method | LilliJane/psychic-waffle,LilliJane/psychic-waffle,LilliJane/psychic-waffle | statues/models.py | statues/models.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.db import models
from django.utils import timezone
from django.template.defaultfilters import slugify
# Create your models here.
class Statue(models.Model):
""" Default values for latitude and longitude are the ones from The Hague
"""
name = models.CharField(max_length=200)
slug = models.SlugField(max_length=32, default='', blank=True)
pub_date = models.DateTimeField('date published')
enable = models.BooleanField(default=False)
outdoor = models.BooleanField(default=True)
description = models.CharField(max_length=500)
latitute = models.FloatField(default=52.0715712)
longitude = models.FloatField(default=4.169786)
pictures = models.ImageField(upload_to = 'pic_folder/', default = 'pic_folder/no-img.png')
def __str__(self):
return self.name
def was_published_recently(self):
return self.pub_date >= timezone.now() - datetime.timedelta(days=1)
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Statue, self).save(*args, **kwargs)
class Beacon(models.Model):
""" Beacon should be joined to a precised statue
"""
statue = models.ForeignKey(Statue, on_delete=models.CASCADE)
uuid = models.CharField(max_length=60)
min_value = models.IntegerField()
max_value = models.IntegerField()
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.db import models
from django.utils import timezone
from django.template.defaultfilters import slugify
# Create your models here.
class Statue(models.Model):
""" Default values for latitude and longitude are the ones from The Hague
"""
name = models.CharField(max_length=200)
slug = models.SlugField(max_length=32, default='', blank=True)
pub_date = models.DateTimeField('date published')
enable = models.BooleanField(default=False)
outdoor = models.BooleanField(default=True)
description = models.CharField(max_length=500)
latitute = models.FloatField(default=52.0715712)
longitude = models.FloatField(default=4.169786)
pictures = models.ImageField(upload_to = 'pic_folder/', default = 'pic_folder/no-img.png')
def __str__(self):
return self.name
def was_published_recently(self):
return self.pub_date >= timezone.now() - datetime.timedelta(days=1)
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super().save(*args, **kwargs)
class Beacon(models.Model):
""" Beacon should be joined to a precised statue
"""
statue = models.ForeignKey(Statue, on_delete=models.CASCADE)
uuid = models.CharField(max_length=60)
min_value = models.IntegerField()
max_value = models.IntegerField()
| apache-2.0 | Python |
ec22975ebfad85e0c7be92acfd326aa9b4f34214 | Remove unused variable from private_torrents | Danfocus/Flexget,xfouloux/Flexget,malkavi/Flexget,ibrahimkarahan/Flexget,ratoaq2/Flexget,dsemi/Flexget,spencerjanssen/Flexget,ianstalk/Flexget,X-dark/Flexget,Pretagonist/Flexget,OmgOhnoes/Flexget,patsissons/Flexget,ianstalk/Flexget,xfouloux/Flexget,tarzasai/Flexget,sean797/Flexget,asm0dey/Flexget,drwyrm/Flexget,Danfocus/Flexget,jawilson/Flexget,qk4l/Flexget,Danfocus/Flexget,JorisDeRieck/Flexget,X-dark/Flexget,Flexget/Flexget,oxc/Flexget,oxc/Flexget,cvium/Flexget,poulpito/Flexget,thalamus/Flexget,malkavi/Flexget,tobinjt/Flexget,vfrc2/Flexget,tarzasai/Flexget,tobinjt/Flexget,ZefQ/Flexget,tobinjt/Flexget,malkavi/Flexget,Flexget/Flexget,spencerjanssen/Flexget,sean797/Flexget,crawln45/Flexget,LynxyssCZ/Flexget,lildadou/Flexget,malkavi/Flexget,dsemi/Flexget,lildadou/Flexget,asm0dey/Flexget,offbyone/Flexget,grrr2/Flexget,ZefQ/Flexget,tvcsantos/Flexget,jawilson/Flexget,patsissons/Flexget,voriux/Flexget,grrr2/Flexget,cvium/Flexget,grrr2/Flexget,ibrahimkarahan/Flexget,drwyrm/Flexget,JorisDeRieck/Flexget,X-dark/Flexget,antivirtel/Flexget,sean797/Flexget,thalamus/Flexget,qvazzler/Flexget,OmgOhnoes/Flexget,qvazzler/Flexget,asm0dey/Flexget,spencerjanssen/Flexget,gazpachoking/Flexget,Flexget/Flexget,crawln45/Flexget,patsissons/Flexget,crawln45/Flexget,poulpito/Flexget,OmgOhnoes/Flexget,lildadou/Flexget,jacobmetrick/Flexget,LynxyssCZ/Flexget,ZefQ/Flexget,offbyone/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,tobinjt/Flexget,v17al/Flexget,LynxyssCZ/Flexget,Flexget/Flexget,ianstalk/Flexget,offbyone/Flexget,jawilson/Flexget,crawln45/Flexget,qvazzler/Flexget,antivirtel/Flexget,ratoaq2/Flexget,ibrahimkarahan/Flexget,vfrc2/Flexget,dsemi/Flexget,tsnoam/Flexget,camon/Flexget,jawilson/Flexget,xfouloux/Flexget,qk4l/Flexget,tarzasai/Flexget,voriux/Flexget,antivirtel/Flexget,jacobmetrick/Flexget,v17al/Flexget,v17al/Flexget,tsnoam/Flexget,qk4l/Flexget,tvcsantos/Flexget,cvium/Flexget,Pretagonist/Flexget,JorisDeRieck/Flexget,tsnoam/Flexget,drwyrm/Flexget,ratoaq2/Flexget,jacobmetrick/Flexget,camon/Flexget,poulpito/Flexget,oxc/Flexget,vfrc2/Flexget,Danfocus/Flexget,gazpachoking/Flexget,thalamus/Flexget,Pretagonist/Flexget | flexget/plugins/filter/private_torrents.py | flexget/plugins/filter/private_torrents.py | import logging
from flexget.plugin import register_plugin, priority
log = logging.getLogger('priv_torrents')
class FilterPrivateTorrents(object):
"""How to handle private torrents.
private_torrents: yes|no
Example::
private_torrents: no
This would reject all torrent entries with private flag.
Example::
private_torrents: yes
This would reject all public torrents.
Non-torrent content is not interviened.
"""
def validator(self):
from flexget import validator
return validator.factory('boolean')
@priority(127)
def on_feed_modify(self, feed):
private_torrents = feed.config['private_torrents']
for entry in feed.accepted:
if not 'torrent' in entry:
log.debug('`%s` is not a torrent' % entry['title'])
continue
private = entry['torrent'].private
if not private_torrents and private:
feed.reject(entry, 'torrent is marked as private', remember=True)
elif private_torrents and not private:
feed.reject(entry, 'public torrent', remember=True)
register_plugin(FilterPrivateTorrents, 'private_torrents')
| import logging
from flexget.plugin import register_plugin, priority
log = logging.getLogger('priv_torrents')
class FilterPrivateTorrents(object):
"""How to handle private torrents.
private_torrents: yes|no
Example::
private_torrents: no
This would reject all torrent entries with private flag.
Example::
private_torrents: yes
This would reject all public torrents.
Non-torrent content is not interviened.
"""
def validator(self):
from flexget import validator
return validator.factory('boolean')
@priority(127)
def on_feed_modify(self, feed):
private_torrents = feed.config['private_torrents']
rejected = False
for entry in feed.accepted:
if not 'torrent' in entry:
log.debug('`%s` is not a torrent' % entry['title'])
continue
private = entry['torrent'].private
if not private_torrents and private:
feed.reject(entry, 'torrent is marked as private', remember=True)
rejected = True
if private_torrents and not private:
feed.reject(entry, 'public torrent', remember=True)
rejected = True
register_plugin(FilterPrivateTorrents, 'private_torrents')
| mit | Python |
79c7f2c7294235cf14e4e6652b348f5eda9079df | bump v1.0.1 | ValvePython/steam | steam/__init__.py | steam/__init__.py | __version__ = "1.0.1"
__author__ = "Rossen Georgiev"
version_info = (1, 0, 1)
| __version__ = "1.0.0"
__author__ = "Rossen Georgiev"
version_info = (1, 0, 0)
| mit | Python |
33fcf7909421104844a952805d043e8de9e5dbcc | Test merge worker with no merges, but with commits | bussiere/gitfs,PressLabs/gitfs,rowhit/gitfs,PressLabs/gitfs,ksmaheshkumar/gitfs | tests/workers/test_merge.py | tests/workers/test_merge.py | import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
def test_on_idle_with_commits_and_merges(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle("commits", "merges")
mocked_commit.assert_called_once_with("commits")
assert mocked_want_to_merge.set.call_count == 1
assert commits == []
assert merges == []
def test_on_idle_with_merges_and_no_commits(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle(None, "merges")
assert mocked_want_to_merge.set.call_count == 1
assert commits is None
assert merges == []
def test_on_idle_with_commits_and_no_merges(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle("commits", None)
mocked_commit.assert_called_once_with("commits")
assert mocked_want_to_merge.set.call_count == 1
assert commits == []
assert merges is None
| import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
def test_on_idle_with_commits_and_merges(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle("commits", "merges")
mocked_commit.assert_called_once_with("commits")
assert mocked_want_to_merge.set.call_count == 1
assert commits == []
assert merges == []
def test_on_idle_with_merges_and_no_commits(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle(None, "merges")
assert mocked_want_to_merge.set.call_count == 1
assert commits == None
assert merges == []
| apache-2.0 | Python |
6a00d7272a29afd21ad40768f87859e7d20526a6 | Whitelist <br> tag | emre/steemrocks,emre/steemrocks | steemrocks/app.py | steemrocks/app.py | from flask import Flask, render_template, request, redirect, abort, g, url_for
from .tx_listener import listen
from .models import Account
from .utils import get_steem_conn, Pagination
from .settings import SITE_URL
import bleach
app = Flask(__name__)
PER_PAGE = 40
@app.cli.command()
def listen_transactions():
"""
This command starts listening transactions on the network and saves them\
into the database.
$ flask listen_transactions
"""
listen()
@app.route('/')
def index():
if request.query_string and request.args.get('account'):
return redirect('/' + request.args.get('account'))
return render_template('index.html')
@app.route('/<username>', defaults={'page': 1})
@app.route('/<username>/page/<int:page>')
def profile(username, page):
if username.startswith("@"):
username = username.replace("@", "")
account = Account(username, get_steem_conn()).set_account_deta()
if not account.account_data:
abort(404)
start = page * PER_PAGE
if page == 1:
start = 0
pagination = Pagination(page, PER_PAGE, account.get_operation_count())
operations = account.get_operations(start=start, end=PER_PAGE)
return render_template(
'profile.html', account=account,
operations=operations, site_url=SITE_URL, pagination=pagination)
@app.teardown_appcontext
def close_db(error):
"""Closes the database again at the end of the request."""
if hasattr(g, 'mysql_db'):
g.mysql_db.close()
def url_for_other_page(page):
args = request.view_args.copy()
args['page'] = page
return url_for(request.endpoint, **args)
def strip_tags(text):
return bleach.clean(text, tags=["strong", "a", "i", "small", "br"])
app.jinja_env.globals['url_for_other_page'] = url_for_other_page
app.jinja_env.globals['clean'] = strip_tags
| from flask import Flask, render_template, request, redirect, abort, g, url_for
from .tx_listener import listen
from .models import Account
from .utils import get_steem_conn, Pagination
from .settings import SITE_URL
import bleach
app = Flask(__name__)
PER_PAGE = 40
@app.cli.command()
def listen_transactions():
"""
This command starts listening transactions on the network and saves them\
into the database.
$ flask listen_transactions
"""
listen()
@app.route('/')
def index():
if request.query_string and request.args.get('account'):
return redirect('/' + request.args.get('account'))
return render_template('index.html')
@app.route('/<username>', defaults={'page': 1})
@app.route('/<username>/page/<int:page>')
def profile(username, page):
if username.startswith("@"):
username = username.replace("@", "")
account = Account(username, get_steem_conn()).set_account_deta()
if not account.account_data:
abort(404)
start = page * PER_PAGE
if page == 1:
start = 0
pagination = Pagination(page, PER_PAGE, account.get_operation_count())
operations = account.get_operations(start=start, end=PER_PAGE)
return render_template(
'profile.html', account=account,
operations=operations, site_url=SITE_URL, pagination=pagination)
@app.teardown_appcontext
def close_db(error):
"""Closes the database again at the end of the request."""
if hasattr(g, 'mysql_db'):
g.mysql_db.close()
def url_for_other_page(page):
args = request.view_args.copy()
args['page'] = page
return url_for(request.endpoint, **args)
def strip_tags(text):
return bleach.clean(text, tags=["strong", "a", "i", "small"])
app.jinja_env.globals['url_for_other_page'] = url_for_other_page
app.jinja_env.globals['clean'] = strip_tags
| mit | Python |
de5c0c9107156a073670d68fcb04e575e08f9b80 | Hide ctypes import error until Plot() is called. | kmacinnis/sympy,Curious72/sympy,meghana1995/sympy,MechCoder/sympy,saurabhjn76/sympy,VaibhavAgarwalVA/sympy,Designist/sympy,lidavidm/sympy,skidzo/sympy,beni55/sympy,Davidjohnwilson/sympy,jaimahajan1997/sympy,kmacinnis/sympy,MridulS/sympy,mcdaniel67/sympy,jbbskinny/sympy,pandeyadarsh/sympy,jaimahajan1997/sympy,mattpap/sympy-polys,lindsayad/sympy,souravsingh/sympy,ryanGT/sympy,Sumith1896/sympy,tovrstra/sympy,moble/sympy,emon10005/sympy,kaushik94/sympy,mafiya69/sympy,emon10005/sympy,Vishluck/sympy,Arafatk/sympy,MridulS/sympy,debugger22/sympy,madan96/sympy,Shaswat27/sympy,Titan-C/sympy,shikil/sympy,rahuldan/sympy,vipulroxx/sympy,toolforger/sympy,shipci/sympy,KevinGoodsell/sympy,cswiercz/sympy,abhiii5459/sympy,Gadal/sympy,jbaayen/sympy,ChristinaZografou/sympy,iamutkarshtiwari/sympy,rahuldan/sympy,kmacinnis/sympy,sahilshekhawat/sympy,asm666/sympy,chaffra/sympy,kevalds51/sympy,maniteja123/sympy,sunny94/temp,vipulroxx/sympy,drufat/sympy,shikil/sympy,saurabhjn76/sympy,mafiya69/sympy,atreyv/sympy,Curious72/sympy,hrashk/sympy,Mitchkoens/sympy,dqnykamp/sympy,ahhda/sympy,liangjiaxing/sympy,madan96/sympy,lindsayad/sympy,iamutkarshtiwari/sympy,VaibhavAgarwalVA/sympy,bukzor/sympy,wyom/sympy,flacjacket/sympy,sahmed95/sympy,Titan-C/sympy,bukzor/sympy,jamesblunt/sympy,kaushik94/sympy,cswiercz/sympy,sahilshekhawat/sympy,skirpichev/omg,dqnykamp/sympy,wyom/sympy,AunShiLord/sympy,cccfran/sympy,mafiya69/sympy,AkademieOlympia/sympy,Sumith1896/sympy,jbbskinny/sympy,kumarkrishna/sympy,atreyv/sympy,ga7g08/sympy,postvakje/sympy,farhaanbukhsh/sympy,pbrady/sympy,moble/sympy,ahhda/sympy,minrk/sympy,garvitr/sympy,skidzo/sympy,MridulS/sympy,meghana1995/sympy,ahhda/sympy,yashsharan/sympy,drufat/sympy,shipci/sympy,lindsayad/sympy,aktech/sympy,kaushik94/sympy,pernici/sympy,abloomston/sympy,atsao72/sympy,hrashk/sympy,minrk/sympy,sahmed95/sympy,sampadsaha5/sympy,wanglongqi/sympy,chaffra/sympy,hargup/sympy,AkademieOlympia/sympy,hargup/sympy,moble/sympy,yashsharan/sympy,garvitr/sympy,aktech/sympy,kevalds51/sympy,liangjiaxing/sympy,emon10005/sympy,atreyv/sympy,shipci/sympy,iamutkarshtiwari/sympy,hazelnusse/sympy-old,asm666/sympy,Davidjohnwilson/sympy,fperez/sympy,oliverlee/sympy,postvakje/sympy,yukoba/sympy,amitjamadagni/sympy,Vishluck/sympy,AunShiLord/sympy,kaichogami/sympy,AunShiLord/sympy,sunny94/temp,madan96/sympy,srjoglekar246/sympy,ChristinaZografou/sympy,Arafatk/sympy,cccfran/sympy,VaibhavAgarwalVA/sympy,abloomston/sympy,atsao72/sympy,ga7g08/sympy,Mitchkoens/sympy,Curious72/sympy,Designist/sympy,ga7g08/sympy,Shaswat27/sympy,ChristinaZografou/sympy,grevutiu-gabriel/sympy,mcdaniel67/sympy,hazelnusse/sympy-old,grevutiu-gabriel/sympy,kumarkrishna/sympy,toolforger/sympy,abhiii5459/sympy,yukoba/sympy,beni55/sympy,Gadal/sympy,jamesblunt/sympy,hargup/sympy,Gadal/sympy,jaimahajan1997/sympy,aktech/sympy,diofant/diofant,Mitchkoens/sympy,kaichogami/sympy,bukzor/sympy,souravsingh/sympy,sahmed95/sympy,toolforger/sympy,cswiercz/sympy,abloomston/sympy,MechCoder/sympy,Shaswat27/sympy,pbrady/sympy,wanglongqi/sympy,sampadsaha5/sympy,pandeyadarsh/sympy,drufat/sympy,yukoba/sympy,kaichogami/sympy,MechCoder/sympy,wanglongqi/sympy,Vishluck/sympy,abhiii5459/sympy,vipulroxx/sympy,AkademieOlympia/sympy,Arafatk/sympy,mcdaniel67/sympy,kumarkrishna/sympy,lidavidm/sympy,jerli/sympy,sampadsaha5/sympy,sahilshekhawat/sympy,debugger22/sympy,souravsingh/sympy,pandeyadarsh/sympy,grevutiu-gabriel/sympy,wyom/sympy,shikil/sympy,maniteja123/sympy,skidzo/sympy,jamesblunt/sympy,amitjamadagni/sympy,kevalds51/sympy,rahuldan/sympy,beni55/sympy,atsao72/sympy,oliverlee/sympy,jerli/sympy,Davidjohnwilson/sympy,saurabhjn76/sympy,postvakje/sympy,liangjiaxing/sympy,chaffra/sympy,dqnykamp/sympy,garvitr/sympy,maniteja123/sympy,yashsharan/sympy,oliverlee/sympy,Designist/sympy,asm666/sympy,farhaanbukhsh/sympy,jbbskinny/sympy,sunny94/temp,Sumith1896/sympy,farhaanbukhsh/sympy,cccfran/sympy,hrashk/sympy,Titan-C/sympy,debugger22/sympy,pbrady/sympy,lidavidm/sympy,meghana1995/sympy,jerli/sympy | sympy/__init__.py | sympy/__init__.py |
__version__ = "0.5.0"
from sympy.core import *
from series import *
from simplify import *
from solvers import *
from matrices import *
from geometry import *
from polynomials import *
from utilities import *
#from specfun import *
from integrals import *
try:
from plotting import Plot
except ImportError, e:
class Plot(object):
def __init__(*args, **kwargs):
raise e
|
__version__ = "0.5.0"
from sympy.core import *
from series import *
from simplify import *
from solvers import *
from matrices import *
from geometry import *
from polynomials import *
from utilities import *
#from specfun import *
from integrals import *
try:
from plotting import Plot
except ImportError, e:
print str(e)
| bsd-3-clause | Python |
03d63ee7819719379d8289921c7bcf30fde1aea1 | Use str.encode('hex'). | probcomp/bayeslite,probcomp/bayeslite | tests/stochastic.py | tests/stochastic.py | # -*- coding: utf-8 -*-
# Copyright (c) 2010-2016, MIT Probabilistic Computing Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
class StochasticError(Exception):
def __init__(self, seed, exctype, excvalue):
self.seed = seed
self.exctype = exctype
self.excvalue = excvalue
def __str__(self):
hexseed = self.seed.encode('hex')
if hasattr(self.exctype, '__name__'):
typename = self.exctype.__name__
else:
typename = repr(self.exctype)
return '[seed %s]\n%s: %s' % (hexseed, typename, self.excvalue)
def stochastic(max_runs, min_passes):
assert 0 < max_runs
assert min_passes < max_runs
def wrap(f):
def f_(seed=None):
if seed is not None:
return f(seed)
npasses = 0
last_seed = None
last_exc_info = None
for i in xrange(max_runs):
seed = os.urandom(32)
try:
value = f(seed)
except:
last_seed = seed
last_exc_info = sys.exc_info()
else:
npasses += 1
if min_passes <= npasses:
return value
t, v, tb = last_exc_info
raise StochasticError, StochasticError(last_seed, t, v), tb
return f_
return wrap
| # -*- coding: utf-8 -*-
# Copyright (c) 2010-2016, MIT Probabilistic Computing Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
class StochasticError(Exception):
def __init__(self, seed, exctype, excvalue):
self.seed = seed
self.exctype = exctype
self.excvalue = excvalue
def __str__(self):
hexseed = ''.join('%02x' % (ord(b),) for b in self.seed)
if hasattr(self.exctype, '__name__'):
typename = self.exctype.__name__
else:
typename = repr(self.exctype)
return '[seed %s]\n%s: %s' % (hexseed, typename, self.excvalue)
def stochastic(max_runs, min_passes):
assert 0 < max_runs
assert min_passes < max_runs
def wrap(f):
def f_(seed=None):
if seed is not None:
return f(seed)
npasses = 0
last_seed = None
last_exc_info = None
for i in xrange(max_runs):
seed = os.urandom(32)
try:
value = f(seed)
except:
last_seed = seed
last_exc_info = sys.exc_info()
else:
npasses += 1
if min_passes <= npasses:
return value
t, v, tb = last_exc_info
raise StochasticError, StochasticError(last_seed, t, v), tb
return f_
return wrap
| apache-2.0 | Python |
019c23eea2cdf486b5ebfddcc38b07d855ac19a8 | Add tests | click-contrib/click-log | tests/test_basic.py | tests/test_basic.py | # -*- coding: utf-8 -*-
import logging
import click
from click.testing import CliRunner
import click_log
import pytest
test_logger = logging.getLogger(__name__)
@pytest.fixture
def runner():
return CliRunner()
def test_basic(runner):
@click.command()
@click_log.init()
def cli():
test_logger.info('hey')
test_logger.error('damn')
result = runner.invoke(cli, catch_exceptions=False)
assert not result.exception
assert result.output == 'hey\nerror: damn\n'
def test_multilines(runner):
@click.command()
@click_log.init()
def cli():
test_logger.warning("""
Lorem ipsum dolor sit amet,
consectetur adipiscing elit,
sed do eiusmod tempor incididunt""")
result = runner.invoke(cli, catch_exceptions=False)
assert not result.exception
assert result.output == (
'warning: \n'
'warning: Lorem ipsum dolor sit amet,\n'
'warning: consectetur adipiscing elit,\n'
'warning: sed do eiusmod tempor incididunt\n')
def test_unicode(runner):
@click.command()
@click_log.init()
def cli():
test_logger.error(u"""
❤️ 💔 💌 💕 💞 💓 💗 💖 💘
💝 💟 💜 💛 💚 💙""")
result = runner.invoke(cli, catch_exceptions=False)
assert not result.exception
assert result.output == (
'error: \n'
u'error: ❤️ 💔 💌 💕 💞 💓 💗 💖 💘\n'
u'error: 💝 💟 💜 💛 💚 💙\n')
def test_non_string_log(runner):
@click.command()
@click_log.init()
def cli():
test_logger.error(42)
result = runner.invoke(cli, catch_exceptions=False)
assert not result.exception
assert result.output == 'error: 42\n'
| # -*- coding: utf-8 -*-
import logging
import click
from click.testing import CliRunner
import click_log
import pytest
test_logger = logging.getLogger(__name__)
@pytest.fixture
def runner():
return CliRunner()
def test_basic(runner):
@click.command()
@click_log.init()
def cli():
test_logger.info('hey')
test_logger.error('damn')
result = runner.invoke(cli, catch_exceptions=False)
assert not result.exception
assert result.output == 'hey\nerror: damn\n'
def test_multilines(runner):
@click.command()
@click_log.init()
def cli():
test_logger.warning("""
Lorem ipsum dolor sit amet,
consectetur adipiscing elit,
sed do eiusmod tempor incididunt""")
result = runner.invoke(cli, catch_exceptions=False)
assert not result.exception
assert result.output == (
'warning: \n'
'warning: Lorem ipsum dolor sit amet,\n'
'warning: consectetur adipiscing elit,\n'
'warning: sed do eiusmod tempor incididunt\n')
def test_unicode(runner):
@click.command()
@click_log.init()
def cli():
test_logger.error(u"""
❤️ 💔 💌 💕 💞 💓 💗 💖 💘
💝 💟 💜 💛 💚 💙""")
result = runner.invoke(cli, catch_exceptions=False)
assert not result.exception
assert result.output == (
'error: \n'
u'error: ❤️ 💔 💌 💕 💞 💓 💗 💖 💘\n'
u'error: 💝 💟 💜 💛 💚 💙\n')
| mit | Python |
f951b38c96b87a4ea14ff0ff0e5aadb71d4357cb | Modify the MNIST testing script | Cysu/Person-Reid,Cysu/Person-Reid,Cysu/Person-Reid,Cysu/Person-Reid,Cysu/Person-Reid | tests/test_mnist.py | tests/test_mnist.py | #!/usr/bin/python2
# -*- coding: utf-8 -*-
import os
import cPickle
from reid.datasets import Datasets
from reid.optimization import sgd
from reid.models import cost_functions as costfuncs
from reid.models import active_functions as actfuncs
from reid.models.layers import FullConnLayer, ConvPoolLayer
from reid.models.neural_net import NeuralNet
from reid.models.evaluate import Evaluator
# Load the MNIST Dataset
with open(os.path.join('..', 'data', 'mnist', 'mnist.pkl'), 'rb') as f:
train_set, valid_set, test_set = cPickle.load(f)
train_set = (train_set[0], train_set[1].reshape(train_set[1].shape[0], 1))
valid_set = (valid_set[0], valid_set[1].reshape(valid_set[1].shape[0], 1))
test_set = (test_set[0], test_set[1].reshape(test_set[1].shape[0], 1))
datasets = Datasets(train_set=train_set, valid_set=valid_set, test_set=test_set)
# Build up the model and evaluator
layers = [ConvPoolLayer((20,1,5,5), (2,2), (1,28,28), actfuncs.tanh, False),
ConvPoolLayer((50,20,5,5), (2,2), None, actfuncs.tanh, True),
FullConnLayer(800, 500, actfuncs.tanh),
FullConnLayer(500, 10, actfuncs.softmax)]
model = NeuralNet(layers)
evaluator = Evaluator(model,
costfuncs.mean_negative_loglikelihood,
costfuncs.mean_number_misclassified)
# Train the model
sgd.train(evaluator, datasets, learning_rate=0.1,
batch_size=500, n_epoch=200,
learning_rate_decr=1.0)
| #!/usr/bin/python2
# -*- coding: utf-8 -*-
import os
import cPickle
from reid.datasets import Datasets
from reid.optimization import sgd
from reid.models import cost_functions as costfuncs
from reid.models import active_functions as actfuncs
from reid.models.layers import FullConnLayer, ConvPoolLayer
from reid.models.neural_net import NeuralNet
def _load_data():
with open(os.path.join('..', 'data', 'mnist', 'mnist.pkl'), 'rb') as f:
train_set, valid_set, test_set = cPickle.load(f)
train_set = (train_set[0], train_set[1].reshape(train_set[1].shape[0], 1))
valid_set = (valid_set[0], valid_set[1].reshape(valid_set[1].shape[0], 1))
test_set = (test_set[0], test_set[1].reshape(test_set[1].shape[0], 1))
return Datasets(train_set=train_set, valid_set=valid_set, test_set=test_set)
def test_model(model):
datasets = _load_data()
sgd.train(model, datasets,
costfuncs.mean_negative_loglikelihood,
costfuncs.mean_number_misclassified,
batch_size=500, n_epoch=200, learning_rate=0.1, learning_rate_decr=1.0)
if __name__ == '__main__':
layers = [ConvPoolLayer((20,1,5,5), (2,2), (1,28,28), actfuncs.tanh, False),
ConvPoolLayer((50,20,5,5), (2,2), None, actfuncs.tanh, True),
FullConnLayer(800, 500, actfuncs.tanh),
FullConnLayer(500, 10, actfuncs.softmax)]
model = NeuralNet(layers)
test_model(model)
| mit | Python |
1300848ed2deb253a4423a7b5826af95c91cb31a | Add application blacklist to run tests | percyfal/snakemakelib-rules,percyfal/snakemake-rules,percyfal/snakemake-rules,percyfal/snakemakelib-rules,percyfal/snakemakelib-rules | tests/test_rules.py | tests/test_rules.py | # Copyright (C) 2016 by Per Unneberg
from os.path import abspath, dirname, join, basename
import logging
import shutil
import subprocess as sp
import pytest
logger = logging.getLogger(__name__)
stderr = None if pytest.config.getoption("--show-workflow-output") else sp.STDOUT
applications = [pytest.config.getoption("--application")] if pytest.config.getoption("--application") else pytest.rules.__all__
if not set(applications).issubset(pytest.rules.__all__):
raise Exception("No such application '{}'".format(applications[0]))
blacklist = []
rules = [(y) for x in applications for y in getattr(pytest.rules, x) if not basename(y).rsplit(".rule") in blacklist]
@pytest.mark.parametrize("rule", rules)
def test_snakemake_list(rule):
output = sp.check_output(['snakemake', '-s', rule, '-l'], stderr=sp.STDOUT)
application_blacklist = ['utils']
applications = list(set(applications).difference(application_blacklist))
blacklist =[]
rules = [(y) for x in applications for y in getattr(pytest.rules, x) if not basename(y).rsplit(".rule") in blacklist]
@pytest.mark.skipif(not applications, reason="application '{}' in blacklist".format(pytest.config.getoption("--application")))
@pytest.mark.slow
@pytest.mark.parametrize("rule", rules)
def test_snakemake_run(rule, data):
target = pytest.make_output(rule)
if target is None:
pytest.skip("Unable to parse target for rule {}".format(basename(rule)))
args = ['snakemake', '-s', rule, '-d', str(data), '--configfile', join(str(data), 'config.yaml')]
if not target == "config":
args = args + [target]
output = sp.check_output(args, stderr=stderr)
| # Copyright (C) 2016 by Per Unneberg
from os.path import abspath, dirname, join, basename
import logging
import shutil
import subprocess as sp
import pytest
logger = logging.getLogger(__name__)
stderr = None if pytest.config.getoption("--show-workflow-output") else sp.STDOUT
applications = [pytest.config.getoption("--application")] if pytest.config.getoption("--application") else pytest.rules.__all__
if not set(applications).issubset(pytest.rules.__all__):
raise Exception("No such application '{}'".format(applications[0]))
blacklist = []
rules = [(y) for x in applications for y in getattr(pytest.rules, x) if not basename(y).rsplit(".rule") in blacklist]
@pytest.mark.parametrize("rule", rules)
def test_snakemake_list(rule):
output = sp.check_output(['snakemake', '-s', rule, '-l'], stderr=sp.STDOUT)
@pytest.mark.slow
@pytest.mark.parametrize("rule", rules)
def test_snakemake_run(rule, data, config):
target = pytest.make_output(rule)
if target is None:
pytest.skip("Unable to parse target for rule {}".format(basename(rule)))
args = ['snakemake', '-s', rule, '-d', str(data), '--configfile', join(str(data), 'config.yaml')]
if not target == "config":
args = args + [target]
output = sp.check_output(args, stderr=stderr)
| mit | Python |
c1472d16e1d0a7c9240cdff87dc27a769e14479a | Rename variable for flake8 | geographika/mappyfile,geographika/mappyfile | tests/test_utils.py | tests/test_utils.py | import logging
import tempfile
import mappyfile
import pytest
def test_open():
fn = './tests/sample_maps/256_overlay_res.map'
d = mappyfile.open(fn)
assert d["name"] == "TEST"
d = mappyfile.open(fn, expand_includes=False)
assert d["name"] == "TEST"
d = mappyfile.open(fn, include_position=True)
assert d["name"] == "TEST"
d = mappyfile.open(fn, include_comments=True)
assert d["name"] == "TEST"
def test_loads():
s = """MAP NAME "TEST" END"""
d = mappyfile.loads(s)
assert d["name"] == "TEST"
d = mappyfile.loads(s, expand_includes=False)
assert d["name"] == "TEST"
d = mappyfile.loads(s, include_position=True)
assert d["name"] == "TEST"
d = mappyfile.loads(s, include_comments=True)
assert d["name"] == "TEST"
def test_write():
s = """MAP NAME "TEST" END"""
fn = tempfile.mktemp()
d = mappyfile.loads(s)
mappyfile.write(d, fn)
d = mappyfile.open(fn)
assert d["name"] == "TEST"
mappyfile.write(d, fn, indent=2, spacer="\t", quote="'", newlinechar="")
d = mappyfile.open(fn)
assert d["name"] == "TEST"
def test_dump():
s = """MAP NAME "TEST" END"""
d = mappyfile.loads(s)
with tempfile.NamedTemporaryFile(mode="w+", delete=False) as fp:
mappyfile.dump(d, fp)
with open(fp.name) as fp:
d = mappyfile.load(fp)
assert d["name"] == "TEST"
def test_dictfind():
s = """
MAP
LAYER
NAME "Layer1"
TYPE POLYGON
END
LAYER
NAME "Layer2"
TYPE POLYGON
CLASS
NAME "Class1"
COLOR 0 0 -8
END
END
END
"""
d = mappyfile.loads(s)
pth = ["layers", 1]
cmp = mappyfile.dictfind(d, *pth)
assert cmp["name"] == "Layer2"
pth = ["layers", 1, "classes", 0]
cmp = mappyfile.dictfind(d, *pth)
assert cmp["name"] == "Class1"
def run_tests():
pytest.main(["tests/test_utils.py"])
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
# run_tests()
test_dump()
print("Done!")
| import logging
import tempfile
import mappyfile
import pytest
def test_open():
fn = './tests/sample_maps/256_overlay_res.map'
d = mappyfile.open(fn)
assert d["name"] == "TEST"
d = mappyfile.open(fn, expand_includes=False)
assert d["name"] == "TEST"
d = mappyfile.open(fn, include_position=True)
assert d["name"] == "TEST"
d = mappyfile.open(fn, include_comments=True)
assert d["name"] == "TEST"
def test_loads():
s = """MAP NAME "TEST" END"""
d = mappyfile.loads(s)
assert d["name"] == "TEST"
d = mappyfile.loads(s, expand_includes=False)
assert d["name"] == "TEST"
d = mappyfile.loads(s, include_position=True)
assert d["name"] == "TEST"
d = mappyfile.loads(s, include_comments=True)
assert d["name"] == "TEST"
def test_write():
s = """MAP NAME "TEST" END"""
fn = tempfile.mktemp()
d = mappyfile.loads(s)
mappyfile.write(d, fn)
d = mappyfile.open(fn)
assert d["name"] == "TEST"
mappyfile.write(d, fn, indent=2, spacer="\t", quote="'", newlinechar="")
d = mappyfile.open(fn)
assert d["name"] == "TEST"
def test_dump():
s = """MAP NAME "TEST" END"""
d = mappyfile.loads(s)
with tempfile.NamedTemporaryFile(mode="w+", delete=False) as fp:
mappyfile.dump(d, fp)
with open(fp.name) as fp:
d = mappyfile.load(fp)
assert d["name"] == "TEST"
def test_dictfind():
s = """
MAP
LAYER
NAME "Layer1"
TYPE POLYGON
END
LAYER
NAME "Layer2"
TYPE POLYGON
CLASS
NAME "Class1"
COLOR 0 0 -8
END
END
END
"""
d = mappyfile.loads(s)
pth = ["layers", 1]
l = mappyfile.dictfind(d, *pth)
assert l["name"] == "Layer2"
pth = ["layers", 1, "classes", 0]
l = mappyfile.dictfind(d, *pth)
print(l)
assert l["name"] == "Class1"
def run_tests():
pytest.main(["tests/test_utils.py"])
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
# run_tests()
test_dump()
print("Done!")
| mit | Python |
167787e36f282229b687fd10a03e9fbbb4b7d313 | Test utils unicode fix | alexgarciac/scrapi,fabianvf/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,jeffreyliu3230/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,fabianvf/scrapi,felliott/scrapi,erinspace/scrapi,ostwald/scrapi,mehanig/scrapi | tests/test_utils.py | tests/test_utils.py | import datetime
from dateutil.parser import parse
import six
from scrapi import util
class TestScrapiUtils(object):
def test_copy_to_unicode(self):
converted = util.copy_to_unicode('test')
assert converted == u'test'
assert isinstance(converted, six.text_type)
def test_timestamp(self):
timestamp = util.timestamp()
parsed = parse(timestamp)
assert isinstance(parsed, datetime.datetime)
def test_stamp_from_raw(self):
raw_doc = {'doc': 'Macho Man Story', 'timestamps': {}}
new_stamps = {'done': 'now'}
stamped_raw = util.stamp_from_raw(raw_doc, **new_stamps)
assert isinstance(stamped_raw, dict)
assert set(stamped_raw.keys()) == set(['done', 'normalizeFinished'])
| import datetime
from dateutil.parser import parse
from scrapi import util
class TestScrapiUtils(object):
def test_copy_to_unicode(self):
converted = util.copy_to_unicode('test')
assert converted == u'test'
assert isinstance(converted, unicode)
def test_timestamp(self):
timestamp = util.timestamp()
parsed = parse(timestamp)
assert isinstance(parsed, datetime.datetime)
def test_stamp_from_raw(self):
raw_doc = {'doc': 'Macho Man Story', 'timestamps': {}}
new_stamps = {'done': 'now'}
stamped_raw = util.stamp_from_raw(raw_doc, **new_stamps)
assert isinstance(stamped_raw, dict)
assert set(stamped_raw.keys()) == set(['done', 'normalizeFinished'])
| apache-2.0 | Python |
c3700f51e1de4d8a4f18286a7a134b3d490dc08b | Add tests for new utils | davesque/django-rest-framework-simplejwt,davesque/django-rest-framework-simplejwt | tests/test_utils.py | tests/test_utils.py | from __future__ import unicode_literals
from datetime import datetime
from django.utils import six, timezone
from django.test import TestCase
from mock import patch
from rest_framework_simplejwt.utils import (
aware_utcnow, datetime_from_timestamp, datetime_to_epoch, format_lazy,
make_utc
)
class TestMakeUtc(TestCase):
def test_it_should_return_the_correct_values(self):
# It should make a naive datetime into an aware, utc datetime if django
# is configured to use timezones and the datetime doesn't already have
# a timezone
# Naive datetime
dt = datetime(year=1970, month=12, day=1)
with self.settings(USE_TZ=False):
self.assertEqual(dt, make_utc(dt))
with self.settings(USE_TZ=True):
self.assertNotEqual(dt, make_utc(dt))
self.assertEqual(timezone.make_aware(dt, timezone=timezone.utc), make_utc(dt))
dt = timezone.now()
self.assertEqual(dt, make_utc(dt))
class TestAwareUtcnow(TestCase):
def test_it_should_return_the_correct_value(self):
now = datetime.utcnow()
with patch('rest_framework_simplejwt.utils.datetime') as fake_datetime:
fake_datetime.utcnow.return_value = now
# Should return aware utcnow if USE_TZ == True
with self.settings(USE_TZ=True):
self.assertEqual(timezone.make_aware(now, timezone=timezone.utc), aware_utcnow())
# Should return naive utcnow if USE_TZ == False
with self.settings(USE_TZ=False):
self.assertEqual(now, aware_utcnow())
class TestDatetimeToEpoch(TestCase):
def test_it_should_return_the_correct_values(self):
self.assertEqual(datetime_to_epoch(datetime(year=1970, month=1, day=1)), 0)
self.assertEqual(datetime_to_epoch(datetime(year=1970, month=1, day=1, second=1)), 1)
self.assertEqual(datetime_to_epoch(datetime(year=2000, month=1, day=1)), 946684800)
class TestDatetimeFromEpoch(TestCase):
def test_it_should_return_the_correct_values(self):
with self.settings(USE_TZ=False):
self.assertEqual(datetime_from_timestamp(0), datetime(year=1970, month=1, day=1))
self.assertEqual(datetime_from_timestamp(1), datetime(year=1970, month=1, day=1, second=1))
self.assertEqual(datetime_from_timestamp(946684800), datetime(year=2000, month=1, day=1), 946684800)
with self.settings(USE_TZ=True):
self.assertEqual(datetime_from_timestamp(0), make_utc(datetime(year=1970, month=1, day=1)))
self.assertEqual(datetime_from_timestamp(1), make_utc(datetime(year=1970, month=1, day=1, second=1)))
self.assertEqual(datetime_from_timestamp(946684800), make_utc(datetime(year=2000, month=1, day=1)))
class TestFormatLazy(TestCase):
def test_it_should_work(self):
obj = format_lazy('{} {}', 'arst', 'zxcv')
self.assertNotIsInstance(obj, str)
self.assertEqual(six.text_type(obj), 'arst zxcv')
| from __future__ import unicode_literals
from datetime import datetime
from unittest import TestCase
from django.utils import six
from rest_framework_simplejwt.utils import datetime_to_epoch, format_lazy
class TestDatetimeToEpoch(TestCase):
def test_it_should_return_the_correct_values(self):
self.assertEqual(datetime_to_epoch(datetime(year=1970, month=1, day=1)), 0)
self.assertEqual(datetime_to_epoch(datetime(year=1970, month=1, day=1, second=1)), 1)
self.assertEqual(datetime_to_epoch(datetime(year=2000, month=1, day=1)), 946684800)
class TestFormatLazy(TestCase):
def test_it_should_work(self):
obj = format_lazy('{} {}', 'arst', 'zxcv')
self.assertNotIsInstance(obj, str)
self.assertEqual(six.text_type(obj), 'arst zxcv')
| mit | Python |
b82dbd63aedf8a6a6af494b6d6be697a9f4230d5 | Add unit test for expand_axis_label | dwf/fuel,ejls/fuel,udibr/fuel,rizar/fuel,capybaralet/fuel,rizar/fuel,EderSantana/fuel,EderSantana/fuel,orhanf/fuel,aalmah/fuel,mila-udem/fuel,mjwillson/fuel,glewis17/fuel,orhanf/fuel,dhruvparamhans/fuel,hantek/fuel,lamblin/fuel,jbornschein/fuel,dribnet/fuel,markusnagel/fuel,udibr/fuel,harmdevries89/fuel,dribnet/fuel,glewis17/fuel,janchorowski/fuel,harmdevries89/fuel,chrishokamp/fuel,jbornschein/fuel,vdumoulin/fuel,codeaudit/fuel,aalmah/fuel,markusnagel/fuel,dmitriy-serdyuk/fuel,rodrigob/fuel,dwf/fuel,dmitriy-serdyuk/fuel,bouthilx/fuel,bouthilx/fuel,capybaralet/fuel,janchorowski/fuel,laurent-dinh/fuel,dhruvparamhans/fuel,mjwillson/fuel,chrishokamp/fuel,mila-udem/fuel,lamblin/fuel,ejls/fuel,vdumoulin/fuel,laurent-dinh/fuel,codeaudit/fuel,hantek/fuel,rodrigob/fuel | tests/test_utils.py | tests/test_utils.py | import pickle
from six.moves import range
from fuel.utils import do_not_pickle_attributes, expand_axis_label
@do_not_pickle_attributes("non_pickable", "bulky_attr")
class TestClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_pickable = lambda x: x
def test_do_not_pickle_attributes():
cl = TestClass()
dump = pickle.dumps(cl)
loaded = pickle.loads(dump)
assert loaded.bulky_attr == list(range(100))
assert loaded.non_pickable is not None
def test_expand_axis_label():
assert expand_axis_label('b') == 'batch'
assert expand_axis_label('c') == 'channel'
assert expand_axis_label('t') == 'time'
assert expand_axis_label('0') == 'axis_0'
assert expand_axis_label('1') == 'axis_1'
assert expand_axis_label('0b') == '0b'
assert expand_axis_label('') == ''
| import pickle
from six.moves import range
from fuel.utils import do_not_pickle_attributes
@do_not_pickle_attributes("non_pickable", "bulky_attr")
class TestClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_pickable = lambda x: x
def test_do_not_pickle_attributes():
cl = TestClass()
dump = pickle.dumps(cl)
loaded = pickle.loads(dump)
assert loaded.bulky_attr == list(range(100))
assert loaded.non_pickable is not None
| mit | Python |
3e6f835a88183182b6ebba25c61666735a69fc81 | Add more tests for the vault commandhelper | bdastur/vault-shell | tests/vaultshell.py | tests/vaultshell.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import vault_shell.vault_commandhelper as VaultHelper
class VaultShellTests(unittest.TestCase):
def test_basic(self):
print "test basic. Pass"
vaulthelper = VaultHelper.VaultCommandHelper()
self.failUnless(vaulthelper is not None)
def test_execute_vault_commands(self):
vaulthelper = VaultHelper.VaultCommandHelper()
output = vaulthelper.execute_vault_commands(['vault'])
self.failUnless(output is not None)
def test_get_commandkey_from_cmdlist(self):
vaulthelper = VaultHelper.VaultCommandHelper()
cmdkey = vaulthelper.get_commandkey_from_cmdlist(["token-create"])
self.assertEqual(cmdkey,
"vault_token-create",
msg="cmdkey did not match")
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
class VaultShellTests(unittest.TestCase):
def test_basic(self):
print "test basic. Pass"
| apache-2.0 | Python |
fd3babd8c1b2817c7b4873529f98de02cdb63096 | Bump to 4.9.1 | figarocms/thumbor,scorphus/thumbor,davduran/thumbor,gselva/thumbor,abaldwin1/thumbor,abaldwin1/thumbor,grevutiu-gabriel/thumbor,thumbor/thumbor,davduran/thumbor,marcelometal/thumbor,food52/thumbor,thumbor/thumbor,kkopachev/thumbor,raphaelfruneaux/thumbor,camargoanderso/thumbor,figarocms/thumbor,kkopachev/thumbor,suwaji/thumbor,BetterCollective/thumbor,scorphus/thumbor,fanhero/thumbor,jdunaravich/thumbor,jiangzhonghui/thumbor,wking/thumbor,MaTriXy/thumbor,food52/thumbor,suwaji/thumbor,aaxx/thumbor,wking/thumbor,voxmedia/thumbor,jiangzhonghui/thumbor,scorphus/thumbor,lfalcao/thumbor,gselva/thumbor,MaTriXy/thumbor,scorphus/thumbor,wking/thumbor,jdunaravich/thumbor,okor/thumbor,gi11es/thumbor,food52/thumbor,gselva/thumbor,wking/thumbor,fanhero/thumbor,thumbor/thumbor,felipemorais/thumbor,okor/thumbor,raphaelfruneaux/thumbor,voxmedia/thumbor,felipemorais/thumbor,BetterCollective/thumbor,2947721120/thumbor,Jimdo/thumbor,thumbor/thumbor,figarocms/thumbor,Jimdo/thumbor,jiangzhonghui/thumbor,lfalcao/thumbor,gselva/thumbor,grevutiu-gabriel/thumbor,marcelometal/thumbor,aaxx/thumbor,voxmedia/thumbor,camargoanderso/thumbor,kkopachev/thumbor,aaxx/thumbor,okor/thumbor,adeboisanger/thumbor,camargoanderso/thumbor,Jimdo/thumbor,MaTriXy/thumbor,jdunaravich/thumbor,2947721120/thumbor,camargoanderso/thumbor,grevutiu-gabriel/thumbor,suwaji/thumbor,adeboisanger/thumbor,2947721120/thumbor,raphaelfruneaux/thumbor,felipemorais/thumbor,aaxx/thumbor,davduran/thumbor,lfalcao/thumbor,adeboisanger/thumbor,Jimdo/thumbor,figarocms/thumbor,fanhero/thumbor,Bladrak/thumbor,grevutiu-gabriel/thumbor,adeboisanger/thumbor,fanhero/thumbor,abaldwin1/thumbor,davduran/thumbor,MaTriXy/thumbor,2947721120/thumbor,felipemorais/thumbor,jdunaravich/thumbor,raphaelfruneaux/thumbor,gi11es/thumbor,kkopachev/thumbor,food52/thumbor,lfalcao/thumbor,abaldwin1/thumbor,BetterCollective/thumbor,gi11es/thumbor,jiangzhonghui/thumbor,Bladrak/thumbor,suwaji/thumbor,marcelometal/thumbor | thumbor/__init__.py | thumbor/__init__.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
'''This is the main module in thumbor'''
__version__ = "4.9.1"
| #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
'''This is the main module in thumbor'''
__version__ = "4.9.0"
| mit | Python |
4d54261dbc751b1cbacc984a9b449da89c5a9205 | Update api | wallstreetweb/django-time-metrics,wallstreetweb/django-time-metrics | time_metrics/api.py | time_metrics/api.py | from .models import Metric, MetricItem
def put_metric(slug, object=None, count=1, **kwargs):
from django.conf import settings
from django.contrib.sites.models import Site
try:
metric = Metric.objects.get(slug=slug)
except Metric.DoesNotExist:
metric = Metric.objects.create(slug=slug, name=slug)
site = Site.objects.get(pk=settings.SITE_ID)
MetricItem.objects.create(
metric=metric,
content_object=object,
site=site,
count=count
)
| from wsw_stats.utils import get_backend
def put_metric(slug, object=None, count=1, **kwargs):
backend = get_backend()
backend.put_metric(slug, object=object, count=count, **kwargs) | mit | Python |
cc309ec0061b10c2bf0fb4114dd4ed9bfdc8078f | Allow dash in name of junit_tests rule | GerritCodeReview/gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,WANdisco/gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,WANdisco/gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit | tools/bzl/junit.bzl | tools/bzl/junit.bzl | # Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Skylark rule to generate a Junit4 TestSuite
# Assumes srcs are all .java Test files
# Assumes junit4 is already added to deps by the user.
# See https://github.com/bazelbuild/bazel/issues/1017 for background.
_OUTPUT = """import org.junit.runners.Suite;
import org.junit.runner.RunWith;
@RunWith(Suite.class)
@Suite.SuiteClasses({%s})
public class %s {}
"""
_PREFIXES = ("org", "com", "edu")
def _SafeIndex(l, val):
for i, v in enumerate(l):
if val == v:
return i
return -1
def _AsClassName(fname):
fname = [x.path for x in fname.files][0]
toks = fname[:-5].split("/")
findex = -1
for s in _PREFIXES:
findex = _SafeIndex(toks, s)
if findex != -1:
break
if findex == -1:
fail("%s does not contain any of %s" % (fname, _PREFIXES))
return ".".join(toks[findex:]) + ".class"
def _impl(ctx):
classes = ",".join(
[_AsClassName(x) for x in ctx.attr.srcs],
)
ctx.actions.write(output = ctx.outputs.out, content = _OUTPUT % (
classes,
ctx.attr.outname,
))
_GenSuite = rule(
attrs = {
"srcs": attr.label_list(allow_files = True),
"outname": attr.string(),
},
outputs = {"out": "%{name}.java"},
implementation = _impl,
)
def junit_tests(name, srcs, **kwargs):
s_name = name.replace("-", "_") + "TestSuite"
_GenSuite(
name = s_name,
srcs = srcs,
outname = s_name,
)
native.java_test(
name = name,
test_class = s_name,
srcs = srcs + [":" + s_name],
**kwargs
)
| # Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Skylark rule to generate a Junit4 TestSuite
# Assumes srcs are all .java Test files
# Assumes junit4 is already added to deps by the user.
# See https://github.com/bazelbuild/bazel/issues/1017 for background.
_OUTPUT = """import org.junit.runners.Suite;
import org.junit.runner.RunWith;
@RunWith(Suite.class)
@Suite.SuiteClasses({%s})
public class %s {}
"""
_PREFIXES = ("org", "com", "edu")
def _SafeIndex(l, val):
for i, v in enumerate(l):
if val == v:
return i
return -1
def _AsClassName(fname):
fname = [x.path for x in fname.files][0]
toks = fname[:-5].split("/")
findex = -1
for s in _PREFIXES:
findex = _SafeIndex(toks, s)
if findex != -1:
break
if findex == -1:
fail("%s does not contain any of %s" % (fname, _PREFIXES))
return ".".join(toks[findex:]) + ".class"
def _impl(ctx):
classes = ",".join(
[_AsClassName(x) for x in ctx.attr.srcs],
)
ctx.actions.write(output = ctx.outputs.out, content = _OUTPUT % (
classes,
ctx.attr.outname,
))
_GenSuite = rule(
attrs = {
"srcs": attr.label_list(allow_files = True),
"outname": attr.string(),
},
outputs = {"out": "%{name}.java"},
implementation = _impl,
)
def junit_tests(name, srcs, **kwargs):
s_name = name + "TestSuite"
_GenSuite(
name = s_name,
srcs = srcs,
outname = s_name,
)
native.java_test(
name = name,
test_class = s_name,
srcs = srcs + [":" + s_name],
**kwargs
)
| apache-2.0 | Python |
f08692645d2fe8895016acfe000e8d461dff66b7 | Change CHROME_34_REVISION to one that is available in the continuous archive (251854); the previous revision (251904) was only available in the snapshots archive, but this is not where we download from for testing. | Just-D/chromium-1,Jonekee/chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,ltilve/chromium,jaruba/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,dednal/chromium.src,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,M4sse/chromium.src,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,Jonekee/chromium.src,littlstar/chromium.src,littlstar/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,dednal/chromium.src,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,littlstar/chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ltilve/chromium,bright-sparks/chromium-spacewalk,M4sse/chromium.src,Jonekee/chromium.src,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,ltilve/chromium,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,ltilve/chromium,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,dednal/chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,Just-D/chromium-1,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,ltilve/chromium,ondra-novak/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,littlstar/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,Chilledheart/chromium,dushu1203/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,M4sse/chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,Chilledheart/chromium,hgl888/chromium-crosswalk,jaruba/chromium.src,Chilledheart/chromium,dednal/chromium.src,ondra-novak/chromium.src | chrome/test/chromedriver/archive.py | chrome/test/chromedriver/archive.py | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Downloads items from the Chromium continuous archive."""
import os
import platform
import urllib
import util
CHROME_32_REVISION = '232870'
CHROME_33_REVISION = '241075'
CHROME_34_REVISION = '251854'
_SITE = 'http://commondatastorage.googleapis.com'
class Site(object):
CONTINUOUS = _SITE + '/chromium-browser-continuous'
SNAPSHOT = _SITE + '/chromium-browser-snapshots'
def GetLatestRevision(site=Site.CONTINUOUS):
"""Returns the latest revision (as a string) available for this platform.
Args:
site: the archive site to check against, default to the continuous one.
"""
url = site + '/%s/LAST_CHANGE'
return urllib.urlopen(url % _GetDownloadPlatform()).read()
def DownloadChrome(revision, dest_dir, site=Site.CONTINUOUS):
"""Downloads the packaged Chrome from the archive to the given directory.
Args:
revision: the revision of Chrome to download.
dest_dir: the directory to download Chrome to.
site: the archive site to download from, default to the continuous one.
Returns:
The path to the unzipped Chrome binary.
"""
def GetZipName():
if util.IsWindows():
return 'chrome-win32'
elif util.IsMac():
return 'chrome-mac'
elif util.IsLinux():
return 'chrome-linux'
def GetChromePathFromPackage():
if util.IsWindows():
return 'chrome.exe'
elif util.IsMac():
return 'Chromium.app/Contents/MacOS/Chromium'
elif util.IsLinux():
return 'chrome'
zip_path = os.path.join(dest_dir, 'chrome-%s.zip' % revision)
if not os.path.exists(zip_path):
url = site + '/%s/%s/%s.zip' % (_GetDownloadPlatform(), revision,
GetZipName())
print 'Downloading', url, '...'
urllib.urlretrieve(url, zip_path)
util.Unzip(zip_path, dest_dir)
return os.path.join(dest_dir, GetZipName(), GetChromePathFromPackage())
def _GetDownloadPlatform():
"""Returns the name for this platform on the archive site."""
if util.IsWindows():
return 'Win'
elif util.IsMac():
return 'Mac'
elif util.IsLinux():
if platform.architecture()[0] == '64bit':
return 'Linux_x64'
else:
return 'Linux'
| # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Downloads items from the Chromium continuous archive."""
import os
import platform
import urllib
import util
CHROME_32_REVISION = '232870'
CHROME_33_REVISION = '241075'
CHROME_34_REVISION = '251904'
_SITE = 'http://commondatastorage.googleapis.com'
class Site(object):
CONTINUOUS = _SITE + '/chromium-browser-continuous'
SNAPSHOT = _SITE + '/chromium-browser-snapshots'
def GetLatestRevision(site=Site.CONTINUOUS):
"""Returns the latest revision (as a string) available for this platform.
Args:
site: the archive site to check against, default to the continuous one.
"""
url = site + '/%s/LAST_CHANGE'
return urllib.urlopen(url % _GetDownloadPlatform()).read()
def DownloadChrome(revision, dest_dir, site=Site.CONTINUOUS):
"""Downloads the packaged Chrome from the archive to the given directory.
Args:
revision: the revision of Chrome to download.
dest_dir: the directory to download Chrome to.
site: the archive site to download from, default to the continuous one.
Returns:
The path to the unzipped Chrome binary.
"""
def GetZipName():
if util.IsWindows():
return 'chrome-win32'
elif util.IsMac():
return 'chrome-mac'
elif util.IsLinux():
return 'chrome-linux'
def GetChromePathFromPackage():
if util.IsWindows():
return 'chrome.exe'
elif util.IsMac():
return 'Chromium.app/Contents/MacOS/Chromium'
elif util.IsLinux():
return 'chrome'
zip_path = os.path.join(dest_dir, 'chrome-%s.zip' % revision)
if not os.path.exists(zip_path):
url = site + '/%s/%s/%s.zip' % (_GetDownloadPlatform(), revision,
GetZipName())
print 'Downloading', url, '...'
urllib.urlretrieve(url, zip_path)
util.Unzip(zip_path, dest_dir)
return os.path.join(dest_dir, GetZipName(), GetChromePathFromPackage())
def _GetDownloadPlatform():
"""Returns the name for this platform on the archive site."""
if util.IsWindows():
return 'Win'
elif util.IsMac():
return 'Mac'
elif util.IsLinux():
if platform.architecture()[0] == '64bit':
return 'Linux_x64'
else:
return 'Linux'
| bsd-3-clause | Python |
44348bafa8f084fe1cf9b22c7d3b9add1d08e912 | Update common.py | NeblioTeam/neblio,NeblioTeam/neblio,NeblioTeam/neblio,NeblioTeam/neblio,NeblioTeam/neblio | ci_scripts/neblio_ci_libs/common.py | ci_scripts/neblio_ci_libs/common.py | import os
from subprocess import call
import sys
import errno
import urllib
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def call_with_err_code(cmd):
err_code = call(cmd, shell=True)
# Error code 137 is thrown by the timeout command when it timesout, used in RPi building
if (err_code != 0 and err_code != 137):
print("")
print("")
sys.stderr.write('call \'' + cmd + '\' exited with error code ' + str(err_code) + ' \n')
print("")
exit(err_code)
def install_packages_debian(packages_to_install):
call_with_err_code('sudo apt-get update')
if len(packages_to_install) > 0:
call_with_err_code('sudo apt-get -y install ' + " ".join(packages_to_install))
def install_packages_osx(packages_to_install):
call_with_err_code('sudo brew update')
call_with_err_code('sudo brew -y install ' + " ".join(packages_to_install))
def setup_travis_or_gh_actions_env_vars():
if os.environ.get('TRAVIS_BUILD_DIR') is not None:
# Travis Detected
print("Travis CI Detected. Setting Up Environment Variables.")
os.environ['BUILD_DIR'] = os.environ.get('TRAVIS_BUILD_DIR')
os.environ['BRANCH'] = os.environ.get('TRAVIS_BRANCH')
os.environ['COMMIT'] = os.environ.get('TRAVIS_COMMIT')
elif os.environ.get('GITHUB_ACTIONS') is not None:
# GitHub Actions Detected
print("GitHub Actions Detected. Setting Up Environment Variables.")
os.environ['BUILD_DIR'] = os.environ['GITHUB_WORKSPACE']
os.environ['BRANCH'] = os.environ['GITHUB_REF'].rsplit('/', 1)[1]
os.environ['COMMIT'] = os.environ.get('GITHUB_SHA')
os.environ['CCACHE_DIR'] = os.path.join(os.environ['GITHUB_WORKSPACE'],'.ccache')
os.environ['CCACHE_COMPRESS'] = "1"
os.environ['CCACHE_COMPRESSLEVEL'] = "9"
os.environ['CCACHE_MAXSIZE'] = "150M"
os.environ['FASTLANE_DONT_STORE_PASSWORD'] = "1"
os.environ['CPATH'] = '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include'
else:
print("Neither Travis CI nor GitHub Actions Detected. Aborting...")
exit(1)
print("BUILD_DIR: " + os.environ['BUILD_DIR'])
print("BRANCH: " + os.environ['BRANCH'])
print("COMMIT: " + os.environ['COMMIT'])
| import os
from subprocess import call
import sys
import errno
import urllib
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def call_with_err_code(cmd):
err_code = call(cmd, shell=True)
# Error code 137 is thrown by the timeout command when it timesout, used in RPi building
if (err_code != 0 and err_code != 137):
print("")
print("")
sys.stderr.write('call \'' + cmd + '\' exited with error code ' + str(err_code) + ' \n')
print("")
exit(err_code)
def install_packages_debian(packages_to_install):
call_with_err_code('sudo apt-get update')
if len(packages_to_install) > 0:
call_with_err_code('sudo apt-get -y install ' + " ".join(packages_to_install))
def install_packages_osx(packages_to_install):
call_with_err_code('sudo brew update')
call_with_err_code('sudo brew -y install ' + " ".join(packages_to_install))
def setup_travis_or_gh_actions_env_vars():
if os.environ.get('TRAVIS_BUILD_DIR') is not None:
# Travis Detected
print("Travis CI Detected. Setting Up Environment Variables.")
os.environ['BUILD_DIR'] = os.environ.get('TRAVIS_BUILD_DIR')
os.environ['BRANCH'] = os.environ.get('TRAVIS_BRANCH')
os.environ['COMMIT'] = os.environ.get('TRAVIS_COMMIT')
elif os.environ.get('GITHUB_ACTIONS') is not None:
# GitHub Actions Detected
print("GitHub Actions Detected. Setting Up Environment Variables.")
os.environ['BUILD_DIR'] = os.environ['GITHUB_WORKSPACE']
os.environ['BRANCH'] = os.environ['GITHUB_REF'].rsplit('/', 1)[1]
os.environ['COMMIT'] = os.environ.get('GITHUB_SHA')
os.environ['CCACHE_DIR'] = os.path.join(os.environ['GITHUB_WORKSPACE'],'.ccache')
os.environ['CCACHE_COMPRESS'] = "1"
os.environ['CCACHE_COMPRESSLEVEL'] = "9"
os.environ['CCACHE_MAXSIZE'] = "150M"
os.environ['FASTLANE_DONT_STORE_PASSWORD'] = "1"
else:
print("Neither Travis CI nor GitHub Actions Detected. Aborting...")
exit(1)
print("BUILD_DIR: " + os.environ['BUILD_DIR'])
print("BRANCH: " + os.environ['BRANCH'])
print("COMMIT: " + os.environ['COMMIT'])
| mit | Python |
5c58bdf408122c5809880bbd93cdfccff6ca19f9 | rearrange test cases | texastribune/armstrong.apps.related_content,texastribune/armstrong.apps.related_content,armstrong/armstrong.apps.related_content,armstrong/armstrong.apps.related_content | armstrong/apps/related_content/tests/fields.py | armstrong/apps/related_content/tests/fields.py | from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.generic import GenericForeignKey
from django.db import models
from ._utils import *
from ..models import RelatedContent
from ..models import RelatedType
from .models import generate_model
class RelatedContentFieldTestCase(TestCase):
def test_related_contains_all_related_models(self):
one, two, c = generate_model()
related_content = one.related_content.all()
self.assertEqual(1, related_content.count())
self.assertEqual(related_content[0].destination_object, two)
class RelatedObjectsFieldTestCase(TestCase):
def test_contains_all_related_objects_for_given_source(self):
one, two, c = generate_model()
related_content = one.related.all()
self.assertEqual(1, related_content.count())
self.assertEqual(related_content[0], two)
class ReverseRelatedObjectsFieldTestCase(TestCase):
def test_contains_all_related_objects_for_given_source(self):
one, two, c = generate_model()
related_content = two.reverse_related.all()
self.assertEqual(1, related_content.count())
self.assertEqual(related_content[0], one)
| from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.generic import GenericForeignKey
from django.db import models
from ._utils import *
from ..models import RelatedContent
from ..models import RelatedType
from .models import generate_model
class RelatedContentFieldTestCase(TestCase):
def test_related_contains_all_related_models(self):
one, two, c = generate_model()
related_content = one.related_content.all()
self.assertEqual(1, related_content.count())
self.assertEqual(related_content[0].destination_object, two)
class ReverseRelatedObjectsFieldTestCase(TestCase):
def test_contains_all_related_objects_for_given_source(self):
one, two, c = generate_model()
related_content = two.reverse_related.all()
self.assertEqual(1, related_content.count())
self.assertEqual(related_content[0], one)
class RelatedObjectsFieldTestCase(TestCase):
def test_contains_all_related_objects_for_given_source(self):
one, two, c = generate_model()
related_content = one.related.all()
self.assertEqual(1, related_content.count())
self.assertEqual(related_content[0], two)
| apache-2.0 | Python |
22e8a7cb8f8412c4bb79f46e8e7ca2a540bf317c | Add stdOut and stdError to Django app data model. | artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history | src/dashboard/src/dashboard/models.py | src/dashboard/src/dashboard/models.py | # This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]'
# into your database.
from django.db import models
class Job(models.Model):
jobuuid = models.CharField(max_length=150, primary_key=True, db_column='jobUUID')
jobtype = models.CharField(max_length=750, db_column='jobType', blank=True)
createdtime = models.DateTimeField(db_column='createdTime')
directory = models.CharField(max_length=750, blank=True)
sipuuid = models.CharField(max_length=150, db_column='SIPUUID', blank=True)
currentstep = models.CharField(max_length=150, db_column='currentStep', blank=True)
class Meta:
db_table = u'Jobs'
class Task(models.Model):
taskuuid = models.CharField(max_length=150, primary_key=True, db_column='taskUUID')
# jobuuid = models.CharField(max_length=150, db_column='jobUUID', blank=True)
job = models.ForeignKey(Job, db_column='jobuuid', to_field = 'jobuuid')
createdtime = models.DateTimeField(db_column='createdTime')
fileuuid = models.CharField(max_length=150, db_column='fileUUID', blank=True)
filename = models.CharField(max_length=300, db_column='fileName', blank=True)
exec_field = models.CharField(max_length=150, db_column='exec', blank=True)
arguments = models.CharField(max_length=3000, blank=True)
starttime = models.DateTimeField(db_column='startTime')
client = models.CharField(max_length=150, blank=True)
endtime = models.DateTimeField(db_column='endTime')
exitcode = models.IntegerField(null=True, db_column='exitCode', blank=True)
stdout = models.TextField(db_column='stdOut', blank=True)
stderror = models.TextField(db_column='stdError', blank=True)
class Meta:
db_table = u'Tasks'
class JobStepCompleted(models.Model):
pk = models.IntegerField(primary_key=True)
# jobuuid = models.CharField(max_length=150, db_column='jobUUID', blank=True)
job = models.ForeignKey(Job, db_column='jobuuid', to_field = 'jobuuid')
completedtime = models.DateTimeField(db_column='completedTime')
step = models.CharField(max_length=150, blank=True)
class Meta:
db_table = u'jobStepCompleted'
| # This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]'
# into your database.
from django.db import models
class Job(models.Model):
jobuuid = models.CharField(max_length=150, primary_key=True, db_column='jobUUID')
jobtype = models.CharField(max_length=750, db_column='jobType', blank=True)
createdtime = models.DateTimeField(db_column='createdTime')
directory = models.CharField(max_length=750, blank=True)
sipuuid = models.CharField(max_length=150, db_column='SIPUUID', blank=True)
currentstep = models.CharField(max_length=150, db_column='currentStep', blank=True)
class Meta:
db_table = u'Jobs'
class Task(models.Model):
taskuuid = models.CharField(max_length=150, primary_key=True, db_column='taskUUID')
# jobuuid = models.CharField(max_length=150, db_column='jobUUID', blank=True)
job = models.ForeignKey(Job, db_column='jobuuid', to_field = 'jobuuid')
createdtime = models.DateTimeField(db_column='createdTime')
fileuuid = models.CharField(max_length=150, db_column='fileUUID', blank=True)
filename = models.CharField(max_length=300, db_column='fileName', blank=True)
exec_field = models.CharField(max_length=150, db_column='exec', blank=True)
arguments = models.CharField(max_length=3000, blank=True)
starttime = models.DateTimeField(db_column='startTime')
client = models.CharField(max_length=150, blank=True)
endtime = models.DateTimeField(db_column='endTime')
exitcode = models.IntegerField(null=True, db_column='exitCode', blank=True)
class Meta:
db_table = u'Tasks'
class JobStepCompleted(models.Model):
pk = models.IntegerField(primary_key=True)
# jobuuid = models.CharField(max_length=150, db_column='jobUUID', blank=True)
job = models.ForeignKey(Job, db_column='jobuuid', to_field = 'jobuuid')
completedtime = models.DateTimeField(db_column='completedTime')
step = models.CharField(max_length=150, blank=True)
class Meta:
db_table = u'jobStepCompleted'
| agpl-3.0 | Python |
60ac05bbda83191a0e794322299969a3fef2f9cd | Update distancia.py | Alan-Jairo/topgeo | topgeo/distancia.py | topgeo/distancia.py | # Calculo de distancias entre puntos
## Profesor Revisor:
### Dr Ramon Solano Barajas
## Elaborado por:
### Jairo Avalos Velazquez
### Alan Mauricio Cruz Otero
def caldist(csv):
"""
Esta funcion sirve para realizar el calculo de distancias entre dos puntos.
"""
# Importamos los modulos numpy y pandas
import numpy as np
import pandas as pd
#Almacenamos en la variable pts el archivo csv
pts = pd.read_csv(input("Ingresar archivo CSV puntos:"))
#Se almacenan las nuevas variables en las columnas que queremos de la tabla
X = pts['Coor_X']
Y = pts['Coor_Y']
Z = pts['Coor_Z']
# Escribir las coordenadas arbitrarias donde se inicio el levantamiento con Estacion Total(X,Y,Z)
xx = float(input("Ingresar coordenada arbitraria donde se inicio el levantamiento x: "))
yy = float(input("Ingresar coordenada arbitraria donde se inicio el levantamiento y: "))
zz = float(input("Ingresar coordenada arbitraria donde se inicio el levantamiento Z: "))
# Escribir el altura del aparato y altura del prisma
Alt_A = float(input("Ingresar altura del Aparato Estación Total: "))
Alt_P = float(input("Ingresar altura del prisma: "))
#Se crean nuevas columnas en nuestra tabla y se hace el calculo de ellas
n = (((xx-X)**2)+((yy-Y)**2))
pts ['Dist_H'] = np.sqrt(n)
pts ['Dist_V'] = (-zz - Alt_A + Alt_P + Z)
pts.head()
#Se almacenan las variables en las columnas que queremos del csv
DH = pts['Dist_H']
DV = pts['Dist_V']
# Calculamos la distancia inclinada
h = ((DH**2)+(DV**2))
pts ['Dist_I'] = np.sqrt(h)
pts.head()
DI = pts['Dist_I']
#Se guarda la tabla terminada con los clculos realizados en un csv
pts.to_csv('Puntos_Dist.csv')
# Se muestran los datos calculados en el archivo csv se muestran en pantalla
return pts.head()
caldist('Puntos.csv')
| # Calculo de distancias entre puntos
## Profesor Revisor:
### Dr Ramon Solano Barajas
## Elaborado por:
### Jairo Avalos Velazquez
### Alan Mauricio Cruz Otero
def caldist(csv):
"""
Esta funcion sirve para realizar el calculo de distancias entre dos puntos.
"""
# Importamos los modulos numpy y pandas
import numpy as np
import pandas as pd
#Almacenamos en la variable pts el archivo csv
pts_1 = input("Ingresar archivo CSV puntos:")
pts = pd.read_csv(pts_1)
#Se almacenan las nuevas variables en las columnas que queremos de la tabla
X = pts['Coor_X']
Y = pts['Coor_Y']
Z = pts['Coor_Z']
# Escribir las coordenadas arbitrarias donde se inicio el levantamiento con Estacion Total(X,Y,Z)
xx = float(input("Ingresar coordenada arbitraria donde se inicio el levantamiento x: "))
yy = float(input("Ingresar coordenada arbitraria donde se inicio el levantamiento y: "))
zz = float(input("Ingresar coordenada arbitraria donde se inicio el levantamiento Z: "))
# Escribir el altura del aparato y altura del prisma
Alt_A = float(input("Ingresar altura del Aparato Estación Total: "))
Alt_P = float(input("Ingresar altura del prisma: "))
#Se crean nuevas columnas en nuestra tabla y se hace el calculo de ellas
n = (((xx-X)**2)+((yy-Y)**2))
pts ['Dist_H'] = np.sqrt(n)
pts ['Dist_V'] = (-zz - Alt_A + Alt_P + Z)
pts.head()
#Se almacenan las variables en las columnas que queremos del csv
DH = pts['Dist_H']
DV = pts['Dist_V']
# Calculamos la distancia inclinada
h = ((DH**2)+(DV**2))
pts ['Dist_I'] = np.sqrt(h)
pts.head()
DI = pts['Dist_I']
#Se guarda la tabla terminada con los clculos realizados en un csv
pts.to_csv('Puntos_Dist.csv')
# Se muestran los datos calculados en el archivo csv se muestran en pantalla
return pts.head()
caldist('Puntos.csv')
| mit | Python |
e4a7eaa570964fbc2489bbce20913857b1c5c0aa | bump minor version | hammerlab/topiary,hammerlab/topiary | topiary/__init__.py | topiary/__init__.py |
from .lazy_ligandome_dict import LazyLigandomeDict, AlleleNotFound
from .converters import (
epitopes_to_dataframe,
epitopes_to_csv
)
from .predict_epitopes import (
predict_epitopes_from_args,
predict_epitopes_from_variants,
predict_epitopes_from_mutation_effects,
)
from .epitope_prediction import (
build_epitope_collection_from_binding_predictions,
MutantEpitopePrediction,
)
from .sequence_helpers import (
check_padding_around_mutation,
peptide_mutation_interval,
contains_mutant_residues,
protein_subsequences_around_mutations,
)
from . import commandline_args
__version__ = '0.1.1'
__all__ = [
"LazyLigandomeDict",
"AlleleNotFound",
"commandline_args",
"epitopes_to_dataframe",
"epitopes_to_csv",
"predict_epitopes_from_variants",
"predict_epitopes_from_mutation_effects",
"predict_epitopes_from_args",
"build_epitope_collection_from_binding_predictions",
"contains_mutant_residues",
"check_padding_around_mutation",
"peptide_mutation_interval",
"protein_subsequences_around_mutations",
"MutantEpitopePrediction",
]
|
from .lazy_ligandome_dict import LazyLigandomeDict, AlleleNotFound
from .converters import (
epitopes_to_dataframe,
epitopes_to_csv
)
from .predict_epitopes import (
predict_epitopes_from_args,
predict_epitopes_from_variants,
predict_epitopes_from_mutation_effects,
)
from .epitope_prediction import (
build_epitope_collection_from_binding_predictions,
MutantEpitopePrediction,
)
from .sequence_helpers import (
check_padding_around_mutation,
peptide_mutation_interval,
contains_mutant_residues,
protein_subsequences_around_mutations,
)
from . import commandline_args
__version__ = '0.1.0'
__all__ = [
"LazyLigandomeDict",
"AlleleNotFound",
"commandline_args",
"epitopes_to_dataframe",
"epitopes_to_csv",
"predict_epitopes_from_variants",
"predict_epitopes_from_mutation_effects",
"predict_epitopes_from_args",
"build_epitope_collection_from_binding_predictions",
"contains_mutant_residues",
"check_padding_around_mutation",
"peptide_mutation_interval",
"protein_subsequences_around_mutations",
"MutantEpitopePrediction",
]
| apache-2.0 | Python |
9d6a2862c536ef83bee085a01ea70900ec116c8c | add europarl.2019-05-23 to Broxtowe | DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_broxtowe.py | polling_stations/apps/data_collection/management/commands/import_broxtowe.py | import os
from data_collection.github_importer import BaseGitHubImporter
class Command(BaseGitHubImporter):
council_id = "E07000172"
elections = ["local.2019-05-02", "europarl.2019-05-23"]
# This one is a bit of a mish-mash
# The stations are on GitHub
srid = 4326
scraper_name = "wdiv-scrapers/DC-PollingStations-Broxtowe"
geom_type = "geojson"
# but we need to import the districts from a file on S3
districts_srid = 27700
districts_name = "local.2019-05-02/Version 1/Polling_Districts_Split"
districts_filetype = "shp"
local_files = True
def get_districts(self):
districts_file = os.path.join(self.base_folder_path, self.districts_name)
return self.get_data(self.districts_filetype, districts_file)
def district_record_to_dict(self, record):
return {
"internal_council_id": record[6].strip(),
"name": " - ".join([record[1].strip(), record[6].strip()]),
"polling_station_id": record[6].strip(),
}
def station_record_to_dict(self, record):
location = self.extract_geometry(
record, self.geom_type, self.get_srid("stations")
)
if record["ADDRESS"].lower().startswith(record["LABEL"].lower()):
address = record["ADDRESS"]
else:
address = "\n".join([record["LABEL"], record["ADDRESS"]])
return {
"internal_council_id": record["POLL_DIST"],
"address": address,
"postcode": "",
"location": location,
}
| import os
from data_collection.github_importer import BaseGitHubImporter
class Command(BaseGitHubImporter):
council_id = "E07000172"
elections = ["local.2019-05-02"]
# This one is a bit of a mish-mash
# The stations are on GitHub
srid = 4326
scraper_name = "wdiv-scrapers/DC-PollingStations-Broxtowe"
geom_type = "geojson"
# but we need to import the districts from a file on S3
districts_srid = 27700
districts_name = "local.2019-05-02/Version 1/Polling_Districts_Split"
districts_filetype = "shp"
local_files = True
def get_districts(self):
districts_file = os.path.join(self.base_folder_path, self.districts_name)
return self.get_data(self.districts_filetype, districts_file)
def district_record_to_dict(self, record):
return {
"internal_council_id": record[6].strip(),
"name": " - ".join([record[1].strip(), record[6].strip()]),
"polling_station_id": record[6].strip(),
}
def station_record_to_dict(self, record):
location = self.extract_geometry(
record, self.geom_type, self.get_srid("stations")
)
if record["ADDRESS"].lower().startswith(record["LABEL"].lower()):
address = record["ADDRESS"]
else:
address = "\n".join([record["LABEL"], record["ADDRESS"]])
return {
"internal_council_id": record["POLL_DIST"],
"address": address,
"postcode": "",
"location": location,
}
| bsd-3-clause | Python |
91004a866474c12031055a9d6ee3ee72262b2877 | Allow custom response from update_from_dict | wichert/rest_toolkit | src/rest_toolkit/views.py | src/rest_toolkit/views.py | from pyramid.httpexceptions import HTTPMethodNotAllowed
from pyramid.httpexceptions import HTTPNoContent
from .state import RestState
def unsupported_method_view(resource, request):
request.response.status_int = 405
return {'message': 'Unsupported HTTP method'}
def default_options_view(resource, request, methods=None):
"""Default OPTIONS view for resources."""
response = HTTPNoContent()
if methods is None:
state = RestState.from_resource(resource)
methods = state.supported_methods()
response.headers['Access-Control-Allow-Methods'] = ', '.join(methods)
return response
def default_delete_view(resource, request):
resource.delete()
return HTTPNoContent()
def default_get_view(resource, request):
return resource.to_dict()
def default_patch_view(resource, request):
try:
data = request.json_body
except ValueError:
request.response.status_int = 400
return {'message': 'No JSON data provided.'}
resource.validate(data, partial=True)
r = resource.update_from_dict(data, replace=False)
return r if r is not None else resource.to_dict()
def default_put_view(resource, request):
try:
data = request.json_body
except ValueError:
request.response.status_int = 400
return {'message': 'No JSON data provided.'}
resource.validate(data, partial=False)
r = resource.update_from_dict(data, replace=True)
return r if r is not None else resource.to_dict()
def default_post_view(resource, request):
try:
data = request.json_body
except ValueError:
request.response.status_int = 400
return {'message': 'No JSON data provided.'}
resource.validate_child(data)
request.response.status_int = 201
return resource.add_child(data)
| from pyramid.httpexceptions import HTTPMethodNotAllowed
from pyramid.httpexceptions import HTTPNoContent
from .state import RestState
def unsupported_method_view(resource, request):
request.response.status_int = 405
return {'message': 'Unsupported HTTP method'}
def default_options_view(resource, request, methods=None):
"""Default OPTIONS view for resources."""
response = HTTPNoContent()
if methods is None:
state = RestState.from_resource(resource)
methods = state.supported_methods()
response.headers['Access-Control-Allow-Methods'] = ', '.join(methods)
return response
def default_delete_view(resource, request):
resource.delete()
return HTTPNoContent()
def default_get_view(resource, request):
return resource.to_dict()
def default_patch_view(resource, request):
try:
data = request.json_body
except ValueError:
request.response.status_int = 400
return {'message': 'No JSON data provided.'}
resource.validate(data, partial=True)
resource.update_from_dict(data, replace=False)
return resource.to_dict()
def default_put_view(resource, request):
try:
data = request.json_body
except ValueError:
request.response.status_int = 400
return {'message': 'No JSON data provided.'}
resource.validate(data, partial=False)
resource.update_from_dict(data, replace=True)
return resource.to_dict()
def default_post_view(resource, request):
try:
data = request.json_body
except ValueError:
request.response.status_int = 400
return {'message': 'No JSON data provided.'}
resource.validate_child(data)
request.response.status_int = 201
return resource.add_child(data)
| bsd-2-clause | Python |
cc5f5e3c15c20d27f71afadf7a993ca7001c3f48 | Set version to 0.11-dev. | glorpen/webassets,heynemann/webassets,wijerasa/webassets,JDeuce/webassets,aconrad/webassets,glorpen/webassets,heynemann/webassets,scorphus/webassets,heynemann/webassets,scorphus/webassets,aconrad/webassets,john2x/webassets,JDeuce/webassets,aconrad/webassets,florianjacob/webassets,wijerasa/webassets,john2x/webassets,florianjacob/webassets,glorpen/webassets | src/webassets/__init__.py | src/webassets/__init__.py | __version__ = (0, 11, 'dev')
# Make a couple frequently used things available right here.
from .bundle import Bundle
from .env import Environment
| __version__ = (0, 10)
# Make a couple frequently used things available right here.
from .bundle import Bundle
from .env import Environment
| bsd-2-clause | Python |
779359f87698a421ae0102b58d214232921a3dfc | add python 2.4 requirement | guziy/basemap,matplotlib/basemap,guziy/basemap,matplotlib/basemap | setup-data.py | setup-data.py | import sys, glob, os
major, minor1, minor2, s, tmp = sys.version_info
if major==2 and minor1<=3:
# setuptools monkeypatches distutils.core.Distribution to support
# package_data
#try: import setuptools
#except ImportError:
# raise SystemExit("""
#matplotlib requires setuptools for installation. Please download
#http://peak.telecommunity.com/dist/ez_setup.py and run it (as su if
#you are doing a system wide install) to install the proper version of
#setuptools for your system""")
raise SystemExit("""The basemap toolkit requires python 2.4.""")
from distutils.core import setup
packages = ['matplotlib.toolkits.basemap.data']
package_dirs = {'':'lib'}
boundaryfiles = glob.glob("lib/matplotlib/toolkits/basemap/data/*_f.dat")
basemap_datafiles = [os.path.basename(bfile) for bfile in boundaryfiles]
package_data = {'matplotlib.toolkits.basemap.data':basemap_datafiles}
setup(
name = "basemap-data-fullres",
version = "0.9.7",
description = "full-resolution boundary data for basemap",
url = "http://matplotlib.sourceforge.net/toolkits.html",
download_url = "http://sourceforge.net/projects/matplotlib",
author = "Jeff Whitaker",
author_email = "jeffrey.s.whitaker@noaa.gov",
platforms = ["any"],
license = "OSI Approved",
keywords = ["python","plotting","plots","graphs","charts","GIS","mapping","map projections","maps"],
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved",
"Topic :: Scientific/Engineering :: Visualization",
"Topic :: Software Development :: Libraries :: Python Modules",
"Operating System :: OS Independent"],
packages = packages,
package_dir = package_dirs,
package_data = package_data
)
| import sys, glob, os
packages = ['matplotlib.toolkits.basemap.data']
package_dirs = {'':'lib'}
boundaryfiles = glob.glob("lib/matplotlib/toolkits/basemap/data/*_f.dat")
basemap_datafiles = [os.path.basename(bfile) for bfile in boundaryfiles]
package_data = {'matplotlib.toolkits.basemap.data':basemap_datafiles}
setup(
name = "basemap-data-fullres",
version = "0.9.7",
description = "full-resolution boundary data for basemap",
url = "http://matplotlib.sourceforge.net/toolkits.html",
download_url = "http://sourceforge.net/projects/matplotlib",
author = "Jeff Whitaker",
author_email = "jeffrey.s.whitaker@noaa.gov",
platforms = ["any"],
license = "OSI Approved",
keywords = ["python","plotting","plots","graphs","charts","GIS","mapping","map projections","maps"],
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved",
"Topic :: Scientific/Engineering :: Visualization",
"Topic :: Software Development :: Libraries :: Python Modules",
"Operating System :: OS Independent"],
packages = packages,
package_dir = package_dirs,
package_data = package_data
)
| mit | Python |
be1faa2f8fda8b7c8ed8e6f3fd61abd16b7c7509 | update read repo example | fullstackpython/blog-code-examples,fullstackpython/blog-code-examples,fullstackpython/blog-code-examples | first-steps-gitpython/read_repo.py | first-steps-gitpython/read_repo.py | import os
from git import Repo
COMMITS_TO_PRINT = 5
def print_commit(commit):
print('----')
print(str(commit.hexsha))
print("\"{}\" by {} ({})".format(commit.summary,
commit.author.name,
commit.author.email))
print(str(commit.authored_datetime))
print(str("count: {} and size: {}".format(commit.count(),
commit.size)))
def print_repository(repo):
print('Repo description: {}'.format(repo.description))
print('Repo active branch is {}'.format(repo.active_branch))
for remote in repo.remotes:
print('Remote named "{}" with URL "{}"'.format(remote, remote.url))
print('Last commit for repo is {}.'.format(str(repo.head.commit.hexsha)))
if __name__ == "__main__":
repo_path = os.getenv('GIT_REPO_PATH')
# Repo object used to programmatically interact with Git repositories
repo = Repo(repo_path)
# check that the repository loaded correctly
if not repo.bare:
print('Repo at {} successfully loaded.'.format(repo_path))
print_repository(repo)
# create list of commits then print some of them to stdout
commits = list(repo.iter_commits('master'))[:COMMITS_TO_PRINT]
for commit in commits:
print_commit(commit)
pass
else:
print('Could not load repository at {} :('.format(repo_path))
| import os
from git import Repo
COMMITS_TO_PRINT = 5
def print_commit(commit):
print('----')
print(str(commit.hexsha))
print("\"{}\" by {} ({})".format(commit.summary,
commit.author.name,
commit.author.email))
print(str(commit.authored_datetime))
print(str("count: {} and size: {}".format(commit.count(),
commit.size)))
def print_repository(repo):
print('Repo active branch is {}'.format(repo.active_branch))
print('Repo description: {}'.format(repo.description))
print('Repo active branch is {}'.format(repo.active_branch))
for remote in repo.remotes:
print('Remote named "{}" with URL "{}"'.format(remote, remote.url))
print('Last commit for repo is {}.'.format(str(repo.head.commit.hexsha)))
if __name__ == "__main__":
repo_path = os.getenv('GIT_REPO_PATH')
# Repo object used to programmatically interact with Git repositories
repo = Repo(repo_path)
# check that the repository loaded correctly
if not repo.bare:
print('Repo at {} successfully loaded.'.format(repo_path))
print_repository(repo)
# create list of commits then print some of them to stdout
commits = list(repo.iter_commits('master'))[:COMMITS_TO_PRINT]
for commit in commits:
print_commit(commit)
pass
else:
print('Could not load repository at {} :('.format(repo_path))
| mit | Python |
20dce56f93b35ae492c8c3b57c4b75265b0002a0 | add robustness to init | KatiRG/flyingpigeon,KatiRG/flyingpigeon,KatiRG/flyingpigeon,KatiRG/flyingpigeon,bird-house/flyingpigeon,KatiRG/flyingpigeon | flyingpigeon/processes/__init__.py | flyingpigeon/processes/__init__.py | __all__ = [
"wps_subset_continents",
"wps_subset_countries",
"wps_subset_regionseurope",
"wps_subset_points",
"wps_indices_simple",
"wps_indices_percentile",
"wps_weatherregimes_ra",
"wps_weatherregimes_model",
"wps_weatherregimes_projection",
"wps_analogs_detection",
"wps_analogs_model",
"wps_analogs_compare",
"wps_analogs_viewer",
"wps_segetalflora",
"wps_sdm_gbif",
"wps_sdm_csv",
"wps_robustness",
"wps_plot_timeseries",
"wps_fetch",
# climate for impact processes
"wps_c4i_simple_indice",
# processes under development
"wps_spatial_analog",
# "wps_eobs2cordex",
# TODO: c4i processes with multiple input sources
#"wps_c4i_multivar_indice",
#"wps_c4i_percentile_indice",
#"wps_c4i_compound_indice",
]
| __all__ = [
"wps_subset_continents",
"wps_subset_countries",
"wps_subset_regionseurope",
"wps_subset_points",
"wps_indices_simple",
"wps_indices_percentile",
"wps_weatherregimes_ra",
"wps_weatherregimes_model",
"wps_weatherregimes_projection",
"wps_analogs_detection",
"wps_analogs_model",
"wps_analogs_compare",
"wps_analogs_viewer",
"wps_segetalflora",
"wps_sdm_gbif",
"wps_sdm_csv",
"wps_plot_timeseries",
"wps_fetch",
# climate for impact processes
"wps_c4i_simple_indice",
# processes under development
"wps_spatial_analog",
# "wps_eobs2cordex",
# "wps_robustness",
# TODO: c4i processes with multiple input sources
#"wps_c4i_multivar_indice",
#"wps_c4i_percentile_indice",
#"wps_c4i_compound_indice",
]
| apache-2.0 | Python |
87e093f7e5309fa7ad0e96d70bb2f04ec0e29479 | bump version | eyaler/tensorpack,haamoon/tensorpack,eyaler/tensorpack,haamoon/tensorpack,ppwwyyxx/tensorpack,ppwwyyxx/tensorpack,haamoon/tensorpack | tensorpack/libinfo.py | tensorpack/libinfo.py |
# issue#1924 may happen on old systems
import cv2 # noqa
import os
# issue#7378 may happen with custom opencv. It doesn't hurt to disable opencl
os.environ['OPENCV_OPENCL_RUNTIME'] = ''
os.environ['TF_ENABLE_WINOGRAD_NONFUSED'] = '1' # issue#9339
os.environ['TF_AUTOTUNE_THRESHOLD'] = '3' # use more warm-up
os.environ['TF_AVGPOOL_USE_CUDNN'] = '1' # issue#8566
__version__ = '0.1.9'
|
# issue#1924 may happen on old systems
import cv2 # noqa
import os
# issue#7378 may happen with custom opencv. It doesn't hurt to disable opencl
os.environ['OPENCV_OPENCL_RUNTIME'] = ''
os.environ['TF_ENABLE_WINOGRAD_NONFUSED'] = '1' # issue#9339
os.environ['TF_AUTOTUNE_THRESHOLD'] = '3' # use more warm-up
os.environ['TF_AVGPOOL_USE_CUDNN'] = '1' # issue#8566
__version__ = '0.1.8'
| apache-2.0 | Python |
db6304c6bc897b18c6e71b57c22e9bda6464a13a | Add post function. | lohayon5/slacknotif | slacknotif.py | slacknotif.py | import socket
import io
import shutil
import requests
import json
key = input("Be sure to run this as root/sudo. Press enter to continue.")
webhook = input("Paste your slack webhook url: ")
filename = "slacknotif"
def post_to_slack():
slack_username = print(socket.gethostname())
slack_message = print("Test.")
slack_url = webhook
payload = {'username:' : 'slack_username' : 'text' : "slack_message"}
r = requests.get('https://hooks.slack.com/services/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', params=payload)
post_to_slack()
with io.FileIO("%s" % filename, "w") as file:
file.write(script)
shutil.move("./slacknotif", "/opt/%s" % filename) | import io
import shutil
print("Be sure to run this as sudo/root. Press enter to continue...")
url = input("Paste your slack webhook url: ")
filename = "slacknotif"
script = """#!/bin/bash
function post_to_slack () {
SLACK_USERNAME="$(hostname)"
SLACK_MESSAGE="$1"
SLACK_URL=$SLACK_URL
curl -sSL --data "payload={\"username\": \"${SLACK_USERNAME}\", \"text\": \"${SLACK_ICON} ${SLACK_MESSAGE}\"}" ${SLACK_URL}
}
post_to_slack "$(cat)"
exit 0""".encode()
with io.FileIO("slacknotif", "w") as file:
file.write(script)
shutil.move("./slacknotif", "/opt/%s" % filename) | mit | Python |
7f1745f3967cc9c07eb2b06b9dbcbdfa6aa13251 | Update cybergis-script-geoserver-import-styles.py | state-hiu/cybergis-scripts,state-hiu/cybergis-scripts | bin/cybergis-script-geoserver-import-styles.py | bin/cybergis-script-geoserver-import-styles.py | from base64 import b64encode
from optparse import make_option
import json
import urllib
import urllib2
import argparse
import time
import os
import subprocess
#==#
import _geoserver_import_styles
#==#
parser = argparse.ArgumentParser(description='')
parser.add_argument("--path", help="The location in the filesystem of the styles directory")
parser.add_argument("--prefix", help="The prefix to prepend to all the styles when loaded into GeoServer")
parser.add_argument('-gs', '--geoserver', help="The url of the target GeoServer.")
parser.add_argument("--username", help="The username to use for basic auth requests.")
parser.add_argument("--password", help="The password to use for basic auth requests.")
parser.add_argument('--verbose', '-v', default=0, action='count', help="Print out intermediate status messages.")
args = parser.parse_args()
#==#
_geoserver_import_styles.run(args)
| from base64 import b64encode
from optparse import make_option
import json
import urllib
import urllib2
import argparse
import time
import os
import subprocess
#==#
import _geoserver_import_styles
#==#
parser = argparse.ArgumentParser(description='')
parser.add_argument("--path", help="The location in the filesystem of the styles directory")
parser.add_argument('-gs', '--geoserver', help="The url of the target GeoServer.")
parser.add_argument("--username", help="The username to use for basic auth requests.")
parser.add_argument("--password", help="The password to use for basic auth requests.")
parser.add_argument('--verbose', '-v', default=0, action='count', help="Print out intermediate status messages.")
args = parser.parse_args()
#==#
_geoserver_import_styles.run(args)
| mit | Python |
231d3cadcbdcdd15dd7bfe4c97e46e12e0d5444b | Fix the CloudFormation ValidationError message (#788) | gjtempleton/moto,kefo/moto,ZuluPro/moto,okomestudio/moto,botify-labs/moto,whummer/moto,okomestudio/moto,okomestudio/moto,william-richard/moto,rocky4570/moto,ZuluPro/moto,whummer/moto,dbfr3qs/moto,spulec/moto,gjtempleton/moto,spulec/moto,Brett55/moto,heddle317/moto,william-richard/moto,heddle317/moto,spulec/moto,heddle317/moto,Brett55/moto,whummer/moto,silveregg/moto,Affirm/moto,dbfr3qs/moto,Brett55/moto,dbfr3qs/moto,ZuluPro/moto,Affirm/moto,okomestudio/moto,2rs2ts/moto,2rs2ts/moto,Affirm/moto,william-richard/moto,gjtempleton/moto,kefo/moto,Affirm/moto,Affirm/moto,whummer/moto,botify-labs/moto,dbfr3qs/moto,william-richard/moto,dbfr3qs/moto,okomestudio/moto,2rs2ts/moto,spulec/moto,2rs2ts/moto,kefo/moto,william-richard/moto,Brett55/moto,botify-labs/moto,okomestudio/moto,botify-labs/moto,rocky4570/moto,heddle317/moto,gjtempleton/moto,kefo/moto,william-richard/moto,Brett55/moto,braintreeps/moto,2rs2ts/moto,rocky4570/moto,heddle317/moto,dbfr3qs/moto,rocky4570/moto,Brett55/moto,botify-labs/moto,gjtempleton/moto,rocky4570/moto,spulec/moto,botify-labs/moto,ZuluPro/moto,spulec/moto,Affirm/moto,ZuluPro/moto,whummer/moto,ZuluPro/moto,kefo/moto,rocky4570/moto,whummer/moto | moto/cloudformation/exceptions.py | moto/cloudformation/exceptions.py | from __future__ import unicode_literals
from werkzeug.exceptions import BadRequest
from jinja2 import Template
class UnformattedGetAttTemplateException(Exception):
description = 'Template error: resource {0} does not support attribute type {1} in Fn::GetAtt'
status_code = 400
class ValidationError(BadRequest):
def __init__(self, name_or_id, message=None):
if message is None:
message="Stack with id {0} does not exist".format(name_or_id)
template = Template(ERROR_RESPONSE)
super(ValidationError, self).__init__()
self.description = template.render(
code="ValidationError",
message=message,
)
class MissingParameterError(BadRequest):
def __init__(self, parameter_name):
template = Template(ERROR_RESPONSE)
super(MissingParameterError, self).__init__()
self.description = template.render(
code="Missing Parameter",
message="Missing parameter {0}".format(parameter_name),
)
ERROR_RESPONSE = """<ErrorResponse xmlns="http://cloudformation.amazonaws.com/doc/2010-05-15/">
<Error>
<Type>Sender</Type>
<Code>{{ code }}</Code>
<Message>{{ message }}</Message>
</Error>
<RequestId>cf4c737e-5ae2-11e4-a7c9-ad44eEXAMPLE</RequestId>
</ErrorResponse>
"""
| from __future__ import unicode_literals
from werkzeug.exceptions import BadRequest
from jinja2 import Template
class UnformattedGetAttTemplateException(Exception):
description = 'Template error: resource {0} does not support attribute type {1} in Fn::GetAtt'
status_code = 400
class ValidationError(BadRequest):
def __init__(self, name_or_id, message=None):
if message is None:
message="Stack:{0} does not exist".format(name_or_id),
template = Template(ERROR_RESPONSE)
super(ValidationError, self).__init__()
self.description = template.render(
code="ValidationError",
message=message,
)
class MissingParameterError(BadRequest):
def __init__(self, parameter_name):
template = Template(ERROR_RESPONSE)
super(MissingParameterError, self).__init__()
self.description = template.render(
code="Missing Parameter",
message="Missing parameter {0}".format(parameter_name),
)
ERROR_RESPONSE = """<ErrorResponse xmlns="http://cloudformation.amazonaws.com/doc/2010-05-15/">
<Error>
<Type>Sender</Type>
<Code>{{ code }}</Code>
<Message>{{ message }}</Message>
</Error>
<RequestId>cf4c737e-5ae2-11e4-a7c9-ad44eEXAMPLE</RequestId>
</ErrorResponse>
"""
| apache-2.0 | Python |
540af49f11ae8162f73faae320da99689332dc11 | Bump to 3.8.1 | vimalloc/flask-jwt-extended | flask_jwt_extended/__init__.py | flask_jwt_extended/__init__.py | from .jwt_manager import JWTManager
from .view_decorators import (
jwt_required, fresh_jwt_required, jwt_refresh_token_required, jwt_optional
)
from .utils import (
create_refresh_token, create_access_token, get_jwt_identity,
get_jwt_claims, set_access_cookies, set_refresh_cookies,
unset_jwt_cookies, get_raw_jwt, get_current_user, current_user,
get_jti, decode_token, get_csrf_token
)
__version__ = '3.8.1'
| from .jwt_manager import JWTManager
from .view_decorators import (
jwt_required, fresh_jwt_required, jwt_refresh_token_required, jwt_optional
)
from .utils import (
create_refresh_token, create_access_token, get_jwt_identity,
get_jwt_claims, set_access_cookies, set_refresh_cookies,
unset_jwt_cookies, get_raw_jwt, get_current_user, current_user,
get_jti, decode_token, get_csrf_token
)
__version__ = '3.8.0'
| mit | Python |
cbac03ff028c84cf9b7e3a092108144fb399b49c | add test | uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw | myuw/test/api/test_affiliation.py | myuw/test/api/test_affiliation.py | import json
from myuw.test.api import MyuwApiTest, require_url
class TestApiAffiliation(MyuwApiTest):
@require_url('myuw_affiliation')
def test_javerage(self):
self.set_user('fffjjj')
response = self.get_response_by_reverse('myuw_affiliation')
self.assertEquals(response.status_code, 400)
self.set_user('javerage')
response = self.get_response_by_reverse('myuw_affiliation')
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data["class_level"], "SENIOR")
self.assertFalse(data["instructor"])
self.assertFalse(data["applicant"])
self.assertFalse(data["grad"])
self.assertFalse(data["grad_c2"])
self.assertFalse(data["undergrad_c2"])
self.assertFalse(data["employee"])
self.assertFalse(data["faculty"])
self.assertFalse(data["clinician"])
self.assertFalse(data["staff_employee"])
self.assertFalse(data["bothell"])
self.assertFalse(data["tacoma"])
self.assertFalse(data["F1"])
self.assertFalse(data["J1"])
self.assertFalse(data["intl_stud"])
self.assertFalse(data["fyp"])
self.assertFalse(data["aut_transfer"])
self.assertFalse(data["win_transfer"])
self.assertFalse(data["grad"])
self.assertFalse(data["alum_asso"])
self.assertFalse(data["alumni"])
self.assertFalse(data["retiree"])
self.assertFalse(data["past_employee"])
self.assertFalse(data["past_stud"])
self.assertFalse(data["no_1st_class_affi"])
self.assertFalse(data["official_bothell"])
self.assertFalse(data["official_tacoma"])
self.assertTrue(data["undergrad"])
self.assertTrue(data["registered_stud"])
self.assertTrue(data["pce"])
self.assertTrue(data["stud_employee"])
self.assertTrue(data["seattle"])
self.assertTrue(data["official_seattle"])
self.assertTrue(data["hxt_viewer"])
self.assertTrue(data["enrolled_stud"])
self.assertTrue(data["2fa_permitted"])
self.assertEqual(self.request.session.get_expiry_age(), 60)
| import json
from myuw.test.api import MyuwApiTest, require_url
class TestApiAffiliation(MyuwApiTest):
@require_url('myuw_affiliation')
def test_javerage(self):
self.set_user('fffjjj')
response = self.get_response_by_reverse('myuw_affiliation')
self.assertEquals(response.status_code, 400)
self.set_user('javerage')
response = self.get_response_by_reverse('myuw_affiliation')
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data["class_level"], "SENIOR")
self.assertFalse(data["instructor"])
self.assertFalse(data["applicant"])
self.assertFalse(data["grad"])
self.assertFalse(data["grad_c2"])
self.assertFalse(data["undergrad_c2"])
self.assertFalse(data["employee"])
self.assertFalse(data["faculty"])
self.assertFalse(data["clinician"])
self.assertFalse(data["staff_employee"])
self.assertFalse(data["bothell"])
self.assertFalse(data["tacoma"])
self.assertFalse(data["F1"])
self.assertFalse(data["J1"])
self.assertFalse(data["intl_stud"])
self.assertFalse(data["fyp"])
self.assertFalse(data["aut_transfer"])
self.assertFalse(data["win_transfer"])
self.assertFalse(data["grad"])
self.assertFalse(data["alum_asso"])
self.assertFalse(data["alumni"])
self.assertFalse(data["retiree"])
self.assertFalse(data["past_employee"])
self.assertFalse(data["past_stud"])
self.assertFalse(data["no_1st_class_affi"])
self.assertFalse(data["official_bothell"])
self.assertFalse(data["official_tacoma"])
self.assertTrue(data["undergrad"])
self.assertTrue(data["registered_stud"])
self.assertTrue(data["pce"])
self.assertTrue(data["stud_employee"])
self.assertTrue(data["seattle"])
self.assertTrue(data["official_seattle"])
self.assertTrue(data["hxt_viewer"])
self.assertTrue(data["enrolled_stud"])
self.assertTrue(data["2fa_permitted"])
| apache-2.0 | Python |
7b50c9290a8c8d3481d9147ebb66d3b7868ad7fc | Write to bouncer config file | m-lab/ooni-support,hellais/ooni-support,m-lab/ooni-support,hellais/ooni-support | bouncer-plumbing/mlab-to-bouncer/makeconfig.py | bouncer-plumbing/mlab-to-bouncer/makeconfig.py | #!/usr/bin/env python
import sys
import yaml
def read_parts_from_stdin():
data = sys.stdin.read()
parts_string = data.split("----")
parts_parsed = []
for part in parts_string:
part_parsed = yaml.safe_load(part)
parts_parsed.append(part_parsed)
return parts_parsed
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.dump(bouncer_config)
def write_bouncer_config(path, bouncer_config_contents):
try:
f = open(path, 'w')
f.write(bouncer_config_contents)
f.close()
except IOError:
print "Couldn't write to bouncer config file."
exit(1)
bouncer_config_path = '/home/mlab/data/bouncer.yaml'
if len(sys.argv) >= 2:
bouncer_config_path = sys.argv[1]
# FIXME: Read from the mlab-ns simulator.
parts = read_parts_from_stdin()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config_path, bouncer_config)
| #!/usr/bin/env python
import sys
import yaml
def read_parts_from_stdin():
data = sys.stdin.read()
parts_string = data.split("----")
parts_parsed = []
for part in parts_string:
part_parsed = yaml.safe_load(part)
parts_parsed.append(part_parsed)
return parts_parsed
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.dump(bouncer_config)
def write_bouncer_config(bouncer_config, path):
print bouncer_config
parts = read_parts_from_stdin()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config, '/home/mlab/data/bouncer.yaml')
| apache-2.0 | Python |
8170ad6cdfd2346bc24a3d743663b4866416ca83 | Add functions to add shapes and iterate over each shape to render. | thebillington/pyPhys3D | Engine.py | Engine.py | #Imports
import pygame
from pygame.locals import *
from OpenGL.GL import *
from OpenGL.GLU import *
from shapes import Shape, Cube
#Create a game class
class Game(object):
#Constructor
def __init__(self, title, width, height, bgcolour):
#Initialise pygame
pygame.init()
#Set the size of the window
self.size = self.width, self.height = width, height
#Set the default perspective and clipping distances
self.fov = 45.0
self.aspectratio = width / height
self.minrender = 0.1
self.maxrender = 80
#Set the pygame mode to use double buffering and open gl
pygame.set_mode(self.size, DOUBLEBUF|OPENGL)
#Set the perspective
self.setperspective()
#Create an empty list of shapes to render
self.shapes = []
#Create a function to update the perspective
def setperspective(self):
#Set the perspective
gluPerspective(self.fov, self.aspectratio, self.minrender, self.maxrender)
#Create a function to add a shape
def addshape(self, s):
self.shapes.append(s)
#Create a function to render the shapes
def render(self):
#For each of the shapes, check the type and render it
for s in shapes:
#If the shape is a cube, call the rendercube method
if s.type == Shape.CUBE:
rendercube(s)
| #Imports
import pygame
from pygame.locals import *
from OpenGL.GL import *
from OpenGL.GLU import *
from shapes import Shape, Cube
#Create a game class
class Game(object):
#Constructor
def __init__(self, title, width, height, bgcolour):
#Initialise pygame
pygame.init()
#Set the size of the window
self.size = self.width, self.height = width, height
#Set the default perspective and clipping distances
self.fov = 45.0
self.aspectratio = width / height
self.minrender = 0.1
self.maxrender = 80
#Set the pygame mode to use double buffering and open gl
pygame.set_mode(self.size, DOUBLEBUF|OPENGL)
#Set the perspective
self.setperspective()
#Create an empty list of shapes to render
self.shapes = []
#Create a function to update the perspective
def setperspective(self):
#Set the perspective
gluPerspective(self.fov, self.aspectratio, self.minrender, self.maxrender)
| mit | Python |
c95b2dd92bb0b7e7b6bd9b57b3a5031310fdd591 | add warning message about large downloads | bthirion/nipy,nipy/nireg,alexis-roche/nipy,nipy/nipy-labs,alexis-roche/nipy,arokem/nipy,bthirion/nipy,bthirion/nipy,arokem/nipy,alexis-roche/nipy,alexis-roche/niseg,alexis-roche/nireg,alexis-roche/nireg,nipy/nipy-labs,nipy/nireg,alexis-roche/register,arokem/nipy,alexis-roche/register,bthirion/nipy,alexis-roche/register,alexis-roche/nipy,arokem/nipy,alexis-roche/niseg | examples/FIAC/batch.py | examples/FIAC/batch.py | import os, time
print "WARNING: Running this program will involve downloading up to 3 gigs of data and takes a number of hours to run."
ttoc = time.time()
for subj in range(16):
os.system('python2.4 ./subject.py %d' % subj)
ttic = time.time()
print 'total time for %d subjects (minutes): %02f' % ((subj+1), ((ttic-ttoc)/60))
ttoc = time.time()
for design in ['block', 'event']:
for which in ['delays', 'contrasts']:
for contrast in ['overall', 'speaker', 'sentence', 'interaction']:
for stat in ['t', 'sd', 'effect']:
os.system('python2.4 ./plots-fixed.py --which=%s --contrast=%s --design=%s --stat=%s' %(which, contrast, design, stat))
print 'plots done', which, contrast, design, stat
ttic = time.time()
print 'total time for fixed plots : %02f' % ((ttic-ttoc)/60)
ttoc = time.time()
for what in ['rho', 'fwhmOLS']:
os.system('python2.4 ./plots-run.py --what=%s' % what)
ttic = time.time()
print 'total time for runs plots : %02f' % ((ttic-ttoc)/60)
ttoc = time.time()
for design in ['block', 'event']:
for which in ['delays', 'contrasts']:
for contrast in ['overall', 'speaker', 'sentence', 'interaction']:
cmd = """
python2.4 ./multi.py --which=%s
--design=%s --contrast=%s --clobber
""" % (which, design, contrast)
cmd = " ".join(cmd.replace('\n', ' ').strip().split())
print cmd
os.system(cmd)
ttic = time.time()
print 'total time for fixed effects group analysis : %02f' % ((ttic-ttoc)/60)
| import os, time
ttoc = time.time()
for subj in range(16):
os.system('python2.4 ./subject.py %d' % subj)
ttic = time.time()
print 'total time for %d subjects (minutes): %02f' % ((subj+1), ((ttic-ttoc)/60))
ttoc = time.time()
for design in ['block', 'event']:
for which in ['delays', 'contrasts']:
for contrast in ['overall', 'speaker', 'sentence', 'interaction']:
for stat in ['t', 'sd', 'effect']:
os.system('python2.4 ./plots-fixed.py --which=%s --contrast=%s --design=%s --stat=%s' %(which, contrast, design, stat))
print 'plots done', which, contrast, design, stat
ttic = time.time()
print 'total time for fixed plots : %02f' % ((ttic-ttoc)/60)
ttoc = time.time()
for what in ['rho', 'fwhmOLS']:
os.system('python2.4 ./plots-run.py --what=%s' % what)
ttic = time.time()
print 'total time for runs plots : %02f' % ((ttic-ttoc)/60)
ttoc = time.time()
for design in ['block', 'event']:
for which in ['delays', 'contrasts']:
for contrast in ['overall', 'speaker', 'sentence', 'interaction']:
cmd = """
python2.4 ./multi.py --which=%s
--design=%s --contrast=%s --clobber
""" % (which, design, contrast)
cmd = " ".join(cmd.replace('\n', ' ').strip().split())
print cmd
os.system(cmd)
ttic = time.time()
print 'total time for fixed effects group analysis : %02f' % ((ttic-ttoc)/60)
| bsd-3-clause | Python |
e9278a2e229953accb8ab9a3d44eae8e455bcb6a | fix linting | CamDavidsonPilon/lifelines | examples/cure_model.py | examples/cure_model.py | # -*- coding: utf-8 -*-
from lifelines.fitters import ParametricRegressionFitter
import autograd.numpy as np
from autograd.scipy.special import expit
import matplotlib.pyplot as plt
from scipy.stats import weibull_min
class CureModel(ParametricRegressionFitter):
_fitted_parameter_names = ["lambda_", "beta_", "rho_"]
def _cumulative_hazard(self, params, T, Xs):
c = expit(np.dot(Xs["beta_"], params["beta_"]))
lambda_ = np.exp(np.dot(Xs["lambda_"], params["lambda_"]))
rho_ = np.exp(np.dot(Xs["rho_"], params["rho_"]))
survival = np.exp(-((T / lambda_) ** rho_))
return -np.log((1 - c) * 1.0 + c * survival)
from lifelines.datasets import load_rossi
swf = CureModel(penalizer=0.0)
rossi = load_rossi()
rossi["intercept"] = 1.0
covariates = {"lambda_": rossi.columns, "rho_": ["intercept"], "beta_": rossi.columns}
swf.fit(rossi, "week", event_col="arrest", regressors=covariates) # TODO: name
swf.print_summary(4)
# swf.plot()
# plt.show()
| # -*- coding: utf-8 -*-
from lifelines.fitters import ParametricRegressionFitter
import autograd.numpy as np
from autograd.scipy.special import expit
import matplotlib.pyplot as plt
from scipy.stats import weibull_min
class CureModel(ParametricRegressionFitter):
_fitted_parameter_names = ["lambda_", "beta_", "rho_"]
def _cumulative_hazard(self, params, T, Xs):
c = expit(np.dot(Xs["beta_"], params["beta_"]))
lambda_ = np.exp(np.dot(Xs["lambda_"], params["lambda_"]))
rho_ = np.exp(np.dot(Xs["rho_"], params["rho_"]))
survival = np.exp(-(T / lambda_) ** rho_)
return -np.log((1 - c) * 1.0 + c * survival)
from lifelines.datasets import load_rossi
swf = CureModel(penalizer=0.0)
rossi = load_rossi()
rossi["intercept"] = 1.0
covariates = {"lambda_": rossi.columns, "rho_": ["intercept"], "beta_": rossi.columns}
swf.fit(rossi, "week", event_col="arrest", regressors=covariates) # TODO: name
swf.print_summary(4)
# swf.plot()
# plt.show()
| mit | Python |
4d270e3a0b9ab144fab26a1add81048dd4a3e3c9 | Test `back_current_density` return unit | jrsmith3/tec,jrsmith3/tec | test/test_Langmuir.py | test/test_Langmuir.py | # -*- coding: utf-8 -*-
import numpy as np
from astropy import units
import unittest
from tec.electrode import Metal
from tec.models import Langmuir
em = Metal(temp=1000., barrier=2., richardson=10.)
co = Metal(temp=300., barrier=1., richardson=10., position=10.)
class Base(unittest.TestCase):
"""
Base class for tests
This class is intended to be subclassed so that I don't have to rewrite the same `setUp` method for each class containing tests.
"""
def setUp(self):
"""
Create new Langmuir object for every test
"""
if em.position > co.position:
raise ValueError("Initialization em.position > co.position.")
self.t = Langmuir(em, co)
self.em = em
self.co = co
class MethodsInput(Base):
"""
Tests methods which take input parameters
Tests include: passing invalid input, etc.
"""
pass
class MethodsReturnType(Base):
"""
Tests methods' output types
"""
def test_back_current_density(self):
"""
back_current_density should return astropy.units.Quantity
"""
self.assertIsInstance(self.t.back_current_density(), units.Quantity)
def test_normalization_length(self):
"""
normalization_length should return astropy.units.Quantity
"""
current_density = units.Quantity(1, "A cm-2")
self.assertIsInstance(self.t.normalization_length(current_density), units.Quantity)
class MethodsReturnUnits(Base):
"""
Tests methods' output units where applicable
"""
def test_back_current_density(self):
"""
back_current_density should return a value with unit A/cm2
"""
self.assertEqual(self.t.back_current_density().unit, units.Unit("A/cm2"))
class MethodsReturnValues(Base):
"""
Tests values of methods against known values
"""
pass
| # -*- coding: utf-8 -*-
import numpy as np
from astropy import units
import unittest
from tec.electrode import Metal
from tec.models import Langmuir
em = Metal(temp=1000., barrier=2., richardson=10.)
co = Metal(temp=300., barrier=1., richardson=10., position=10.)
class Base(unittest.TestCase):
"""
Base class for tests
This class is intended to be subclassed so that I don't have to rewrite the same `setUp` method for each class containing tests.
"""
def setUp(self):
"""
Create new Langmuir object for every test
"""
if em.position > co.position:
raise ValueError("Initialization em.position > co.position.")
self.t = Langmuir(em, co)
self.em = em
self.co = co
class MethodsInput(Base):
"""
Tests methods which take input parameters
Tests include: passing invalid input, etc.
"""
pass
class MethodsReturnType(Base):
"""
Tests methods' output types
"""
def test_back_current_density(self):
"""
back_current_density should return astropy.units.Quantity
"""
self.assertIsInstance(self.t.back_current_density(), units.Quantity)
def test_normalization_length(self):
"""
normalization_length should return astropy.units.Quantity
"""
current_density = units.Quantity(1, "A cm-2")
self.assertIsInstance(self.t.normalization_length(current_density), units.Quantity)
class MethodsReturnUnits(Base):
"""
Tests methods' output units where applicable
"""
pass
class MethodsReturnValues(Base):
"""
Tests values of methods against known values
"""
pass
| mit | Python |
597b0505ff47de8447c7f7318f9bf0a56c890caa | Store access data as well as last event info | lshift/scrutiny,lshift/scrutiny | list-repos.py | list-repos.py | import github
import yaml
import os.path as path
from datetime import datetime, timedelta
import collections
config = yaml.load(open("backup.yaml"))
g = github.Github(login_or_token=config["admin-token"])
repos = {}
oldest_when = datetime.now() - timedelta(days=90) # Github doesn't return events more than 90 days ago, so assume repos with that timestamp had something just before then https://developer.github.com/v3/activity/events/
org = g.get_organization(config["org"])
Access = collections.namedtuple('Access', ['who', 'why', 'what'])
admins = []
for member in org.get_members(role="admin"):
admins.append(member.login)
def max_permission(perms):
if "admin" in perms:
return "admin"
elif "push" in perms:
return "push"
elif "pull" in perms:
return "pull"
else:
raise Exception, perms
for repo in org.get_repos():
print "repo", repo.name
repos[repo.name] = {}
access = []
def new_access(adding):
existing = [x for x in access if x.who == adding.who]
if len(existing) == 1:
existing = existing[0]
new_perm = max_permission([adding.what, existing.what])
if new_perm != existing.what:
raise Exception, (existing, adding)
elif len(existing) == 0:
access.append(adding)
else:
raise Exception, existing
for admin in admins:
new_access(Access(admin, "[Owner]", "admin"))
for team in repo.get_teams():
for user in team.get_members():
new_access(Access(user.login, team.name, team.permission))
for collaborator in repo.get_collaborators():
perms = collaborator._rawData['permissions']
perms = max_permission({k: v for k,v in perms.items() if v}.keys())
new_access(Access(collaborator.login, "Collaborator", perms))
if not repo.private:
new_access(Access("Everyone", "[Public access]", "pull"))
repos[repo.name]["access"] = [dict(x._asdict()) for x in access]
events = list(repo.get_events().get_page(0))
if len(events) > 0:
when = events[0].created_at
else:
when = oldest_when.replace() # Can't do copy, but replace works!
repos[repo.name]["last_event"] = when
with open(path.join(config["folder"], config["repos"]), "w") as reposfile:
reposfile.write(yaml.safe_dump(repos))
| import github
import yaml
import os.path as path
from datetime import datetime, timedelta
config = yaml.load(open("backup.yaml"))
g = github.Github(login_or_token=config["admin-token"])
repos = {}
oldest_when = datetime.now() - timedelta(days=90) # Github doesn't return events more than 90 days ago, so assume repos with that timestamp had something just before then https://developer.github.com/v3/activity/events/
for repo in g.get_organization(config["org"]).get_repos():
events = list(repo.get_events().get_page(0))
if len(events) > 0:
when = events[0].created_at
else:
when = oldest_when.replace() # Can't do copy, but replace works!
repos[repo.name] = {"last_event": when}
with open(path.join(config["folder"], config["repos"]), "w") as reposfile:
reposfile.write(yaml.safe_dump(repos))
| agpl-3.0 | Python |
2d359a318ed1fa835ca986a336fc5c26ea7e326e | Fix #225, Add author in builds API | frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq | frigg/builds/serializers.py | frigg/builds/serializers.py | from rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
'still_running',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'author',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'author',
'color',
'pull_request_url',
'commit_url',
)
| from rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
'still_running',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
| mit | Python |
53e1ff21bb219495f1b99f84dbb31624fdd35231 | Fix that crazy error that would cause enless looping... | jaredmanning/learning,jaredmanning/learning | lpthw/ex33.py | lpthw/ex33.py | #i = 0
#numbers = []
#while i < 6:
# print "At the top i is %d" % i
# numbers.append(i)
#
# i += 1
# print "Numbers now: ", numbers
# print "At the bottom i is %d" % i
#
#
#print "The numbers: "
#
#for num in numbers:
# print num
#Study Drills
print "What's the limit of the list?"
a = int(raw_input("> "))
def list_numbers(a):
"""This function might add numbers to a list?"""
i = 0
numbers = []
while i < a:
print "At the top i is %d" % i
numbers.append(i)
i += 1
print "Numbers now: ", numbers
print "At the bottom i is %d" % i
print "The numbers: "
for num in numbers:
print num
return
list_numbers(a)
| #i = 0
#numbers = []
#while i < 6:
# print "At the top i is %d" % i
# numbers.append(i)
#
# i += 1
# print "Numbers now: ", numbers
# print "At the bottom i is %d" % i
#
#
#print "The numbers: "
#
#for num in numbers:
# print num
#Study Drills
print "What's the limit of the list?"
a = raw_input("> ")
def list_numbers(a):
"""This function might add numbers to a list?"""
i = 0
numbers = []
while i < a:
print "At the top i is %d" % i
numbers.append(i)
i += 1
print "Numbers now: ", numbers
print "At the bottom i is %d" % i
print "The numbers: "
for num in numbers:
print num
return
list_numbers(a)
| mit | Python |
5df45d88d7547dafe459ef211f0ef284b18ffacc | update bash_command | bichocj/slack-deploy-github,bichocj/slack-deploy-github | lu/plugins.py | lu/plugins.py | import re
import subprocess
from slackbot.bot import listen_to
from slackbot.bot import respond_to
@respond_to('hi', re.IGNORECASE)
def hi(message):
message.reply('hi-me -> jaime XD')
message.react('+1')
@respond_to('help', re.IGNORECASE)
def help(message):
message.reply('I going to do: ')
with open('bash_commands.sh', "r") as f:
for line in f:
message.reply("$ " + line)
message.reply("write 'deploy' if you want I run the script")
message.reply("also you can give me your own command using 'exec (.*)'")
@respond_to('deploy', re.IGNORECASE)
def deploy(message):
message.reply('ok!, I\'m beginning with de deploy')
p = subprocess.Popen(['./bash_commands.sh'], shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
text = ''
for line in p.stdout.readlines():
text += line.decode("utf-8")
message.reply(text)
# uncomment to exec line by line
# with open('bash_commands.sh', "r") as f:
# for line in f:
# p = subprocess.Popen(line, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
# text = ''
# for line in p.stdout.readlines():
# text += line.decode("utf-8")
# message.reply(text)
# f.close()
@respond_to('exec (.*)')
def exec_command(message, something):
p = subprocess.Popen(something, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
text = ''
for line in p.stdout.readlines():
text += line.decode("utf-8")
message.reply(text)
import subprocess
| import re
import subprocess
from slackbot.bot import listen_to
from slackbot.bot import respond_to
@respond_to('hi', re.IGNORECASE)
def hi(message):
message.reply('hi-me -> jaime XD')
message.react('+1')
@respond_to('help', re.IGNORECASE)
def help(message):
message.reply('I going to do: ')
with open('bash_commands.sh', "r") as f:
for line in f:
message.reply("$ " + line)
message.reply("write 'deploy' if you want I run the script")
message.reply("also you can give me your own command using 'exec (.*)'")
@respond_to('deploy', re.IGNORECASE)
def deploy(message):
message.reply('ok!, I\'m beginning with de deploy')
with open('bash_commands.sh', "r") as f:
for line in f:
p = subprocess.Popen(line, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
text = ''
for line in p.stdout.readlines():
text += line.decode("utf-8")
message.reply(text)
f.close()
@respond_to('exec (.*)')
def exec_command(message, something):
p = subprocess.Popen(something, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
text = ''
for line in p.stdout.readlines():
text += line.decode("utf-8")
message.reply(text) | apache-2.0 | Python |
f51bae0d0787e879fd7dafe5d460568c2bc8ef31 | fix e2e test fixtures to post list of jobs | kapy2010/treeherder,tojonmz/treeherder,edmorley/treeherder,wlach/treeherder,wlach/treeherder,adusca/treeherder,deathping1994/treeherder,glenn124f/treeherder,gbrmachado/treeherder,tojon/treeherder,jgraham/treeherder,moijes12/treeherder,edmorley/treeherder,avih/treeherder,akhileshpillai/treeherder,gbrmachado/treeherder,moijes12/treeherder,gbrmachado/treeherder,moijes12/treeherder,vaishalitekale/treeherder,sylvestre/treeherder,jgraham/treeherder,sylvestre/treeherder,akhileshpillai/treeherder,kapy2010/treeherder,glenn124f/treeherder,adusca/treeherder,edmorley/treeherder,gbrmachado/treeherder,kapy2010/treeherder,jgraham/treeherder,parkouss/treeherder,moijes12/treeherder,jgraham/treeherder,deathping1994/treeherder,avih/treeherder,moijes12/treeherder,edmorley/treeherder,akhileshpillai/treeherder,tojonmz/treeherder,tojon/treeherder,deathping1994/treeherder,rail/treeherder,vaishalitekale/treeherder,tojonmz/treeherder,akhileshpillai/treeherder,parkouss/treeherder,tojonmz/treeherder,avih/treeherder,avih/treeherder,vaishalitekale/treeherder,rail/treeherder,KWierso/treeherder,vaishalitekale/treeherder,adusca/treeherder,glenn124f/treeherder,tojon/treeherder,parkouss/treeherder,glenn124f/treeherder,sylvestre/treeherder,wlach/treeherder,adusca/treeherder,parkouss/treeherder,rail/treeherder,gbrmachado/treeherder,parkouss/treeherder,akhileshpillai/treeherder,KWierso/treeherder,vaishalitekale/treeherder,parkouss/treeherder,glenn124f/treeherder,kapy2010/treeherder,kapy2010/treeherder,rail/treeherder,sylvestre/treeherder,sylvestre/treeherder,gbrmachado/treeherder,jgraham/treeherder,KWierso/treeherder,wlach/treeherder,tojonmz/treeherder,avih/treeherder,akhileshpillai/treeherder,avih/treeherder,glenn124f/treeherder,rail/treeherder,adusca/treeherder,wlach/treeherder,deathping1994/treeherder,deathping1994/treeherder,tojonmz/treeherder,vaishalitekale/treeherder,tojon/treeherder,deathping1994/treeherder,wlach/treeherder,adusca/treeherder,KWierso/treeherder,sylvestre/treeherder,rail/treeherder,moijes12/treeherder,jgraham/treeherder | tests/e2e/conftest.py | tests/e2e/conftest.py | from django.core.urlresolvers import reverse
from django.template import Context, Template
import pytest
from webtest.app import TestApp
import simplejson as json
from treeherder.webapp.wsgi import application
import os
@pytest.fixture
def pending_jobs():
"""returns a list of buildapi pending jobs"""
return json.loads(open(
os.path.join(os.path.dirname(__file__), "pending.json")
).read())
@pytest.fixture
def running_jobs():
"""returns a list of buildapi running jobs"""
return json.loads(open(
os.path.join(os.path.dirname(__file__), "running.json")
).read())
@pytest.fixture
def completed_jobs(sample_data):
"""returns a list of pulse completed jobs"""
base_dir = os.path.dirname(__file__)
content = open(
os.path.join(os.path.dirname(__file__), "finished.json")
).read()
t = Template(content)
c = Context({"base_dir": base_dir})
return json.loads(t.render(c))
@pytest.fixture
def pending_jobs_stored(jm, pending_jobs, result_set_stored):
"""
stores a list of buildapi pending jobs into the jobs store
using BuildApiTreeHerderAdapter
"""
pending_jobs.update(result_set_stored[0])
url = reverse("jobs-list", kwargs={"project": jm.project})
TestApp(application).post_json(url, params=[pending_jobs])
@pytest.fixture
def running_jobs_stored(jm, running_jobs, result_set_stored):
"""
stores a list of buildapi running jobs into the objectstore
using BuildApiTreeHerderAdapter
"""
running_jobs.update(result_set_stored[0])
url = reverse("jobs-list", kwargs={"project": jm.project})
TestApp(application).post_json(url, params=[running_jobs])
@pytest.fixture
def completed_jobs_stored(jm, completed_jobs, result_set_stored):
"""
stores a list of buildapi completed jobs into the objectstore
using BuildApiTreeHerderAdapter
"""
completed_jobs['revision_hash'] = result_set_stored[0]['revision_hash']
url = reverse('objectstore-list', kwargs={"project": jm.project})
TestApp(application).post_json(url, params=[completed_jobs])
@pytest.fixture
def completed_jobs_loaded(jm, completed_jobs_stored):
jm.process_objects(1, raise_errors=True)
| from django.core.urlresolvers import reverse
from django.template import Context, Template
import pytest
from webtest.app import TestApp
import simplejson as json
from treeherder.webapp.wsgi import application
import os
@pytest.fixture
def pending_jobs():
"""returns a list of buildapi pending jobs"""
return json.loads(open(
os.path.join(os.path.dirname(__file__), "pending.json")
).read())
@pytest.fixture
def running_jobs():
"""returns a list of buildapi running jobs"""
return json.loads(open(
os.path.join(os.path.dirname(__file__), "running.json")
).read())
@pytest.fixture
def completed_jobs(sample_data):
"""returns a list of pulse completed jobs"""
base_dir = os.path.dirname(__file__)
content = open(
os.path.join(os.path.dirname(__file__), "finished.json")
).read()
t = Template(content)
c = Context({"base_dir": base_dir})
return json.loads(t.render(c))
@pytest.fixture
def pending_jobs_stored(jm, pending_jobs, result_set_stored):
"""
stores a list of buildapi pending jobs into the jobs store
using BuildApiTreeHerderAdapter
"""
pending_jobs.update(result_set_stored)
url = reverse("resultset-add-job",
kwargs={"project": jm.project, "pk": pending_jobs['resultset_id']})
TestApp(application).post_json(url, params=pending_jobs)
@pytest.fixture
def running_jobs_stored(jm, running_jobs, result_set_stored):
"""
stores a list of buildapi running jobs into the objectstore
using BuildApiTreeHerderAdapter
"""
running_jobs.update(result_set_stored)
url = reverse("resultset-add-job",
kwargs={"project": jm.project, "pk": running_jobs['resultset_id']})
TestApp(application).post_json(url, params=running_jobs)
@pytest.fixture
def completed_jobs_stored(jm, completed_jobs, result_set_stored):
"""
stores a list of buildapi completed jobs into the objectstore
using BuildApiTreeHerderAdapter
"""
completed_jobs['revision_hash'] = result_set_stored['revision_hash']
url = reverse('objectstore-list', kwargs={"project": jm.project})
TestApp(application).post_json(url, params=completed_jobs)
@pytest.fixture
def completed_jobs_loaded(jm, completed_jobs_stored):
jm.process_objects(1, raise_errors=True)
| mpl-2.0 | Python |
2401e8c11fb88b2da6ea1c2c376023150e2f97d6 | clean up | zujko/manage-vm,zujko/manage-vm,zujko/manage-vm | main/views.py | main/views.py | from django.shortcuts import render
from .forms import *
from proxmoxer import ProxmoxAPI
from managevm import secrets
def index(request):
return render(request, 'index.html')
def manage(request):
return render(request,'manage.html')
def create_vm(request):
if request.method == 'POST':
vm_form = VM_Form(data=request.POST)
drive_form = CD_DVD(data=request.POST)
disk_form = Disk(data=request.POST)
cpu_form = CPU(data=request.POST)
net_form = Network(data=request.POST)
if vm_form.is_valid() and drive_form.is_valid() and disk_form.is_valid() and cpu_form.is_valid() and net_form.is_valid():
proxmox = ProxmoxAPI(secrets.PROXMOX_HOST,user=secrets.PROXMOX_USER,password=secrets.PROXMOX_PASS,verify_ssl=False)
node = proxmox.nodes(vm_form.cleaned_data['node'])
vm_id = int(proxmox.cluster.nextid.get())
testdata = node.qemu.create(vmid=vm_id,
name=vm_form.cleaned_data['name'],
ostype=vm_form.cleaned_data['ostype'],
ide2=drive_form.cleaned_data['iso']+',media=cdrom',
ide0=disk_form.cleaned_data['storage']+':'+str(disk_form.cleaned_data['size'])+',format='+disk_form.cleaned_data['disk_format'],
sockets=1,
cores=cpu_form.cleaned_data['cores'],
numa=0,
memory=vm_form.cleaned_data['memory'],
net0=net_form.cleaned_data['model']+',bridge='+net_form.cleaned_data['bridge'])
else:
vm_form = VM_Form()
drive_form = CD_DVD()
disk_form = Disk()
cpu_form = CPU()
net_form = Network()
return render(request, 'create.html',{'vm_form': vm_form,'drive_form': drive_form,'disk_form': disk_form,'cpu_form': cpu_form,'net_form': net_form})
| from django.shortcuts import render
from .forms import *
from proxmoxer import ProxmoxAPI
from managevm import secrets
def index(request):
return render(request, 'index.html')
def manage(request):
return render(request,'manage.html')
def create_vm(request):
if request.method == 'POST':
vm_form = VM_Form(data=request.POST)
drive_form = CD_DVD(data=request.POST)
disk_form = Disk(data=request.POST)
cpu_form = CPU(data=request.POST)
net_form = Network(data=request.POST)
if vm_form.is_valid() and drive_form.is_valid() and disk_form.is_valid() and cpu_form.is_valid() and net_form.is_valid():
proxmox = ProxmoxAPI(secrets.PROXMOX_HOST,user=secrets.PROXMOX_USER,password=secrets.PROXMOX_PASS,verify_ssl=False)
node = proxmox.nodes(vm_form.cleaned_data['node'])
# [TESTING] Create openvz container
vm_id = int(proxmox.cluster.nextid.get())
testdata = node.qemu.create(vmid=vm_id,
name=vm_form.cleaned_data['name'],
ostype=vm_form.cleaned_data['ostype'],
ide2=drive_form.cleaned_data['iso']+',media=cdrom',
ide0=disk_form.cleaned_data['storage']+':'+str(disk_form.cleaned_data['size'])+',format='+disk_form.cleaned_data['disk_format'],
sockets=1,
cores=cpu_form.cleaned_data['cores'],
numa=0,
memory=vm_form.cleaned_data['memory'],
net0=net_form.cleaned_data['model']+',bridge='+net_form.cleaned_data['bridge'])
print(str(testdata))
else:
vm_form = VM_Form()
drive_form = CD_DVD()
disk_form = Disk()
cpu_form = CPU()
net_form = Network()
return render(request, 'create.html',{'vm_form': vm_form,'drive_form': drive_form,'disk_form': disk_form,'cpu_form': cpu_form,'net_form': net_form})
| mit | Python |
2342adc6339c0cb2ae794185800286dd6d2d17f6 | introduce a new kind of exception: RedirectWarning (warning with an additional redirection button) | MarkusTeufelberger/openobject-server,MarkusTeufelberger/openobject-server,MarkusTeufelberger/openobject-server | openerp/exceptions.py | openerp/exceptions.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" OpenERP core exceptions.
This module defines a few exception types. Those types are understood by the
RPC layer. Any other exception type bubbling until the RPC layer will be
treated as a 'Server error'.
"""
class Warning(Exception):
pass
class RedirectWarning(Exception):
""" Warning with a possibility to redirect the user instead of simply
discarding the warning message.
"""
def __init__(self, msg, action_id, button_text):
"""
:param int action_id: id of the action required to perform the
redirection
:param string button_text: text to put on the button which will trigger
the redirection
"""
super(RedirectWarning, self).__init__(msg, action_id, button_text)
class AccessDenied(Exception):
""" Login/password error. No message, no traceback. """
def __init__(self):
super(AccessDenied, self).__init__('Access denied.')
self.traceback = ('', '', '')
class AccessError(Exception):
""" Access rights error. """
class DeferredException(Exception):
""" Exception object holding a traceback for asynchronous reporting.
Some RPC calls (database creation and report generation) happen with
an initial request followed by multiple, polling requests. This class
is used to store the possible exception occuring in the thread serving
the first request, and is then sent to a polling request.
('Traceback' is misleading, this is really a exc_info() triple.)
"""
def __init__(self, msg, tb):
self.message = msg
self.traceback = tb
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" OpenERP core exceptions.
This module defines a few exception types. Those types are understood by the
RPC layer. Any other exception type bubbling until the RPC layer will be
treated as a 'Server error'.
"""
class Warning(Exception):
pass
class AccessDenied(Exception):
""" Login/password error. No message, no traceback. """
def __init__(self):
super(AccessDenied, self).__init__('Access denied.')
self.traceback = ('', '', '')
class AccessError(Exception):
""" Access rights error. """
class DeferredException(Exception):
""" Exception object holding a traceback for asynchronous reporting.
Some RPC calls (database creation and report generation) happen with
an initial request followed by multiple, polling requests. This class
is used to store the possible exception occuring in the thread serving
the first request, and is then sent to a polling request.
('Traceback' is misleading, this is really a exc_info() triple.)
"""
def __init__(self, msg, tb):
self.message = msg
self.traceback = tb
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
218587b68756be4f7393324fdaf04ed04d8baf3c | add offsets to tseries.api | pandas-dev/pandas,rs2/pandas,cython-testbed/pandas,GuessWhoSamFoo/pandas,louispotok/pandas,nmartensen/pandas,Winand/pandas,harisbal/pandas,toobaz/pandas,amolkahat/pandas,GuessWhoSamFoo/pandas,jreback/pandas,datapythonista/pandas,linebp/pandas,louispotok/pandas,zfrenchee/pandas,cbertinato/pandas,GuessWhoSamFoo/pandas,harisbal/pandas,DGrady/pandas,dsm054/pandas,amolkahat/pandas,winklerand/pandas,cython-testbed/pandas,MJuddBooth/pandas,Winand/pandas,nmartensen/pandas,harisbal/pandas,linebp/pandas,jmmease/pandas,jorisvandenbossche/pandas,cython-testbed/pandas,pandas-dev/pandas,jorisvandenbossche/pandas,TomAugspurger/pandas,kdebrab/pandas,jmmease/pandas,toobaz/pandas,cython-testbed/pandas,cbertinato/pandas,cython-testbed/pandas,amolkahat/pandas,jreback/pandas,jorisvandenbossche/pandas,linebp/pandas,winklerand/pandas,nmartensen/pandas,zfrenchee/pandas,pandas-dev/pandas,datapythonista/pandas,MJuddBooth/pandas,rs2/pandas,zfrenchee/pandas,GuessWhoSamFoo/pandas,linebp/pandas,gfyoung/pandas,toobaz/pandas,jreback/pandas,cbertinato/pandas,toobaz/pandas,louispotok/pandas,pandas-dev/pandas,DGrady/pandas,gfyoung/pandas,pratapvardhan/pandas,pratapvardhan/pandas,datapythonista/pandas,toobaz/pandas,cbertinato/pandas,gfyoung/pandas,gfyoung/pandas,winklerand/pandas,dsm054/pandas,amolkahat/pandas,kdebrab/pandas,pratapvardhan/pandas,jmmease/pandas,rs2/pandas,Winand/pandas,DGrady/pandas,dsm054/pandas,jmmease/pandas,DGrady/pandas,amolkahat/pandas,kdebrab/pandas,linebp/pandas,Winand/pandas,winklerand/pandas,linebp/pandas,DGrady/pandas,pratapvardhan/pandas,nmartensen/pandas,TomAugspurger/pandas,MJuddBooth/pandas,jreback/pandas,cbertinato/pandas,louispotok/pandas,rs2/pandas,nmartensen/pandas,winklerand/pandas,TomAugspurger/pandas,nmartensen/pandas,dsm054/pandas,TomAugspurger/pandas,MJuddBooth/pandas,harisbal/pandas,jreback/pandas,jmmease/pandas,jmmease/pandas,zfrenchee/pandas,zfrenchee/pandas,pratapvardhan/pandas,kdebrab/pandas,winklerand/pandas,gfyoung/pandas,Winand/pandas,louispotok/pandas,MJuddBooth/pandas,kdebrab/pandas,Winand/pandas,jorisvandenbossche/pandas,dsm054/pandas,DGrady/pandas,datapythonista/pandas,GuessWhoSamFoo/pandas,harisbal/pandas | pandas/tseries/api.py | pandas/tseries/api.py | """
"""
from pandas.tseries.index import DatetimeIndex, date_range, bdate_range
from pandas.tseries.offsets import *
from pandas.tseries.period import PeriodIndex, period_range, pnow
from pandas.tseries.resample import TimeGrouper
| """
"""
from pandas.tseries.index import DatetimeIndex, date_range, bdate_range
from pandas.tseries.period import PeriodIndex, period_range, pnow
from pandas.tseries.resample import TimeGrouper
| bsd-3-clause | Python |
97f608ee5e2b73108de6af69db8764c9d009a873 | reorder failing presets | NervanaSystems/coach,NervanaSystems/coach,NervanaSystems/coach | rl_coach/tests/presets/test_presets.py | rl_coach/tests/presets/test_presets.py | # nasty hack to deal with issue #46
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
import pytest
import os
import time
import shutil
from subprocess import Popen, DEVNULL
from rl_coach.logger import screen
FAILING_PRESETS = [
'Fetch_DDPG_HER_baselines',
'MontezumaRevenge_BC',
'ControlSuite_DDPG',
'Doom_Basic_BC',
'CARLA_CIL',
'CARLA_DDPG',
'CARLA_Dueling_DDQN',
'CARLA_3_Cameras_DDPG',
'Starcraft_CollectMinerals_A3C',
'Starcraft_CollectMinerals_Dueling_DDQN',
]
def all_presets():
result = []
for f in sorted(os.listdir('rl_coach/presets')):
if f.endswith('.py') and f != '__init__.py':
preset = f.split('.')[0]
if preset not in FAILING_PRESETS:
result.append(preset)
return result
@pytest.fixture(params=all_presets())
def preset(request):
return request.param
@pytest.mark.integration_test
def test_preset_runs(preset):
test_failed = False
print("Testing preset {}".format(preset))
# TODO: this is a temporary workaround for presets which define more than a single available level.
# we should probably do this in a more robust way
level = ""
if "Atari" in preset:
level = "breakout"
elif "Mujoco" in preset:
level = "inverted_pendulum"
elif "ControlSuite" in preset:
level = "pendulum:swingup"
experiment_name = ".test-" + preset
params = ["python3", "rl_coach/coach.py", "-p", preset, "-ns", "-e", experiment_name]
if level != "":
params += ["-lvl", level]
p = Popen(params, stdout=DEVNULL)
# wait 10 seconds overhead of initialization etc.
time.sleep(10)
return_value = p.poll()
if return_value is None:
screen.success("{} passed successfully".format(preset))
else:
test_failed = True
screen.error("{} failed".format(preset), crash=False)
p.kill()
if os.path.exists("experiments/" + experiment_name):
shutil.rmtree("experiments/" + experiment_name)
assert not test_failed
| # nasty hack to deal with issue #46
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
import pytest
import os
import time
import shutil
from subprocess import Popen, DEVNULL
from rl_coach.logger import screen
FAILING_PRESETS = [
'Fetch_DDPG_HER_baselines',
'MontezumaRevenge_BC',
'CARLA_CIL',
'ControlSuite_DDPG',
'CARLA_DDPG',
'Doom_Basic_BC',
'CARLA_Dueling_DDQN',
'CARLA_3_Cameras_DDPG',
'Starcraft_CollectMinerals_A3C',
'Starcraft_CollectMinerals_Dueling_DDQN',
]
def all_presets():
result = []
for f in sorted(os.listdir('rl_coach/presets')):
if f.endswith('.py') and f != '__init__.py':
preset = f.split('.')[0]
if preset not in FAILING_PRESETS:
result.append(preset)
return result
@pytest.fixture(params=all_presets())
def preset(request):
return request.param
@pytest.mark.integration_test
def test_preset_runs(preset):
test_failed = False
print("Testing preset {}".format(preset))
# TODO: this is a temporary workaround for presets which define more than a single available level.
# we should probably do this in a more robust way
level = ""
if "Atari" in preset:
level = "breakout"
elif "Mujoco" in preset:
level = "inverted_pendulum"
elif "ControlSuite" in preset:
level = "pendulum:swingup"
experiment_name = ".test-" + preset
params = ["python3", "rl_coach/coach.py", "-p", preset, "-ns", "-e", experiment_name]
if level != "":
params += ["-lvl", level]
p = Popen(params, stdout=DEVNULL)
# wait 10 seconds overhead of initialization etc.
time.sleep(10)
return_value = p.poll()
if return_value is None:
screen.success("{} passed successfully".format(preset))
else:
test_failed = True
screen.error("{} failed".format(preset), crash=False)
p.kill()
if os.path.exists("experiments/" + experiment_name):
shutil.rmtree("experiments/" + experiment_name)
assert not test_failed
| apache-2.0 | Python |
3dd4ab569931cda3015600364a29611b0b060701 | implement FireModule | AlexandruBurlacu/keras_squeezenet,AlexandruBurlacu/keras_squeezenet | squezeenet.py | squezeenet.py | from keras.models import Model
from keras.layers import (Input, Dense, Convolution2D, MaxPooling2D,
Dropout, BatchNormalization, Flatten, merge)
from keras.optimizers import RMSProp
from keras.utils import np_utils
import numpy as np
import theano as tn
import multiprocessing as mp
tn.config.openmp = True
OMP_NUM_THREADS = mp.cpu_count()
class FireModule:
def __init__(self, squeeze_size, expand_size):
self.sqz_size = squeeze_size
self.expn_size = expand_size
def __call__(self, data):
# squeeze layer
sqz_layer = Convolution2D(self.sqz_size, 1, 1, activation = "relu")(data)
# expand layer
conv_1x1 = Convolution2D(self.expn_size, 1, 1, activation = "relu")(sqz_layer)
conv_3x3 = Convolution2D(self.expn_size, 3, 3, activation = "relu")(sqz_layer)
return megre([conv_1x1, conv_3x3], mode = "concat", concat_axis = 1)
| from keras.models import Model
from keras.layers import (Input, Dense, Convolution2D, MaxPooling2D,
Dropout, BatchNormalization, Flatten, Merge)
from keras.optimizers import RMSProp
from keras.utils import np_utils
import numpy as np
import theano as tn
import multiprocessing as mp
tn.config.openmp = True
OMP_NUM_THREADS = mp.cpu_count()
class FireModule:
def __init__(self):
pass
def __call__(self, data):
# data = ...(data)
# data = ...(data)
# data = ...(data)
# data = ...(data)
pass
return data
| mit | Python |
b77cc71a96d3cd9aa9475189428378b34f5f590c | fix typo in run_regression_tests.py | archos-sa/libtorrent-avp,archos-sa/libtorrent-avp,archos-sa/libtorrent-avp,archos-sa/libtorrent-avp,archos-sa/libtorrent-avp,archos-sa/libtorrent-avp | tools/run_regression_tests.py | tools/run_regression_tests.py | #!/bin/python
import run_tests
import os
import time
import subprocess
import sys
# returns a list of new revisions
def svn_fetch():
current_version = run_tests.svn_info()[0]
p = subprocess.Popen(['svn', 'up'], stdout=subprocess.PIPE)
revision = -1
output = ''
for l in p.stdout:
if 'At revision ' in l:
revision = int(l.split('At revision')[1].strip()[0:-1])
output += l
if revision == -1:
print '\n\nsvn update failed\n\n%s' % output
sys.exit(1)
return range(current_version + 1, revision + 1)
def svn_up(revision):
os.system('svn up %d' % revision)
def print_usage():
print '''usage: run_regression_tests.py remote-path [options] toolset [toolset...]
toolset are bjam toolsets. For instance clang, gcc, darwin, msvc etc.
remote-path is an scp path where the results are copied. This path has
the form: user@hostname:/path
if the remote-path is set to "-", no copying will be done
options:
-j<n> use n parallel processes for running tests
'''
def loop():
remote_path = sys.argv[1]
root_path = os.path.join(os.getcwd(), 'regression_tests')
if len(sys.argv) < 3:
print_usage()
sys.exit(1)
while True:
revs = svn_fetch()
# reverse the list to always run the tests for the
# latest version first, then fill in with the history
revs.reverse()
for r in revs:
print '\n\nREVISION %d ===\n' % r
svn_up(r)
run_tests.main(sys.argv[2:])
if remote_path != '-':
os.system('scp -r %s %s' % (os.path.join(root_path, '%d' % r), remote_path))
time.sleep(120)
if __name__ == "__main__":
loop()
| #!/bin/python
import run_tests
import os
import time
import subprocess
import sys
# returns a list of new revisions
def svn_fetch():
current_version = run_tests.svn_info()[0]
p = subprocess.Popen(['svn', 'up'], stdout=subprocess.PIPE)
revision = -1
output = ''
for l in p.stdout:
if 'At revision ' in l:
revision = int(l.split('At revision')[1].strip()[0:-1])
output += l
if revision == -1:
print '\n\nsvn update failed\n\n%s' % ouput
sys.exit(1)
return range(current_version + 1, revision + 1)
def svn_up(revision):
os.system('svn up %d' % revision)
def print_usage():
print '''usage: run_regression_tests.py remote-path [options] toolset [toolset...]
toolset are bjam toolsets. For instance clang, gcc, darwin, msvc etc.
remote-path is an scp path where the results are copied. This path has
the form: user@hostname:/path
if the remote-path is set to "-", no copying will be done
options:
-j<n> use n parallel processes for running tests
'''
def loop():
remote_path = sys.argv[1]
root_path = os.path.join(os.getcwd(), 'regression_tests')
if len(sys.argv) < 3:
print_usage()
sys.exit(1)
while True:
revs = svn_fetch()
# reverse the list to always run the tests for the
# latest version first, then fill in with the history
revs.reverse()
for r in revs:
print '\n\nREVISION %d ===\n' % r
svn_up(r)
run_tests.main(sys.argv[2:])
if remote_path != '-':
os.system('scp -r %s %s' % (os.path.join(root_path, '%d' % r), remote_path))
time.sleep(120)
if __name__ == "__main__":
loop()
| bsd-3-clause | Python |
32e9fb1549365be7405ba7c8c575b2dd381eeb4b | remove useless EsxTemplatePool class | OpenTouch/vsphere-client | src/template.py | src/template.py | from pyVmomi import vim
from tabulate import tabulate
from vm import vm_guess_folder
from misc import esx_name, esx_objects
###########
# HELPERS #
###########
def template_get_all(service):
l = []
vms = esx_objects(service, vim.VirtualMachine)
for v in vms:
if not v.summary.config.template:
continue
vm = EsxTemplate(service, v)
l.append(vm)
return l
def template_print_details(templates):
tabs = []
headers = [ "Name", "Folder", "OS", "CPUs", "Mem (MB)", "NIC" ]
for t in templates:
info = t.info()
vals = [ info.name, info.folder, info.os, info.cpu, info.mem, info.nic ]
tabs.append(vals)
tabs.sort(reverse=False)
print tabulate(tabs, headers)
def template_list(s, opt):
templates = template_get_all(s)
template_print_details(templates)
def template_parser(service, opt):
if opt['list'] == True: template_list(service, opt)
###########
# CLASSES #
###########
class EsxTemplateInfo:
def __init__(self, t):
summary = t.summary
config = summary.config
self.name = config.name
self.folder = vm_guess_folder(t)
self.os = config.guestFullName
self.cpu = config.numCpu
self.mem = config.memorySizeMB
self.nic = config.numEthernetCards
class EsxTemplate:
def __init__(self, service, template):
self.service = service
self.template = template
self.name = template.name
def info(self):
return EsxTemplateInfo(self.template)
def __str__(self):
return self.name
| from pyVmomi import vim
from tabulate import tabulate
from vm import vm_guess_folder
from misc import esx_name, esx_objects
###########
# HELPERS #
###########
def template_get_all(service):
l = []
vms = esx_objects(service, vim.VirtualMachine)
for v in vms:
if not v.summary.config.template:
continue
vm = EsxTemplate(service, v)
l.append(vm)
return l
def template_list(s, opt):
pool = EsxTemplatePool(s)
tmpls = pool.list()
tabs = []
headers = [ "Name", "Folder", "OS", "CPUs", "Mem (MB)", "NIC" ]
for t in tmpls:
info = t.info()
vals = [ info.name, info.folder, info.os, info.cpu, info.mem, info.nic ]
tabs.append(vals)
tabs.sort(reverse=False)
print tabulate(tabs, headers)
def template_parser(service, opt):
if opt['list'] == True: template_list(service, opt)
###########
# CLASSES #
###########
class EsxTemplateInfo:
def __init__(self, t):
summary = t.summary
config = summary.config
self.name = config.name
self.folder = vm_guess_folder(t)
self.os = config.guestFullName
self.cpu = config.numCpu
self.mem = config.memorySizeMB
self.nic = config.numEthernetCards
class EsxTemplate:
def __init__(self, service, template):
self.service = service
self.template = template
self.name = template.name
def info(self):
return EsxTemplateInfo(self.template)
def __str__(self):
return self.name
class EsxTemplatePool:
def __init__(self, service):
self.templates = template_get_all(service)
def list(self):
return self.templates
def get(self, name):
for t in self.templates:
if t.name == name:
return t
return None
def __str__(self):
r = "ESXi Templates:\n"
for t in self.templates:
r += str(t)
r += "\n"
return r
| apache-2.0 | Python |
04b8abde8e44dd6f1ccd9bbec55d0c03e89dfd49 | Update librispeech_test.py | tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets | tensorflow_datasets/audio/librispeech_test.py | tensorflow_datasets/audio/librispeech_test.py | # coding=utf-8
# Copyright 2019 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for librispeech dataset module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
import tensorflow_datasets.public_api as tfds
from tensorflow_datasets.audio import librispeech
class LibrispeechTest(testing.DatasetBuilderTestCase):
DATASET_CLASS = librispeech.Librispeech
BUILDER_CONFIG_NAMES_TO_TEST = ["clean-100"]
SPLITS = {
"train": 2,
"test": 1,
"dev": 1,
}
DL_EXTRACT_RESULT = {
tfds.Split.TRAIN: ["train-clean-100"],
tfds.Split.TEST: ["test-clean"],
tfds.Split.VALIDATION: ["dev-clean"],
}
class LibrispeechTest(testing.DatasetBuilderTestCase):
DATASET_CLASS = librispeech.Librispeech
BUILDER_CONFIG_NAMES_TO_TEST = ["clean-360"]
SPLITS = {
"train": 1,
"test": 1,
"dev": 1,
}
DL_EXTRACT_RESULT = {
tfds.Split.TRAIN: ["train-clean-100", "train-clean-360"],
tfds.Split.TEST: ["test-clean"],
tfds.Split.VALIDATION: ["dev-clean"],
}
if __name__ == "__main__":
testing.test_main()
| # coding=utf-8
# Copyright 2019 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for lm1b dataset module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
import tensorflow_datasets.public_api as tfds
from tensorflow_datasets.audio import librispeech
class LibrispeechTest(testing.DatasetBuilderTestCase):
DATASET_CLASS = librispeech.Librispeech
BUILDER_CONFIG_NAMES_TO_TEST = ["clean-100"]
SPLITS = {
"train": 2,
"test": 1,
"dev": 1,
}
DL_EXTRACT_RESULT = {
tfds.Split.TRAIN: ["train-clean-100"],
tfds.Split.TEST: ["test-clean"],
tfds.Split.VALIDATION: ["dev-clean"],
}
class LibrispeechTest(testing.DatasetBuilderTestCase):
DATASET_CLASS = librispeech.Librispeech
BUILDER_CONFIG_NAMES_TO_TEST = ["clean-360"]
SPLITS = {
"train": 1,
"test": 1,
"dev": 1,
}
DL_EXTRACT_RESULT = {
tfds.Split.TRAIN: ["train-clean-100", "train-clean-360"],
tfds.Split.TEST: ["test-clean"],
tfds.Split.VALIDATION: ["dev-clean"],
}
if __name__ == "__main__":
testing.test_main()
| apache-2.0 | Python |
4f88836f604bd5dbd7e38b55428f3b30d814c88a | add glsar comparison to example_gls.py | matthew-brett/draft-statsmodels,matthew-brett/draft-statsmodels | scikits/statsmodels/examples/example_gls.py | scikits/statsmodels/examples/example_gls.py | """
Example: scikis.statsmodels.GLS
"""
import scikits.statsmodels as sm
import numpy as np
data = sm.datasets.longley.Load()
data.exog = sm.add_constant(data.exog)
# The Longley dataset is a time series dataset
# Let's assume that the data is heteroskedastic and that we know
# the nature of the heteroskedasticity. We can then define
# `sigma` and use it to give us a GLS model
# First we will obtain the residuals from an OLS fit
ols_resid = sm.OLS(data.endog, data.exog).fit().resid
# Assume that the error terms follow an AR(1) process with a trend
# resid[i] = beta_0 + rho*resid[i-1] + e[i]
# where e ~ N(0,some_sigma**2)
# and that rho is simply the correlation of the residuals
# a consistent estimator for rho is to regress the residuals
# on the lagged residuals
resid_fit = sm.OLS(ols_resid[1:], sm.add_constant(ols_resid[:-1])).fit()
print resid_fit.t(0)
print resid_fit.pvalues[0]
# While we don't have strong evidence that the errors follow an AR(1)
# process we continue
rho = resid_fit.params[0]
# As we know, an AR(1) process means that near-neighbors have a stronger
# relation so we can give this structure by using a toeplitz matrix
from scipy.linalg import toeplitz
# # for example
# >>> toeplitz(range(5))
# array([[0, 1, 2, 3, 4],
# [1, 0, 1, 2, 3],
# [2, 1, 0, 1, 2],
# [3, 2, 1, 0, 1],
# [4, 3, 2, 1, 0]])
order = toeplitz(range(len(ols_resid)))
# so that our error covariance structure is actually rho**order
# which defines an autocorrelation structure
sigma = rho**order
gls_model = sm.GLS(data.endog, data.exog, sigma=sigma)
gls_results = gls_model.fit()
# of course, the exact rho in this instance is not known so it
# it might make more sense to use feasible gls, which currently only
# has experimental support
# We can use the GLSAR model with one lag, to get to a similar result
glsar_model = sm.GLSAR(data.endog, data.exog, 1)
glsar_results = glsar_model.iterative_fit(0)
# comparing gls and glsar results, we see that there are some small
# differences in the parameter estimates and the resultsing standard
# errors of the parameter estimate. This might be do to the numerical
# differences in the algorithm, e.g. the treatment of initial conditions,
# because of the small number of observations in the lonley dataset.
print gls_results.params
print glsar_results.params
print gls_results.bse
print glsar_results.bse
| """
Example: scikis.statsmodels.GLS
"""
import scikits.statsmodels as sm
import numpy as np
data = sm.datasets.longley.Load()
data.exog = sm.add_constant(data.exog)
# The Longley dataset is a time series dataset
# Let's assume that the data is heteroskedastic and that we know
# the nature of the heteroskedasticity. We can then define
# `sigma` and use it to give us a GLS model
# First we will obtain the residuals from an OLS fit
ols_resid = sm.OLS(data.endog, data.exog).fit().resid
# Assume that the error terms follow an AR(1) process with a trend
# resid[i] = beta_0 + rho*resid[i-1] + e[i]
# where e ~ N(0,some_sigma**2)
# and that rho is simply the correlation of the residuals
# a consistent estimator for rho is to regress the residuals
# on the lagged residuals
resid_fit = sm.OLS(ols_resid[1:], sm.add_constant(ols_resid[:-1])).fit()
print resid_fit.t(0)
print resid_fit.pvalues[0]
# While we don't have strong evidence that the errors follow an AR(1)
# process we continue
rho = resid_fit.params[0]
# As we know, an AR(1) process means that near-neighbors have a stronger
# relation so we can give this structure by using a toeplitz matrix
from scipy.linalg import toeplitz
# # for example
# >>> toeplitz(range(5))
# array([[0, 1, 2, 3, 4],
# [1, 0, 1, 2, 3],
# [2, 1, 0, 1, 2],
# [3, 2, 1, 0, 1],
# [4, 3, 2, 1, 0]])
order = toeplitz(range(len(ols_resid)))
# so that our error covariance structure is actually rho**order
# which defines an autocorrelation structure
sigma = rho**order
gls_model = sm.GLS(data.endog, data.exog, sigma=sigma)
gls_results = gls_model.fit()
# of course, the exact rho in this instance is not known so it
# it might make more sense to use feasible gls, which currently only
# has experimental support
| bsd-3-clause | Python |
c0d02e1f9eccf7c4aa6d28da9a3fa7f027885a11 | Remove unused import | Eric89GXL/vispy,Eric89GXL/vispy,Eric89GXL/vispy | vispy/visuals/tests/test_axis.py | vispy/visuals/tests/test_axis.py | # -*- coding: utf-8 -*-
"""
Tests for AxisVisual
"""
from vispy.scene import visuals
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main)
@requires_application()
def test_axis():
with TestingCanvas() as c:
axis = visuals.Axis(pos=[[-1.0, 0], [1.0, 0]], parent=c.scene)
c.draw_visual(axis)
@requires_application()
def test_axis_zero_domain():
# Regression test for a bug that caused an overflow error when the domain
# min was same as max
with TestingCanvas() as c:
axis = visuals.Axis(pos=[[-1.0, 0], [1.0, 0]], domain=(0.5, 0.5), parent=c.scene)
c.draw_visual(axis)
run_tests_if_main()
| # -*- coding: utf-8 -*-
"""
Tests for AxisVisual
"""
from vispy import scene
from vispy.scene import visuals
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main)
@requires_application()
def test_axis():
with TestingCanvas() as c:
axis = visuals.Axis(pos=[[-1.0, 0], [1.0, 0]], parent=c.scene)
c.draw_visual(axis)
@requires_application()
def test_axis_zero_domain():
# Regression test for a bug that caused an overflow error when the domain
# min was same as max
with TestingCanvas() as c:
axis = visuals.Axis(pos=[[-1.0, 0], [1.0, 0]], domain=(0.5, 0.5), parent=c.scene)
c.draw_visual(axis)
run_tests_if_main()
| bsd-3-clause | Python |
4368ea7128fd850d3ee2b67243db47d92e75836f | fix test_context to use get_current_context | swenger/glitter,swenger/glitter,swenger/glitter | tests/test_context.py | tests/test_context.py | import numpy
from glitter import EnumConstant
from glitter.contexts import get_current_context
def check_property(context, name):
value = getattr(context, name)
try:
if isinstance(value, EnumConstant):
if name in ("draw_buffer", "read_buffer"):
return # avoid problems with unavailable stereo buffers
valid_values = value._enum._reverse_dict.values()
for value in valid_values:
setattr(context, name, value)
assert numpy.all(getattr(context, name) == value), "property %s is broken" % name
else:
if type(value) is float:
value *= 0.5
setattr(context, name, value)
assert numpy.all(getattr(context, name) == value), "property %s is broken" % name
value += 0.5
setattr(context, name, value)
assert numpy.all(getattr(context, name) == value), "property %s is broken" % name
except AttributeError:
pass # "AttributeError: can't set attribute" is okay for read-only attributes
def test_property_generator():
context = get_current_context()
properties = [x for x in dir(context) if not x.startswith("_")]
for p in properties:
yield check_property, context, p
| import numpy
from glitter import EnumConstant
from glitter.contexts import Context
def check_property(context, name):
value = getattr(context, name)
try:
if isinstance(value, EnumConstant):
if name in ("draw_buffer", "read_buffer"):
return # avoid problems with unavailable stereo buffers
valid_values = value._enum._reverse_dict.values()
for value in valid_values:
setattr(context, name, value)
assert numpy.all(getattr(context, name) == value), "property %s is broken" % name
else:
if type(value) is float:
value *= 0.5
setattr(context, name, value)
assert numpy.all(getattr(context, name) == value), "property %s is broken" % name
value += 0.5
setattr(context, name, value)
assert numpy.all(getattr(context, name) == value), "property %s is broken" % name
except AttributeError:
pass # "AttributeError: can't set attribute" is okay for read-only attributes
def test_property_generator():
context = Context()
properties = [x for x in dir(context) if not x.startswith("_")]
for p in properties:
yield check_property, context, p
| mit | Python |
dec0bbfa4ab4c824ef93cdded281c8dd954f0c77 | Add conditional user switching only if user is not sdkman_user | Comcast/ansible-sdkman | tests/test_default.py | tests/test_default.py | sdkman_user = 'jenkins'
sdkman_group = 'jenkins'
def script_wrap(host, cmds):
# run as interactive shell to ensure .bashrc is sourced
wrapped_cmd = "/bin/bash -i -c '{0}'".format('; '.join(cmds))
if host.user.name == sdkman_user:
return wrapped_cmd
else:
return "sudo -H -u {0} {1}".format(sdkman_user, wrapped_cmd)
def check_run_for_rc_and_result(cmds, expected, host, check_stderr=False):
result = host.run(script_wrap(host, cmds))
assert result.rc == 0
if check_stderr:
assert result.stderr.find(expected) != -1
else:
assert result.stdout.find(expected) != -1
def test_config_file(host):
result = host.run(script_wrap(host, ['echo $SDKMAN_DIR']))
config_file_path = "{0}/etc/config".format(result.stdout)
f = host.file(config_file_path)
assert f.exists
assert f.is_file
assert f.mode in [0o644, 0o654, 0o655]
assert f.user == sdkman_user
assert f.group == sdkman_group
assert f.contains('sdkman_auto_answer=true')
def test_gradle_installed(host):
cmds = ['gradle --version']
expected = 'Gradle 4.6'
check_run_for_rc_and_result(cmds, expected, host)
def test_other_gradle_installed(host):
cmds = ['sdk use gradle 3.5.1', 'gradle --version']
expected = 'Gradle 3.5.1'
check_run_for_rc_and_result(cmds, expected, host)
def test_offline(host):
cmds = ['sdk list gradle']
expected = 'Offline: only showing installed gradle versions'
check_run_for_rc_and_result(cmds, expected, host)
cmds = ['sdk offline disable', 'sdk list gradle']
expected = 'Available Gradle Versions'
check_run_for_rc_and_result(cmds, expected, host)
| sdkman_user = 'jenkins'
sdkman_group = 'jenkins'
def script_wrap(cmds):
# run as interactive shell under user to ensure .bashrc is sourced
return "sudo -H -u {0} /bin/bash -i -c '{1}'".format(
sdkman_user,
'; '.join(cmds)
)
def check_run_for_rc_and_result(cmds, expected, host, check_stderr=False):
result = host.run(script_wrap(cmds))
assert result.rc == 0
if check_stderr:
assert result.stderr.find(expected) != -1
else:
assert result.stdout.find(expected) != -1
def test_config_file(host):
result = host.run(script_wrap(['echo $SDKMAN_DIR']))
config_file_path = "{0}/etc/config".format(result.stdout)
f = host.file(config_file_path)
assert f.exists
assert f.is_file
assert f.mode in [0o644, 0o654, 0o655]
assert f.user == sdkman_user
assert f.group == sdkman_group
assert f.contains('sdkman_auto_answer=true')
def test_gradle_installed(host):
cmds = ['gradle --version']
expected = 'Gradle 4.6'
check_run_for_rc_and_result(cmds, expected, host)
def test_other_gradle_installed(host):
cmds = ['sdk use gradle 3.5.1', 'gradle --version']
expected = 'Gradle 3.5.1'
check_run_for_rc_and_result(cmds, expected, host)
def test_offline(host):
cmds = ['sdk list gradle']
expected = 'Offline: only showing installed gradle versions'
check_run_for_rc_and_result(cmds, expected, host)
cmds = ['sdk offline disable', 'sdk list gradle']
expected = 'Available Gradle Versions'
check_run_for_rc_and_result(cmds, expected, host)
| apache-2.0 | Python |
043b2f26f836567f3d8d755d0756522559063a0d | Fix last commit | branall1/cli53,ftahmed/cli53,branall1/cli53,jefflaplante/cli53,Collaborne/cli53,branall1/cli53,jefflaplante/cli53,Collaborne/cli53,ftahmed/cli53 | tests/test_domains.py | tests/test_domains.py | import sys
import unittest
import subprocess
import random
# copied from python 2.7 for python 2.6
def check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd, output=output)
return output
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, sys.maxint)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
| import unittest
import subprocess
import random
# copied from python 2.7 for python 2.6
def check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd, output=output)
return output
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, sys.maxint)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
| mit | Python |
80efd0cd3b050fe75fe4a96b26b35000b1d0a90b | Update test_elo_bot.py | kevswanberg/mnl_elo_bot,jdrager/mnl_elo_bot | tests/test_elo_bot.py | tests/test_elo_bot.py | import unittest
import sys
from mnl_elo_bot import elo_bot
sys.path.append(".")
class IntegrationTest(unittest.TestCase):
# Just see if it initializes and runs...
def test_process(self):
teams = elo_bot.main(False, None, '')
for team in teams.values():
self.assertIsNotNone(team.name)
self.assertIsNotNone(team.color)
self.assertIsNotNone(team.emoji)
self.assertIsNotNone(team.history)
if __name__ == '__main__':
unittest.main()
| import unittest
import sys
from mnl_elo_bot import elo_bot
sys.path.append(".")
class IntegrationTest(unittest.TestCase):
# Just see if it initializes and runs...
def test_process(self):
teams = elo_bot.main(False, None, '')
for team in teams.values():
self.assertIsNotNone(team.name)
self.assertIsNotNone(team.color)
self.assertIsNotNone(team.emoji)
self.assertIsNotNone(team.history)
if __name__ == '__main__':
unittest.main()
| mit | Python |
de69b8cce482edc7877d2b203accbc846656ae04 | fix test_factory attribute lookup | tyarkoni/transitions,pytransitions/transitions,pytransitions/transitions | tests/test_factory.py | tests/test_factory.py | try:
from builtins import object
except ImportError:
pass
from unittest import TestCase
from transitions.extensions import MachineFactory
class TestFactory(TestCase):
def setUp(self):
self.factory = MachineFactory()
def test_mixins(self):
machine_cls = self.factory.get_predefined()
self.assertFalse(hasattr(machine_cls, 'set_edge_state'))
graph_cls = self.factory.get_predefined(graph=True)
self.assertTrue(hasattr(graph_cls, '_get_graph'))
nested_cls = self.factory.get_predefined(nested=True)
self.assertFalse(hasattr(nested_cls, '_get_graph'))
self.assertTrue(hasattr(nested_cls, '_traverse'))
locked_cls = self.factory.get_predefined(locked=True)
self.assertFalse(hasattr(locked_cls, '_get_graph'))
self.assertFalse(hasattr(locked_cls, '_traverse'))
self.assertTrue('__getattribute__' in locked_cls.__dict__)
locked_nested_cls = self.factory.get_predefined(nested=True, locked=True)
self.assertFalse(hasattr(locked_nested_cls, '_get_graph'))
self.assertTrue(hasattr(locked_nested_cls, '_traverse'))
self.assertEqual(locked_nested_cls.__getattribute__, locked_cls.__getattribute__)
self.assertNotEqual(machine_cls.__getattribute__, locked_cls.__getattribute__)
graph_locked_cls = self.factory.get_predefined(graph=True, locked=True)
self.assertTrue(hasattr(graph_locked_cls, '_get_graph'))
self.assertEqual(graph_locked_cls.__getattribute__, locked_cls.__getattribute__)
graph_nested_cls = self.factory.get_predefined(graph=True, nested=True)
self.assertNotEqual(nested_cls._create_transition, graph_nested_cls._create_transition)
locked_nested_graph_cls = self.factory.get_predefined(nested=True, locked=True, graph=True)
self.assertNotEqual(locked_nested_graph_cls._create_event, graph_cls._create_event)
| try:
from builtins import object
except ImportError:
pass
from unittest import TestCase
from transitions.extensions import MachineFactory
class TestFactory(TestCase):
def setUp(self):
self.factory = MachineFactory()
def test_mixins(self):
machine_cls = self.factory.get_predefined()
self.assertFalse(hasattr(machine_cls, 'set_edge_state'))
graph_cls = self.factory.get_predefined(graph=True)
self.assertTrue(hasattr(graph_cls, 'set_edge_state'))
nested_cls = self.factory.get_predefined(nested=True)
self.assertFalse(hasattr(nested_cls, 'set_edge_state'))
self.assertTrue(hasattr(nested_cls, '_traverse'))
locked_cls = self.factory.get_predefined(locked=True)
self.assertFalse(hasattr(locked_cls, 'set_edge_state'))
self.assertFalse(hasattr(locked_cls, '_traverse'))
self.assertTrue('__getattribute__' in locked_cls.__dict__)
locked_nested_cls = self.factory.get_predefined(nested=True, locked=True)
self.assertFalse(hasattr(locked_nested_cls, 'set_edge_state'))
self.assertTrue(hasattr(locked_nested_cls, '_traverse'))
self.assertEqual(locked_nested_cls.__getattribute__, locked_cls.__getattribute__)
self.assertNotEqual(machine_cls.__getattribute__, locked_cls.__getattribute__)
graph_locked_cls = self.factory.get_predefined(graph=True, locked=True)
self.assertTrue(hasattr(graph_locked_cls, 'set_edge_state'))
self.assertEqual(graph_locked_cls.__getattribute__, locked_cls.__getattribute__)
graph_nested_cls = self.factory.get_predefined(graph=True, nested=True)
self.assertNotEqual(nested_cls._create_transition, graph_nested_cls._create_transition)
locked_nested_graph_cls = self.factory.get_predefined(nested=True, locked=True, graph=True)
self.assertNotEqual(locked_nested_graph_cls._create_event, graph_cls._create_event)
| mit | Python |
d10826960d5bafc6616dc408e0d47faf1b305269 | Fix the tests | amolenaar/gaphor,amolenaar/gaphor | gaphor/ui/tests/test_elementeditor.py | gaphor/ui/tests/test_elementeditor.py | import pytest
from gaphor.ui.elementeditor import ElementEditor
class DiagramsStub:
def get_current_view(self):
return None
@pytest.fixture
def diagrams():
return DiagramsStub()
class DummyProperties(dict):
def set(self, key, val):
self[key] = val
def test_reopen_of_window(event_manager, element_factory, diagrams):
properties = DummyProperties()
editor = ElementEditor(event_manager, element_factory, diagrams, properties)
editor.open()
editor.close()
editor.open()
editor.close()
| import pytest
from gaphor.ui.elementeditor import ElementEditor
class DiagramsStub:
def get_current_view(self):
return None
@pytest.fixture
def diagrams():
return DiagramsStub()
def test_reopen_of_window(event_manager, element_factory, diagrams):
editor = ElementEditor(event_manager, element_factory, diagrams, properties={})
editor.open()
editor.close()
editor.open()
editor.close()
| lgpl-2.1 | Python |
203425afc4a65c9ff41e86bf09da01931612b005 | Add repr test for recipe | chrisgilmerproj/brewday,chrisgilmerproj/brewday | tests/test_recipes.py | tests/test_recipes.py | import unittest
from brew.constants import IMPERIAL_UNITS
from brew.constants import SI_UNITS
from brew.recipes import Recipe
from fixtures import grain_additions
from fixtures import hop_additions
from fixtures import recipe
from fixtures import yeast
class TestRecipe(unittest.TestCase):
def setUp(self):
# Define Grains
self.grain_additions = grain_additions
# Define Hops
self.hop_additions = hop_additions
# Define Yeast
self.yeast = yeast
# Define Recipes
self.recipe = recipe
self.assertEquals(self.recipe.units, IMPERIAL_UNITS)
def test_str(self):
out = str(self.recipe)
self.assertEquals(out, 'pale ale')
def test_repr(self):
out = repr(self.recipe)
self.assertEquals(out, "Recipe('pale ale', grain_additions=[GrainAddition(Grain('pale 2-row', short_name='2-row', color=2.0, hwe=308.78), weight=13.96, grain_type='cereal'), GrainAddition(Grain('crystal C20', short_name='C20', color=20.0, hwe=292.09), weight=0.78, grain_type='cereal')], hop_additions=[HopAddition(Hop('centennial', percent_alpha_acids=0.14), weight=0.57, boil_time=60.0, hop_type='pellet', utilization_cls=HopsUtilizationGlennTinseth, units='imperial'), HopAddition(Hop('cascade', percent_alpha_acids=0.07), weight=0.76, boil_time=5.0, hop_type='pellet', utilization_cls=HopsUtilizationGlennTinseth, units='imperial')], yeast=Yeast('Wyeast 1056', percent_attenuation=0.75), percent_brew_house_yield='0.7', start_volume=7.0, final_volume=5.0, units=imperial)") # nopep8
def test_set_units(self):
self.assertEquals(self.recipe.units, IMPERIAL_UNITS)
self.recipe.set_units(SI_UNITS)
self.assertEquals(self.recipe.units, SI_UNITS)
self.recipe.set_units(IMPERIAL_UNITS)
self.assertEquals(self.recipe.units, IMPERIAL_UNITS)
def test_set_raises(self):
with self.assertRaises(Exception):
self.recipe.set_units('bad')
def test_grains_units_mismatch_raises(self):
grain_additions = [g.change_units() for g in self.grain_additions]
with self.assertRaises(Exception):
Recipe(name='pale ale',
grain_additions=grain_additions,
hop_additions=self.hop_additions,
yeast=self.yeast)
def test_hops_units_mismatch_raises(self):
hop_additions = [h.change_units() for h in self.hop_additions]
with self.assertRaises(Exception):
Recipe(name='pale ale',
grain_additions=self.grain_additions,
hop_additions=hop_additions,
yeast=self.yeast)
| import unittest
from brew.constants import IMPERIAL_UNITS
from brew.constants import SI_UNITS
from brew.recipes import Recipe
from fixtures import grain_additions
from fixtures import hop_additions
from fixtures import recipe
from fixtures import yeast
class TestRecipe(unittest.TestCase):
def setUp(self):
# Define Grains
self.grain_additions = grain_additions
# Define Hops
self.hop_additions = hop_additions
# Define Yeast
self.yeast = yeast
# Define Recipes
self.recipe = recipe
self.assertEquals(self.recipe.units, IMPERIAL_UNITS)
def test_str(self):
out = str(self.recipe)
self.assertEquals(out, 'pale ale')
def test_set_units(self):
self.assertEquals(self.recipe.units, IMPERIAL_UNITS)
self.recipe.set_units(SI_UNITS)
self.assertEquals(self.recipe.units, SI_UNITS)
self.recipe.set_units(IMPERIAL_UNITS)
self.assertEquals(self.recipe.units, IMPERIAL_UNITS)
def test_set_raises(self):
with self.assertRaises(Exception):
self.recipe.set_units('bad')
def test_grains_units_mismatch_raises(self):
grain_additions = [g.change_units() for g in self.grain_additions]
with self.assertRaises(Exception):
Recipe(name='pale ale',
grain_additions=grain_additions,
hop_additions=self.hop_additions,
yeast=self.yeast)
def test_hops_units_mismatch_raises(self):
hop_additions = [h.change_units() for h in self.hop_additions]
with self.assertRaises(Exception):
Recipe(name='pale ale',
grain_additions=self.grain_additions,
hop_additions=hop_additions,
yeast=self.yeast)
| mit | Python |
f5cf4944ee092301b0e6ddf31b14906cba2e198c | Improve documentation | pwdyson/inflect.py,hugovk/inflect.py,jazzband/inflect | tests/test_unicode.py | tests/test_unicode.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest
import inflect
class TestUnicode(unittest.TestCase):
""" Unicode compatibility test cases """
def test_unicode_plural(self):
""" Unicode compatibility test cases for plural """
engine = inflect.engine()
unicode_test_cases = {
'cliché': 'clichés',
'ångström': 'ångströms'
}
for singular, plural in unicode_test_cases.items():
self.assertEqual(plural, engine.plural(singular))
| #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest
import inflect
class TestUnicode(unittest.TestCase):
def test_unicode(self):
engine = inflect.engine()
# Unicode compatability test
unicode_test_cases = {
'cliché': 'clichés',
'ångström': 'ångströms'
}
for singular, plural in unicode_test_cases.items():
self.assertEqual(plural, engine.plural(singular))
| mit | Python |
07034e25419b0bca9cd07828acf14a03bdd0726b | Clarify broken line | Kitware/tangelo,Kitware/tangelo,Kitware/tangelo | tests/web/settings.py | tests/web/settings.py | import cherrypy
import tangelo
# This service reports the value of cherrypy's thread pool setting
def run(**kwargs):
if kwargs.get('pool'):
tangelo.util.set_server_setting('server.thread_pool', int(kwargs['pool']))
response = 'pool="%r"' % cherrypy.config.get('server.thread_pool')
return response
| import cherrypy
import tangelo
# This service reports the value of cherrypy's thread pool setting
def run(**kwargs):
if kwargs.get('pool'):
tangelo.util.set_server_setting('server.thread_pool',
int(kwargs['pool']))
response = 'pool="%r"' % cherrypy.config.get('server.thread_pool')
return response
| apache-2.0 | Python |
d0089aea33e773ec844964c1751157d04e27ea33 | fix implicit relative import | InFoCusp/tf_cnnvis | tf_cnnvis/__init__.py | tf_cnnvis/__init__.py | from .tf_cnnvis import get_visualization
from .tf_cnnvis import image_normalization
from .tf_cnnvis import convert_into_grid
__all__ = ["get_visualization", "image_normalization", "convert_into_grid"]
| from tf_cnnvis import *
| mit | Python |
cb934c9570eefa0aa7616f486395f1bfbc8c08cd | Update Bazel to latest version | google/copybara,google/copybara,google/copybara | third_party/bazel.bzl | third_party/bazel.bzl | # Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
bazel_version = "867bada0117df8c57a91c6c0005206ba45ffbada"
| # Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
bazel_version = "51421724b6038844838a74886e894df16b1bcaf1"
| apache-2.0 | Python |
3d74a8cab311acefb499c3b1c5969b2ff9cdcd07 | Update Bazel to latest version | google/copybara,google/copybara,google/copybara | third_party/bazel.bzl | third_party/bazel.bzl | # Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
bazel_version = "0289418a45e0ca220474d1f55e11a4c9bffff489"
bazel_sha256 = "d168f896b064193c3fc22a8f17127738865970c56a0dfba8d3ec88106fdf4887"
| # Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
bazel_version = "da5ee491b7af41d18d035e42228f518077164f65"
bazel_sha256 = "8820cad1f844cdf528a7c7d66dc221f91fb4cf9eb3ae98e7f1fe2b0ac7760e46"
| apache-2.0 | Python |
80bf936ad8068a32313411291b5be5675decf235 | Update Bazel to latest version | google/copybara,google/copybara,google/copybara | third_party/bazel.bzl | third_party/bazel.bzl | # Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
bazel_version = "dfcda9e1fb417af94884b319d40ce747b2b7c871"
bazel_sha256 = "ab7650f4f3c615cfa8da149be53792a3b86d20c4e5990bb2c9ffd7268a048cf7"
| # Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
bazel_version = "b85d0b71c878029384caf94041701ad7839796c6"
bazel_sha256 = "79fff96688010cf4a2d857c1824d42d1addd806ac610fa9d11eb076decbf0bc2"
| apache-2.0 | Python |
92adc02daae13f6ef24ae1ec2eafac77ce528a74 | Update script to start, stop and status by name. | EricSchles/veyepar,CarlFK/veyepar,yoe/veyepar,yoe/veyepar,xfxf/veyepar,EricSchles/veyepar,yoe/veyepar,xfxf/veyepar,CarlFK/veyepar,xfxf/veyepar,CarlFK/veyepar,EricSchles/veyepar,CarlFK/veyepar,xfxf/veyepar,xfxf/veyepar,CarlFK/veyepar,yoe/veyepar,yoe/veyepar,EricSchles/veyepar,EricSchles/veyepar | setup/timvideos/streaming/list_aws_hosts.py | setup/timvideos/streaming/list_aws_hosts.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# list_aws_hosts.py
# list all active ec2 hosts
"""
Start / stop by name.
Start mission "list_aws_hosts.py start mission"
Stop mission "list_aws_hosts.py stop mission"
Status mission "list_aws_hosts.py status mission"
mission i-b59966c7 **OFF** stopped
"""
from boto import ec2
import pw
creds = pw.stream['aws']
def aws_start(instance, name):
if name == instance.tags['Name']:
instance.start()
def aws_stop(instance, name):
if name == instance.tags['Name']:
instance.stop()
def aws_status(instance, name=None):
if name and not name == instance.tags['Name']:
return
if not instance.dns_name:
print instance.tags['Name'], instance.id, '**OFF**', instance.state
else:
print instance.tags['Name'], instance.id, instance.dns_name, instance.state
def do_command(command, name):
ec2conn = ec2.connection.EC2Connection(creds['id'], creds['key'])
reservations = ec2conn.get_all_instances()
instances = [i for r in reservations for i in r.instances]
for i in instances:
command(i, name)
def do_status():
ec2conn = ec2.connection.EC2Connection(creds['id'], creds['key'])
reservations = ec2conn.get_all_instances()
instances = [i for r in reservations for i in r.instances]
for i in instances:
aws_status(i)
if __name__ == '__main__':
import sys
if len(sys.argv) == 3:
command, name = sys.argv[1:]
if command == 'start':
do_command(aws_start, name)
elif command == 'stop':
do_command(aws_stop, name)
else:
do_command(aws_status, name)
else:
do_status()
| # list_aws_hosts.py
# list all active ec2 hosts
from boto import ec2
import pw
creds = pw.stream['aws']
ec2conn = ec2.connection.EC2Connection(creds['id'], creds['key'] )
reservations = ec2conn.get_all_instances()
instances = [i for r in reservations for i in r.instances]
for i in instances:
if not i.dns_name:
continue
print i.tags['Name'], i.id, i.dns_name
| mit | Python |
d2a7d772826773941ad11fd92e823ec81080635d | Add SF package registry for PIO Plus | platformio/platformio-core,platformio/platformio,platformio/platformio-core | platformio/pioplus.py | platformio/pioplus.py | # Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
from os.path import dirname, join
from platform import system
from platformio import exception, util
from platformio.managers.package import PackageManager
PACKAGE_DEPS = {"pysite": {"name": "pysite-pioplus",
"requirements": ">=0.1.0"},
"tool": {"name": "tool-pioplus",
"requirements": ">=0.2.0"}}
class PioPlusPackageManager(PackageManager):
def __init__(self):
PackageManager.__init__(self, join(util.get_home_dir(), "packages"), [
"https://dl.bintray.com/platformio/dl-packages/manifest.json",
"https://sourceforge.net/projects/platformio-storage/files/"
"packages/manifest.json/download",
"https://dl.platformio.org/packages/manifest.json"
])
def pioplus_install():
pm = PioPlusPackageManager()
for item in PACKAGE_DEPS.values():
pm.install(item['name'], item['requirements'], silent=True)
def pioplus_update():
pm = PioPlusPackageManager()
for item in PACKAGE_DEPS.values():
package_dir = pm.get_package_dir(item['name'], item['requirements'])
if package_dir:
pm.update(item['name'], item['requirements'])
def pioplus_call(args, **kwargs):
pioplus_install()
pm = PioPlusPackageManager()
pioplus_path = join(
pm.get_package_dir(PACKAGE_DEPS['tool']['name'],
PACKAGE_DEPS['tool']['requirements']), "pioplus")
if system() == "Linux":
os.environ['LD_LIBRARY_PATH'] = dirname(pioplus_path)
os.environ['PYTHONEXEPATH'] = util.get_pythonexe_path()
os.environ['PYTHONPYSITEDIR'] = pm.get_package_dir(
PACKAGE_DEPS['pysite']['name'], PACKAGE_DEPS['pysite']['requirements'])
util.copy_pythonpath_to_osenv()
if subprocess.call([pioplus_path] + args, **kwargs) != 0:
raise exception.ReturnErrorCode()
| # Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
from os.path import dirname, join
from platform import system
from platformio import exception, util
from platformio.managers.package import PackageManager
PACKAGE_DEPS = {"pysite": {"name": "pysite-pioplus",
"requirements": ">=0.1.0"},
"tool": {"name": "tool-pioplus",
"requirements": ">=0.2.0"}}
class PioPlusPackageManager(PackageManager):
def __init__(self):
PackageManager.__init__(
self, join(util.get_home_dir(), "packages"),
["https://dl.bintray.com/platformio/dl-packages/manifest.json",
"https://dl.platformio.org/packages/manifest.json"])
def pioplus_install():
pm = PioPlusPackageManager()
for item in PACKAGE_DEPS.values():
pm.install(item['name'], item['requirements'], silent=True)
def pioplus_update():
pm = PioPlusPackageManager()
for item in PACKAGE_DEPS.values():
package_dir = pm.get_package_dir(item['name'], item['requirements'])
if package_dir:
pm.update(item['name'], item['requirements'])
def pioplus_call(args, **kwargs):
pioplus_install()
pm = PioPlusPackageManager()
pioplus_path = join(
pm.get_package_dir(PACKAGE_DEPS['tool']['name'],
PACKAGE_DEPS['tool']['requirements']), "pioplus")
if system() == "Linux":
os.environ['LD_LIBRARY_PATH'] = dirname(pioplus_path)
os.environ['PYTHONEXEPATH'] = util.get_pythonexe_path()
os.environ['PYTHONPYSITEDIR'] = pm.get_package_dir(
PACKAGE_DEPS['pysite']['name'], PACKAGE_DEPS['pysite']['requirements'])
util.copy_pythonpath_to_osenv()
if subprocess.call([pioplus_path] + args, **kwargs) != 0:
raise exception.ReturnErrorCode()
| apache-2.0 | Python |
58a9f7ad0da5b405d5925ed45bf425e37f978fdd | increase version to 1.4.1 | SCIP-Interfaces/PySCIPOpt,SCIP-Interfaces/PySCIPOpt,mattmilten/PySCIPOpt,mattmilten/PySCIPOpt | src/pyscipopt/__init__.py | src/pyscipopt/__init__.py | __version__ = '1.4.1'
# export user-relevant objects:
from pyscipopt.Multidict import multidict
from pyscipopt.scip import Model
from pyscipopt.scip import Branchrule
from pyscipopt.scip import Conshdlr
from pyscipopt.scip import Eventhdlr
from pyscipopt.scip import Heur
from pyscipopt.scip import Presol
from pyscipopt.scip import Pricer
from pyscipopt.scip import Prop
from pyscipopt.scip import Sepa
from pyscipopt.scip import LP
from pyscipopt.scip import quicksum
from pyscipopt.scip import exp
from pyscipopt.scip import log
from pyscipopt.scip import sqrt
from pyscipopt.scip import PY_SCIP_RESULT as SCIP_RESULT
from pyscipopt.scip import PY_SCIP_PARAMSETTING as SCIP_PARAMSETTING
from pyscipopt.scip import PY_SCIP_PARAMEMPHASIS as SCIP_PARAMEMPHASIS
from pyscipopt.scip import PY_SCIP_STATUS as SCIP_STATUS
from pyscipopt.scip import PY_SCIP_STAGE as SCIP_STAGE
from pyscipopt.scip import PY_SCIP_PROPTIMING as SCIP_PROPTIMING
from pyscipopt.scip import PY_SCIP_PRESOLTIMING as SCIP_PRESOLTIMING
from pyscipopt.scip import PY_SCIP_HEURTIMING as SCIP_HEURTIMING
from pyscipopt.scip import PY_SCIP_EVENTTYPE as SCIP_EVENTTYPE
| __version__ = '1.4.0'
# export user-relevant objects:
from pyscipopt.Multidict import multidict
from pyscipopt.scip import Model
from pyscipopt.scip import Branchrule
from pyscipopt.scip import Conshdlr
from pyscipopt.scip import Eventhdlr
from pyscipopt.scip import Heur
from pyscipopt.scip import Presol
from pyscipopt.scip import Pricer
from pyscipopt.scip import Prop
from pyscipopt.scip import Sepa
from pyscipopt.scip import LP
from pyscipopt.scip import quicksum
from pyscipopt.scip import exp
from pyscipopt.scip import log
from pyscipopt.scip import sqrt
from pyscipopt.scip import PY_SCIP_RESULT as SCIP_RESULT
from pyscipopt.scip import PY_SCIP_PARAMSETTING as SCIP_PARAMSETTING
from pyscipopt.scip import PY_SCIP_PARAMEMPHASIS as SCIP_PARAMEMPHASIS
from pyscipopt.scip import PY_SCIP_STATUS as SCIP_STATUS
from pyscipopt.scip import PY_SCIP_STAGE as SCIP_STAGE
from pyscipopt.scip import PY_SCIP_PROPTIMING as SCIP_PROPTIMING
from pyscipopt.scip import PY_SCIP_PRESOLTIMING as SCIP_PRESOLTIMING
from pyscipopt.scip import PY_SCIP_HEURTIMING as SCIP_HEURTIMING
from pyscipopt.scip import PY_SCIP_EVENTTYPE as SCIP_EVENTTYPE
| mit | Python |
d2e8afc2abf2ded55b0c05b0eb01f9bd1fb1992b | Fix importing style | wkentaro/fcn | fcn/models/__init__.py | fcn/models/__init__.py | from fcn.models.fcn8s import FCN8s
from fcn.models.vgg16 import VGG16
| from apc2015or.models.fcn8s import *
from apc2015or.models.vgg16 import *
| mit | Python |
d267f9a8a9383a97884bbe8109aa6a1a6e478b8f | Remove f-string | beetbox/beets,beetbox/beets,beetbox/beets,beetbox/beets | beetsplug/mbsubmit.py | beetsplug/mbsubmit.py | # This file is part of beets.
# Copyright 2016, Adrian Sampson and Diego Moreda.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Aid in submitting information to MusicBrainz.
This plugin allows the user to print track information in a format that is
parseable by the MusicBrainz track parser [1]. Programmatic submitting is not
implemented by MusicBrainz yet.
[1] https://wiki.musicbrainz.org/History:How_To_Parse_Track_Listings
"""
from beets import ui
from beets.autotag import Recommendation
from beets.plugins import BeetsPlugin
from beets.ui.commands import PromptChoice
from beetsplug.info import print_data
class MBSubmitPlugin(BeetsPlugin):
def __init__(self):
super().__init__()
self.config.add({
'format': '$track. $title - $artist ($length)',
'threshold': 'medium',
})
# Validate and store threshold.
self.threshold = self.config['threshold'].as_choice({
'none': Recommendation.none,
'low': Recommendation.low,
'medium': Recommendation.medium,
'strong': Recommendation.strong
})
self.register_listener('before_choose_candidate',
self.before_choose_candidate_event)
def before_choose_candidate_event(self, session, task):
if task.rec <= self.threshold:
return [PromptChoice('p', 'Print tracks', self.print_tracks)]
def print_tracks(self, session, task):
for i in sorted(task.items, key=lambda i: i.track):
print_data(None, i, self.config['format'].as_str())
def commands(self):
"""Add beet UI commands for mbsubmit."""
mbsubmit_cmd = ui.Subcommand(
'mbsubmit', help='Submit Tracks to MusicBrainz')
def func(lib, opts, args):
items = lib.items(ui.decargs(args))
self._mbsubmit(items)
mbsubmit_cmd.func = func
return [mbsubmit_cmd]
def _mbsubmit(self, items):
"""Print track information to be submitted to MusicBrainz."""
for i in sorted(items, key=lambda i: i.track):
print_data(None, i, self.config['format'].as_str())
| # This file is part of beets.
# Copyright 2016, Adrian Sampson and Diego Moreda.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Aid in submitting information to MusicBrainz.
This plugin allows the user to print track information in a format that is
parseable by the MusicBrainz track parser [1]. Programmatic submitting is not
implemented by MusicBrainz yet.
[1] https://wiki.musicbrainz.org/History:How_To_Parse_Track_Listings
"""
from beets import ui
from beets.autotag import Recommendation
from beets.plugins import BeetsPlugin
from beets.ui.commands import PromptChoice
from beetsplug.info import print_data
class MBSubmitPlugin(BeetsPlugin):
def __init__(self):
super().__init__()
self.config.add({
'format': '$track. $title - $artist ($length)',
'threshold': 'medium',
})
# Validate and store threshold.
self.threshold = self.config['threshold'].as_choice({
'none': Recommendation.none,
'low': Recommendation.low,
'medium': Recommendation.medium,
'strong': Recommendation.strong
})
self.register_listener('before_choose_candidate',
self.before_choose_candidate_event)
def before_choose_candidate_event(self, session, task):
if task.rec <= self.threshold:
return [PromptChoice('p', 'Print tracks', self.print_tracks)]
def print_tracks(self, session, task):
for i in sorted(task.items, key=lambda i: i.track):
print_data(None, i, self.config['format'].as_str())
def commands(self):
"""Add beet UI commands for mbsubmit."""
mbsubmit_cmd = ui.Subcommand(
'mbsubmit', help=f'Submit Tracks to MusicBrainz')
def func(lib, opts, args):
items = lib.items(ui.decargs(args))
self._mbsubmit(items)
mbsubmit_cmd.func = func
return [mbsubmit_cmd]
def _mbsubmit(self, items):
"""Print track information to be submitted to MusicBrainz."""
for i in sorted(items, key=lambda i: i.track):
print_data(None, i, self.config['format'].as_str())
| mit | Python |
fa1b8059b03f7a4210dff7dccb44f24c41772c55 | fix smarttags message | mnot/redbot,mnot/redbot,mnot/redbot | redbot/message/headers/x_meta_mssmarttagspreventparsing.py | redbot/message/headers/x_meta_mssmarttagspreventparsing.py | #!/usr/bin/env python
from redbot.message import headers
from redbot.speak import Note, categories, levels
from redbot.type import AddNoteMethodType
class x_meta_mssmarttagspreventparsing(headers.HttpHeader):
canonical_name = "X-Meta-MSSmartTagsPreventParsing"
list_header = False
deprecated = False
valid_in_requests = False
valid_in_responses = True
def evaluate(self, add_note: AddNoteMethodType) -> None:
add_note(SMART_TAG_NO_WORK)
class SMART_TAG_NO_WORK(Note):
category = categories.GENERAL
level = levels.WARN
summary = "The %(field_name)s header doesn't have any effect on smart tags."
text = """\
This header doesn't have any effect on Microsoft Smart Tags, except in certain beta versions of
IE6. To turn them off, you'll need to make changes in the HTML content itself."""
| #!/usr/bin/env python
from redbot.message import headers
from redbot.speak import Note, categories, levels
from redbot.type import AddNoteMethodType
class x_meta_mssmarttagspreventparsing(headers.HttpHeader):
canonical_name = "X-Meta-MSSmartTagsPreventParsing"
list_header = False
deprecated = False
valid_in_requests = False
valid_in_responses = True
def evaluate(self, add_note: AddNoteMethodType) -> None:
add_note(SMART_TAG_NO_WORK)
class SMART_TAG_NO_WORK(Note):
category = categories.GENERAL
level = levels.WARN
summary = "The %(field_name)s header doesn't have any effect on smart tags."
text = """\
This header doesn't have any effect on Microsoft Smart Tags, except in certain beta versions of
IE6. To turn them off, you'll need to make changes in the HTML content it"""
| mit | Python |
6da69eb8f13dc56cc19d06a09d74005395de8989 | Add missing attributes in TPSProcessor. | release-engineering/fedmsg_meta_umb | fedmsg_meta_umb/tps.py | fedmsg_meta_umb/tps.py | # Copyright (C) 2017 Red Hat, Inc.
#
# fedmsg_meta_umb is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg_meta_umb is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Gowrishankar Rajaiyan <grajaiya@redhat.com>
from fedmsg.meta.base import BaseProcessor
class TPSProcessor(BaseProcessor):
topic_prefix_re = r'/topic/VirtualTopic\.eng'
__name__ = 'tps'
__description__ = 'package sanity testing of brew builds'
__obj__ = 'Test Package Sanity'
__docs__ = 'https://mojo.redhat.com/docs/DOC-0000000'
__link__ = 'https://sometpslink.engineering.redhat.com'
def title(self, msg, **config):
return msg['topic'].split('.', 2)[-1]
def packages(self, msg, **config):
return set([msg['headers']['component'].rsplit('-', 2)[0]])
| # Copyright (C) 2017 Red Hat, Inc.
#
# fedmsg_meta_umb is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg_meta_umb is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Gowrishankar Rajaiyan <grajaiya@redhat.com>
from fedmsg.meta.base import BaseProcessor
class TPSProcessor(BaseProcessor):
topic_prefix_re = r'/topic/VirtualTopic\.eng'
__name__ = 'tps'
def title(self, msg, **config):
return msg['topic'].split('.', 2)[-1]
def packages(self, msg, **config):
return set([msg['headers']['component'].rsplit('-', 2)[0]])
| lgpl-2.1 | Python |
733720bf1b4ce6b559e0ef62074762e29df11ba7 | bump version to 3.0 | kanarelo/reportlab,kanarelo/reportlab,Distrotech/reportlab,Distrotech/reportlab,Distrotech/reportlab,Distrotech/reportlab,kanarelo/reportlab,kanarelo/reportlab,Distrotech/reportlab,kanarelo/reportlab | src/reportlab/__init__.py | src/reportlab/__init__.py | #Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/__init__.py
__version__=''' $Id$ '''
__doc__="""The Reportlab PDF generation library."""
Version = "3.0"
import sys
if sys.version_info[0:2]!=(2, 7) and sys.version_info<(3, 3):
raise ImportError("""reportlab requires Python 2.7+ or 3.3+; 3.0-3.2 are not supported.""")
#define these early in reportlab's life
isPy3 = sys.version_info[0]==3
if isPy3:
def cmp(a,b):
return -1 if a<b else (1 if a>b else 0)
import builtins
builtins.cmp = cmp
builtins.xrange = range
del cmp, builtins
else:
from future_builtins import ascii
import __builtin__
__builtin__.ascii = ascii
del ascii, __builtin__
#the module reportlab.local_rl_mods can be used to customize just about anything
try:
import reportlab.local_rl_mods
except ImportError:
pass
| #Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/__init__.py
__version__=''' $Id$ '''
__doc__="""The Reportlab PDF generation library."""
Version = "3.0b2"
import sys
if sys.version_info[0:2]!=(2, 7) and sys.version_info<(3, 3):
raise ImportError("""reportlab requires Python 2.7+ or 3.3+; 3.0-3.2 are not supported.""")
#define these early in reportlab's life
isPy3 = sys.version_info[0]==3
if isPy3:
def cmp(a,b):
return -1 if a<b else (1 if a>b else 0)
import builtins
builtins.cmp = cmp
builtins.xrange = range
del cmp, builtins
else:
from future_builtins import ascii
import __builtin__
__builtin__.ascii = ascii
del ascii, __builtin__
#the module reportlab.local_rl_mods can be used to customize just about anything
try:
import reportlab.local_rl_mods
except ImportError:
pass
| bsd-3-clause | Python |
35e95fc3fe2d11b5db9ea676fdac272069e18c18 | bump version number | Distrotech/reportlab,Distrotech/reportlab,kanarelo/reportlab,kanarelo/reportlab,Distrotech/reportlab,kanarelo/reportlab,kanarelo/reportlab,kanarelo/reportlab,Distrotech/reportlab,Distrotech/reportlab | src/reportlab/__init__.py | src/reportlab/__init__.py | #Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/__init__.py
__version__=''' $Id$ '''
__doc__="""The Reportlab PDF generation library."""
Version = "3.0a1"
import sys
if sys.version_info[0:2]!=(2, 7) and sys.version_info<(3, 3):
raise ImportError("""reportlab requires Python 2.7+ or 3.3+; 3.0-3.2 are not supported.""")
#define these early in reportlab's life
isPy3 = sys.version_info[0]==3
if isPy3:
def cmp(a,b):
return -1 if a<b else (1 if a>b else 0)
import builtins
builtins.cmp = cmp
del cmp, builtins
else:
from future_builtins import ascii
import __builtin__
__builtin__.ascii = ascii
del ascii, __builtins__
| #Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/__init__.py
__version__=''' $Id$ '''
__doc__="""The Reportlab PDF generation library."""
Version = "2.7"
import sys
if sys.version_info[0:2]!=(2, 7) and sys.version_info<(3, 3):
raise ImportError("""reportlab requires Python 2.7+ or 3.3+; 3.0-3.2 are not supported.""")
#define these early in reportlab's life
isPy3 = sys.version_info[0]==3
if isPy3:
def cmp(a,b):
return -1 if a<b else (1 if a>b else 0)
import builtins
builtins.cmp = cmp
del cmp, builtins
else:
from future_builtins import ascii
import __builtin__
__builtin__.ascii = ascii
del ascii, __builtins__
| bsd-3-clause | Python |
2ab74ea127c1e1c82a6f3df81c35aa0aec73c494 | Update cachequery.py | Nik0l/UTemPro,Nik0l/UTemPro | utils/cachequery.py | utils/cachequery.py | import shelve
import sqlite3
# a part of the code is taken from: http://www.cs.berkeley.edu/~bjoern/projects/stackoverflow/
shelvefile = '../../db/querycache.shelve'
def query(cursor,query,parameters=()):
shlv = shelve.open(shelvefile)
key = makekey(query,parameters)
if(shlv.has_key(key)):
print("found query in cache")
return shlv[key]
else:
# query does not exist - run it and save result in cache
print("did NOT find query in cache")
cursor.execute(query,parameters)
result = cursor.fetchall()
shlv[key]=result
return result
def clear(query,parameters=()):
shlv = shelve.open(shelvefile)
key = makekey(query,parameters)
if shlv.has_key(key):
del shlv[key]
return True
else:
return False #nothing to do
def makekey(query,parameters):
return query+str(parameters)
| mit | Python | |
00d91e5b8911a8f1519281cf152b5f79cb8a15fe | correct default settings | allink/woodstock | woodstock/default_settings.py | woodstock/default_settings.py | from django.conf import settings
LANGUAGES = getattr(settings, 'LANGUAGES')
LANGUAGE_CODE = getattr(settings, 'LANGUAGE_CODE')
INVITEE_GENERATES_PASSWORD = getattr(settings, 'WOODSTOCK_INVITEE_GENERATES_PASSWORD', False)
SUBSCRIPTION_NEEDS_INVITATION = getattr(settings, 'WOODSTOCK_SUBSCRIPTION_NEEDS_INVITATION', False)
SUBSCRIPTION_CONSUMES_INVITATION = getattr(settings, 'WOODSTOCK_SUBSCRIPTION_CONSUMES_INVITATION', False)
SUBSCRIPTION_ALLOW_MULTIPLE_EVENTS = getattr(settings, 'WOODSTOCK_SUBSCRIPTION_ALLOW_MULTIPLE_EVENTS', True)
SUBSCRIPTION_ALLOW_MULTIPLE_EVENTPARTS = getattr(settings, 'WOODSTOCK_SUBSCRIPTION_ALLOW_MULTIPLE_EVENTPARTS', True)
SUBSCRIPTION_NEEDS_ACTIVATION = getattr(settings, 'WOODSTOCK_SUBSCRIPTION_NEEDS_ACTIVATION', False)
OVERBOOKING_ALLOWED = getattr(settings, 'WOODSTOCK_OVERBOOKING_ALLOWED', False)
# if set to None only password will be used
USERNAME_FIELD = getattr(settings, 'WOODSTOCK_USERNAME_FIELD', 'email')
PERSON_EMAIL_UNIQUE = getattr(settings, 'WOODSTOCK_PERSON_EMAIL_UNIQUE', True)
LOST_PASSWORD_NEWSLETTER = getattr(settings, 'WOODSTOCK_LOST_PASSWORD_NEWSLETTER', 'LOST PASSWORD')
ACTIVATION_NEWSLETTER = getattr(settings, 'WOODSTOCK_ACTIVATION_NEWSLETTER', 'ACTIVATION')
# customize admin view
ADMIN_HAS_PARTICIPANT = getattr(settings, 'WOODSTOCK_ADMIN_HAS_PARTICIPANT', True)
ADMIN_HAS_EVENT = getattr(settings, 'WOODSTOCK_ADMIN_HAS_EVENT', True)
ADMIN_HAS_INVITEE = getattr(settings, 'WOODSTOCK_ADMIN_HAS_INVITEE', True)
ADMIN_HAS_GROUP = getattr(settings, 'WOODSTOCK_ADMIN_HAS_GROUP', True)
ADMIN_HAS_SALUTATION = getattr(settings, 'WOODSTOCK_ADMIN_HAS_SALUTATION', True) | from django.conf import settings
LANGUAGES = getattr(settings, 'LANGUAGES')
LANGUAGE_CODE = getattr(settings, 'LANGUAGE_CODE')
INVITEE_GENERATES_PASSWORD = getattr(settings, 'WOODSTOCK_INVITEE_GENERATES_PASSWORD', False)
SUBSCRIPTION_NEEDS_INVITATION = getattr(settings, 'WOODSTOCK_SUBSCRIPTION_NEEDS_INVITATION', False)
SUBSCRIPTION_CONSUMES_INVITATION = getattr(settings, 'WOODSTOCK_SUBSCRIPTION_CONSUMES_INVITATION', False)
SUBSCRIPTION_ALLOW_MULTIPLE_EVENTS = getattr(settings, 'WOODSTOCK_SUBSCRIPTION_ALLOW_MULTIPLE_EVENTS', True)
SUBSCRIPTION_ALLOW_MULTIPLE_EVENTPARTS = getattr(settings, 'WOODSTOCK_SUBSCRIPTION_ALLOW_MULTIPLE_EVENTPARTS', True)
SUBSCRIPTION_NEEDS_ACTIVATION = getattr(settings, 'WOODSTOCK_SUBSCRIPTION_NEEDS_ACTIVATION', False)
OVERBOOKING_ALLOWED = getattr(settings, 'WOODSTOCK_OVERBOOKING_ALLOWED', False)
# if set to None only password will be used
USERNAME_FIELD = getattr(settings, 'WOODSTOCK_USERNAME_FIELD', 'email')
PERSON_EMAIL_UNIQUE = getattr(settings, 'WOODSTOCK_PARTICIPANT_EMAIL_UNIQUE', True)
LOST_PASSWORD_NEWSLETTER = getattr(settings, 'WOODSTOCK_LOST_PASSWORD_NEWSLETTER', 'LOST PASSWORD')
ACTIVATION_NEWSLETTER = getattr(settings, 'WOODSTOCK_ACTIVATION_NEWSLETTER', 'ACTIVATION')
# customize admin view
ADMIN_HAS_PARTICIPANT = getattr(settings, 'WOODSTOCK_ADMIN_HAS_PARTICIPANT', True)
ADMIN_HAS_EVENT = getattr(settings, 'WOODSTOCK_ADMIN_HAS_EVENT', True)
ADMIN_HAS_INVITEE = getattr(settings, 'WOODSTOCK_ADMIN_HAS_INVITEE', True)
ADMIN_HAS_GROUP = getattr(settings, 'WOODSTOCK_ADMIN_HAS_GROUP', True)
ADMIN_HAS_SALUTATION = getattr(settings, 'WOODSTOCK_ADMIN_HAS_SALUTATION', True) | bsd-3-clause | Python |
153ed6a519d6836adb02b934cff44974a7132b6d | Fix doc test failure parsing | softwaredoug/flake8_doctest | flake8/parseDocTest.py | flake8/parseDocTest.py | def parseFailDetails(failDetails):
""" Parse the line number of the doctest failure
>>> parseFailDetails("blah")
-1
"""
import re
failDetails = failDetails.split(',')
lineNo = -1
if len(failDetails) == 3:
match = re.search("line.*?(\d+)", failDetails[1])
if match is None:
return lineNo
lineNo = int(match.group(1))
return lineNo
def parseDocTestResult(docTestResStr):
""" Extract the line number and filename of the doctest failure"""
lines = docTestResStr.split("\n")
for lineNo, line in enumerate(lines):
failure = line.find("Failed example:")
if failure != -1:
failDetails = lines[lineNo - 1]
yield parseFailDetails(failDetails)
if __name__ == "__main__":
from doctest import testmod
testmod()
| def parseFailDetails(failDetails):
""" Parse the line number of the doctest failure"""
import re
failDetails = failDetails.split(',')
lineNo = -1
if len(failDetails) == 3:
match = re.search("line.*?(\d+)", failDetails[1])
if match is None:
return lineNo
lineNo = int(match.group(1))
return lineNo
def parseDocTestResult(docTestResStr):
""" Extract the line number and filename of the doctest failure"""
lines = docTestResStr.split("\n")
for lineNo, line in enumerate(lines):
failure = line.find("Failed example:")
if failure != -1:
failDetails = lines[lineNo - 1]
yield parseFailDetails(failDetails)
| mit | Python |
e048948d5758c6bcc7ca3d5e45f20a654022de7f | Update app version | lingthio/Flask-User,lingthio/Flask-User | flask_user/__init__.py | flask_user/__init__.py | __title__ = 'Flask-User'
__description__ = 'Customizable User Authentication, User Management, and more.'
__version__ = '1.0.2.2'
__url__ = 'https://github.com/lingthio/Flask-User'
__author__ = 'Ling Thio'
__author_email__= 'ling.thio@gmail.com'
__maintainer__ = 'Ling Thio'
__license__ = 'MIT'
__copyright__ = '(c) 2013 Ling Thio'
# Define Flask-User Exceptions early on
class ConfigError(Exception):
pass
class EmailError(Exception):
pass
# Export Flask-Login's current user
from flask_login import current_user # pass through Flask-Login's current_user
# Export v0.6 legacy classes DbAdapter and SQLAlchemyAdapter
# To display an Incompatibility error message the v0.6 API is used on a v1.0+ install
from .legacy_error import DbAdapter, SQLAlchemyAdapter
from .user_mixin import UserMixin
from .user_manager import UserManager
from .email_manager import EmailManager
from .password_manager import PasswordManager
from .token_manager import TokenManager
# Export Flask-User decorators
from .decorators import *
# Export Flask-User signals
from .signals import *
| __title__ = 'Flask-User'
__description__ = 'Customizable User Authentication, User Management, and more.'
__version__ = '1.0.2.0'
__url__ = 'https://github.com/lingthio/Flask-User'
__author__ = 'Ling Thio'
__author_email__= 'ling.thio@gmail.com'
__maintainer__ = 'Ling Thio'
__license__ = 'MIT'
__copyright__ = '(c) 2013 Ling Thio'
# Define Flask-User Exceptions early on
class ConfigError(Exception):
pass
class EmailError(Exception):
pass
# Export Flask-Login's current user
from flask_login import current_user # pass through Flask-Login's current_user
# Export v0.6 legacy classes DbAdapter and SQLAlchemyAdapter
# To display an Incompatibility error message the v0.6 API is used on a v1.0+ install
from .legacy_error import DbAdapter, SQLAlchemyAdapter
from .user_mixin import UserMixin
from .user_manager import UserManager
from .email_manager import EmailManager
from .password_manager import PasswordManager
from .token_manager import TokenManager
# Export Flask-User decorators
from .decorators import *
# Export Flask-User signals
from .signals import *
| mit | Python |
0c87ace4f8fed068b961269e74bd1b64fae59c5f | Fix little typo | lingthio/Flask-User,lingthio/Flask-User | flask_user/__init__.py | flask_user/__init__.py | __title__ = 'Flask-User'
__description__ = 'Customizable User Authentication, User Management, and more.'
__version__ = '1.0.2.0'
__url__ = 'https://github.com/lingthio/Flask-User'
__author__ = 'Ling Thio'
__author_email__= 'ling.thio@gmail.com'
__maintainer__ = 'Ling Thio'
__license__ = 'MIT'
__copyright__ = '(c) 2013 Ling Thio'
# Define Flask-User Exceptions early on
class ConfigError(Exception):
pass
class EmailError(Exception):
pass
# Export Flask-Login's current user
from flask_login import current_user # pass through Flask-Login's current_user
# Export v0.6 legacy classes DbAdapter and SQLAlchemyAdapter
# To display an Incompatibility error message the v0.6 API is used on a v1.0+ install
from .legacy_error import DbAdapter, SQLAlchemyAdapter
from .user_mixin import UserMixin
from .user_manager import UserManager
from .email_manager import EmailManager
from .password_manager import PasswordManager
from .token_manager import TokenManager
# Export Flask-User decorators
from .decorators import *
# Export Flask-User signals
from .signals import *
| __title__ = 'Flask-User'
__description__ = 'Customizable User Authentication, User Management, and more.'
__version__ = '1.0.2.0'
__url__ = 'https://github.com/lingthio/Flask-User'
__author__ = 'Ling Thio'
__author_email__= 'ling.thio@gmail.com'
__maintainer__ = 'Ling Thio'
__license__ = 'MIT'
__copyright__ = '(c) 2013 Ling Thio'
# Define Flask-User Exceptions early on
class ConfigError(Exception):
pass
class EmailError(Exception):
pass
# Export Flask-Login's current user
from flask_login import current_user # pass through Flask-Login's current_user
# Export v0.6 legacy classes DbAdapter and SQLAlchemyAdapter
# To display an Incompatibilty error message the v0.6 API is used on a v1.0+ install
from .legacy_error import DbAdapter, SQLAlchemyAdapter
from .user_mixin import UserMixin
from .user_manager import UserManager
from .email_manager import EmailManager
from .password_manager import PasswordManager
from .token_manager import TokenManager
# Export Flask-User decorators
from .decorators import *
# Export Flask-User signals
from .signals import *
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.