commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
cd5053ac36e13b57e95eeb1241032c97b48a4a85 | Drop try/catch that causes uncaught errors in the Observer to be silently ignored | planetstack/openstack_observer/backend.py | planetstack/openstack_observer/backend.py | import threading
import time
from observer.event_loop import PlanetStackObserver
from observer.event_manager import EventListener
from util.logger import Logger, logging
logger = Logger(level=logging.INFO)
class Backend:
def run(self):
# start the openstack observer
observer = PlanetStackObserver()
observer_thread = threading.Thread(target=observer.run)
observer_thread.start()
# start event listene
event_manager = EventListener(wake_up=observer.wake_up)
event_manager_thread = threading.Thread(target=event_manager.run)
event_manager_thread.start()
| import threading
import time
from observer.event_loop import PlanetStackObserver
from observer.event_manager import EventListener
from util.logger import Logger, logging
logger = Logger(level=logging.INFO)
class Backend:
def run(self):
try:
# start the openstack observer
observer = PlanetStackObserver()
observer_thread = threading.Thread(target=observer.run)
observer_thread.start()
# start event listene
event_manager = EventListener(wake_up=observer.wake_up)
event_manager_thread = threading.Thread(target=event_manager.run)
event_manager_thread.start()
except:
logger.log_exc("Exception in child thread")
| Python | 0 |
b725ef74f8e6f0887737e13783062b987fb3dd77 | bump to 7.0.3 final | device_inventory/__init__.py | device_inventory/__init__.py | VERSION = (7, 0, 3, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
| VERSION = (7, 0, 3, 'beta', 6)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
| Python | 0.000002 |
584c2f69df66bd08ace0652da7337e8e71a72099 | Use bool for zero_mask. Requires pytorch 1.7+ | projects/transformers/models/sparse_embedding.py | projects/transformers/models/sparse_embedding.py | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2021, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import numpy as np
import torch
from nupic.torch.modules.sparse_weights import SparseWeightsBase
__all__ = ["SparseEmbeddings"]
class SparseEmbeddings(SparseWeightsBase):
"""
This wraps a torch.nn.Embedding module to sparsify the weights where the sparsity is
applied per embedding. The embedding of an arbitrary index j will have the desired
sparsity specified through the init.
Note: A torch.nn.Embedding is already sparse in one sense. Specifically, it's input
is expected to be sparse (i.e. an integer specifying the index of the embedding).
In contrast, this introduces sparsity in the weights of the embedding layer, which
effectively yields sparse output embeddings.
:param module: A torch.nn.Embedding module
:param sparsity: Sparsity to apply to the weights; each output embedding will have
this level of sparsity.
"""
def __init__(self, module, sparsity=None):
assert len(module.weight.shape) == 2, "Should resemble a nn.Embedding"
super(SparseEmbeddings, self).__init__(
module, sparsity=sparsity
)
# For each unit, decide which weights are going to be zero
num_embeddings = self.module.num_embeddings
embedding_dim = self.module.embedding_dim
num_nz = int(round((1 - self.sparsity) * embedding_dim))
zero_mask = torch.ones(num_embeddings, embedding_dim, dtype=torch.bool,
device=module.weight.device)
for embedding_j in range(num_embeddings):
on_indices = np.random.choice(embedding_dim, num_nz, replace=False)
zero_mask[embedding_j, on_indices] = False
self.register_buffer("zero_mask", zero_mask)
self.rezero_weights()
def rezero_weights(self):
self.module.weight.data[self.zero_mask] = 0
| # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2021, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import numpy as np
import torch
from nupic.torch.modules.sparse_weights import SparseWeightsBase
__all__ = ["SparseEmbeddings"]
class SparseEmbeddings(SparseWeightsBase):
"""
This wraps a torch.nn.Embedding module to sparsify the weights where the sparsity is
applied per embedding. The embedding of an arbitrary index j will have the desired
sparsity specified through the init.
Note: A torch.nn.Embedding is already sparse in one sense. Specifically, it's input
is expected to be sparse (i.e. an integer specifying the index of the embedding).
In contrast, this introduces sparsity in the weights of the embedding layer, which
effectively yields sparse output embeddings.
:param module: A torch.nn.Embedding module
:param sparsity: Sparsity to apply to the weights; each output embedding will have
this level of sparsity.
"""
def __init__(self, module, sparsity=None):
assert len(module.weight.shape) == 2, "Should resemble a nn.Embedding"
super(SparseEmbeddings, self).__init__(
module, sparsity=sparsity
)
# For each unit, decide which weights are going to be zero
num_embeddings = self.module.num_embeddings
embedding_dim = self.module.embedding_dim
num_nz = int(round((1 - self.sparsity) * embedding_dim))
zero_mask = torch.ones(num_embeddings, embedding_dim, dtype=torch.bool)
for embedding_j in range(num_embeddings):
on_indices = np.random.choice(embedding_dim, num_nz, replace=False)
zero_mask[embedding_j, on_indices] = False
# Use float16 because pytorch distributed nccl doesn't support bools
self.register_buffer("zero_mask", zero_mask.half())
self.rezero_weights()
def rezero_weights(self):
self.module.weight.data[self.zero_mask.bool()] = 0
| Python | 0.000001 |
3587c608cde4f273d33a572c0bf44dbe2b003250 | better initial negative rate | python/alpenglow/experiments/FactorExperiment.py | python/alpenglow/experiments/FactorExperiment.py | import alpenglow.Getter as rs
import alpenglow as prs
class FactorExperiment(prs.OnlineExperiment):
"""FactorExperiment(dimension=10,begin_min=-0.01,begin_max=0.01,learning_rate=0.05,regularization_rate=0.0,negative_rate=0.0)
This class implements an online version of the well-known matrix factorization recommendation model [Koren2009]_
and trains it via stochastic gradient descent. The model is able to train on implicit data
using negative sample generation, see [X.He2016]_ and the **negative_rate** parameter.
.. [Koren2009] Koren, Yehuda, Robert Bell, and Chris Volinsky. "Matrix factorization techniques for recommender systems." Computer 42.8 (2009).
.. [X.He2016] X. He, H. Zhang, M.-Y. Kan, and T.-S. Chua. Fast matrix factorization for online recommendation with implicit feedback. In SIGIR, pages 549–558, 2016.
Parameters
----------
dimension : int
The latent factor dimension of the factormodel.
begin_min : double
The factors are initialized randomly, sampling each element uniformly from the interval (begin_min, begin_max).
begin_max : double
See begin_min.
learning_rate : double
The learning rate used in the stochastic gradient descent updates.
regularization_rate : double
The coefficient for the L2 regularization term.
negative_rate : int
The number of negative samples generated after each update. Useful for implicit recommendation.
"""
def _config(self, top_k, seed):
#config = self.parameter_defaults(
# top_k=100,
# min_time=0,
# seed=0,
# out_file=None,
# filters=[],
# loggers=[],
#)
model = rs.FactorModel(**self.parameter_defaults(
begin_min=-0.01,
begin_max=0.01,
dimension=10,
initialize_all=False,
))
updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
learning_rate=0.05,
regularization_rate=0.0
))
updater.set_model(model)
point_wise = rs.ObjectiveMSE()
gradient_computer = rs.GradientComputerPointWise()
gradient_computer.set_objective(point_wise)
gradient_computer.set_model(model)
gradient_computer.add_gradient_updater(updater)
negative_sample_generator = rs.UniformNegativeSampleGenerator(**self.parameter_defaults(
negative_rate=100,
initialize_all=False,
seed=67439852,
filter_repeats=False,
))
negative_sample_generator.add_updater(gradient_computer)
return (model, [negative_sample_generator], [], [])
| import alpenglow.Getter as rs
import alpenglow as prs
class FactorExperiment(prs.OnlineExperiment):
"""FactorExperiment(dimension=10,begin_min=-0.01,begin_max=0.01,learning_rate=0.05,regularization_rate=0.0,negative_rate=0.0)
This class implements an online version of the well-known matrix factorization recommendation model [Koren2009]_
and trains it via stochastic gradient descent. The model is able to train on implicit data
using negative sample generation, see [X.He2016]_ and the **negative_rate** parameter.
.. [Koren2009] Koren, Yehuda, Robert Bell, and Chris Volinsky. "Matrix factorization techniques for recommender systems." Computer 42.8 (2009).
.. [X.He2016] X. He, H. Zhang, M.-Y. Kan, and T.-S. Chua. Fast matrix factorization for online recommendation with implicit feedback. In SIGIR, pages 549–558, 2016.
Parameters
----------
dimension : int
The latent factor dimension of the factormodel.
begin_min : double
The factors are initialized randomly, sampling each element uniformly from the interval (begin_min, begin_max).
begin_max : double
See begin_min.
learning_rate : double
The learning rate used in the stochastic gradient descent updates.
regularization_rate : double
The coefficient for the L2 regularization term.
negative_rate : int
The number of negative samples generated after each update. Useful for implicit recommendation.
"""
def _config(self, top_k, seed):
#config = self.parameter_defaults(
# top_k=100,
# min_time=0,
# seed=0,
# out_file=None,
# filters=[],
# loggers=[],
#)
model = rs.FactorModel(**self.parameter_defaults(
begin_min=-0.01,
begin_max=0.01,
dimension=10,
initialize_all=False,
))
updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
learning_rate=0.05,
regularization_rate=0.0
))
updater.set_model(model)
point_wise = rs.ObjectiveMSE()
gradient_computer = rs.GradientComputerPointWise()
gradient_computer.set_objective(point_wise)
gradient_computer.set_model(model)
gradient_computer.add_gradient_updater(updater)
negative_sample_generator = rs.UniformNegativeSampleGenerator(**self.parameter_defaults(
negative_rate=0.0,
initialize_all=False,
seed=67439852,
filter_repeats=False,
))
negative_sample_generator.add_updater(gradient_computer)
return (model, [negative_sample_generator], [], [])
| Python | 0.998618 |
37fa40a9b5260f8090adaa8c15d3767c0867574f | Create a list of messages that contain system time. | python/fusion_engine_client/messages/__init__.py | python/fusion_engine_client/messages/__init__.py | from .core import *
from . import ros
message_type_to_class = {
# Navigation solution messages.
PoseMessage.MESSAGE_TYPE: PoseMessage,
PoseAuxMessage.MESSAGE_TYPE: PoseAuxMessage,
GNSSInfoMessage.MESSAGE_TYPE: GNSSInfoMessage,
GNSSSatelliteMessage.MESSAGE_TYPE: GNSSSatelliteMessage,
# Sensor measurement messages.
IMUMeasurement.MESSAGE_TYPE: IMUMeasurement,
# ROS messages.
ros.PoseMessage.MESSAGE_TYPE: ros.PoseMessage,
ros.GPSFixMessage.MESSAGE_TYPE: ros.GPSFixMessage,
ros.IMUMessage.MESSAGE_TYPE: ros.IMUMessage,
# Command and control messages.
CommandResponseMessage.MESSAGE_TYPE: CommandResponseMessage,
MessageRequest.MESSAGE_TYPE: MessageRequest,
ResetRequest.MESSAGE_TYPE: ResetRequest,
VersionInfoMessage.MESSAGE_TYPE: VersionInfoMessage,
EventNotificationMessage.MESSAGE_TYPE: EventNotificationMessage,
}
messages_with_system_time = [t for t, c in message_type_to_class.items() if hasattr(c(), 'system_time_ns')]
| from .core import *
from . import ros
message_type_to_class = {
# Navigation solution messages.
PoseMessage.MESSAGE_TYPE: PoseMessage,
PoseAuxMessage.MESSAGE_TYPE: PoseAuxMessage,
GNSSInfoMessage.MESSAGE_TYPE: GNSSInfoMessage,
GNSSSatelliteMessage.MESSAGE_TYPE: GNSSSatelliteMessage,
# Sensor measurement messages.
IMUMeasurement.MESSAGE_TYPE: IMUMeasurement,
# ROS messages.
ros.PoseMessage.MESSAGE_TYPE: ros.PoseMessage,
ros.GPSFixMessage.MESSAGE_TYPE: ros.GPSFixMessage,
ros.IMUMessage.MESSAGE_TYPE: ros.IMUMessage,
# Command and control messages.
CommandResponseMessage.MESSAGE_TYPE: CommandResponseMessage,
MessageRequest.MESSAGE_TYPE: MessageRequest,
ResetRequest.MESSAGE_TYPE: ResetRequest,
VersionInfoMessage.MESSAGE_TYPE: VersionInfoMessage,
EventNotificationMessage.MESSAGE_TYPE: EventNotificationMessage,
}
| Python | 0.00003 |
82f68c3a0bd734dc9a639d9c257b26f5720c0d9c | add prepare_dir | decorators.py | decorators.py | import os
from functools import wraps
from requests import Timeout, ConnectionError
from socket import timeout as socket_timeout
import logging
from .models import ArbitraryAccessObject
from shutil import get_terminal_size
timeouts = (Timeout, socket_timeout, ConnectionError)
__author__ = 'zz'
def threading_lock(lock):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
with lock:
return func(*args, **kwargs)
return wrapper
return decorator
def retry_connect(retry_times, timeout, error=None):
if error is None:
error=ArbitraryAccessObject()
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try_times = 0
while True:
try:
ret = func(*args, timeout=timeout, **kwargs)
if ret.status_code != 200:
logging.warning('%s is %s', ret.url, ret.status_code)
if ret.status_code == 404:
raise Timeout
except timeouts:
try_times += 1
error.reconnect(try_times)
else:
return ret
if try_times >= retry_times:
raise Timeout
return wrapper
return decorator
def semalock_for_class(func):
@wraps(func)
def wrapper(self, s, *args, **kwargs):
with s:
return func(self, *args, **kwargs)
return wrapper
def semalock(func):
@wraps(func)
def wrapper(s, *args, **kwargs):
with s:
return func(*args, **kwargs)
return wrapper
def loop(func):
@wraps(func)
def wrapper(*args, **kwargs):
while True:
ret = func(*args, **kwargs)
if ret:
break
return wrapper
def resolve_timeout(replace_value):
"""
return replace value instead of raise timeout
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except timeouts as e:
return replace_value
return wrapper
return decorator
def clear_output(func):
terminal_width, _ = get_terminal_size()
@wraps(func)
def wrapper(*args, **kwargs):
print(' ' * terminal_width, end='\r')
return func(*args, **kwargs)
return wrapper
def prepare_dir(dirname):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
if not os.path.exists(dirname):
os.mkdir(dirname)
return func(*args, **kwargs)
return wrapper
return decorator | __author__ = 'zz'
from functools import wraps
from requests import Timeout, ConnectionError
from socket import timeout as socket_timeout
import logging
from .models import ArbitraryAccessObject
from shutil import get_terminal_size
timeouts = (Timeout, socket_timeout, ConnectionError)
def threading_lock(lock):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
with lock:
return func(*args, **kwargs)
return wrapper
return decorator
def retry_connect(retry_times, timeout, error=None):
if error is None:
error=ArbitraryAccessObject()
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try_times = 0
while True:
try:
ret = func(*args, timeout=timeout, **kwargs)
if ret.status_code != 200:
logging.warning('%s is %s', ret.url, ret.status_code)
if ret.status_code == 404:
raise Timeout
except timeouts:
try_times += 1
error.reconnect(try_times)
else:
return ret
if try_times >= retry_times:
raise Timeout
return wrapper
return decorator
def semalock_for_class(func):
@wraps(func)
def wrapper(self, s, *args, **kwargs):
with s:
return func(self, *args, **kwargs)
return wrapper
def semalock(func):
@wraps(func)
def wrapper(s, *args, **kwargs):
with s:
return func(*args, **kwargs)
return wrapper
def loop(func):
@wraps(func)
def wrapper(*args, **kwargs):
while True:
ret = func(*args, **kwargs)
if ret:
break
return wrapper
def resolve_timeout(replace_value):
"""
return replace value instead of raise timeout
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except timeouts as e:
return replace_value
return wrapper
return decorator
def clear_output(func):
terminal_width, _ = get_terminal_size()
@wraps(func)
def wrapper(*args, **kwargs):
print(' ' * terminal_width, end='\r')
return func(*args, **kwargs)
return wrapper | Python | 0.000001 |
9ff314c9481605e174769416dec1b71e16936b83 | Fix unicode error when creating SHA1 sum for ical UID | demo/utils.py | demo/utils.py | from datetime import datetime, time, timedelta
import hashlib
def export_event(event, format='ical'):
# Only ical format supported at the moment
if format != 'ical':
return
# Begin event
# VEVENT format: http://www.kanzaki.com/docs/ical/vevent.html
ical_components = [
'BEGIN:VCALENDAR',
'VERSION:2.0',
'PRODID:-//Torchbox//wagtail//EN',
]
# Work out number of days the event lasts
if event.date_to is not None:
days = (event.date_to - event.date_from).days + 1
else:
days = 1
for day in range(days):
# Get date
date = event.date_from + timedelta(days=day)
# Get times
if event.time_from is not None:
start_time = event.time_from
else:
start_time = time.min
if event.time_to is not None:
end_time = event.time_to
else:
end_time = time.max
# Combine dates and times
start_datetime = datetime.combine(
date,
start_time
)
end_datetime = datetime.combine(date, end_time)
def add_slashes(string):
string.replace('"', '\\"')
string.replace('\\', '\\\\')
string.replace(',', '\\,')
string.replace(':', '\\:')
string.replace(';', '\\;')
string.replace('\n', '\\n')
return string
# Make a uid
event_string = event.url + str(start_datetime)
uid = hashlib.sha1(event_string.encode('utf-8')).hexdigest() + '@wagtaildemo'
# Make event
ical_components.extend([
'BEGIN:VEVENT',
'UID:' + add_slashes(uid),
'URL:' + add_slashes(event.url),
'DTSTAMP:' + start_time.strftime('%Y%m%dT%H%M%S'),
'SUMMARY:' + add_slashes(event.title),
'DESCRIPTION:' + add_slashes(event.search_description),
'LOCATION:' + add_slashes(event.location),
'DTSTART;TZID=Europe/London:' + start_datetime.strftime('%Y%m%dT%H%M%S'),
'DTEND;TZID=Europe/London:' + end_datetime.strftime('%Y%m%dT%H%M%S'),
'END:VEVENT',
])
# Finish event
ical_components.extend([
'END:VCALENDAR',
])
# Join components
return '\r'.join(ical_components)
| from datetime import datetime, time, timedelta
import hashlib
def export_event(event, format='ical'):
# Only ical format supported at the moment
if format != 'ical':
return
# Begin event
# VEVENT format: http://www.kanzaki.com/docs/ical/vevent.html
ical_components = [
'BEGIN:VCALENDAR',
'VERSION:2.0',
'PRODID:-//Torchbox//wagtail//EN',
]
# Work out number of days the event lasts
if event.date_to is not None:
days = (event.date_to - event.date_from).days + 1
else:
days = 1
for day in range(days):
# Get date
date = event.date_from + timedelta(days=day)
# Get times
if event.time_from is not None:
start_time = event.time_from
else:
start_time = time.min
if event.time_to is not None:
end_time = event.time_to
else:
end_time = time.max
# Combine dates and times
start_datetime = datetime.combine(
date,
start_time
)
end_datetime = datetime.combine(date, end_time)
def add_slashes(string):
string.replace('"', '\\"')
string.replace('\\', '\\\\')
string.replace(',', '\\,')
string.replace(':', '\\:')
string.replace(';', '\\;')
string.replace('\n', '\\n')
return string
# Make a uid
uid = hashlib.sha1(event.url + str(start_datetime)).hexdigest() + '@wagtaildemo'
# Make event
ical_components.extend([
'BEGIN:VEVENT',
'UID:' + add_slashes(uid),
'URL:' + add_slashes(event.url),
'DTSTAMP:' + start_time.strftime('%Y%m%dT%H%M%S'),
'SUMMARY:' + add_slashes(event.title),
'DESCRIPTION:' + add_slashes(event.search_description),
'LOCATION:' + add_slashes(event.location),
'DTSTART;TZID=Europe/London:' + start_datetime.strftime('%Y%m%dT%H%M%S'),
'DTEND;TZID=Europe/London:' + end_datetime.strftime('%Y%m%dT%H%M%S'),
'END:VEVENT',
])
# Finish event
ical_components.extend([
'END:VCALENDAR',
])
# Join components
return '\r'.join(ical_components)
| Python | 0.000184 |
d99dfa94a42d70900e31c36023602bea3e5efdfb | Bump forgotten version to 3.2 | debinterface/__init__.py | debinterface/__init__.py | # -*- coding: utf-8 -*-
"""Imports for easier use"""
from .adapter import NetworkAdapter
from .adapterValidation import NetworkAdapterValidation
from .dnsmasqRange import (DnsmasqRange,
DEFAULT_CONFIG as DNSMASQ_DEFAULT_CONFIG)
from .hostapd import Hostapd
from .interfaces import Interfaces
from .interfacesReader import InterfacesReader
from .interfacesWriter import InterfacesWriter
__version__ = '3.2.0'
__all__ = [
'NetworkAdapter',
'NetworkAdapterValidation',
'DnsmasqRange',
'DNSMASQ_DEFAULT_CONFIG',
'Hostapd',
'Interfaces',
'InterfacesReader',
'InterfacesWriter'
]
| # -*- coding: utf-8 -*-
"""Imports for easier use"""
from .adapter import NetworkAdapter
from .adapterValidation import NetworkAdapterValidation
from .dnsmasqRange import (DnsmasqRange,
DEFAULT_CONFIG as DNSMASQ_DEFAULT_CONFIG)
from .hostapd import Hostapd
from .interfaces import Interfaces
from .interfacesReader import InterfacesReader
from .interfacesWriter import InterfacesWriter
__version__ = '3.1.0'
__all__ = [
'NetworkAdapter',
'NetworkAdapterValidation',
'DnsmasqRange',
'DNSMASQ_DEFAULT_CONFIG',
'Hostapd',
'Interfaces',
'InterfacesReader',
'InterfacesWriter'
]
| Python | 0 |
e9e6d5a6c42ff1522010f003fbed2cd324eab48e | Update cluster config | configs/config_cluster.py | configs/config_cluster.py | CDNA = '/home/cmb-panasas2/skchoudh/genomes/hg19/kallisto/hg19'
GENOMES_DIR='/home/cmb-panasas2/skchoudh/genomes'
OUT_DIR = '/home/cmb-panasas2/skchoudh/HuR_results/human/rna_seq_star_hg38_annotated'
SRC_DIR = '/home/cmb-panasas2/skchoudh/github_projects/clip_seq_pipeline/scripts'
RAWDATA_DIR ='/home/cmb-06/as/skchoudh/data/HuR_Mouse_Human_liver/rna-seq/Penalva_L_08182016'
SAMPLES=['HepG2_CTRL1_S31_L004', 'HepG2_CTRL_2_S33_L004',
'HepG2_CTRL_7_S35_L004', 'HepG2_HuR_KD_1_S32_L004',
'HepG2_HuR_KD_2_S34_L004', 'HepG2_HuR_KD_7_S36_L004']
GENOME_BUILD = 'hg38'
GENOME_FASTA = GENOMES_DIR + '/' + GENOME_BUILD + '/fasta/'+ GENOME_BUILD+ '.fa'
STAR_INDEX = GENOMES_DIR + '/' + GENOME_BUILD + '/star_annotated'
GTF = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.v25.annotation.gtf'
GENE_NAMES = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + GENOME_BUILD+'_gene_names_stripped.tsv'
GENE_LENGTHS = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.v25.coding_lengths.tsv' #+ GENOME_BUILD+'_gene_lengths.tsv'
GENE_NAME_MAP = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + GENOME_BUILD + '_gene_names_stripped.tsv'
| CDNA = '/home/cmb-panasas2/skchoudh/genomes/hg19/kallisto/hg19'
GENOMES_DIR='/home/cmb-panasas2/skchoudh/genomes'
OUT_DIR = '/home/cmb-panasas2/skchoudh/HuR_results/analysis/rna_seq_star_hg38_annotated'
RAWDATA_DIR ='/home/cmb-06/as/skchoudh/data/HuR_Mouse_Human_liver/rna-seq/Penalva_L_08182016'
SAMPLES=['HepG2_CTRL1_S31_L004', 'HepG2_CTRL_2_S33_L004',
'HepG2_CTRL_7_S35_L004', 'HepG2_HuR_KD_1_S32_L004',
'HepG2_HuR_KD_2_S34_L004', 'HepG2_HuR_KD_7_S36_L004']
GENOME_BUILD = 'hg38'
GENOME_FASTA = GENOMES_DIR + '/' + GENOME_BUILD + '/fasta/'+ GENOME_BUILD+ '.fa'
STAR_INDEX = GENOMES_DIR + '/' + GENOME_BUILD + '/star_annotated'
GTF = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.v25.annotation.gtf'
| Python | 0.000001 |
79eb9241ac8ce36b14512287bc473a426db50cf1 | Use elif to make it faster. | Example/Pluton/Plugins/Example/Example.py | Example/Pluton/Plugins/Example/Example.py | import clr
import sys
clr.AddReferenceByPartialName("UnityEngine")
clr.AddReferenceByPartialName("Pluton")
import UnityEngine
import Pluton
from Pluton import InvItem
from System import *
from UnityEngine import *
class Example:
def On_PlayerConnected(self, player):
for p in Server.ActivePlayers:
if(p.Name != player.Name):
p.Message(String.Format("{0} has joined the server!", player.Name))
def On_PlayerDisconnected(self, player):
for p in Server.ActivePlayers:
if(p.Name != player.Name):
p.Message(String.Format("{0} has left the server!", player.Name))
def On_Command(self, cmd):
try:
if(cmd.cmd == "kit"):
if(Server.LoadOuts.ContainsKey(cmd.quotedArgs[0])):
loadout = Server.LoadOuts[cmd.quotedArgs[0]]
loadout.ToInv(cmd.User.Inventory)
elif(cmd.cmd == "apple"):
cmd.User.Message("An apple a day keeps the doctor away!")
item = InvItem("Apple")
item.Instantiate(Vector3(cmd.User.X + 3, cmd.User.Y + 3, cmd.User.Z + 3))
elif(cmd.cmd == "help"):
cmd.User.Message("Usable command: /whereami, /kit starter")
except:
Debug.Log(String.Format("Something went wrong while executing: /{0} args", cmd.cmd, String.Join(" ", cmd.args)))
| import clr
import sys
clr.AddReferenceByPartialName("UnityEngine")
clr.AddReferenceByPartialName("Pluton")
import UnityEngine
import Pluton
from Pluton import InvItem
from System import *
from UnityEngine import *
class Example:
def On_PlayerConnected(self, player):
for p in Server.ActivePlayers:
if(p.Name != player.Name):
p.Message(String.Format("{0} has joined the server!", player.Name))
def On_PlayerDisconnected(self, player):
for p in Server.ActivePlayers:
if(p.Name != player.Name):
p.Message(String.Format("{0} has left the server!", player.Name))
def On_Command(self, cmd):
try:
if(cmd.cmd == "kit"):
if(Server.LoadOuts.ContainsKey(cmd.quotedArgs[0])):
loadout = Server.LoadOuts[cmd.quotedArgs[0]]
loadout.ToInv(cmd.User.Inventory)
if(cmd.cmd == "apple"):
cmd.User.Message("An apple a day keeps the doctor away!")
item = InvItem("Apple")
item.Instantiate(Vector3(cmd.User.X + 3, cmd.User.Y + 3, cmd.User.Z + 3))
if(cmd.cmd == "help"):
cmd.User.Message("Usable command: /whereami, /kit starter")
except:
Debug.Log(String.Format("Something went wrong while executing: /{0} args", cmd.cmd, String.Join(" ", cmd.args))) | Python | 0 |
9af1cbe0676ca71edecfa6d44c66690a5a583b01 | Rewrite for clarity | constructive_hierarchy.py | constructive_hierarchy.py | '''Reason about a directed graph in which the (non-)existence of some edges
must be inferred by the disconnectedness of certain vertices. Collect (truthy)
evidence for boolean function return values.'''
def transitive_closure_dict(known_vertices, edges):
'''Find the transitive closure of a dict mapping vertices to their paths.'''
found_vertices = {b: known_vertices[a] + ((a, b),)
for a, b in edges if a in known_vertices}
if all(v in known_vertices for v in found_vertices):
return known_vertices
found_vertices.update(known_vertices)
return transitive_closure_dict(found_vertices, edges)
def transitive_closure(vertex, edges):
closure = transitive_closure_dict({vertex: ()}, edges)
# Use a (truthy) loop instead of an empty path
closure[vertex] = (vertex, vertex)
return closure
def downward_closure(vertex, edges):
'''Find the downward closure of a vertex.'''
return transitive_closure(vertex, edges)
def upward_closure(vertex, edges):
'''Find the upward closure of a vertex.'''
return transitive_closure(vertex, {(b, a) for a, b in edges})
def is_connected(a, b, edges):
'''Check if there is a path from a to b.'''
return downward_closure(a, edges).get(b, False)
def is_separated(a, b, edges, disconnections):
'''Check that a and b will remain not connected even if edges are added to
the graph, as long as the vertex pairs listed in disconnections remain
disconnected.'''
for p, p_path in upward_closure(a, edges).items():
for q, q_path in downward_closure(b, edges).items():
if (p, q) in disconnections:
# Should reverse p_path
return p_path, q_path
return False
def find_possible_connections(vertices, edges, disconnections):
'''Find which edges can be added to create new connections, without
connecting any pairs in disconnections.'''
return {(a, b) for a in vertices for b in vertices
if not is_connected(a, b, edges)
if not is_separated(a, b, edges, disconnections)}
def is_redundant_edge(edge, edges):
'''Give alternate path if one exists.'''
return is_connected(*edge, edges - {edge})
def spanning_tree(edges):
for edge in edges:
if is_redundant_edge(edge, edges):
return spanning_tree(edges - {edge})
return edges
def rank_possible_edge(edge, vertices, edges, disconnections):
evaluator = lambda x, y: len(find_possible_connections(vertices, x, y))
exists_rank = evaluator(edges | {edge}, disconnections)
not_exists_rank = evaluator(edges, disconnections | {edge})
return abs(exists_rank) + abs(not_exists_rank)
| '''Reason about a directed graph in which the (non-)existence of some edges
must be inferred by the disconnectedness of certain vertices. Collect (truthy)
evidence for boolean function return values.'''
def transitive_closure_dict(vertices, edges):
'''Find the transitive closure of a dict mapping vertices to their paths.'''
neighbours = {b: vertices[a] + ((a, b),)
for a, b in edges if a in vertices}
if set(neighbours).issubset(set(vertices)):
return vertices
return transitive_closure_dict(dict(neighbours, **vertices), edges)
def transitive_closure(vertex, edges):
closure = transitive_closure_dict({vertex: ()}, edges)
# Use a (truthy) loop instead of an empty path
closure[vertex] = (vertex, vertex)
return closure
def downward_closure(vertex, edges):
'''Find the downward closure of a vertex.'''
return transitive_closure(vertex, edges)
def upward_closure(vertex, edges):
'''Find the upward closure of a vertex.'''
return transitive_closure(vertex, {(b, a) for a, b in edges})
def is_connected(a, b, edges):
'''Check if there is a path from a to b.'''
return downward_closure(a, edges).get(b, False)
def is_separated(a, b, edges, disconnections):
'''Check that a and b will remain not connected even if edges are added to
the graph, as long as the vertex pairs listed in disconnections remain
disconnected.'''
for p, p_path in upward_closure(a, edges).items():
for q, q_path in downward_closure(b, edges).items():
if (p, q) in disconnections:
# Should reverse p_path
return p_path, q_path
return False
def find_possible_connections(vertices, edges, disconnections):
'''Find which edges can be added to create new connections, without
connecting any pairs in disconnections.'''
return {(a, b) for a in vertices for b in vertices
if not is_connected(a, b, edges)
if not is_separated(a, b, edges, disconnections)}
def is_redundant_edge(edge, edges):
'''Give alternate path if one exists.'''
return is_connected(*edge, edges - {edge})
def spanning_tree(edges):
for edge in edges:
if is_redundant_edge(edge, edges):
return spanning_tree(edges - {edge})
return edges
def rank_possible_edge(edge, vertices, edges, disconnections):
evaluator = lambda x, y: len(find_possible_connections(vertices, x, y))
exists_rank = evaluator(edges | {edge}, disconnections)
not_exists_rank = evaluator(edges, disconnections | {edge})
return abs(exists_rank) + abs(not_exists_rank)
| Python | 0.000008 |
7760d75bb5ca38d2c96924e0ea1d65485cdc5c6f | Update version 0.12.2 -> 0.12.3 | dimod/__init__.py | dimod/__init__.py | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# version is used by serialization below so we need it before everything
__version__ = '0.12.3'
from dimod.constrained import *
import dimod.constrained
from dimod.core import *
import dimod.core
from dimod.cyutilities import *
from dimod.reference import *
import dimod.reference
from dimod.roof_duality import fix_variables
from dimod.binary import *
import dimod.binary
from dimod.discrete import *
import dimod.testing
from dimod.converters import *
import dimod.decorators
import dimod.generators
from dimod.exceptions import *
import dimod.exceptions
from dimod.higherorder import make_quadratic, make_quadratic_cqm, reduce_binary_polynomial, poly_energy, poly_energies, BinaryPolynomial
import dimod.higherorder
from dimod.package_info import __version__, __author__, __authoremail__, __description__
from dimod.quadratic import *
import dimod.quadratic
from dimod.traversal import *
from dimod.sampleset import *
from dimod.serialization.format import set_printoptions
import dimod.lp
from dimod.utilities import *
import dimod.utilities
from dimod.vartypes import *
# flags for some global features
REAL_INTERACTIONS = False
| # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# version is used by serialization below so we need it before everything
__version__ = '0.12.2'
from dimod.constrained import *
import dimod.constrained
from dimod.core import *
import dimod.core
from dimod.cyutilities import *
from dimod.reference import *
import dimod.reference
from dimod.roof_duality import fix_variables
from dimod.binary import *
import dimod.binary
from dimod.discrete import *
import dimod.testing
from dimod.converters import *
import dimod.decorators
import dimod.generators
from dimod.exceptions import *
import dimod.exceptions
from dimod.higherorder import make_quadratic, make_quadratic_cqm, reduce_binary_polynomial, poly_energy, poly_energies, BinaryPolynomial
import dimod.higherorder
from dimod.package_info import __version__, __author__, __authoremail__, __description__
from dimod.quadratic import *
import dimod.quadratic
from dimod.traversal import *
from dimod.sampleset import *
from dimod.serialization.format import set_printoptions
import dimod.lp
from dimod.utilities import *
import dimod.utilities
from dimod.vartypes import *
# flags for some global features
REAL_INTERACTIONS = False
| Python | 0.000001 |
8d72c58ac607f75c0a10ca9b79be9da59907cc7a | Update dev setting | src/server/settings.py | src/server/settings.py | """
Django settings for server project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 't9+m%qyni5%=__s8brz#tf#lv^1wy6)zj#m_2re&(_c(!_pixl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
import sys
TESTING = sys.argv[1:2] == ['test']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_assets',
'south',
'edge',
)
if TESTING:
INSTALLED_APPS += ('django_nose',)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
SOUTH_TESTS_MIGRATE = False
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'server.urls'
WSGI_APPLICATION = 'server.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'sqlite': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
},
'mysql': {
'ENGINE': 'django.db.backends.mysql',
'OPTIONS': { "init_command": "SET storage_engine=INNODB;" },
'HOST': '',
'PORT': '',
'NAME' : 'toolbox_dev',
'USER': 'root',
'PASSWORD': 'password',
'ATOMIC_REQUESTS': True,
}
}
DEFAULT_DB = 'mysql'
DATABASES['default'] = DATABASES[DEFAULT_DB]
if TESTING:
other_dbs = [db for db in DATABASES if db != 'default']
for db in other_dbs:
del DATABASES[db]
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler'
}
},
'loggers': {
#'django.db.backends': { 'level': 'DEBUG', 'handlers': ['console'], },
},
}
# NCBI blast
NCBI_DIR = BASE_DIR+'/../ncbi'
NCBI_BIN_DIR = NCBI_DIR+'/bin'
NCBI_DATA_DIR = NCBI_DIR+'/blastdb'
# Primer3
PRIMER3_DIR = BASE_DIR+'/../primer3'
| """
Django settings for server project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 't9+m%qyni5%=__s8brz#tf#lv^1wy6)zj#m_2re&(_c(!_pixl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
import sys
TESTING = sys.argv[1:2] == ['test']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_assets',
'south',
'edge',
)
if TESTING:
INSTALLED_APPS += ('django_nose',)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
SOUTH_TESTS_MIGRATE = False
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'server.urls'
WSGI_APPLICATION = 'server.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'sqlite': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
},
'mysql': {
'ENGINE': 'django.db.backends.mysql',
'OPTIONS': { "init_command": "SET storage_engine=INNODB;" },
'HOST': '',
'PORT': '',
'NAME' : 'edge',
'USER': 'root',
'PASSWORD': 'password',
'ATOMIC_REQUESTS': True,
}
}
DEFAULT_DB = 'mysql'
DATABASES['default'] = DATABASES[DEFAULT_DB]
if TESTING:
other_dbs = [db for db in DATABASES if db != 'default']
for db in other_dbs:
del DATABASES[db]
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler'
}
},
'loggers': {
#'django.db.backends': { 'level': 'DEBUG', 'handlers': ['console'], },
},
}
# NCBI blast
NCBI_DIR = BASE_DIR+'/../ncbi'
NCBI_BIN_DIR = NCBI_DIR+'/bin'
NCBI_DATA_DIR = NCBI_DIR+'/blastdb'
# Primer3
PRIMER3_DIR = BASE_DIR+'/../primer3'
| Python | 0 |
4d983708981029f0c0c5d103f8329427ff824b1f | add user output when generating key pair | conda_build/main_sign.py | conda_build/main_sign.py | # (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
import os
import sys
from os.path import isdir, join
try:
from Crypto.PublicKey import RSA
from Crypto import Random
except ImportError:
sys.exit("""\
Error: could not import Crypto (required for "conda sign").
Run the following command:
$ conda install -n root pycrypto
""")
from conda.utils import sha256_file
from conda.signature import KEYS_DIR, sig2ascii, verify
def keygen(name):
print("Generating public/private key pair...")
random_generator = Random.new().read
key = RSA.generate(1024, random_generator)
if not isdir(KEYS_DIR):
os.makedirs(KEYS_DIR)
path = join(KEYS_DIR, name)
print("Storing private key: %s" % path)
with open(path, 'wb') as fo:
fo.write(key.exportKey())
fo.write(b'\n')
path = join(KEYS_DIR, '%s.pub' % name)
print("Storing public key : %s" % path)
with open(path, 'wb') as fo:
fo.write(key.publickey().exportKey())
fo.write(b'\n')
def get_default_keyname():
if isdir(KEYS_DIR):
for fn in os.listdir(KEYS_DIR):
if not fn.endswith('.pub'):
return fn
return None
def sign(path, key):
return sig2ascii(key.sign(sha256_file(path), '')[0])
def main():
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [option] [FILE ...]",
description="tool for signing conda packages")
p.add_option('-k', '--keygen',
action="store",
help="generate a public-private "
"key pair ~/.conda/keys/<NAME>(.pub)",
metavar="NAME")
p.add_option('-v', '--verify',
action="store_true",
help="verify FILE(s)")
opts, args = p.parse_args()
if opts.keygen:
if args:
p.error('no arguments expected for --keygen')
keygen(opts.keygen)
return
if opts.verify:
for path in args:
print('%-65s %s' % (path, verify(path)))
return
key_name = get_default_keyname()
print("Using private key '%s' for signing." % key_name)
key = RSA.importKey(open(join(KEYS_DIR, key_name)).read())
for path in args:
print('signing: %s' % path)
with open('%s.sig' % path, 'w') as fo:
fo.write('%s ' % key_name)
fo.write(sign(path, key))
fo.write('\n')
if __name__ == '__main__':
main()
| # (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
import os
import sys
from os.path import isdir, join
try:
from Crypto.PublicKey import RSA
from Crypto import Random
except ImportError:
sys.exit("""\
Error: could not import Crypto (required for "conda sign").
Run the following command:
$ conda install -n root pycrypto
""")
from conda.utils import sha256_file
from conda.signature import KEYS_DIR, sig2ascii, verify
def keygen(name):
random_generator = Random.new().read
key = RSA.generate(1024, random_generator)
if not isdir(KEYS_DIR):
os.makedirs(KEYS_DIR)
with open(join(KEYS_DIR, name), 'wb') as fo:
fo.write(key.exportKey())
fo.write(b'\n')
with open(join(KEYS_DIR, '%s.pub' % name), 'wb') as fo:
fo.write(key.publickey().exportKey())
fo.write(b'\n')
def get_default_keyname():
if isdir(KEYS_DIR):
for fn in os.listdir(KEYS_DIR):
if not fn.endswith('.pub'):
return fn
return None
def sign(path, key):
return sig2ascii(key.sign(sha256_file(path), '')[0])
def main():
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [option] [FILE ...]",
description="tool for signing conda packages")
p.add_option('-k', '--keygen',
action="store",
help="generate a public-private "
"key pair ~/.conda/keys/<NAME>(.pub)",
metavar="NAME")
p.add_option('-v', '--verify',
action="store_true",
help="verify FILE(s)")
opts, args = p.parse_args()
if opts.keygen:
if args:
p.error('no arguments expected for --keygen')
keygen(opts.keygen)
return
if opts.verify:
for path in args:
print('%-65s %s' % (path, verify(path)))
return
key_name = get_default_keyname()
print("Using private key '%s' for signing." % key_name)
key = RSA.importKey(open(join(KEYS_DIR, key_name)).read())
for path in args:
print('signing: %s' % path)
with open('%s.sig' % path, 'w') as fo:
fo.write('%s ' % key_name)
fo.write(sign(path, key))
fo.write('\n')
if __name__ == '__main__':
main()
| Python | 0.000004 |
00b7cf15877dc17d07d591c893671decb6b869e2 | Enable touch events for smoothness tests. | tools/perf/measurements/smoothness.py | tools/perf/measurements/smoothness.py | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from metrics import power
from measurements import smoothness_controller
from telemetry.page import page_measurement
class Smoothness(page_measurement.PageMeasurement):
def __init__(self):
super(Smoothness, self).__init__('RunSmoothness')
self._power_metric = None
self._smoothness_controller = None
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-gpu-benchmarking')
options.AppendExtraBrowserArgs('--touch-events=enabled')
power.PowerMetric.CustomizeBrowserOptions(options)
def WillRunActions(self, page, tab):
self._power_metric = power.PowerMetric()
self._power_metric.Start(page, tab)
self._smoothness_controller = smoothness_controller.SmoothnessController()
self._smoothness_controller.Start(page, tab)
def DidRunActions(self, page, tab):
self._power_metric.Stop(page, tab)
self._smoothness_controller.Stop(tab)
def MeasurePage(self, page, tab, results):
self._power_metric.AddResults(tab, results)
self._smoothness_controller.AddResults(tab, results)
def CleanUpAfterPage(self, _, tab):
self._smoothness_controller.CleanUp(tab)
| # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from metrics import power
from measurements import smoothness_controller
from telemetry.page import page_measurement
class Smoothness(page_measurement.PageMeasurement):
def __init__(self):
super(Smoothness, self).__init__('RunSmoothness')
self._power_metric = None
self._smoothness_controller = None
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-gpu-benchmarking')
power.PowerMetric.CustomizeBrowserOptions(options)
def WillRunActions(self, page, tab):
self._power_metric = power.PowerMetric()
self._power_metric.Start(page, tab)
self._smoothness_controller = smoothness_controller.SmoothnessController()
self._smoothness_controller.Start(page, tab)
def DidRunActions(self, page, tab):
self._power_metric.Stop(page, tab)
self._smoothness_controller.Stop(tab)
def MeasurePage(self, page, tab, results):
self._power_metric.AddResults(tab, results)
self._smoothness_controller.AddResults(tab, results)
def CleanUpAfterPage(self, _, tab):
self._smoothness_controller.CleanUp(tab)
| Python | 0.00001 |
afb37f495f32ab03ea1a2b2dff566ae3d20eff5b | fix exception raising in svg2pdf | IPython/nbconvert/transformers/svg2pdf.py | IPython/nbconvert/transformers/svg2pdf.py | """Module containing a transformer that converts outputs in the notebook from
one format to another.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import base64
import io
import os
import sys
import subprocess
from IPython.utils.tempdir import TemporaryDirectory
from IPython.utils.traitlets import Unicode
from .convertfigures import ConvertFiguresTransformer
#-----------------------------------------------------------------------------
# Constants
#-----------------------------------------------------------------------------
INKSCAPE_COMMAND = 'inkscape --without-gui --export-pdf="{to_filename}" "{from_filename}"'
INKSCAPE_OSX_COMMAND = '/Applications/Inkscape.app/Contents/Resources/bin/inkscape --without-gui --export-pdf="{to_filename}" "{from_filename}"'
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class SVG2PDFTransformer(ConvertFiguresTransformer):
"""
Converts all of the outputs in a notebook from SVG to PDF.
"""
from_format = Unicode('svg', config=True, help='Format the converter accepts')
to_format = Unicode('pdf', config=False, help='Format the converter writes')
command = Unicode(config=True,
help="""The command to use for converting SVG to PDF
This string is a template, which will be formatted with the keys
to_filename and from_filename.
The conversion call must read the SVG from {from_flename},
and write a PDF to {to_filename}.
""")
def _command_default(self):
if sys.platform == "darwin":
return INKSCAPE_OSX_COMMAND
elif sys.platform == "win32":
# windows not yet supported
return ""
else:
return INKSCAPE_COMMAND
def convert_figure(self, data_format, data):
"""
Convert a single SVG figure to PDF. Returns converted data.
"""
#Work in a temporary directory
with TemporaryDirectory() as tmpdir:
#Write fig to temp file
input_filename = os.path.join(tmpdir, 'figure.' + data_format)
# SVG data is unicode text
with io.open(input_filename, 'w', encoding='utf8') as f:
f.write(data)
#Call conversion application
output_filename = os.path.join(tmpdir, 'figure.pdf')
shell = self.command.format(from_filename=input_filename,
to_filename=output_filename)
subprocess.call(shell, shell=True) #Shell=True okay since input is trusted.
#Read output from drive
# return value expects a filename
if os.path.isfile(output_filename):
with open(output_filename, 'rb') as f:
# PDF is a nb supported binary, data type, so base64 encode.
return base64.encodestring(f.read())
else:
raise TypeError("Inkscape svg to png conversion failed")
| """Module containing a transformer that converts outputs in the notebook from
one format to another.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import base64
import io
import os
import sys
import subprocess
from IPython.utils.tempdir import TemporaryDirectory
from IPython.utils.traitlets import Unicode
from .convertfigures import ConvertFiguresTransformer
#-----------------------------------------------------------------------------
# Constants
#-----------------------------------------------------------------------------
INKSCAPE_COMMAND = 'inkscape --without-gui --export-pdf="{to_filename}" "{from_filename}"'
INKSCAPE_OSX_COMMAND = '/Applications/Inkscape.app/Contents/Resources/bin/inkscape --without-gui --export-pdf="{to_filename}" "{from_filename}"'
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class SVG2PDFTransformer(ConvertFiguresTransformer):
"""
Converts all of the outputs in a notebook from SVG to PDF.
"""
from_format = Unicode('svg', config=True, help='Format the converter accepts')
to_format = Unicode('pdf', config=False, help='Format the converter writes')
command = Unicode(config=True,
help="""The command to use for converting SVG to PDF
This string is a template, which will be formatted with the keys
to_filename and from_filename.
The conversion call must read the SVG from {from_flename},
and write a PDF to {to_filename}.
""")
def _command_default(self):
if sys.platform == "darwin":
return INKSCAPE_OSX_COMMAND
elif sys.platform == "win32":
# windows not yet supported
return ""
else:
return INKSCAPE_COMMAND
def convert_figure(self, data_format, data):
"""
Convert a single SVG figure to PDF. Returns converted data.
"""
#Work in a temporary directory
with TemporaryDirectory() as tmpdir:
#Write fig to temp file
input_filename = os.path.join(tmpdir, 'figure.' + data_format)
# SVG data is unicode text
with io.open(input_filename, 'w', encoding='utf8') as f:
f.write(data)
#Call conversion application
output_filename = os.path.join(tmpdir, 'figure.pdf')
shell = self.command.format(from_filename=input_filename,
to_filename=output_filename)
subprocess.call(shell, shell=True) #Shell=True okay since input is trusted.
#Read output from drive
# return value expects a filename
if os.path.isfile(output_filename):
with open(output_filename, 'rb') as f:
# PDF is a nb supported binary, data type, so base64 encode.
return base64.encodestring(f.read())
else:
return TypeError("Inkscape svg to png conversion failed")
| Python | 0.000001 |
cf2004cec6e84cbec213f9e70dd8245327af541d | Update api.py | example/services/api.py | example/services/api.py | # external imports
from nautilus import APIGateway
from graphene import Schema, ObjectType, String, Mutation, Boolean
from nautilus.api import ServiceObjectType
from nautilus.api.fields import Connection
from nautilus.network import dispatchAction
from nautilus.conventions import getCRUDAction
# local imports
from .recipes import service as RecipeService
from .ingredients import service as IngredientService
# create the schema based on the query object
schema = Schema(name='Product Schema')
## define the schema that encapsulates the cloud
class Recipe(ServiceObjectType):
class Meta:
service = RecipeService
# connections are resolved/joined using the appropriate connection service
# you can avoid circular/undefined references using strings - nautilus will look
# for the corresponding ServiceObjectType
ingredients = Connection('Ingredient', description = 'The ingredients in this recipe.')
class Ingredient(ServiceObjectType):
class Meta:
service = IngredientService
recipes = Connection(Recipe, description = 'The recipes with this ingredient')
# add the query to the schema
schema.query = Query
class AddRecipeMutation(Mutation):
class Input:
name = String()
success = Boolean(description="Wether or not the dispatch was successful")
@classmethod
def mutate(cls, instance, args, info):
""" perform the mutation """
# send the new production action into the queue
dispatchAction({
'type': getCRUDAction('create', 'recipe'),
'payload': args
})
class Mutation(ObjectType):
""" the list of mutations that the api supports """
addRecipe = Field(AddRecipeMutation)
sceham.mutation = Mutation
# create a nautilus service with just the schema
service = APIGateway(schema=schema)
| # external imports
from nautilus import APIGateway
from graphene import Schema, ObjectType, String, Mutation, Boolean
from nautilus.api import ServiceObjectType
from nautilus.api.fields import Connection
from nautilus.network import dispatchAction
from nautilus.conventions import getCRUDAction
# local imports
from .recipes import service as RecipeService
from .ingredients import service as IngredientService
# create the schema based on the query object
schema = Schema(name='Product Schema')
## define the schema that encapsulates the cloud
class Recipe(ServiceObjectType):
class Meta:
service = RecipeService
# connections are resolved/joined using the appropriate connection service
# you can avoid circular/undefined references using strings - nautilus will look
# for the corresponding ServiceObjectType
ingredients = Connection('Ingredient', description = 'The ingredients in this recipe.')
class Ingredient(ServiceObjectType):
class Meta:
service = IngredientService
recipes = Connection(Recipe, description = 'The recipes with this ingredient')
# add the query to the schema
schema.query = Query
# third party imports
class AddRecipeMutation(Mutation):
class Input:
name = String()
success = Boolean(description="Wether or not the dispatch was successful")
@classmethod
def mutate(cls, instance, args, info):
""" perform the mutation """
# send the new production action into the queue
dispatchAction({
'type': getCRUDAction('create', 'recipe'),
'payload': args
})
class Mutation(ObjectType):
""" the list of mutations that the api supports """
addRecipe = Field(AddRecipeMutation)
sceham.mutation = Mutation
# create a nautilus service with just the schema
service = APIGateway(schema=schema)
| Python | 0.000001 |
c4b83c9554ca0f501ac42c63a53394ff8b90c2af | bump version to 20190807 | acbs/__init__.py | acbs/__init__.py | __version__ = '20190807'
| __version__ = '20181007'
| Python | 0 |
ec831928b9e065b523eae2621f51091a8e332c71 | Be more verbose | pissuu/api.py | pissuu/api.py | import requests
import md5
import json
class IssuuAPI(object):
def __init__(self, key, secret):
"""
Initialize an API client with the given ``key`` and ``secret``.
"""
self.key = key
self.secret = secret
def add_bookmark(self):
"""
Add a bookmark.
"""
raise NotImplementedError()
def list_bookmarks(self):
"""
List bookmarks.
"""
raise NotImplementedError()
def update_bookmark(self):
"""
Update a bookmark.
"""
raise NotImplementedError()
def delete_bookmark(self, names):
"""
Delete a bookmark.
"""
raise NotImplementedError()
def list_documents(self):
"""
List documents for this user.
"""
return self._query(
url = 'http://api.issuu.com/1_0',
action = 'issuu.documents.list'
)
def upload_document(self, file, title=''):
"""
Upload the given ``file``.
"""
response = self._query(
url = 'http://upload.issuu.com/1_0',
action = 'issuu.document.upload',
data = {
'file': file,
'title': title
}
)
return response['_content']['document']['documentId']
def update_document(self):
"""
Update a document.
"""
raise NotImplementedError()
def delete_document(self, id):
"""
Delete a document.
:param id: A string describing a document ID.
"""
self.delete_documents([id])
def delete_documents(self, ids):
"""
Delete the documents with the given ``ids``.
:param ids: A list of strings describing document IDs.
"""
self._query(
url = 'http://api.issuu.com/1_0',
action = 'issuu.document.delete',
data = {
'names': ','.join(ids)
}
)
def add_folder(self):
"""
Create a folder.
"""
raise NotImplementedError()
def list_folders(self):
"""
List folders.
"""
raise NotImplementedError()
def update_folder(self):
"""
Update a folder.
"""
raise NotImplementedError()
def delete_folder(self):
"""
Delete a folder.
"""
raise NotImplementedError()
def _query(self, url, action, data=None):
"""
Low-level access to the Issuu API.
"""
if not data:
data = {}
data.update({
'apiKey': self.key,
'format': 'json',
'action': action
})
data['signature'] = self._sign(data)
files = {}
for key in data:
if hasattr(data[key], 'read'):
files[key] = data[key]
for key in files:
data.pop(key)
response = requests.post(
url = url,
data = data,
files = files
)
try:
data = json.loads(response.content)['rsp']
except ValueError:
raise self.Error('API response could not be parsed as JSON: %s' % response.content)
if data['stat'] == 'fail':
raise self.Error(data['_content']['error']['message'])
else:
return data
def _sign(self, data):
"""
Create a signature of the given ``data``.
"""
signature = self.secret
data.update({
'apiKey': self.key
})
keys = data.keys()
for key in sorted(keys):
if isinstance(data[key], (str, unicode)):
signature += key + data[key]
return md5.new(signature).hexdigest()
class Error(StandardError):
pass
| import requests
import md5
import json
class IssuuAPI(object):
def __init__(self, key, secret):
"""
Initialize an API client with the given ``key`` and ``secret``.
"""
self.key = key
self.secret = secret
def add_bookmark(self):
"""
Add a bookmark.
"""
raise NotImplementedError()
def list_bookmarks(self):
"""
List bookmarks.
"""
raise NotImplementedError()
def update_bookmark(self):
"""
Update a bookmark.
"""
raise NotImplementedError()
def delete_bookmark(self, names):
"""
Delete a bookmark.
"""
raise NotImplementedError()
def list_documents(self):
"""
List documents for this user.
"""
return self._query(
url = 'http://api.issuu.com/1_0',
action = 'issuu.documents.list'
)
def upload_document(self, file, title=''):
"""
Upload the given ``file``.
"""
response = self._query(
url = 'http://upload.issuu.com/1_0',
action = 'issuu.document.upload',
data = {
'file': file,
'title': title
}
)
return response['_content']['document']['documentId']
def update_document(self):
"""
Update a document.
"""
raise NotImplementedError()
def delete_document(self, id):
"""
Delete a document.
:param id: A string describing a document ID.
"""
self.delete_documents([id])
def delete_documents(self, ids):
"""
Delete the documents with the given ``ids``.
:param ids: A list of strings describing document IDs.
"""
self._query(
url = 'http://api.issuu.com/1_0',
action = 'issuu.document.delete',
data = {
'names': ','.join(ids)
}
)
def add_folder(self):
"""
Create a folder.
"""
raise NotImplementedError()
def list_folders(self):
"""
List folders.
"""
raise NotImplementedError()
def update_folder(self):
"""
Update a folder.
"""
raise NotImplementedError()
def delete_folder(self):
"""
Delete a folder.
"""
raise NotImplementedError()
def _query(self, url, action, data=None):
"""
Low-level access to the Issuu API.
"""
if not data:
data = {}
data.update({
'apiKey': self.key,
'format': 'json',
'action': action
})
data['signature'] = self._sign(data)
files = {}
for key in data:
if hasattr(data[key], 'read'):
files[key] = data[key]
for key in files:
data.pop(key)
response = requests.post(
url = url,
data = data,
files = files
)
try:
data = json.loads(response.content)['rsp']
except ValueError:
raise self.Error('API response could not be parsed as JSON')
if data['stat'] == 'fail':
raise self.Error(data['_content']['error']['message'])
else:
return data
def _sign(self, data):
"""
Create a signature of the given ``data``.
"""
signature = self.secret
data.update({
'apiKey': self.key
})
keys = data.keys()
for key in sorted(keys):
if isinstance(data[key], (str, unicode)):
signature += key + data[key]
return md5.new(signature).hexdigest()
class Error(StandardError):
pass
| Python | 0.999847 |
2b20e803733db09ad4643be00b2af11ecea1eeb8 | Increase version to 0.11.0 (#394) | opsdroid/const.py | opsdroid/const.py | """Constants used by OpsDroid."""
import os
__version__ = "0.11.0"
DEFAULT_GIT_URL = "https://github.com/opsdroid/"
MODULES_DIRECTORY = "opsdroid-modules"
DEFAULT_ROOT_PATH = os.path.expanduser("~/.opsdroid")
DEFAULT_LOG_FILENAME = os.path.join(DEFAULT_ROOT_PATH, 'output.log')
DEFAULT_MODULES_PATH = os.path.join(DEFAULT_ROOT_PATH, "modules")
DEFAULT_MODULE_DEPS_PATH = os.path.join(DEFAULT_ROOT_PATH, "site-packages")
DEFAULT_CONFIG_PATH = os.path.join(DEFAULT_ROOT_PATH, "configuration.yaml")
DEFAULT_MODULE_BRANCH = "master"
EXAMPLE_CONFIG_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"configuration/example_configuration.yaml")
REGEX_MAX_SCORE = 0.6
RASANLU_DEFAULT_URL = "http://localhost:5000"
RASANLU_DEFAULT_PROJECT = "opsdroid"
| """Constants used by OpsDroid."""
import os
__version__ = "0.10.0"
DEFAULT_GIT_URL = "https://github.com/opsdroid/"
MODULES_DIRECTORY = "opsdroid-modules"
DEFAULT_ROOT_PATH = os.path.expanduser("~/.opsdroid")
DEFAULT_LOG_FILENAME = os.path.join(DEFAULT_ROOT_PATH, 'output.log')
DEFAULT_MODULES_PATH = os.path.join(DEFAULT_ROOT_PATH, "modules")
DEFAULT_MODULE_DEPS_PATH = os.path.join(DEFAULT_ROOT_PATH, "site-packages")
DEFAULT_CONFIG_PATH = os.path.join(DEFAULT_ROOT_PATH, "configuration.yaml")
DEFAULT_MODULE_BRANCH = "master"
EXAMPLE_CONFIG_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"configuration/example_configuration.yaml")
REGEX_MAX_SCORE = 0.6
RASANLU_DEFAULT_URL = "http://localhost:5000"
RASANLU_DEFAULT_PROJECT = "opsdroid"
| Python | 0 |
5f43ac2dbca1caba21b2d6f4afbc798323a0d79f | Clear memory more actively | osmhm/__init__.py | osmhm/__init__.py | import fetch
import filters
import inserts
import tables
import config
import send_notification
def run(time_type='hour', history=False, suspicious=False, monitor=True,
notification=False, notifier=send_notification.send_mail):
"""
"""
import osmhm
import osmdt
import datetime
import time
while True:
sequence = osmhm.fetch.fetch_last_read()
if not sequence:
osmhm.fetch.fetch_next(time_type=time_type, reset=True)
sequence = osmhm.fetch.fetch_last_read()
if sequence['read_flag'] is False:
print "Processing sequence %s." % (sequence['sequencenumber'])
count = 0
while True:
try:
count += 1
data_stream = osmdt.fetch(sequence['sequencenumber'], time=time_type)
break
except:
if count == 5:
msg = 'Current state file not retrievable after five times.'
raise Exception(msg)
print "File not reachable; waiting 60 more seconds..."
time.sleep(60)
data_object = osmdt.process(data_stream)
del data_stream
changesets = osmdt.extract_changesets(data_object)
objects = osmdt.extract_objects(data_object)
users = osmdt.extract_users(data_object)
del data_object
if history:
osmhm.inserts.insert_all_changesets(changesets)
if suspicious:
osmhm.filters.suspicious_filter(changesets)
if monitor:
osmhm.filters.object_filter(objects, notification=notification, notifier=notifier)
osmhm.filters.user_filter(changesets, notification=notification, notifier=notifier)
#osmhm.filters.user_object_filter(objects, notification=notification, notifier=notifier) # not implemented yet
osmhm.filters.key_filter(objects, notification=notification, notifier=notifier)
del changesets, objects, users
osmhm.inserts.insert_file_read()
print "Finished processing %s." % (sequence['sequencenumber'])
if sequence['timetype'] == 'minute':
delta_time = 1
extra_time = 10
elif sequence['timetype'] == 'hour':
delta_time = 60
extra_time = 120
elif sequence['timetype'] == 'day':
delta_time = 1440
extra_time = 300
next_time = datetime.datetime.strptime(sequence['timestamp'],
"%Y-%m-%dT%H:%M:%SZ") + datetime.timedelta(minutes=delta_time)
if datetime.datetime.utcnow() < next_time:
sleep_time = (next_time - datetime.datetime.utcnow()).seconds + delta_time
print "Waiting %2.1f seconds for the next file." % (sleep_time)
else:
sleep_time = 1
time.sleep(sleep_time)
count = 0
while True:
try:
count += 1
osmhm.fetch.fetch_next(sequence['sequencenumber'], time_type=time_type)
break
except:
if count == 5:
msg = 'New state file not retrievable after five times.'
raise Exception(msg)
print "Waiting %2.1f more seconds..." % (extra_time)
time.sleep(extra_time)
| import fetch
import filters
import inserts
import tables
import config
import send_notification
def run(time_type='hour', history=False, suspicious=False, monitor=True,
notification=False, notifier=send_notification.send_mail):
"""
"""
import osmhm
import osmdt
import datetime
import time
while True:
sequence = osmhm.fetch.fetch_last_read()
if not sequence:
osmhm.fetch.fetch_next(time_type=time_type, reset=True)
sequence = osmhm.fetch.fetch_last_read()
if sequence['read_flag'] is False:
print "Processing sequence %s." % (sequence['sequencenumber'])
count = 0
while True:
try:
count += 1
data_stream = osmdt.fetch(sequence['sequencenumber'], time=time_type)
break
except:
if count == 5:
msg = 'Current state file not retrievable after five times.'
raise Exception(msg)
print "File not reachable; waiting 60 more seconds..."
time.sleep(60)
data_object = osmdt.process(data_stream)
changesets = osmdt.extract_changesets(data_object)
objects = osmdt.extract_objects(data_object)
users = osmdt.extract_users(data_object)
if history:
osmhm.inserts.insert_all_changesets(changesets)
if suspicious:
osmhm.filters.suspicious_filter(changesets)
if monitor:
osmhm.filters.object_filter(objects, notification=notification, notifier=notifier)
osmhm.filters.user_filter(changesets, notification=notification, notifier=notifier)
#osmhm.filters.user_object_filter(objects, notification=notification, notifier=notifier) # not implemented yet
osmhm.filters.key_filter(objects, notification=notification, notifier=notifier)
osmhm.inserts.insert_file_read()
print "Finished processing %s." % (sequence['sequencenumber'])
if sequence['timetype'] == 'minute':
delta_time = 1
extra_time = 10
elif sequence['timetype'] == 'hour':
delta_time = 60
extra_time = 120
elif sequence['timetype'] == 'day':
delta_time = 1440
extra_time = 300
next_time = datetime.datetime.strptime(sequence['timestamp'],
"%Y-%m-%dT%H:%M:%SZ") + datetime.timedelta(minutes=delta_time)
if datetime.datetime.utcnow() < next_time:
sleep_time = (next_time - datetime.datetime.utcnow()).seconds + delta_time
print "Waiting %2.1f seconds for the next file." % (sleep_time)
else:
sleep_time = 1
time.sleep(sleep_time)
count = 0
while True:
try:
count += 1
osmhm.fetch.fetch_next(sequence['sequencenumber'], time_type=time_type)
break
except:
if count == 5:
msg = 'New state file not retrievable after five times.'
raise Exception(msg)
print "Waiting %2.1f more seconds..." % (extra_time)
time.sleep(extra_time)
| Python | 0 |
8f4f1e8cc45daa8cf49f050200ce17a48f008e5a | Fix process entity migration | resolwe/flow/migrations/0023_process_entity_2.py | resolwe/flow/migrations/0023_process_entity_2.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-10-01 03:15
from __future__ import unicode_literals
from django.db import migrations
def migrate_flow_collection(apps, schema_editor):
"""Migrate 'flow_collection' field to 'entity_type'."""
Process = apps.get_model('flow', 'Process')
DescriptorSchema = apps.get_model('flow', 'DescriptorSchema')
for process in Process.objects.all():
process.entity_type = process.flow_collection
process.entity_descriptor_schema = process.flow_collection
if (process.entity_descriptor_schema is not None and
not DescriptorSchema.objects.filter(slug=process.entity_descriptor_schema).exists()):
raise LookupError(
"Descriptow schema '{}' referenced in 'entity_descriptor_schema' not "
"found.".format(process.entity_descriptor_schema)
)
process.save()
class Migration(migrations.Migration):
dependencies = [
('flow', '0022_process_entity_1'),
]
operations = [
migrations.RunPython(migrate_flow_collection)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-10-01 03:15
from __future__ import unicode_literals
from django.db import migrations
def migrate_flow_collection(apps, schema_editor):
"""Migrate 'flow_collection' field to 'entity_type'."""
Process = apps.get_model('flow', 'Process')
DescriptorSchema = apps.get_model('flow', 'DescriptorSchema')
for process in Process.objects.all():
process.entity_type = process.flow_collection
process.entity_descriptor_schema = process.flow_collection
if not DescriptorSchema.objects.filter(slug=process.entity_descriptor_schema).exists():
raise LookupError(
"Descriptow schema '{}' referenced in 'entity_descriptor_schema' not "
"found.".format(process.entity_descriptor_schema)
)
process.save()
class Migration(migrations.Migration):
dependencies = [
('flow', '0022_process_entity_1'),
]
operations = [
migrations.RunPython(migrate_flow_collection)
]
| Python | 0.000006 |
21b453946bfa35c7730d5ab15e62b48d299170ed | Update password loading test | osfclient/tests/test_listing.py | osfclient/tests/test_listing.py | """Test `osf ls` command"""
from unittest import mock
from unittest.mock import patch, MagicMock, PropertyMock, mock_open
from osfclient import OSF
from osfclient.cli import list_
from osfclient.tests.mocks import MockProject
@patch('osfclient.cli.OSF')
def test_anonymous_doesnt_use_password(MockOSF):
args = MagicMock()
username = PropertyMock(return_value=None)
type(args).username = username
list_(args)
MockOSF.assert_called_once_with(username=None, password=None)
@patch('osfclient.cli.OSF')
def test_username_password(MockOSF):
args = MagicMock()
username = PropertyMock(return_value='joe@example.com')
type(args).username = username
def simple_getenv(key):
if key == 'OSF_PASSWORD':
return 'secret'
with patch('osfclient.cli.os.getenv',
side_effect=simple_getenv) as mock_getenv:
list_(args)
MockOSF.assert_called_once_with(username='joe@example.com',
password='secret')
mock_getenv.assert_called_with('OSF_PASSWORD')
@patch.object(OSF, 'project', return_value=MockProject('1234'))
def test_get_project(OSF_project):
args = MagicMock()
username = PropertyMock(return_value=None)
type(args).username = username
project = PropertyMock(return_value='1234')
type(args).project = project
output = PropertyMock(return_value=None)
type(args).output = output
list_(args)
OSF_project.assert_called_once_with('1234')
# check that the project and the files have been printed
for store in OSF_project.return_value.storages:
assert store._name_mock.called
for f in store.files:
assert f._path_mock.called
| """Test `osf ls` command"""
from unittest import mock
from unittest.mock import patch, MagicMock, PropertyMock, mock_open
from osfclient import OSF
from osfclient.cli import list_
from osfclient.tests.mocks import MockProject
@patch('osfclient.cli.OSF')
def test_anonymous_doesnt_use_password(MockOSF):
args = MagicMock()
username = PropertyMock(return_value=None)
type(args).username = username
list_(args)
MockOSF.assert_called_once_with(username=None, password=None)
@patch('osfclient.cli.OSF')
def test_username_password(MockOSF):
args = MagicMock()
username = PropertyMock(return_value='joe@example.com')
type(args).username = username
mock_open_func = mock_open(read_data="secret")
with patch('osfclient.cli.open', mock_open_func, create=True):
list_(args)
MockOSF.assert_called_once_with(username='joe@example.com',
password='secret')
assert mock_open_func.called
@patch.object(OSF, 'project', return_value=MockProject('1234'))
def test_get_project(OSF_project):
args = MagicMock()
username = PropertyMock(return_value=None)
type(args).username = username
project = PropertyMock(return_value='1234')
type(args).project = project
output = PropertyMock(return_value=None)
type(args).output = output
list_(args)
OSF_project.assert_called_once_with('1234')
# check that the project and the files have been printed
for store in OSF_project.return_value.storages:
assert store._name_mock.called
for f in store.files:
assert f._path_mock.called
| Python | 0 |
e9060c166987a18aa9faf3b790b80135b319ecca | Update example.py | libs/python/example.py | libs/python/example.py | #!/usr/bin/env python
import postscriptbarcode
c=postscriptbarcode.BWIPP("../../build/monolithic_package/barcode.ps")
c.get_version()
| #!/usr/bin/env python
import postscriptbarcode
c=postscriptbarcode.BWIPP("../barcode.ps")
c.get_version()
| Python | 0.000001 |
068a94a455448b3fc2ee552616658d9f980104ea | Add comment. | numpy/distutils/command/bdist_rpm.py | numpy/distutils/command/bdist_rpm.py | import os
import sys
from distutils.command.bdist_rpm import bdist_rpm as old_bdist_rpm
class bdist_rpm(old_bdist_rpm):
def _make_spec_file(self):
spec_file = old_bdist_rpm._make_spec_file(self)
# Replace hardcoded setup.py script name
# with the real setup script name.
setup_py = os.path.basename(sys.argv[0])
if setup_py == 'setup.py':
return spec_file
new_spec_file = []
for line in spec_file:
line = line.replace('setup.py',setup_py)
new_spec_file.append(line)
return new_spec_file
| import os
import sys
from distutils.command.bdist_rpm import bdist_rpm as old_bdist_rpm
class bdist_rpm(old_bdist_rpm):
def _make_spec_file(self):
spec_file = old_bdist_rpm._make_spec_file(self)
setup_py = os.path.basename(sys.argv[0])
if setup_py == 'setup.py':
return spec_file
new_spec_file = []
for line in spec_file:
line = line.replace('setup.py',setup_py)
new_spec_file.append(line)
return new_spec_file
| Python | 0.000001 |
6af3eacec303abfe6f260581687a38d89f7b7474 | Fix wavelength issue for QE65000 | oceanoptics/spectrometers/QE65xxx.py | oceanoptics/spectrometers/QE65xxx.py | # tested
# ----------------------------------------------------------
from oceanoptics.base import OceanOpticsBase as _OOBase
from oceanoptics.base import OceanOpticsTEC as _OOTEC
import struct
#----------------------------------------------------------
class _QE65xxx(_OOBase, _OOTEC):
def _set_integration_time(self, time_us):
""" send command 0x02 """
# XXX: The QE65000 requires the time set in Milliseconds!
# This overides the provided function of OOBase
time_ms = int(time_us/1000)
self._usb_send(struct.pack('<BI', 0x02, time_ms))
def _query_status(self):
""" 0xFE query status """
# XXX: The QE65000 also returns the time in Milliseconds!
# This overides the provided function of OOBase
# and pretends to return us
ret = self._usb_query(struct.pack('<B', 0xFE))
data = struct.unpack('<HLBBBBBBBBBB', ret[:])
ret = { 'pixels' : data[0],
'integration_time' : data[1] * 1000, # ms to us
'lamp_enable' : data[2],
'trigger_mode' : data[3],
'acquisition_status' : data[4],
'packets_in_spectrum' : data[5],
'power_down' : data[6],
'packets_in_endpoint' : data[7],
'usb_speed' : data[10] }
return ret
#--------
# tested
#--------
class QE65000(_QE65xxx):
def __init__(self):
super(QE65000, self).__init__('QE65000')
# The QE65000 needs a -10 offset for calculating the wavelengths
# due to some legacy issues...
self._wl = sum( self._wl_factors[i] *
np.arange(-10, self._pixels - 10, dtype=np.float64)**i for i in range(4) )
self.initialize_TEC()
#----------
# untested
#----------
class QE65pro(_QE65xxx):
def __init__(self):
super(QE65pro, self).__init__('QE65pro')
self.initialize_TEC()
| # tested
# ----------------------------------------------------------
from oceanoptics.base import OceanOpticsBase as _OOBase
from oceanoptics.base import OceanOpticsTEC as _OOTEC
import struct
#----------------------------------------------------------
class _QE65xxx(_OOBase, _OOTEC):
def _set_integration_time(self, time_us):
""" send command 0x02 """
# XXX: The QE65000 requires the time set in Milliseconds!
# This overides the provided function of OOBase
time_ms = int(time_us/1000)
self._usb_send(struct.pack('<BI', 0x02, time_ms))
def _query_status(self):
""" 0xFE query status """
# XXX: The QE65000 also returns the time in Milliseconds!
# This overides the provided function of OOBase
# and pretends to return us
ret = self._usb_query(struct.pack('<B', 0xFE))
data = struct.unpack('<HLBBBBBBBBBB', ret[:])
ret = { 'pixels' : data[0],
'integration_time' : data[1] * 1000, # ms to us
'lamp_enable' : data[2],
'trigger_mode' : data[3],
'acquisition_status' : data[4],
'packets_in_spectrum' : data[5],
'power_down' : data[6],
'packets_in_endpoint' : data[7],
'usb_speed' : data[10] }
return ret
#--------
# tested
#--------
class QE65000(_QE65xxx):
def __init__(self):
super(QE65000, self).__init__('QE65000')
self.initialize_TEC()
#----------
# untested
#----------
class QE65pro(_QE65xxx):
def __init__(self):
super(QE65pro, self).__init__('QE65pro')
self.initialize_TEC()
| Python | 0.000001 |
7655ba80da745ef2491a7ef872683620d6328304 | Disable verbose logging by default | designateclient/shell.py | designateclient/shell.py | # Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <kiall@managedit.ie>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from cliff.app import App
from cliff.commandmanager import CommandManager
from designateclient.version import version_info as version
class DesignateShell(App):
CONSOLE_MESSAGE_FORMAT = '%(levelname)s: %(message)s'
DEFAULT_VERBOSE_LEVEL = 0
def __init__(self):
super(DesignateShell, self).__init__(
description='Designate Client',
version=version.version_string(),
command_manager=CommandManager('designateclient.cli'),
)
self.log = logging.getLogger(__name__)
def build_option_parser(self, description, version):
parser = super(DesignateShell, self).build_option_parser(
description, version)
parser.add_argument('--os-endpoint',
default=os.environ.get('OS_DNS_ENDPOINT'),
help="Defaults to env[OS_DNS_ENDPOINT]")
parser.add_argument('--os-auth-url',
default=os.environ.get('OS_AUTH_URL'),
help="Defaults to env[OS_AUTH_URL]")
parser.add_argument('--os-username',
default=os.environ.get('OS_USERNAME'),
help="Defaults to env[OS_USERNAME]")
parser.add_argument('--os-password',
default=os.environ.get('OS_PASSWORD'),
help="Defaults to env[OS_PASSWORD]")
parser.add_argument('--os-tenant-id',
default=os.environ.get('OS_TENANT_ID'),
help="Defaults to env[OS_TENANT_ID]")
parser.add_argument('--os-tenant-name',
default=os.environ.get('OS_TENANT_NAME'),
help="Defaults to env[OS_TENANT_NAME]")
parser.add_argument('--os-token',
default=os.environ.get('OS_SERVICE_TOKEN'),
help="Defaults to env[OS_SERVICE_TOKEN]")
parser.add_argument('--os-service-type',
default=os.environ.get('OS_DNS_SERVICE_TYPE',
'dns'),
help=("Defaults to env[OS_DNS_SERVICE_TYPE], or "
"'dns'"))
parser.add_argument('--os-region-name',
default=os.environ.get('OS_REGION_NAME'),
help="Defaults to env[OS_REGION_NAME]")
parser.add_argument('--sudo-tenant-id',
default=os.environ.get('DESIGNATE_SUDO_TENANT_ID'),
help="Defaults to env[DESIGNATE_SUDO_TENANT_ID]")
parser.add_argument('--insecure', action='store_true',
help="Explicitly allow 'insecure' SSL requests")
return parser
| # Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <kiall@managedit.ie>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from cliff.app import App
from cliff.commandmanager import CommandManager
from designateclient.version import version_info as version
class DesignateShell(App):
CONSOLE_MESSAGE_FORMAT = '%(levelname)s: %(message)s'
def __init__(self):
super(DesignateShell, self).__init__(
description='Designate Client',
version=version.version_string(),
command_manager=CommandManager('designateclient.cli'),
)
self.log = logging.getLogger(__name__)
def configure_logging(self):
super(DesignateShell, self).configure_logging()
# Set requests logging
requests_logger = logging.getLogger('requests')
if self.options.verbose_level <= 1:
requests_logger.setLevel(logging.WARN)
else:
requests_logger.setLevel(logging.DEBUG)
def build_option_parser(self, description, version):
parser = super(DesignateShell, self).build_option_parser(
description, version)
parser.add_argument('--os-endpoint',
default=os.environ.get('OS_DNS_ENDPOINT'),
help="Defaults to env[OS_DNS_ENDPOINT]")
parser.add_argument('--os-auth-url',
default=os.environ.get('OS_AUTH_URL'),
help="Defaults to env[OS_AUTH_URL]")
parser.add_argument('--os-username',
default=os.environ.get('OS_USERNAME'),
help="Defaults to env[OS_USERNAME]")
parser.add_argument('--os-password',
default=os.environ.get('OS_PASSWORD'),
help="Defaults to env[OS_PASSWORD]")
parser.add_argument('--os-tenant-id',
default=os.environ.get('OS_TENANT_ID'),
help="Defaults to env[OS_TENANT_ID]")
parser.add_argument('--os-tenant-name',
default=os.environ.get('OS_TENANT_NAME'),
help="Defaults to env[OS_TENANT_NAME]")
parser.add_argument('--os-token',
default=os.environ.get('OS_SERVICE_TOKEN'),
help="Defaults to env[OS_SERVICE_TOKEN]")
parser.add_argument('--os-service-type',
default=os.environ.get('OS_DNS_SERVICE_TYPE',
'dns'),
help=("Defaults to env[OS_DNS_SERVICE_TYPE], or "
"'dns'"))
parser.add_argument('--os-region-name',
default=os.environ.get('OS_REGION_NAME'),
help="Defaults to env[OS_REGION_NAME]")
parser.add_argument('--sudo-tenant-id',
default=os.environ.get('DESIGNATE_SUDO_TENANT_ID'),
help="Defaults to env[DESIGNATE_SUDO_TENANT_ID]")
parser.add_argument('--insecure', action='store_true',
help="Explicitly allow 'insecure' SSL requests")
return parser
| Python | 0.000001 |
ba81c1d04a9896f1e24ca43592b93b26047705ef | Clean up command output | openstackclient/compute/v2/server.py | openstackclient/compute/v2/server.py | # Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
"""
Server action implementations
"""
import logging
from cliff.command import Command
from openstackclient.common import utils
def _find_server(cs, server):
"""Get a server by name or ID."""
return utils.find_resource(cs.servers, server)
def _print_server(cs, server):
# By default when searching via name we will do a
# findall(name=blah) and due a REST /details which is not the same
# as a .get() and doesn't get the information about flavors and
# images. This fix it as we redo the call with the id which does a
# .get() to get all informations.
if not 'flavor' in server._info:
server = _find_server(cs, server.id)
networks = server.networks
info = server._info.copy()
for network_label, address_list in networks.items():
info['%s network' % network_label] = ', '.join(address_list)
flavor = info.get('flavor', {})
flavor_id = flavor.get('id', '')
info['flavor'] = _find_flavor(cs, flavor_id).name
image = info.get('image', {})
image_id = image.get('id', '')
info['image'] = _find_image(cs, image_id).name
info.pop('links', None)
info.pop('addresses', None)
utils.print_dict(info)
class List_Server(Command):
"List server command."
log = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super(List_Server, self).get_parser(prog_name)
parser.add_argument(
'--long',
action='store_true',
default=False,
help='Additional fields are listed in output')
return parser
def run(self, parsed_args):
self.log.info('v2.List_Server.run(%s)' % parsed_args)
class Show_Server(Command):
"Show server command."
log = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super(Show_Server, self).get_parser(prog_name)
parser.add_argument(
'server',
metavar='<server>',
help='Name or ID of server to display')
return parser
def run(self, parsed_args):
self.log.info('v2.Show_Server.run(%s)' % parsed_args)
#s = _find_server(cs, args.server)
#_print_server(cs, s)
| # Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
"""
Server action implementations
"""
import logging
from cliff.command import Command
from openstackclient.common import utils
def _find_server(cs, server):
"""Get a server by name or ID."""
return utils.find_resource(cs.servers, server)
def _print_server(cs, server):
# By default when searching via name we will do a
# findall(name=blah) and due a REST /details which is not the same
# as a .get() and doesn't get the information about flavors and
# images. This fix it as we redo the call with the id which does a
# .get() to get all informations.
if not 'flavor' in server._info:
server = _find_server(cs, server.id)
networks = server.networks
info = server._info.copy()
for network_label, address_list in networks.items():
info['%s network' % network_label] = ', '.join(address_list)
flavor = info.get('flavor', {})
flavor_id = flavor.get('id', '')
info['flavor'] = _find_flavor(cs, flavor_id).name
image = info.get('image', {})
image_id = image.get('id', '')
info['image'] = _find_image(cs, image_id).name
info.pop('links', None)
info.pop('addresses', None)
utils.print_dict(info)
class List_Server(Command):
"List server command."
log = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super(List_Server, self).get_parser(prog_name)
parser.add_argument(
'--long',
action='store_true',
default=False,
help='Additional fields are listed in output')
return parser
def run(self, parsed_args):
self.log.info('List_Server()')
self.log.info(' run(%s)' % parsed_args)
self.app.stdout.write('hi!\n')
class Show_Server(Command):
"Show server command."
log = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super(Show_Server, self).get_parser(prog_name)
parser.add_argument(
'server',
metavar='<server>',
help='Name or ID of server to display')
return parser
def run(self, parsed_args):
self.log.info('Show_Server()')
self.log.info(' run(%s)' % parsed_args)
self.app.stdout.write('hi!\n')
#s = _find_server(cs, args.server)
#_print_server(cs, s)
| Python | 0.999995 |
56ac633029c9d7ef40415e1881d2cb3c18c83d7b | Bump to version 0.17.1 | ckanny/__init__.py | ckanny/__init__.py | # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
ckanny
~~~~~~
Miscellaneous CKAN utility scripts
Examples:
literal blocks::
python example_google.py
Attributes:
module_level_variable1 (int): Module level variables may be documented in
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
from manager import Manager
from . import datastorer, filestorer, package, hdx
__version__ = '0.17.1'
__title__ = 'ckanny'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility scripts'
__email__ = 'reubano@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
manager = Manager()
manager.merge(datastorer.manager, namespace='ds')
manager.merge(filestorer.manager, namespace='fs')
manager.merge(hdx.manager, namespace='hdx')
manager.merge(package.manager, namespace='pk')
@manager.command
def ver():
"""Show ckanny version"""
print('v%s' % __version__)
if __name__ == '__main__':
manager.main()
| # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
ckanny
~~~~~~
Miscellaneous CKAN utility scripts
Examples:
literal blocks::
python example_google.py
Attributes:
module_level_variable1 (int): Module level variables may be documented in
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
from manager import Manager
from . import datastorer, filestorer, package, hdx
__version__ = '0.17.0'
__title__ = 'ckanny'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility scripts'
__email__ = 'reubano@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
manager = Manager()
manager.merge(datastorer.manager, namespace='ds')
manager.merge(filestorer.manager, namespace='fs')
manager.merge(hdx.manager, namespace='hdx')
manager.merge(package.manager, namespace='pk')
@manager.command
def ver():
"""Show ckanny version"""
print('v%s' % __version__)
if __name__ == '__main__':
manager.main()
| Python | 0 |
20ffbab08c244ec788e8a6114ccdbf38e39d97b6 | Fix unclassifiable problem | classifier/demo.py | classifier/demo.py | """
This is a demo about how to use LibLINEAR to do the prediction
==============================================================
Usage: python demo.py
Author: Wenjun Wang
Date: June 18, 2015
"""
import pickle
import datetime
from liblinearutil import *
from feature import convert_query
# Read training file
#y, x = svm_read_problem(path_to_training_file)
# Train and save model
#m = train(y, x, '-c 1 -s 1 -B 1 -e 0.01 -v 5 -q')
#save_model(name_of_model_file,m)
# Load the trained model, which is in the same directory as this script
date = str(datetime.date.today())
m = load_model('model_'+date)
if m == None:
date = str(datetime.date.fromordinal(datetime.date.today().toordinal()-1))
m = load_model('model_'+date)
# Load feature file, which is also in the same directory
infile = open('features')
feature_list = pickle.load(infile)
# Class labels
y = [1,2,3,4,5]
# Example query
query = 'Alan Black'
# Convert query
x = convert_query(query, feature_list, 'test')
# Do the prediction
p_label, p_val = predict(y, x, m, '-b 0')
# Cannot classify it to any class
if p_val[0][int(p_label[0])-1] == 0:
p_label[0] = -1
print p_label #predict class/label
print p_val #svm value for each class/label
| """
This is a demo about how to use LibLINEAR to do the prediction
==============================================================
Usage: python demo.py
Author: Wenjun Wang
Date: June 18, 2015
"""
import pickle
import datetime
from liblinearutil import *
from feature import convert_query
# Read training file
#y, x = svm_read_problem(path_to_training_file)
# Train and save model
#m = train(y, x, '-c 1 -s 1 -B 1 -e 0.01 -v 5 -q')
#save_model(name_of_model_file,m)
# Load the trained model, which is in the same directory as this script
date = str(datetime.date.today())
m = load_model('model_'+date)
if m == None:
date = str(datetime.date.fromordinal(datetime.date.today().toordinal()-1))
m = load_model('model_'+date)
# Load feature file, which is also in the same directory
infile = open('features')
feature_list = pickle.load(infile)
# Class labels
y = [1,2,3,4,5]
# Example query
query = 'next comment'
# Convert query
x = convert_query(query, feature_list, 'test')
# Do the prediction
p_label, p_val = predict(y, x, m, '-b 0')
print p_label #predict class/label
print p_val #svm value for each class/label
| Python | 0.999999 |
6c1f487aa7ac472fc7f726b21d26c841625b176d | Edit feed content | routes.py | routes.py | from flask import Flask, render_template, redirect, url_for, request, session,\
flash, jsonify
from werkzeug.contrib.atom import AtomFeed
import os
import psycopg2
from functools import wraps
import urlparse
import datetime
app = Flask(__name__)
app.secret_key = os.environ['SECRET_KEY']
def connectDB(wrapped):
@wraps(wrapped)
def inner(*args, **kwargs):
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(
database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port
)
cur = conn.cursor()
ret = wrapped(cur, *args, **kwargs)
conn.commit()
cur.close()
conn.close()
return ret
return inner
def login_required(f):
@wraps(f)
def function(*args, **kwargs):
if 'username' not in session:
return redirect(url_for('home'))
return f(*args, **kwargs)
return function
@app.route('/', methods=['GET', 'POST'])
@connectDB
def home(cur):
if request.method == 'GET':
if 'username' in session:
return redirect(url_for('update'))
session['type'] = 'unknown'
return render_template('home.html', logged_in=False)
else:
cur.execute("SELECT * FROM ROOT")
auth = cur.fetchone()
user_input = (request.form['username'], request.form['password'])
if user_input == auth:
session['username'] = request.form['username']
return redirect(url_for('update'))
else:
return redirect(url_for('home'))
@app.route('/update', methods=['GET', 'POST'])
@login_required
@connectDB
def update(cur):
if request.method == 'GET':
return render_template('update.html', logged_in=True)
else:
cluster = request.form['cluster']
announcement = request.form['announcement']
timestamp = datetime.datetime.now()
try:
cur.execute("INSERT INTO ANNOUNCEMENTS (CLUSTER, ANNOUNCEMENT, TIME) VALUES \
(%s, %s, %s)", (cluster, announcement, timestamp))
flash('The update has been posted.')
except:
flash('ERROR ! The update was NOT posted.')
finally:
return redirect(url_for('update'))
@app.route('/logout')
@login_required
def logout():
session.pop('username', None)
session['type'] = 'unknown'
return redirect(url_for('home'))
@app.route('/recent.atom')
@connectDB
def recent_feed(cur):
feed = AtomFeed('Recent announcements',
feed_url=request.url,
url=request.url_root)
cur.execute("SELECT * FROM ANNOUNCEMENTS ORDER BY TIME DESC LIMIT 50")
announcements = cur.fetchall()
for i, announcement in enumerate(announcements):
feed.add(
"ANNOUNCEMENT " + str(i + 1),
unicode(announcement[0].strip() + ': ' + announcement[1]),
author="KS",
url=request.url,
updated=announcement[2]
)
return feed.get_response()
@app.route('/api/count', methods=['GET'])
@connectDB
def count(cur):
cur.execute("SELECT COUNT(*) FROM ANNOUNCEMENTS")
count = cur.fetchone()[0]
return jsonify({'count': count})
if __name__ == '__main__':
app.run(host="127.0.0.1", port=6666, debug=True)
| from flask import Flask, render_template, redirect, url_for, request, session,\
flash, jsonify
from werkzeug.contrib.atom import AtomFeed
import os
import psycopg2
from functools import wraps
import urlparse
import datetime
app = Flask(__name__)
app.secret_key = os.environ['SECRET_KEY']
def connectDB(wrapped):
@wraps(wrapped)
def inner(*args, **kwargs):
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(
database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port
)
cur = conn.cursor()
ret = wrapped(cur, *args, **kwargs)
conn.commit()
cur.close()
conn.close()
return ret
return inner
def login_required(f):
@wraps(f)
def function(*args, **kwargs):
if 'username' not in session:
return redirect(url_for('home'))
return f(*args, **kwargs)
return function
@app.route('/', methods=['GET', 'POST'])
@connectDB
def home(cur):
if request.method == 'GET':
if 'username' in session:
return redirect(url_for('update'))
session['type'] = 'unknown'
return render_template('home.html', logged_in=False)
else:
cur.execute("SELECT * FROM ROOT")
auth = cur.fetchone()
user_input = (request.form['username'], request.form['password'])
if user_input == auth:
session['username'] = request.form['username']
return redirect(url_for('update'))
else:
return redirect(url_for('home'))
@app.route('/update', methods=['GET', 'POST'])
@login_required
@connectDB
def update(cur):
if request.method == 'GET':
return render_template('update.html', logged_in=True)
else:
cluster = request.form['cluster']
announcement = request.form['announcement']
timestamp = datetime.datetime.now()
try:
cur.execute("INSERT INTO ANNOUNCEMENTS (CLUSTER, ANNOUNCEMENT, TIME) VALUES \
(%s, %s, %s)", (cluster, announcement, timestamp))
flash('The update has been posted.')
except:
flash('ERROR ! The update was NOT posted.')
finally:
return redirect(url_for('update'))
@app.route('/logout')
@login_required
def logout():
session.pop('username', None)
session['type'] = 'unknown'
return redirect(url_for('home'))
@app.route('/recent.atom')
@connectDB
def recent_feed(cur):
feed = AtomFeed('Recent announcements',
feed_url=request.url,
url=request.url_root)
cur.execute("SELECT * FROM ANNOUNCEMENTS ORDER BY TIME DESC LIMIT 50")
announcements = cur.fetchall()
for i, announcement in enumerate(announcements):
feed.add(
"ANNOUNCEMENT " + str(i),
unicode(announcement[0] + announcement[1]),
author="KS",
url=request.url,
updated=announcement[2]
)
return feed.get_response()
@app.route('/api/count', methods=['GET'])
@connectDB
def count(cur):
cur.execute("SELECT COUNT(*) FROM ANNOUNCEMENTS")
count = cur.fetchone()[0]
return jsonify({'count': count})
if __name__ == '__main__':
app.run(host="127.0.0.1", port=6666, debug=True)
| Python | 0.000001 |
da31be1c27c7568fa50c89f28b04ad763481f541 | Remove unused import | rparse.py | rparse.py | #!/usr/bin/env python
# Copyright 2015, Dmitry Veselov
from plyplus import Grammar, STransformer, \
ParseError, TokenizeError
try:
# Python 2.x and pypy
from itertools import imap as map
from itertools import ifilter as filter
except ImportError:
# Python 3.x already have lazy map
pass
__all__ = [
"parse"
]
grammar = Grammar(r"""
@start : package ;
package : name extras? specs? comment?;
name : string ;
specs : comparison version (',' comparison version)* ;
comparison : '<' | '<=' | '!=' | '==' | '>=' | '>' | '~=' | '===' ;
version : string ;
extras : '\[' (extra (',' extra)*)? '\]' ;
extra : string ;
comment : '\#.+' ;
@string : '[-A-Za-z0-9_\.]+' ;
SPACES: '[ \t\n]+' (%ignore) (%newline);
""")
class Requirement(object):
def __init__(self, name=None, extras=None, specs=None, comment=None):
self.name = name
self.extras = extras
self.specs = specs
self.comment = comment
def __str__(self):
return "<{0}(name='{1}'>".format(self.__class__.__name__, self.name)
class RTransformer(STransformer):
def package(self, node):
requirement = Requirement()
for key, value in node.tail:
setattr(requirement, key, value)
return requirement
def name(self, node):
return ("name", node.tail[0])
def specs(self, node):
comparisons, versions = node.tail[0::2], node.tail[1::2]
return ("specs", list(zip(comparisons, versions)))
def comparison(self, node):
return node.tail[0]
def version(self, node):
return node.tail[0]
def extras(self, node):
return ("extras", [name for name in node.tail])
def extra(self, node):
return node.tail[0]
def comment(self, node):
return ("comment", " ".join([word for word in node.tail]))
def comment_content(self, node):
return node.tail[0]
def _parse(line, g=grammar):
line = line.strip()
if line.startswith("#"):
return None
try:
if line:
return g.parse(line)
else:
return None
except (ParseError, TokenizeError):
message = "Invalid requirements line: '{0}'".format(line)
raise ValueError(message)
def parse(requirements):
"""
Parses given requirements line-by-line.
"""
transformer = RTransformer()
return map(transformer.transform, filter(None, map(_parse, requirements.splitlines())))
| #!/usr/bin/env python
# Copyright 2015, Dmitry Veselov
from re import sub
from plyplus import Grammar, STransformer, \
ParseError, TokenizeError
try:
# Python 2.x and pypy
from itertools import imap as map
from itertools import ifilter as filter
except ImportError:
# Python 3.x already have lazy map
pass
__all__ = [
"parse"
]
grammar = Grammar(r"""
@start : package ;
package : name extras? specs? comment?;
name : string ;
specs : comparison version (',' comparison version)* ;
comparison : '<' | '<=' | '!=' | '==' | '>=' | '>' | '~=' | '===' ;
version : string ;
extras : '\[' (extra (',' extra)*)? '\]' ;
extra : string ;
comment : '\#.+' ;
@string : '[-A-Za-z0-9_\.]+' ;
SPACES: '[ \t\n]+' (%ignore) (%newline);
""")
class Requirement(object):
def __init__(self, name=None, extras=None, specs=None, comment=None):
self.name = name
self.extras = extras
self.specs = specs
self.comment = comment
def __str__(self):
return "<{0}(name='{1}'>".format(self.__class__.__name__, self.name)
class RTransformer(STransformer):
def package(self, node):
requirement = Requirement()
for key, value in node.tail:
setattr(requirement, key, value)
return requirement
def name(self, node):
return ("name", node.tail[0])
def specs(self, node):
comparisons, versions = node.tail[0::2], node.tail[1::2]
return ("specs", list(zip(comparisons, versions)))
def comparison(self, node):
return node.tail[0]
def version(self, node):
return node.tail[0]
def extras(self, node):
return ("extras", [name for name in node.tail])
def extra(self, node):
return node.tail[0]
def comment(self, node):
return ("comment", " ".join([word for word in node.tail]))
def comment_content(self, node):
return node.tail[0]
def _parse(line, g=grammar):
line = line.strip()
if line.startswith("#"):
return None
try:
if line:
return g.parse(line)
else:
return None
except (ParseError, TokenizeError):
message = "Invalid requirements line: '{0}'".format(line)
raise ValueError(message)
def parse(requirements):
"""
Parses given requirements line-by-line.
"""
transformer = RTransformer()
return map(transformer.transform, filter(None, map(_parse, requirements.splitlines())))
| Python | 0.000001 |
c3951f942633438e91e43b523a814bf1a3528295 | Add impl to analyzer. | analyze.py | analyze.py | #!/bin/python
from __future__ import print_function, division
import cv
import cv2
import argparse
import preprocess
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Analyze shogi board state in a photo""",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'photo', metavar='PHOTO', nargs=1, type=str,
help='Photo image path')
parser.add_argument(
'--output-visualization', nargs='?', metavar='VISUALIZATION_PATH',
type=str, default=None, const=True,
help='Output path of pretty visualization image')
args = parser.parse_args()
img = cv2.imread(args.photo[0])
# TODO: Refactoring required
args.derive_emptiness = False
args.derive_types_up = False
args.derive_validness = False
detected = preprocess.detect_board("", img, visualize=False, derive=args)
print("Detected?: %s" % detected)
| #!/bin/python
from __future__ import print_function, division
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Analyze shogi board state in a photo""",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
args = parser.parse_args()
| Python | 0 |
6c2adf0ff9f5026a4280b3e374429dcb7ef48dce | Enable using script directly | openfisca_web_api_preview/scripts/serve.py | openfisca_web_api_preview/scripts/serve.py | # -*- coding: utf-8 -*-
import sys
import imp
import os.path
import logging
import argparse
from gunicorn.app.base import BaseApplication
from gunicorn.six import iteritems
from gunicorn import config
from openfisca_core.scripts import add_minimal_tax_benefit_system_arguments
from openfisca_web_api_preview.app import create_app
from imp import load_module
"""
Define the `openfisca serve` command line interface.
"""
DEFAULT_PORT = '5000'
HOST = '127.0.0.1'
DEFAULT_WORKERS_NUMBER = '3'
log = logging.getLogger(__name__)
def define_command_line_options(parser):
# Define OpenFisca modules configuration
parser = add_minimal_tax_benefit_system_arguments(parser)
# Define server configuration
parser.add_argument('-p', '--port', action = 'store', default = DEFAULT_PORT, help = "port to serve on", type = int)
parser.add_argument('--tracker_url', action = 'store', help = "tracking service url", type = str)
parser.add_argument('--tracker_idsite', action = 'store', help = "tracking service id site", type = int)
parser.add_argument('-f', '--configuration_file', action = 'store', help = "gunicorn configuration file", type = str)
return parser
def read_user_configuration(default_configuration, command_line_parser):
configuration = default_configuration
args, unknown_args = command_line_parser.parse_known_args()
if args.configuration_file:
# Configuration file overloads default configuration
module_name = os.path.splitext(os.path.basename(args.configuration_file))[0]
module_directory = os.path.dirname(args.configuration_file)
module = imp.load_module(module_name, *imp.find_module(module_name, [module_directory]))
file_configuration = [item for item in dir(module) if not item.startswith("__")]
for key in file_configuration:
value = getattr(module, key)
if value:
configuration[key] = value
if key == "port":
configuration['bind'] = configuration['bind'][:-4] + str(configuration['port'])
# Command line configuration overloads all configuration
command_line_parser = config.Config().parser()
configuration = update(configuration, vars(args))
configuration = update(configuration, unknown_args)
return configuration
def update(configuration, new_options):
for key in new_options:
value = new_options[key]
if not configuration.get(key) or value:
configuration[key] = value
if key == "port":
configuration['bind'] = configuration['bind'][:-4] + str(configuration['port'])
return configuration
class StandaloneApplication(BaseApplication):
def __init__(self, app, options = None):
self.options = options or {}
self.application = app
super(StandaloneApplication, self).__init__()
def load_config(self):
for key, value in iteritems(self.options):
if value is None:
log.debug('Undefined value for key `{}`.'.format(key))
if key in self.cfg.settings and value is not None:
self.cfg.set(key.lower(), value)
def load(self):
return self.application
def main(parser = None):
if not parser:
parser = argparse.ArgumentParser()
parser = define_command_line_options(parser)
configuration = {
'port': DEFAULT_PORT,
'bind': '{}:{}'.format(HOST, DEFAULT_PORT),
'workers': DEFAULT_WORKERS_NUMBER,
}
configuration = read_user_configuration(configuration, parser)
app = create_app(configuration['country_package'], configuration['extensions'], configuration['tracker_url'], configuration['tracker_idsite'])
StandaloneApplication(app, configuration).run()
if __name__ == '__main__':
sys.exit(main())
| # -*- coding: utf-8 -*-
import sys
import imp
import os.path
import logging
import argparse
from gunicorn.app.base import BaseApplication
from gunicorn.six import iteritems
from gunicorn import config
from openfisca_core.scripts import add_minimal_tax_benefit_system_arguments
from ..app import create_app
from imp import load_module
"""
Define the `openfisca serve` command line interface.
"""
DEFAULT_PORT = '5000'
HOST = '127.0.0.1'
DEFAULT_WORKERS_NUMBER = '3'
log = logging.getLogger(__name__)
def define_command_line_options(parser):
# Define OpenFisca modules configuration
parser = add_minimal_tax_benefit_system_arguments(parser)
# Define server configuration
parser.add_argument('-p', '--port', action = 'store', default = DEFAULT_PORT, help = "port to serve on", type = int)
parser.add_argument('--tracker_url', action = 'store', help = "tracking service url", type = str)
parser.add_argument('--tracker_idsite', action = 'store', help = "tracking service id site", type = int)
parser.add_argument('-f', '--configuration_file', action = 'store', help = "gunicorn configuration file", type = str)
return parser
def read_user_configuration(default_configuration, command_line_parser):
configuration = default_configuration
args, unknown_args = command_line_parser.parse_known_args()
if args.configuration_file:
# Configuration file overloads default configuration
module_name = os.path.splitext(os.path.basename(args.configuration_file))[0]
module_directory = os.path.dirname(args.configuration_file)
module = imp.load_module(module_name, *imp.find_module(module_name, [module_directory]))
file_configuration = [item for item in dir(module) if not item.startswith("__")]
for key in file_configuration:
value = getattr(module, key)
if value:
configuration[key] = value
if key == "port":
configuration['bind'] = configuration['bind'][:-4] + str(configuration['port'])
# Command line configuration overloads all configuration
command_line_parser = config.Config().parser()
configuration = update(configuration, vars(args))
configuration = update(configuration, unknown_args)
return configuration
def update(configuration, new_options):
for key in new_options:
value = new_options[key]
if not configuration.get(key) or value:
configuration[key] = value
if key == "port":
configuration['bind'] = configuration['bind'][:-4] + str(configuration['port'])
return configuration
class StandaloneApplication(BaseApplication):
def __init__(self, app, options = None):
self.options = options or {}
self.application = app
super(StandaloneApplication, self).__init__()
def load_config(self):
for key, value in iteritems(self.options):
if value is None:
log.debug('Undefined value for key `{}`.'.format(key))
if key in self.cfg.settings and value is not None:
self.cfg.set(key.lower(), value)
def load(self):
return self.application
def main(parser):
command_line_parser = define_command_line_options(parser)
configuration = {
'port': DEFAULT_PORT,
'bind': '{}:{}'.format(HOST, DEFAULT_PORT),
'workers': DEFAULT_WORKERS_NUMBER,
}
configuration = read_user_configuration(configuration, command_line_parser)
app = create_app(configuration['country_package'], configuration['extensions'], configuration['tracker_url'], configuration['tracker_idsite'])
StandaloneApplication(app, configuration).run()
if __name__ == '__main__':
sys.exit(main())
| Python | 0 |
b5b40dc232b04a2cfa75438bb5143ffdb103a57c | split a method | AlphaTwirl/EventReader/ProgressReporter.py | AlphaTwirl/EventReader/ProgressReporter.py | # Tai Sakuma <sakuma@fnal.gov>
import multiprocessing
import time
from ProgressReport import ProgressReport
##____________________________________________________________________________||
class ProgressReporter(object):
def __init__(self, queue, pernevents = 1000):
self.queue = queue
self.pernevents = pernevents
self.interval = 0.02 # [second]
self._readTime()
def report(self, event, component):
if not self.needToReport(event, component): return
self._report(event, component)
def _report(self, event, component):
done = event.iEvent + 1
report = ProgressReport(name = component.name, done = done, total = event.nEvents)
self.queue.put(report)
self._readTime()
def needToReport(self, event, component):
iEvent = event.iEvent + 1 # add 1 because event.iEvent starts from 0
if self._time() - self.lastTime > self.interval: return True
if iEvent % self.pernevents == 0: return True
if iEvent == event.nEvents: return True
return False
def _time(self): return time.time()
def _readTime(self): self.lastTime = self._time()
##____________________________________________________________________________||
| # Tai Sakuma <sakuma@fnal.gov>
import multiprocessing
import time
from ProgressReport import ProgressReport
##____________________________________________________________________________||
class ProgressReporter(object):
def __init__(self, queue, pernevents = 1000):
self.queue = queue
self.pernevents = pernevents
self.interval = 0.02 # [second]
self._readTime()
def report(self, event, component):
if not self.needToReport(event, component): return
done = event.iEvent + 1
report = ProgressReport(name = component.name, done = done, total = event.nEvents)
self.queue.put(report)
self._readTime()
def needToReport(self, event, component):
iEvent = event.iEvent + 1 # add 1 because event.iEvent starts from 0
if self._time() - self.lastTime > self.interval: return True
if iEvent % self.pernevents == 0: return True
if iEvent == event.nEvents: return True
return False
def _time(self): return time.time()
def _readTime(self): self.lastTime = self._time()
##____________________________________________________________________________||
| Python | 0.999953 |
6cc803f68876689629fa2c2bae1413d46a0d2002 | Update different-ways-to-add-parentheses.py | Python/different-ways-to-add-parentheses.py | Python/different-ways-to-add-parentheses.py | # Time: O(4^n / n^(3/2)) ~= Catalan numbers = C(2n, n) - C(2n, n - 1)
# Space: O(n^2 * 4^n / n^(3/2))
#
# Given a string of numbers and operators, return all possible
# results from computing all the different possible ways to
# group numbers and operators. The valid operators are +, - and *.
#
#
# Example 1
# Input: "2-1-1".
#
# ((2-1)-1) = 0
# (2-(1-1)) = 2
# Output: [0, 2]
#
#
# Example 2
# Input: "2*3-4*5"
#
# (2*(3-(4*5))) = -34
# ((2*3)-(4*5)) = -14
# ((2*(3-4))*5) = -10
# (2*((3-4)*5)) = -10
# (((2*3)-4)*5) = 10
# Output: [-34, -14, -10, -10, 10]
#
class Solution:
# @param {string} input
# @return {integer[]}
def diffWaysToCompute(self, input):
tokens = re.split('(\D)', input)
nums = map(int, tokens[::2])
ops = map({'+': operator.add, '-': operator.sub, '*': operator.mul}.get, tokens[1::2])
lookup = [[None for _ in xrange(len(nums))] for _ in xrange(len(nums))]
def diffWaysToComputeRecu(left, right):
if left == right:
return [nums[left]]
if lookup[left][right]:
return lookup[left][right]
lookup[left][right] = [ops[i](x, y)
for i in xrange(left, right)
for x in diffWaysToComputeRecu(left, i)
for y in diffWaysToComputeRecu(i + 1, right)]
return lookup[left][right]
return diffWaysToComputeRecu(0, len(nums) - 1)
class Solution2:
# @param {string} input
# @return {integer[]}
def diffWaysToCompute(self, input):
lookup = [[None for _ in xrange(len(input) + 1)] for _ in xrange(len(input) + 1)]
ops = {'+': operator.add, '-': operator.sub, '*': operator.mul}
def diffWaysToComputeRecu(left, right):
if lookup[left][right]:
return lookup[left][right]
result = []
for i in xrange(left, right):
if input[i] in "+-*":
for x in diffWaysToComputeRecu(left, i):
for y in diffWaysToComputeRecu(i + 1, right):
result.append(ops[input[i]](x, y))
if not result:
result = [int(input[left:right])]
lookup[left][right] = result
return lookup[left][right]
return diffWaysToComputeRecu(0, len(input))
| # Time: O(n * 4^n / n^(3/2)) ~= n * (Catalan numbers) = n * (C(2n, n) - C(2n, n - 1))
# Space: O(n^2 * 4^n / n^(3/2))
#
# Given a string of numbers and operators, return all possible
# results from computing all the different possible ways to
# group numbers and operators. The valid operators are +, - and *.
#
#
# Example 1
# Input: "2-1-1".
#
# ((2-1)-1) = 0
# (2-(1-1)) = 2
# Output: [0, 2]
#
#
# Example 2
# Input: "2*3-4*5"
#
# (2*(3-(4*5))) = -34
# ((2*3)-(4*5)) = -14
# ((2*(3-4))*5) = -10
# (2*((3-4)*5)) = -10
# (((2*3)-4)*5) = 10
# Output: [-34, -14, -10, -10, 10]
#
class Solution:
# @param {string} input
# @return {integer[]}
def diffWaysToCompute(self, input):
tokens = re.split('(\D)', input)
nums = map(int, tokens[::2])
ops = map({'+': operator.add, '-': operator.sub, '*': operator.mul}.get, tokens[1::2])
lookup = [[None for _ in xrange(len(nums))] for _ in xrange(len(nums))]
def diffWaysToComputeRecu(left, right):
if left == right:
return [nums[left]]
if lookup[left][right]:
return lookup[left][right]
lookup[left][right] = [ops[i](x, y)
for i in xrange(left, right)
for x in diffWaysToComputeRecu(left, i)
for y in diffWaysToComputeRecu(i + 1, right)]
return lookup[left][right]
return diffWaysToComputeRecu(0, len(nums) - 1)
class Solution2:
# @param {string} input
# @return {integer[]}
def diffWaysToCompute(self, input):
lookup = [[None for _ in xrange(len(input) + 1)] for _ in xrange(len(input) + 1)]
ops = {'+': operator.add, '-': operator.sub, '*': operator.mul}
def diffWaysToComputeRecu(left, right):
if lookup[left][right]:
return lookup[left][right]
result = []
for i in xrange(left, right):
if input[i] in "+-*":
for x in diffWaysToComputeRecu(left, i):
for y in diffWaysToComputeRecu(i + 1, right):
result.append(ops[input[i]](x, y))
if not result:
result = [int(input[left:right])]
lookup[left][right] = result
return lookup[left][right]
return diffWaysToComputeRecu(0, len(input))
| Python | 0.000014 |
ec013d194e2b26155949bf89a5cd03ef4a013cc5 | Add import unicode on csv_importer | passpie/importers/csv_importer.py | passpie/importers/csv_importer.py | import csv
from passpie.importers import BaseImporter
from passpie._compat import is_python2, unicode
def unicode_csv_reader(utf8_data, dialect=csv.excel, **kwargs):
csv_reader = csv.reader(utf8_data, dialect=dialect, **kwargs)
for row in csv_reader:
if is_python2():
yield [unicode(cell, 'utf-8') for cell in row]
else:
yield [str(cell) for cell in row]
class CSVImporter(BaseImporter):
def match(self, filepath):
"""Dont match this importer"""
return False
def handle(self, filepath, cols):
credentials = []
with open(filepath) as csv_file:
reader = unicode_csv_reader(csv_file)
try:
next(reader)
except StopIteration:
raise ValueError('empty csv file: %s' % filepath)
for row in reader:
credential = {
'name': row[cols['name']],
'login': row[cols.get('login', '')],
'password': row[cols['password']],
'comment': row[cols.get('comment', '')],
}
credentials.append(credential)
return credentials
| import csv
from passpie.importers import BaseImporter
from passpie._compat import is_python2
def unicode_csv_reader(utf8_data, dialect=csv.excel, **kwargs):
csv_reader = csv.reader(utf8_data, dialect=dialect, **kwargs)
for row in csv_reader:
if is_python2():
yield [unicode(cell, 'utf-8') for cell in row]
else:
yield [str(cell) for cell in row]
class CSVImporter(BaseImporter):
def match(self, filepath):
"""Dont match this importer"""
return False
def handle(self, filepath, cols):
credentials = []
with open(filepath) as csv_file:
reader = unicode_csv_reader(csv_file)
try:
next(reader)
except StopIteration:
raise ValueError('empty csv file: %s' % filepath)
for row in reader:
credential = {
'name': row[cols['name']],
'login': row[cols.get('login', '')],
'password': row[cols['password']],
'comment': row[cols.get('comment', '')],
}
credentials.append(credential)
return credentials
| Python | 0.000004 |
8e536e4911ab18a5ac6e2e018fa041425a57a14b | Update serializers.py | website/serializers.py | website/serializers.py | from website.models import Issue, User , UserProfile,Points, Domain
from rest_framework import routers, serializers, viewsets, filters
import django_filters
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id','username')
class IssueSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True)
class Meta:
model = Issue
fields = '__all__'
class IssueViewSet(viewsets.ModelViewSet):
queryset = Issue.objects.all()
serializer_class = IssueSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('url', 'description', 'user__id')
http_method_names = ['get', 'post', 'head']
class UserIssueViewSet(viewsets.ModelViewSet):
queryset = Issue.objects.all()
serializer_class = IssueSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('user__username', 'user__id')
http_method_names = ['get', 'post', 'head']
class UserProfileSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True)
class Meta:
model = UserProfile
fields = '__all__'
class UserProfileViewSet(viewsets.ModelViewSet):
serializer_class = UserProfileSerializer
queryset = UserProfile.objects.all()
filter_backends = (filters.SearchFilter,)
search_fields = ('id', 'user__id','user__username')
http_method_names = ['get', 'post', 'head']
class DomainSerializer(serializers.ModelSerializer):
class Meta:
model = Domain
fields = '__all__'
class DomainViewSet(viewsets.ModelViewSet):
serializer_class = DomainSerializer
queryset = Domain.objects.all()
filter_backends = (filters.SearchFilter,)
search_fields = ('url', 'name')
http_method_names = ['get', 'post', 'head']
router = routers.DefaultRouter()
router.register(r'issues', IssueViewSet, basename="issues")
router.register(r'userissues', UserIssueViewSet, basename="userissues")
router.register(r'profile', UserProfileViewSet, basename="profile")
router.register(r'domain', DomainViewSet, basename="domain")
| from website.models import Issue, User , UserProfile,Points, Domain
from rest_framework import routers, serializers, viewsets, filters
import django_filters
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id','username')
class IssueSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True)
class Meta:
model = Issue
fields = '__all__'
class IssueViewSet(viewsets.ModelViewSet):
queryset = Issue.objects.all()
serializer_class = IssueSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('url', 'description', 'user__id')
class UserIssueViewSet(viewsets.ModelViewSet):
queryset = Issue.objects.all()
serializer_class = IssueSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('user__username', 'user__id')
class UserProfileSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True)
class Meta:
model = UserProfile
fields = '__all__'
class UserProfileViewSet(viewsets.ModelViewSet):
serializer_class = UserProfileSerializer
queryset = UserProfile.objects.all()
filter_backends = (filters.SearchFilter,)
search_fields = ('id', 'user__id','user__username')
class DomainSerializer(serializers.ModelSerializer):
class Meta:
model = Domain
fields = '__all__'
class DomainViewSet(viewsets.ModelViewSet):
serializer_class = DomainSerializer
queryset = Domain.objects.all()
filter_backends = (filters.SearchFilter,)
search_fields = ('url', 'name')
router = routers.DefaultRouter()
router.register(r'issues', IssueViewSet, basename="issues")
router.register(r'userissues', UserIssueViewSet, basename="userissues")
router.register(r'profile', UserProfileViewSet, basename="profile")
router.register(r'domain', DomainViewSet, basename="domain")
| Python | 0 |
936d16449ae8e40435258f79bbb14f4d47c96f02 | Fix bug in neonlogger using stdout for stderr. | src/opencmiss/neon/core/neonlogger.py | src/opencmiss/neon/core/neonlogger.py | '''
Copyright 2015 University of Auckland
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import sys
import logging
from PySide import QtCore
from opencmiss.zinc.logger import Logger
ENABLE_STD_STREAM_CAPTURE = True
class CustomStream(QtCore.QObject):
_stdout = None
_stderr = None
messageWritten = QtCore.Signal([str, str])
def flush(self):
pass
def fileno(self):
return -1
def write(self, msg, level="INFORMATION"):
if (not self.signalsBlocked()):
self.messageWritten.emit(msg, level)
@staticmethod
def stdout():
if (not CustomStream._stdout):
CustomStream._stdout = CustomStream()
sys.stdout = CustomStream._stdout if ENABLE_STD_STREAM_CAPTURE else sys.stdout
return CustomStream._stdout
@staticmethod
def stderr():
if (not CustomStream._stderr):
CustomStream._stderr = CustomStream()
sys.stderr = CustomStream._stderr if ENABLE_STD_STREAM_CAPTURE else sys.stderr
return CustomStream._stderr
class LogsToWidgetHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
def emit(self, record):
levelString = record.levelname
record = self.format(record)
if record:
CustomStream.stdout().write('%s\n' % record, levelString)
def setup_custom_logger(name):
formatter = logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(module)s - %(message)s')
handler = LogsToWidgetHandler()
handler.setFormatter(formatter)
neonLogger = logging.getLogger(name)
neonLogger.setLevel(logging.DEBUG)
neonLogger.addHandler(handler)
return neonLogger
class NeonLogger(object):
_logger = None
_zincLogger = None
_loggerNotifier = None
@staticmethod
def getLogger():
if (not NeonLogger._logger):
NeonLogger._logger = setup_custom_logger("Neon")
return NeonLogger._logger
@staticmethod
def writeErrorMessage(string):
NeonLogger.getLogger().error(string)
@staticmethod
def writeWarningMessage(string):
NeonLogger.getLogger().warning(string)
@staticmethod
def writeInformationMessage(string):
NeonLogger.getLogger().info(string)
@staticmethod
def loggerCallback(event):
if event.getChangeFlags() == Logger.CHANGE_FLAG_NEW_MESSAGE:
text = event.getMessageText()
if event.getMessageType() == Logger.MESSAGE_TYPE_ERROR:
NeonLogger.writeErrorMessage(text)
elif event.getMessageType() == Logger.MESSAGE_TYPE_WARNING:
NeonLogger.writeWarningMessage(text)
elif event.getMessageType() == Logger.MESSAGE_TYPE_INFORMATION:
NeonLogger.writeInformationMessage(text)
@staticmethod
def setZincContext(zincContext):
if NeonLogger._loggerNotifier:
NeonLogger._loggerNotifier.clearCallback()
NeonLogger._zincLogger = zincContext.getLogger()
NeonLogger._loggerNotifier = NeonLogger._zincLogger.createLoggernotifier()
NeonLogger._loggerNotifier.setCallback(NeonLogger.loggerCallback)
| '''
Copyright 2015 University of Auckland
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import sys
import logging
from PySide import QtCore
from opencmiss.zinc.logger import Logger
ENABLE_STD_STREAM_CAPTURE = False
class CustomStream(QtCore.QObject):
_stdout = None
_stderr = None
_saved_stdout = None
messageWritten = QtCore.Signal([str, str])
def flush(self):
pass
def fileno(self):
return -1
def write(self, msg, level="INFORMATION"):
if (not self.signalsBlocked()):
self.messageWritten.emit(msg, level)
@staticmethod
def stdout():
if (not CustomStream._stdout):
CustomStream._stdout = CustomStream()
CustomStream._saved_stdout = sys.stdout
sys.stdout = CustomStream._stdout if ENABLE_STD_STREAM_CAPTURE else sys.stdout
return CustomStream._stdout
@staticmethod
def stderr():
if (not CustomStream._stderr):
CustomStream._stderr = CustomStream()
sys.stderr = CustomStream._stdout if ENABLE_STD_STREAM_CAPTURE else sys.stderr
return CustomStream._stderr
class LogsToWidgetHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
def emit(self, record):
levelString = record.levelname
record = self.format(record)
if record:
CustomStream.stdout().write('%s\n' % record, levelString)
def setup_custom_logger(name):
formatter = logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(module)s - %(message)s')
handler = LogsToWidgetHandler()
handler.setFormatter(formatter)
neonLogger = logging.getLogger(name)
neonLogger.setLevel(logging.DEBUG)
neonLogger.addHandler(handler)
return neonLogger
class NeonLogger(object):
_logger = None
_zincLogger = None
_loggerNotifier = None
@staticmethod
def getLogger():
if (not NeonLogger._logger):
NeonLogger._logger = setup_custom_logger("Neon")
return NeonLogger._logger
@staticmethod
def writeErrorMessage(string):
NeonLogger.getLogger().error(string)
@staticmethod
def writeWarningMessage(string):
NeonLogger.getLogger().warning(string)
@staticmethod
def writeInformationMessage(string):
NeonLogger.getLogger().info(string)
@staticmethod
def loggerCallback(event):
if event.getChangeFlags() == Logger.CHANGE_FLAG_NEW_MESSAGE:
text = event.getMessageText()
if event.getMessageType() == Logger.MESSAGE_TYPE_ERROR:
NeonLogger.writeErrorMessage(text)
elif event.getMessageType() == Logger.MESSAGE_TYPE_WARNING:
NeonLogger.writeWarningMessage(text)
elif event.getMessageType() == Logger.MESSAGE_TYPE_INFORMATION:
NeonLogger.writeInformationMessage(text)
@staticmethod
def setZincContext(zincContext):
if NeonLogger._loggerNotifier:
NeonLogger._loggerNotifier.clearCallback()
NeonLogger._zincLogger = zincContext.getLogger()
NeonLogger._loggerNotifier = NeonLogger._zincLogger.createLoggernotifier()
NeonLogger._loggerNotifier.setCallback(NeonLogger.loggerCallback)
| Python | 0 |
6f0c05ee4743528550dd083d9290b5be0074ff0e | Add commands args to runner and improve docs in it | runner.py | runner.py | import argparse
import sys
from vsut.unit import CSVFormatter, TableFormatter, Unit
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Runs unit tests.")
parser.add_argument('units', metavar='Unit', type=str, nargs='+')
parser.add_argument(
'--format', help="Default: table; Decides whether to use table or csv for output.", required=False)
args = vars(parser.parse_args())
for unit in args["units"]:
try:
# Get the name of the module.
modName = unit.split(".")[0:-1]
modName = ".".join(modName)
# Get the name of the class.
className = unit.split(".")[-1]
# Import the module.
module = __import__(modName, fromlist=[className])
# Create unit and run it.
unit = getattr(module, className)()
unit.run()
# Format the results and output them.
if args["format"] == "csv":
formatter = CSVFormatter(unit)
else:
formatter = TableFormatter(unit)
print(formatter)
except (ImportError, AttributeError) as e:
print("[Error] Could not import unit: {0}".format(unit))
| import sys
from vsut.unit import CSVFormatter, TableFormatter
if __name__ == "__main__":
for i in range(1, len(sys.argv)):
try:
modName = sys.argv[i].split(".")[0:-1]
modName = ".".join(modName)
className = sys.argv[i].split(".")[-1]
module = __import__(modName, fromlist=[className])
className = getattr(module, className)
unit = className()
unit.run()
formatter = TableFormatter(unit)
print(formatter.format())
except ImportError as e:
#TODO: Handle this import error.
print(e)
| Python | 0 |
553cd68fb5d54be6ecbf3ca93c6d6c6be75afdb5 | Add EveLinkCache to evelink.appengine | evelink/appengine/__init__.py | evelink/appengine/__init__.py | from evelink.appengine.api import AppEngineAPI
from evelink.appengine.api import AppEngineCache
from evelink.appengine.api import AppEngineDatastoreCache
from evelink.appengine.api import EveLinkCache
from evelink.appengine import account
from evelink.appengine import char
from evelink.appengine import corp
from evelink.appengine import eve
from evelink.appengine import map
from evelink.appengine import server
__all__ = [
"AppEngineAPI",
"AppEngineCache",
"AppEngineDatastoreCache",
"EveLinkCache",
"account",
"char",
"corp",
"eve",
"map",
"server",
]
| from evelink.appengine.api import AppEngineAPI
from evelink.appengine.api import AppEngineCache
from evelink.appengine.api import AppEngineDatastoreCache
from evelink.appengine import account
from evelink.appengine import char
from evelink.appengine import corp
from evelink.appengine import eve
from evelink.appengine import map
from evelink.appengine import server
__all__ = [
"AppEngineAPI",
"AppEngineCache",
"AppEngineDatastoreCache",
"account",
"char",
"corp",
"eve",
"map",
"server",
]
| Python | 0.000001 |
68c4f723f5eea2802209862d323825f33a445154 | Fix url id to pk. | eventex/subscriptions/urls.py | eventex/subscriptions/urls.py | from django.urls import path
import eventex.subscriptions.views as s
app_name = 'subscriptions'
urlpatterns = [
path('', s.new, name='new'),
path('<int:pk>/', s.detail, name='detail'),
path('json/donut/', s.paid_list_json, name='paid_list_json'),
path('json/column/', s.paid_column_json, name='paid_column_json'),
path('graphic/', s.graphic, name='graphic'),
]
| from django.urls import path
import eventex.subscriptions.views as s
app_name = 'subscriptions'
urlpatterns = [
path('', s.new, name='new'),
path('<int:id>/', s.detail, name='detail'),
path('json/donut/', s.paid_list_json, name='paid_list_json'),
path('json/column/', s.paid_column_json, name='paid_column_json'),
path('graphic/', s.graphic, name='graphic'),
]
| Python | 0.000001 |
127e5ae02932af67c6157939cff6ab388c89c677 | convert process_attr to a parameter in contructor so extending the class is not needed | scrapy/trunk/scrapy/contrib_exp/link/__init__.py | scrapy/trunk/scrapy/contrib_exp/link/__init__.py | from HTMLParser import HTMLParser
from scrapy.link import Link
from scrapy.utils.python import unique as unique_list
from scrapy.utils.url import safe_url_string, urljoin_rfc as urljoin
class LinkExtractor(HTMLParser):
"""LinkExtractor are used to extract links from web pages. They are
instantiated and later "applied" to a Response using the extract_links
method which must receive a Response object and return a list of Link objects
containing the (absolute) urls to follow, and the links texts.
This is the base LinkExtractor class that provides enough basic
functionality for extracting links to follow, but you could override this
class or create a new one if you need some additional functionality. The
only requisite is that the new (or overrided) class must provide a
extract_links method that receives a Response and returns a list of Link objects.
This LinkExtractor always returns percent-encoded URLs, using the detected encoding
from the response.
The constructor arguments are:
* tag (string or function)
* a tag name which is used to search for links (defaults to "a")
* a function which receives a tag name and returns whether to scan it
* attr (string or function)
* an attribute name which is used to search for links (defaults to "href")
* a function which receives an attribute name and returns whether to scan it
* process (funtion)
* a function wich receives the attribute value before assigning it
* unique - if True the same urls won't be extracted twice, otherwise the
same urls will be extracted multiple times (with potentially different link texts)
"""
def __init__(self, tag="a", attr="href", process=None, unique=False):
HTMLParser.__init__(self)
self.scan_tag = tag if callable(tag) else lambda t: t == tag
self.scan_attr = attr if callable(attr) else lambda a: a == attr
self.process_attr = process if callable(process) else lambda v: v
self.unique = unique
def _extract_links(self, response_text, response_url, response_encoding):
self.reset()
self.feed(response_text)
self.close()
links = unique_list(self.links, key=lambda link: link.url) if self.unique else self.links
ret = []
base_url = self.base_url if self.base_url else response_url
for link in links:
link.url = urljoin(base_url, link.url)
link.url = safe_url_string(link.url, response_encoding)
link.text = link.text.decode(response_encoding)
ret.append(link)
return ret
def extract_links(self, response):
# wrapper needed to allow to work directly with text
return self._extract_links(response.body, response.url,
response.encoding)
def reset(self):
HTMLParser.reset(self)
self.base_url = None
self.current_link = None
self.links = []
def handle_starttag(self, tag, attrs):
if tag == 'base':
self.base_url = dict(attrs).get('href')
if self.scan_tag(tag):
for attr, value in attrs:
if self.scan_attr(attr):
url = self.process_attr(value)
link = Link(url=url)
self.links.append(link)
self.current_link = link
def handle_endtag(self, tag):
self.current_link = None
def handle_data(self, data):
if self.current_link and not self.current_link.text:
self.current_link.text = data.strip()
def matches(self, url):
"""This extractor matches with any url, since
it doesn't contain any patterns"""
return True
| from HTMLParser import HTMLParser
from scrapy.link import Link
from scrapy.utils.python import unique as unique_list
from scrapy.utils.url import safe_url_string, urljoin_rfc as urljoin
class LinkExtractor(HTMLParser):
"""LinkExtractor are used to extract links from web pages. They are
instantiated and later "applied" to a Response using the extract_links
method which must receive a Response object and return a list of Link objects
containing the (absolute) urls to follow, and the links texts.
This is the base LinkExtractor class that provides enough basic
functionality for extracting links to follow, but you could override this
class or create a new one if you need some additional functionality. The
only requisite is that the new (or overrided) class must provide a
extract_links method that receives a Response and returns a list of Link objects.
This LinkExtractor always returns percent-encoded URLs, using the detected encoding
from the response.
The constructor arguments are:
* tag (string or function)
* a tag name which is used to search for links (defaults to "a")
* a function which receives a tag name and returns whether to scan it
* attr (string or function)
* an attribute name which is used to search for links (defaults to "href")
* a function which receives an attribute name and returns whether to scan it
* unique - if True the same urls won't be extracted twice, otherwise the
same urls will be extracted multiple times (with potentially different link texts)
"""
def __init__(self, tag="a", attr="href", unique=False):
HTMLParser.__init__(self)
self.scan_tag = tag if callable(tag) else lambda t: t == tag
self.scan_attr = attr if callable(attr) else lambda a: a == attr
self.unique = unique
def _extract_links(self, response_text, response_url, response_encoding):
self.reset()
self.feed(response_text)
self.close()
links = unique_list(self.links, key=lambda link: link.url) if self.unique else self.links
ret = []
base_url = self.base_url if self.base_url else response_url
for link in links:
link.url = urljoin(base_url, link.url)
link.url = safe_url_string(link.url, response_encoding)
link.text = link.text.decode(response_encoding)
ret.append(link)
return ret
def extract_links(self, response):
# wrapper needed to allow to work directly with text
return self._extract_links(response.body, response.url,
response.encoding)
def reset(self):
HTMLParser.reset(self)
self.base_url = None
self.current_link = None
self.links = []
def handle_starttag(self, tag, attrs):
if tag == 'base':
self.base_url = dict(attrs).get('href')
if self.scan_tag(tag):
for attr, value in attrs:
if self.scan_attr(attr):
url = self.process_attr(value)
link = Link(url=url)
self.links.append(link)
self.current_link = link
def handle_endtag(self, tag):
self.current_link = None
def handle_data(self, data):
if self.current_link and not self.current_link.text:
self.current_link.text = data.strip()
def process_attr(self, value):
"""Hook to process the value of the attribute before asigning
it to the link"""
return value
def matches(self, url):
"""This extractor matches with any url, since
it doesn't contain any patterns"""
return True
| Python | 0.000002 |
ca625e22cb397905f859c826c6507b3977665a51 | Fix import | examples/cifar10_ror.py | examples/cifar10_ror.py | '''
Trains a Residual-of-Residual Network (WRN-40-2) model on the CIFAR-10 Dataset.
Gets a 94.53% accuracy score after 150 epochs.
'''
import numpy as np
import sklearn.metrics as metrics
import keras.callbacks as callbacks
import keras.utils.np_utils as kutils
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.optimizers import Adam
from keras_contrib.applications import ResidualOfResidual
batch_size = 64
nb_epoch = 150
img_rows, img_cols = 32, 32
(trainX, trainY), (testX, testY) = cifar10.load_data()
trainX = trainX.astype('float32')
testX = testX.astype('float32')
trainX /= 255
testX /= 255
tempY = testY
trainY = kutils.to_categorical(trainY)
testY = kutils.to_categorical(testY)
generator = ImageDataGenerator(rotation_range=15,
width_shift_range=5. / 32,
height_shift_range=5. / 32)
generator.fit(trainX, seed=0)
model = ResidualOfResidual(depth=40, width=2, dropout_rate=0.0, weights='None')
optimizer = Adam(lr=1e-3)
model.compile(loss="categorical_crossentropy", optimizer=optimizer, metrics=["acc"])
print("Finished compiling")
model.fit_generator(generator.flow(trainX, trainY, batch_size=batch_size), samples_per_epoch=len(trainX),
nb_epoch=nb_epoch,
callbacks=[callbacks.ModelCheckpoint("weights/RoR-WRN-40-2-Weights.h5", monitor="val_acc",
save_best_only=True, save_weights_only=True)],
validation_data=(testX, testY),
nb_val_samples=testX.shape[0], verbose=2)
scores = model.evaluate(testX, testY, batch_size)
print("Test loss : ", scores[0])
print("Test accuracy : ", scores[1])
| '''
Trains a Residual-of-Residual Network (WRN-40-2) model on the CIFAR-10 Dataset.
Gets a 94.53% accuracy score after 150 epochs.
'''
import numpy as np
import sklearn.metrics as metrics
import keras.callbacks as callbacks
import keras.utils.np_utils as kutils
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.optimizers import Adam
from keras_contrib.applications.ror import ResidualOfResidual
batch_size = 64
nb_epoch = 150
img_rows, img_cols = 32, 32
(trainX, trainY), (testX, testY) = cifar10.load_data()
trainX = trainX.astype('float32')
testX = testX.astype('float32')
trainX /= 255
testX /= 255
tempY = testY
trainY = kutils.to_categorical(trainY)
testY = kutils.to_categorical(testY)
generator = ImageDataGenerator(rotation_range=15,
width_shift_range=5. / 32,
height_shift_range=5. / 32)
generator.fit(trainX, seed=0)
model = ResidualOfResidual(depth=40, width=2, dropout_rate=0.0, weights='None')
optimizer = Adam(lr=1e-3)
model.compile(loss="categorical_crossentropy", optimizer=optimizer, metrics=["acc"])
print("Finished compiling")
model.fit_generator(generator.flow(trainX, trainY, batch_size=batch_size), samples_per_epoch=len(trainX),
nb_epoch=nb_epoch,
callbacks=[callbacks.ModelCheckpoint("weights/RoR-WRN-40-2-Weights.h5", monitor="val_acc",
save_best_only=True, save_weights_only=True)],
validation_data=(testX, testY),
nb_val_samples=testX.shape[0], verbose=2)
scores = model.evaluate(testX, testY, batch_size)
print("Test loss : ", scores[0])
print("Test accuracy : ", scores[1])
| Python | 0 |
d458fb855df77dfb553ee3e95a8201f58aba169e | Increment version number | clippercard/__init__.py | clippercard/__init__.py | """
Copyright (c) 2012-2017 (https://github.com/clippercard/clippercard-python)
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import clippercard.client as client
Session = client.ClipperCardWebSession
__version__ = '0.4.1'
| """
Copyright (c) 2012-2017 (https://github.com/clippercard/clippercard-python)
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import clippercard.client as client
Session = client.ClipperCardWebSession
__version__ = '0.4.0'
| Python | 0.000021 |
6e663d4010f9a79d2816a212e504773a1745a8e6 | Fix project name! | src/txkube/__init__.py | src/txkube/__init__.py | # Copyright Least Authority Enterprises.
# See LICENSE for details.
"""
A Kubernetes client.
"""
__all__ = [
"version",
"IKubernetesClient",
"network_client", "memory_client",
]
from incremental import Version
from ._metadata import version_tuple as _version_tuple
version = Version("txkube", *_version_tuple)
from ._interface import IKubernetes, IKubernetesClient
from ._network import network_kubernetes
from ._memory import memory_kubernetes
| # Copyright Least Authority Enterprises.
# See LICENSE for details.
"""
A Kubernetes client.
"""
__all__ = [
"version",
"IKubernetesClient",
"network_client", "memory_client",
]
from incremental import Version
from ._metadata import version_tuple as _version_tuple
version = Version("pykube", *_version_tuple)
from ._interface import IKubernetes, IKubernetesClient
from ._network import network_kubernetes
from ._memory import memory_kubernetes
| Python | 0 |
15faef8beb415211a04fd6dca976158343d8f77f | add abc to guid, fixed issues | user_profile/models.py | user_profile/models.py | from django.db import models
from django.contrib.auth.models import User
import uuid
# Create your models here.
# using the guid model
from framework.models import GUIDModel
class Profile(GUIDModel):
author = models.ForeignKey(User)
display_name = models.CharField(max_length=55)
def as_dict(self):
return {
"id": self.guid,
# TODO implement host
"host": "",
"displayname" : self.display_name,
"url": self.host + "/author/" + self.guid
}
| from django.db import models
from django.contrib.auth.models import User
import uuid
# Create your models here.
# using the guid model
from framework.models import GUIDModel
class Profile(GUIDModel):
author = models.ForeignKey(User)
display_name = models.CharField(max_length=55)
# guid
guid = models.CharField(max_length=55, default=None)
def as_dict(self):
return {
"id": self.guid,
# TODO implement host
"host": "",
"displayname" : self.display_name,
"url": self.host + "/author/" + self.guid
}
| Python | 0.000231 |
127ad982617c2376c9378d1ef7e50b716a077428 | Replace imp with __import__ | dm_root.py | dm_root.py | #!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#pylint: disable-msg=W0404
#
"""The Device Model root, allowing specific platforms to populate it."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import google3
import dm.catawampus
import dm.management_server
import tr.core
import traceroute
def _RecursiveImport(name):
return __import__(name, fromlist=[''])
class DeviceModelRoot(tr.core.Exporter):
"""A class to hold the device models."""
def __init__(self, loop, platform):
tr.core.Exporter.__init__(self)
if platform:
self.device = _RecursiveImport('platform.%s.device' % platform)
(params, objects) = self.device.PlatformInit(name=platform,
device_model_root=self)
else:
(params, objects) = (list(), list())
self.TraceRoute = traceroute.TraceRoute(loop)
objects.append('TraceRoute')
self.X_CATAWAMPUS_ORG_CATAWAMPUS = dm.catawampus.CatawampusDm()
objects.append('X_CATAWAMPUS-ORG_CATAWAMPUS')
self.Export(params=params, objects=objects)
def get_platform_config(self):
"""Return the platform_config.py object for this platform."""
return self.device.PlatformConfig()
def add_management_server(self, mgmt):
# tr-181 Device.ManagementServer
try:
ms181 = self.GetExport('Device')
ms181.ManagementServer = dm.management_server.ManagementServer181(mgmt)
except (AttributeError, KeyError):
pass # no tr-181 for this platform
# tr-98 InternetGatewayDevice.ManagementServer
try:
ms98 = self.GetExport('InternetGatewayDevice')
ms98.ManagementServer = dm.management_server.ManagementServer98(mgmt)
except (AttributeError, KeyError):
pass # no tr-98 for this platform
| #!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#pylint: disable-msg=W0404
#
"""The Device Model root, allowing specific platforms to populate it."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import imp
import sys
import google3
import dm.catawampus
import dm.management_server
import tr.core
import traceroute
def _RecursiveImport(name):
split = name.split('.')
last = split.pop()
if split:
path = _RecursiveImport('.'.join(split)).__path__
else:
path = sys.path
fileobj, path, description = imp.find_module(last, path)
return imp.load_module(name, fileobj, path, description)
class DeviceModelRoot(tr.core.Exporter):
"""A class to hold the device models."""
def __init__(self, loop, platform):
tr.core.Exporter.__init__(self)
if platform:
self.device = _RecursiveImport('platform.%s.device' % platform)
(params, objects) = self.device.PlatformInit(name=platform,
device_model_root=self)
else:
(params, objects) = (list(), list())
self.TraceRoute = traceroute.TraceRoute(loop)
objects.append('TraceRoute')
self.X_CATAWAMPUS_ORG_CATAWAMPUS = dm.catawampus.CatawampusDm()
objects.append('X_CATAWAMPUS-ORG_CATAWAMPUS')
self.Export(params=params, objects=objects)
def get_platform_config(self):
"""Return the platform_config.py object for this platform."""
return self.device.PlatformConfig()
def add_management_server(self, mgmt):
# tr-181 Device.ManagementServer
try:
ms181 = self.GetExport('Device')
ms181.ManagementServer = dm.management_server.ManagementServer181(mgmt)
except (AttributeError, KeyError):
pass # no tr-181 for this platform
# tr-98 InternetGatewayDevice.ManagementServer
try:
ms98 = self.GetExport('InternetGatewayDevice')
ms98.ManagementServer = dm.management_server.ManagementServer98(mgmt)
except (AttributeError, KeyError):
pass # no tr-98 for this platform
| Python | 0.000617 |
8233f9c312955d56dff2fc80aed71dae6af910be | Check for None repos, in case of bad configuration file | do/main.py | do/main.py | # -*- coding: utf-8 -*-
""" DO!
I can do things thanks to Python, YAML configurations and Docker
NOTE: the command check does nothing
"""
from do.project import project_configuration, apply_variables
from do.gitter import clone, upstream
from do.builds import find_and_build
from do.utils.logs import get_logger
log = get_logger(__name__)
class Application(object):
def __init__(self, args):
self.action = args.get('command')
if self.action is None:
raise AttributeError("Misconfiguration")
else:
# print(f"\n********************\tDO: {self.action}")
print("\n********************\tDO: %s" % self.action)
self.blueprint = args.get('blueprint')
self.current_args = args
self.run()
def read_specs(self):
""" Read project configuration """
self.specs = project_configuration()
self.vars = self.specs \
.get('variables', {}) \
.get('python', {})
self.frontend = self.vars \
.get('frontend', {}) \
.get('enable', False)
log.very_verbose("Frontend is %s" % self.frontend)
def git_submodules(self):
""" Check and/or clone git projects """
initialize = self.action == 'init'
repos = self.vars.get('repos')
if repos is None:
raise AttributeError(
"Invalid configuration: repos section is missing")
core = repos.pop('rapydo')
upstream(
url=core.get('online_url'),
path=core.get('path'),
do=initialize
)
myvars = {'frontend': self.frontend}
for _, repo in repos.items():
# substitute $$ values
repo = apply_variables(repo, myvars)
if repo.pop('if', False):
clone(**repo, do=initialize)
raise NotImplementedError("TO FINISH")
def builds(self):
""" Look up for builds depending on templates """
# FIXME: move here the logic
# TODO: pass the check/init option
find_and_build(
bp=self.blueprint,
frontend=self.frontend,
build=self.current_args.get('force_build'),
)
def run(self):
func = getattr(self, self.action, None)
if func is None:
# log.critical_exit(f"Command not yet implemented: {self.action}")
log.critical_exit("Command not yet implemented: %s" % self.action)
self.read_specs()
self.git_submodules()
self.builds()
# Do what you're supposed to
func()
def check(self):
raise AttributeError("Not completed yet")
def init(self):
raise AttributeError("Not completed yet")
| # -*- coding: utf-8 -*-
""" DO!
I can do things thanks to Python, YAML configurations and Docker
NOTE: the command check does nothing
"""
from do.project import project_configuration, apply_variables
from do.gitter import clone, upstream
from do.builds import find_and_build
from do.utils.logs import get_logger
log = get_logger(__name__)
class Application(object):
def __init__(self, args):
self.action = args.get('command')
if self.action is None:
raise AttributeError("Misconfiguration")
else:
# print(f"\n********************\tDO: {self.action}")
print("\n********************\tDO: %s" % self.action)
self.blueprint = args.get('blueprint')
self.current_args = args
self.run()
def read_specs(self):
""" Read project configuration """
self.specs = project_configuration()
self.vars = self.specs \
.get('variables', {}) \
.get('python', {})
self.frontend = self.vars \
.get('frontend', {}) \
.get('enable', False)
log.very_verbose("Frontend is %s" % self.frontend)
def git_submodules(self):
""" Check and/or clone git projects """
initialize = self.action == 'init'
repos = self.vars.get('repos')
core = repos.pop('rapydo')
upstream(
url=core.get('online_url'),
path=core.get('path'),
do=initialize
)
myvars = {'frontend': self.frontend}
for _, repo in repos.items():
# substitute $$ values
repo = apply_variables(repo, myvars)
if repo.pop('if', False):
clone(**repo, do=initialize)
raise NotImplementedError("TO FINISH")
def builds(self):
""" Look up for builds depending on templates """
# FIXME: move here the logic
# TODO: pass the check/init option
find_and_build(
bp=self.blueprint,
frontend=self.frontend,
build=self.current_args.get('force_build'),
)
def run(self):
func = getattr(self, self.action, None)
if func is None:
# log.critical_exit(f"Command not yet implemented: {self.action}")
log.critical_exit("Command not yet implemented: %s" % self.action)
self.read_specs()
self.git_submodules()
self.builds()
# Do what you're supposed to
func()
def check(self):
raise AttributeError("Not completed yet")
def init(self):
raise AttributeError("Not completed yet")
| Python | 0 |
e784227ae5da242d474bc02209289e1dabd2d3a2 | Test Spectral Reconstruction on Sin Wave | utils/spectral_test.py | utils/spectral_test.py | # Lint as: python3
"""Tests for spectral."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import os
import spectral
class SpectralTest(tf.test.TestCase):
def test_waveform_to_spectogram_shape(self):
waveform = np.random.normal(size=(2**14,))
spectogram = spectral.waveform_2_spectogram(waveform, frame_length=512, frame_step=128)
self.assertEqual((128, 256, 2), spectogram.shape)
def test_waveform_to_magnitude_shape(self):
waveform = np.random.normal(size=(2**14,))
magnitude = spectral.waveform_2_magnitude(waveform, frame_length=512, frame_step=128)
self.assertEqual((128, 256), magnitude.shape)
def test_waveform_to_spectogram_return(self):
waveform = np.sin(np.linspace(0, 4 * np.pi, 2**14))
spectogram = spectral.waveform_2_spectogram(waveform, frame_length=512, frame_step=128)
waveform_hat = spectral.spectogram_2_waveform(spectogram, frame_length=512, frame_step=128)
# Account for extra samples from reverse transform
waveform_hat = waveform[0:len(waveform)]
self.assertAllClose(waveform, waveform_hat)
def test_waveform_to_magnitude_return(self):
waveform = np.sin(np.linspace(0, 4 * np.pi, 2**14))
spectogram = spectral.waveform_2_magnitude(waveform, frame_length=512, frame_step=128)
waveform_hat = spectral.magnitude_2_waveform(spectogram, frame_length=512, frame_step=128)
# Account for extra samples from reverse transform
waveform_hat = waveform[0:len(waveform)]
self.assertAllClose(waveform, waveform_hat)
if __name__ == '__main__':
os.environ["CUDA_VISIBLE_DEVICES"] = ''
tf.test.main()
| # Lint as: python3
"""Tests for spectral."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import os
import spectral
class SpectralTest(tf.test.TestCase):
def test_waveform_to_spectogram_shape(self):
waveform = np.random.normal(size=(2**14,))
spectogram = spectral.waveform_2_spectogram(waveform, frame_length=512, frame_step=128)
self.assertEqual((128, 256, 2), spectogram.shape)
def test_waveform_to_magnitude_shape(self):
waveform = np.random.normal(size=(2**14,))
magnitude = spectral.waveform_2_magnitude(waveform, frame_length=512, frame_step=128)
self.assertEqual((128, 256), magnitude.shape)
def test_waveform_to_spectogram_return(self):
waveform = np.random.normal(size=(2**14,))
spectogram = spectral.waveform_2_spectogram(waveform, frame_length=512, frame_step=128)
waveform_hat = spectral.spectogram_2_waveform(spectogram, frame_length=512, frame_step=128)
# Account for extra samples from reverse transform
waveform_hat = waveform[0:len(waveform)]
self.assertAllClose(waveform, waveform_hat)
def test_waveform_to_magnitude_return(self):
waveform = np.random.normal(size=(2**14,))
spectogram = spectral.waveform_2_magnitude(waveform, frame_length=512, frame_step=128)
waveform_hat = spectral.magnitude_2_waveform(spectogram, frame_length=512, frame_step=128)
# Account for extra samples from reverse transform
waveform_hat = waveform[0:len(waveform)]
self.assertAllClose(waveform, waveform_hat)
if __name__ == '__main__':
os.environ["CUDA_VISIBLE_DEVICES"] = ''
tf.test.main()
| Python | 0 |
05939b0b797780ac1d265c8415f72f1ca44be53d | Modify return tag search data with tag_name | coco/dashboard/views.py | coco/dashboard/views.py | # -*- coding: utf-8 -*-
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from posts.models import Post, Tag
@login_required
def index(request):
context = {'posts': Post.objects.all()}
return render(request, 'dashboard/index.html', context)
@login_required
def tagged_posts(request, tag_name=""):
context = {
'tag': tag_name,
'posts': Post.objects.filter(tags__name=tag_name)
}
return render(request, 'dashboard/search_result.html', context)
| # -*- coding: utf-8 -*-
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from posts.models import Post, Tag
@login_required
def index(request):
context = {'posts': Post.objects.all()}
return render(request, 'dashboard/index.html', context)
@login_required
def tagged_posts(request, tag_name=""):
context = {'posts': Post.objects.filter(tags__name=tag_name)}
return render(request, 'dashboard/search_result.html', context)
| Python | 0.000001 |
d0de5476580b466d7b13cfc7668c267e62cb15f0 | create 32 bit integer var, not 64 (to allow test with NETCDF4_CLASSIC) | examples/mpi_example.py | examples/mpi_example.py | # to run: mpirun -np 4 python mpi_example.py
from mpi4py import MPI
import numpy as np
from netCDF4 import Dataset
rank = MPI.COMM_WORLD.rank # The process ID (integer 0-3 for 4-process run)
nc = Dataset('parallel_test.nc', 'w', parallel=True, comm=MPI.COMM_WORLD,
info=MPI.Info(),format='NETCDF4_CLASSIC')
# below should work also - MPI_COMM_WORLD and MPI_INFO_NULL will be used.
#nc = Dataset('parallel_test.nc', 'w', parallel=True)
d = nc.createDimension('dim',4)
v = nc.createVariable('var', np.int32, 'dim')
v[rank] = rank
# switch to collective mode, rewrite the data.
v.set_collective(True)
v[rank] = rank
nc.close()
# reopen the file read-only, check the data
nc = Dataset('parallel_test.nc', parallel=True, comm=MPI.COMM_WORLD,
info=MPI.Info())
assert rank==nc['var'][rank]
nc.close()
# reopen the file in append mode, modify the data on the last rank.
nc = Dataset('parallel_test.nc', 'a',parallel=True, comm=MPI.COMM_WORLD,
info=MPI.Info())
if rank == 3: v[rank] = 2*rank
nc.close()
# reopen the file read-only again, check the data.
# leave out the comm and info kwargs to check that the defaults
# (MPI_COMM_WORLD and MPI_INFO_NULL) work.
nc = Dataset('parallel_test.nc', parallel=True)
if rank == 3:
assert 2*rank==nc['var'][rank]
else:
assert rank==nc['var'][rank]
nc.close()
| # to run: mpirun -np 4 python mpi_example.py
from mpi4py import MPI
import numpy as np
from netCDF4 import Dataset
rank = MPI.COMM_WORLD.rank # The process ID (integer 0-3 for 4-process run)
nc = Dataset('parallel_test.nc', 'w', parallel=True, comm=MPI.COMM_WORLD,
info=MPI.Info(),format='NETCDF4_CLASSIC')
# below should work also - MPI_COMM_WORLD and MPI_INFO_NULL will be used.
#nc = Dataset('parallel_test.nc', 'w', parallel=True)
d = nc.createDimension('dim',4)
v = nc.createVariable('var', np.int, 'dim')
v[rank] = rank
# switch to collective mode, rewrite the data.
v.set_collective(True)
v[rank] = rank
nc.close()
# reopen the file read-only, check the data
nc = Dataset('parallel_test.nc', parallel=True, comm=MPI.COMM_WORLD,
info=MPI.Info())
assert rank==nc['var'][rank]
nc.close()
# reopen the file in append mode, modify the data on the last rank.
nc = Dataset('parallel_test.nc', 'a',parallel=True, comm=MPI.COMM_WORLD,
info=MPI.Info())
if rank == 3: v[rank] = 2*rank
nc.close()
# reopen the file read-only again, check the data.
# leave out the comm and info kwargs to check that the defaults
# (MPI_COMM_WORLD and MPI_INFO_NULL) work.
nc = Dataset('parallel_test.nc', parallel=True)
if rank == 3:
assert 2*rank==nc['var'][rank]
else:
assert rank==nc['var'][rank]
nc.close()
| Python | 0 |
a53fae5b42e9b33774650e017967b865552870e9 | tag v0.7.4 | unihan_tabular/__about__.py | unihan_tabular/__about__.py | __title__ = 'unihan-tabular'
__package_name__ = 'unihan_tabular'
__description__ = 'Export UNIHAN to Python, Data Package, CSV, JSON and YAML'
__version__ = '0.7.4'
__author__ = 'Tony Narlock'
__email__ = 'cihai@git-pull.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013-2017 Tony Narlock'
| __title__ = 'unihan-tabular'
__package_name__ = 'unihan_tabular'
__description__ = 'Export UNIHAN to Python, Data Package, CSV, JSON and YAML'
__version__ = '0.7.3'
__author__ = 'Tony Narlock'
__email__ = 'cihai@git-pull.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013-2017 Tony Narlock'
| Python | 0.000001 |
4420892ad3e8c1797753e7893772e53785efb570 | add logfile handling | updatebot/cmdline/simple.py | updatebot/cmdline/simple.py | #
# Copyright (c) 2008 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
import os
from updatebot import log
from updatebot import config
def validatePlatform(platform, configDir):
validPlatforms = os.listdir(configDir)
if platform not in validPlatforms:
print ('Invalid platform %s... Please select from the following '
'available platforms %s' % (platform, ', '.join(validPlatforms)))
return False
return True
def usage(argv):
print 'usage: %s <platform name> [logfile]' % argv[0]
return 1
def main(argv, workerFunc, configDir='/etc/mirrorball', enableLogging=True):
if len(argv) < 2 or len(argv) > 3:
return usage(argv)
logFile = None
if len(argv) == 3:
logFile = argv[2]
if enableLogging:
log.addRootLogger(logFile=logFile)
platform = argv[1]
if not validatePlatform(platform, configDir):
return 1
cfg = config.UpdateBotConfig()
cfg.read(os.path.join(configDir, platform, 'updatebotrc'))
rc = workerFunc(cfg)
return rc
| #
# Copyright (c) 2008 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
import os
from updatebot import log
from updatebot import config
def validatePlatform(platform, configDir):
validPlatforms = os.listdir(configDir)
if platform not in validPlatforms:
print ('Invalid platform %s... Please select from the following '
'available platforms %s' % (platform, ', '.join(validPlatforms)))
return False
return True
def usage(argv):
print 'usage: %s <platform name>' % argv[0]
return 1
def main(argv, workerFunc, configDir='/etc/mirrorball', enableLogging=True):
if enableLogging:
log.addRootLogger()
if len(argv) != 2:
return usage(argv)
platform = argv[1]
if not validatePlatform(platform, configDir):
return 1
cfg = config.UpdateBotConfig()
cfg.read(os.path.join(configDir, platform, 'updatebotrc'))
rc = workerFunc(cfg)
return rc
| Python | 0.000001 |
4ffa2483021b360eb7460bfcf1d845712806390b | Move motor ports because our retail EV3 brick's port A doesn't work. | app/ev3.py | app/ev3.py | # See http://ev3dev-lang.readthedocs.org/projects/python-ev3dev/en/stable/index.html
# for API details -- specific Sensor/Motor docs http://www.ev3dev.org/docs/
from time import sleep
from ev3dev.auto import *
# Connect two large motors on output ports A and C
lmotor, rmotor = [LargeMotor(address) for address in (OUTPUT_B, OUTPUT_D)]
moving = False
# Connect medium motors on output port B for the camera
cmotor = MediumMotor(OUTPUT_C)
camera_pos = 0
cmotor.reset()
# Check that the motors are actually connected
assert lmotor.connected
assert rmotor.connected
color_sensor = ColorSensor()
touch_sensor = TouchSensor()
gyro = GyroSensor()
def _start():
'''
Start the motors using run_direct() so we can just vary speed
'''
global moving
if not moving:
for motor in (lmotor, rmotor):
motor.duty_cycle_sp = 0
motor.run_direct()
moving = True
def forward(speed=50):
'''
Move the robot forward
'''
_start()
for motor in (lmotor, rmotor):
motor.duty_cycle_sp=speed
def backward(speed=50):
'''
Reverse
'''
_start()
for motor in (lmotor, rmotor):
motor.duty_cycle_sp=-speed
def stop():
'''
Stop the robot
'''
global moving
for motor in (lmotor, rmotor):
motor.stop()
moving = False
def turn_right(speed=40):
'''
Turn while moving forward
'''
_start()
lmotor.duty_cycle_sp=speed
rmotor.duty_cycle_sp=0
def turn_left(speed=40):
'''
Turn while moving forward
'''
_start()
lmotor.duty_cycle_sp=0
rmotor.duty_cycle_sp=speed
def spin_right(speed=50):
'''
Turn on the spot
'''
_start()
lmotor.duty_cycle_sp=speed
rmotor.duty_cycle_sp=-speed
def spin_left(speed=50):
'''
Turn on the spot
'''
_start()
lmotor.duty_cycle_sp=-speed
rmotor.duty_cycle_sp=speed
def speak():
Sound.speak('Excuse me!')
def color():
return color_sensor.value()
def touch():
return touch_sensor.value() == 1
def direction():
return gyro.value() % 360 # degrees, but needs a 0 value
def camera_left():
global camera_pos
camera_pos -= 25
camera_pos = max(-150, camera_pos)
cmotor.run_to_abs_pos(speed_regulation_enabled='on', speed_sp=100, position_sp=camera_pos)
def camera_right():
global camera_pos
camera_pos += 25
camera_pos = min(150, camera_pos)
cmotor.run_to_abs_pos(speed_regulation_enabled='on', speed_sp=100, position_sp=camera_pos)
def camera_forward():
global camera_pos
camera_pos = 0
cmotor.run_to_abs_pos(speed_regulation_enabled='on', speed_sp=100, position_sp=camera_pos)
| # See http://ev3dev-lang.readthedocs.org/projects/python-ev3dev/en/stable/index.html
# for API details -- specific Sensor/Motor docs http://www.ev3dev.org/docs/
from time import sleep
from ev3dev.auto import *
# Connect two large motors on output ports A and C
lmotor, rmotor = [LargeMotor(address) for address in (OUTPUT_A, OUTPUT_C)]
moving = False
# Connect medium motors on output port B for the camera
cmotor = MediumMotor(OUTPUT_B)
camera_pos = 0
cmotor.reset()
# Check that the motors are actually connected
assert lmotor.connected
assert rmotor.connected
color_sensor = ColorSensor()
touch_sensor = TouchSensor()
gyro = GyroSensor()
def _start():
'''
Start the motors using run_direct() so we can just vary speed
'''
global moving
if not moving:
for motor in (lmotor, rmotor):
motor.duty_cycle_sp = 0
motor.run_direct()
moving = True
def forward(speed=50):
'''
Move the robot forward
'''
_start()
for motor in (lmotor, rmotor):
motor.duty_cycle_sp=speed
def backward(speed=50):
'''
Reverse
'''
_start()
for motor in (lmotor, rmotor):
motor.duty_cycle_sp=-speed
def stop():
'''
Stop the robot
'''
global moving
for motor in (lmotor, rmotor):
motor.stop()
moving = False
def turn_right(speed=40):
'''
Turn while moving forward
'''
_start()
lmotor.duty_cycle_sp=speed
rmotor.duty_cycle_sp=0
def turn_left(speed=40):
'''
Turn while moving forward
'''
_start()
lmotor.duty_cycle_sp=0
rmotor.duty_cycle_sp=speed
def spin_right(speed=50):
'''
Turn on the spot
'''
_start()
lmotor.duty_cycle_sp=speed
rmotor.duty_cycle_sp=-speed
def spin_left(speed=50):
'''
Turn on the spot
'''
_start()
lmotor.duty_cycle_sp=-speed
rmotor.duty_cycle_sp=speed
def speak():
Sound.speak('Excuse me!')
def color():
return color_sensor.value()
def touch():
return touch_sensor.value() == 1
def direction():
return gyro.value() % 360 # degrees, but needs a 0 value
def camera_left():
global camera_pos
camera_pos -= 25
camera_pos = max(-150, camera_pos)
cmotor.run_to_abs_pos(speed_regulation_enabled='on', speed_sp=100, position_sp=camera_pos)
def camera_right():
global camera_pos
camera_pos += 25
camera_pos = min(150, camera_pos)
cmotor.run_to_abs_pos(speed_regulation_enabled='on', speed_sp=100, position_sp=camera_pos)
def camera_forward():
global camera_pos
camera_pos = 0
cmotor.run_to_abs_pos(speed_regulation_enabled='on', speed_sp=100, position_sp=camera_pos)
| Python | 0 |
98cb673b358671211a0aa7fed0725dbb732200d0 | Fix edge cases due to artworkUrl100 being missing | coverpy/coverpy.py | coverpy/coverpy.py | import os
import requests
from . import exceptions
class Result:
""" Parse an API result into an object format. """
def __init__(self, item):
""" Call the list parser. """
self.parse(item)
def parse(self, item):
""" Parse the given list into self variables. """
try:
self.artworkThumb = item['artworkUrl100']
except KeyError as e:
# A vital parameter is missing, and magic on our end can't get us out of this error case situation.
# Therefore, we try to save the user from issues (mostly KeyErrors), and stop them from using the public API.
# Just return a NoResultsException, because the data is corrupt on the API's end,
# and the library can't use the results.
# This gets many edge cases in which the API had issues dealing with.
raise exceptions.NoResultsException
# The above should prevent most keyerrors, this one is just guarding. However, if something fails here,
# I can't do much because the API sometimes skips on things and this is _not vital._
self.artist = item['artistName']
self.album = item['collectionName']
self.url = item['url']
# Take some measures to detect whether it is a song or album
if 'kind' in item:
self.type = item['kind'].lower()
elif 'wrapperType' in item:
if item['wrapperType'].lower() == 'track':
self.type = 'song'
elif item['wrapperType'].lower() == 'collection':
self.type = 'album'
elif 'collectionType' in item:
self.type = 'album'
else:
# Assuming edge case of the API
self.type = 'unknown'
if self.type == 'song':
self.name = item['trackName']
elif self.type == 'album':
self.name = item['collectionName']
else:
self.name = 'unknown'
def artwork(self, size = 625):
""" Return the artwork to the thumb URL contained. """
# Replace size because API doesn't hand links to full res. It only gives 60x60 and 100x100.
# However, I found a way to circumvent it.
return self.artworkThumb.replace('100x100bb', "%sx%s" % (size, size))
class CoverPy:
def __init__(self):
""" Initialize CoverPy. Set a base_url. """
self.base_url = "https://itunes.apple.com/search/"
def _get(self, payload, override = False, entities = False):
""" Get a payload using the base_url. General purpose GET interface """
if override:
data = requests.get("%s%s" % (self.base_url, override))
else:
payload['entity'] = "musicArtist,musicTrack,album,mix,song"
payload['media'] = 'music'
data = requests.get(self.base_url, params = payload)
if data.status_code != 200:
raise requests.HTTPError
else:
return data
def _search(self, term, limit = 1):
""" Expose a friendlier internal API for executing searches """
payload = {
'term': term,
'limit': limit
}
req = self._get(payload)
return req
def get_cover(self, term, limit = 1, debug = False):
""" Get an album cover, return a Result object """
search = self._search(term, limit)
parsed = search.json()
if parsed['resultCount'] == 0:
raise exceptions.NoResultsException
result = parsed['results'][0]
result['url'] = search.url
return Result(result)
| import os
import requests
from . import exceptions
class Result:
""" Parse an API result into an object format. """
def __init__(self, item):
""" Call the list parser. """
self.parse(item)
def parse(self, item):
""" Parse the given list into self variables. """
self.artworkThumb = item['artworkUrl100']
self.artist = item['artistName']
self.album = item['collectionName']
self.url = item['url']
# Take some measures to detect whether it is a song or album
if 'kind' in item:
self.type = item['kind'].lower()
elif 'wrapperType' in item:
if item['wrapperType'].lower() == 'track':
self.type = 'song'
elif item['wrapperType'].lower() == 'collection':
self.type = 'album'
elif 'collectionType' in item:
self.type = 'album'
else:
# Assuming edge case of the API
self.type = 'unknown'
if self.type == 'song':
self.name = item['trackName']
elif self.type == 'album':
self.name = item['collectionName']
else:
self.name = 'unknown'
def artwork(self, size = 625):
""" Return the artwork to the thumb URL contained. """
# Replace size because API doesn't hand links to full res. It only gives 60x60 and 100x100.
# However, I found a way to circumvent it.
return self.artworkThumb.replace('100x100bb', "%sx%s" % (size, size))
class CoverPy:
def __init__(self):
""" Initialize CoverPy. Set a base_url. """
self.base_url = "https://itunes.apple.com/search/"
def _get(self, payload, override = False, entities = False):
""" Get a payload using the base_url. General purpose GET interface """
if override:
data = requests.get("%s%s" % (self.base_url, override))
else:
payload['entity'] = "musicArtist,musicTrack,album,mix,song"
payload['media'] = 'music'
data = requests.get(self.base_url, params = payload)
if data.status_code != 200:
raise requests.HTTPError
else:
return data
def _search(self, term, limit = 1):
""" Expose a friendlier internal API for executing searches """
payload = {
'term': term,
'limit': limit
}
req = self._get(payload)
return req
def get_cover(self, term, limit = 1, debug = False):
""" Get an album cover, return a Result object """
search = self._search(term, limit)
parsed = search.json()
if parsed['resultCount'] == 0:
raise exceptions.NoResultsException
result = parsed['results'][0]
result['url'] = search.url
return Result(result)
| Python | 0 |
7f0b530db953698e6e923366be6d0d98033e4afb | add description | prontopull.py | prontopull.py | # -*- coding: utf-8 -*-
'''
Pulls data from pronto cycle share. Combine with cron job to
get data over time
'''
from urllib2 import Request, urlopen
import json
from pandas.io.json import json_normalize
import time
url = "https://secure.prontocycleshare.com/data/stations.json"
request = Request(url)
response = urlopen(request)
data = json.loads(response.read())
df=json_normalize(data['stations'])
timestring = time.strftime("%Y%m%d-%H%M%S")
SAVE_PATH = "../Desktop/pronto/pronto%s.csv" %timestring
df.to_csv(SAVE_PATH, sep = ",")
| # -*- coding: utf-8 -*-
from urllib2 import Request, urlopen
import json
from pandas.io.json import json_normalize
import time
#from datetime import datetime
url = "https://secure.prontocycleshare.com/data/stations.json"
request = Request(url)
response = urlopen(request)
data = json.loads(response.read())
df=json_normalize(data['stations'])
timestring = time.strftime("%Y%m%d-%H%M%S")
SAVE_PATH = "../Desktop/pronto/pronto%s.csv" %timestring
df.to_csv(SAVE_PATH, sep = ",")
| Python | 0.000004 |
d016e9f2620688bc1059977a12df638393c3fff1 | Bump version | lintreview/__init__.py | lintreview/__init__.py | __version__ = '2.1.2'
| __version__ = '2.1.1'
| Python | 0 |
c86e7107d2f9d8079b0010ac100f627f1c34d127 | Update ipc_lista1.2.py | lista1/ipc_lista1.2.py | lista1/ipc_lista1.2.py | #ipc_lista1.2
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça um número e então mostre a mensagem O número informado foi [número].
number = input("Digite um número: ")
print "O número digitado foi ",number
| #ipc_lista1.2
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça um número e então mostre a mensagem O número informado foi [número].
number = input("Digite um número: ")
print "O número digitado foi ",number
| Python | 0.000001 |
85432b9509744eadc47c73a21b49f9ea93172c78 | Update ipc_lista1.8.py | lista1/ipc_lista1.8.py | lista1/ipc_lista1.8.py | #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 161531
| #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615
| Python | 0 |
9cad93eb5f04e9f455cec679089d8c8787ce3b04 | Enable appsembler reporting settings | lms/envs/appsembler.py | lms/envs/appsembler.py | import os
import json
from path import path
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
CONFIG_ROOT = path('/edx/app/edxapp/') #don't hardcode this in the future
CONFIG_PREFIX = SERVICE_VARIANT + "." if SERVICE_VARIANT else ""
with open(CONFIG_ROOT / CONFIG_PREFIX + 'env.json') as env_file:
ENV_TOKENS = json.load(env_file)
APPSEMBLER_FEATURES = ENV_TOKENS.get('APPSEMBLER_FEATURES', {})
# search APPSEMBLER_FEATURES first, env variables second, fallback to None
GOOGLE_TAG_MANAGER_ID = APPSEMBLER_FEATURES.get('GOOGLE_TAG_MANAGER_ID', os.environ.get('GOOGLE_TAG_MANAGER_ID', None))
INTERCOM_APP_ID = APPSEMBLER_FEATURES.get('INTERCOM_APP_ID', os.environ.get('INTERCOM_APP_ID', ''))
INTERCOM_API_KEY = APPSEMBLER_FEATURES.get('INTERCOM_API_KEY', os.environ.get('INTERCOM_API_KEY', ''))
INTERCOM_USER_EMAIL = APPSEMBLER_FEATURES.get('INTERCOM_USER_EMAIL', os.environ.get('INTERCOM_USER_EMAIL', ''))
if APPSEMBLER_FEATURES.get('ENABLE_APPSEMBLER_REPORTING', False):
from appsembler_reporting.settings import APPSEMBLER_REPORTING
APPSEMBLER_REPORTING.update(APPSEMBLER_FEATURES.get(
'APPSEMBLER_REPORTING', {} ))
| import os
import json
from path import path
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
CONFIG_ROOT = path('/edx/app/edxapp/') #don't hardcode this in the future
CONFIG_PREFIX = SERVICE_VARIANT + "." if SERVICE_VARIANT else ""
with open(CONFIG_ROOT / CONFIG_PREFIX + 'env.json') as env_file:
ENV_TOKENS = json.load(env_file)
APPSEMBLER_FEATURES = ENV_TOKENS.get('APPSEMBLER_FEATURES', {})
# search APPSEMBLER_FEATURES first, env variables second, fallback to None
GOOGLE_TAG_MANAGER_ID = APPSEMBLER_FEATURES.get('GOOGLE_TAG_MANAGER_ID', os.environ.get('GOOGLE_TAG_MANAGER_ID', None))
INTERCOM_APP_ID = APPSEMBLER_FEATURES.get('INTERCOM_APP_ID', os.environ.get('INTERCOM_APP_ID', ''))
INTERCOM_API_KEY = APPSEMBLER_FEATURES.get('INTERCOM_API_KEY', os.environ.get('INTERCOM_API_KEY', ''))
INTERCOM_USER_EMAIL = APPSEMBLER_FEATURES.get('INTERCOM_USER_EMAIL', os.environ.get('INTERCOM_USER_EMAIL', ''))
| Python | 0.000474 |
4315d028f114ae1005f57d33df964be05b2fb8a6 | use bin/penchy_test_job instead of running it directly | docs/commented_sample_job.py | docs/commented_sample_job.py | # A job description is two part: part 1 introduces the involved elements and
# part 2 joins them in a job
# part 1: introduce the elements
# setup job environment
from penchy.jobs import *
# import the configuration file (if needed)
import config
# define a node
node = NodeConfiguration(
# that is the localhost
'localhost',
# ssh port is 22
22,
# the executing user is the current one
os.environ['USER'],
# we execute in /tmp
'/tmp',
# all jvm are specified relative to /usr/bin
'/usr/bin')
# define a jvm with relative path java
jvm = jvms.JVM('java')
# you can also specify an absolute path:
# jvm = jvms.JVM('/usr/java')
# fuse jvm and node
jconfig = makeJVMNodeConfiguration(jvm, node,
# and give it a decorative name (optional)
name="Simple Example!")
# setup a workload
w = workloads.ScalaBench('dummy')
# and add it the the jvms that should execute it
jvm.workload = w
# setup filter, used in flows
f1 = filters.DacapoHarness()
f2 = filters.Print()
# part 2: form elements to a job
job = Job(
# setup the JVMNodeConfigurations that are included, can be a single one or
# a list of configurations
configurations=jconfig,
# specify the flow of data on clients
client_flow=[
# flow from Scalabench workload to DacapoHarness filter
Edge(w, f1,
# and match filter inputs to workload outputs (here with same name)
[('stderr', 'stderr'),
('exit_code', 'exit_code')]),
# flow from ScalaBench workload to Print filter
Edge(w, f2,
# and feed stderr and exit_code output prefix with 'workload_' to filter
[('stderr', 'workload_stderr'),
('exit_code', 'workload_exit_code')]),
# feed whole output of DacapoHarness filter to print filter (with the name of the output)
Edge(f1, f2)
],
# there is no flow on the server side
server_flow=[],
# jvms will be run twice
invocations = 2
)
| # A job description is two part: part 1 introduces the involved elements and
# part 2 joins them in a job
# part 1: introduce the elements
# setup job environment
from penchy.jobs import *
# define a node
node = NodeConfiguration(
# that is the localhost
'localhost',
# ssh port is 22
22,
# the executing user is the current one
os.environ['USER'],
# we execute in /tmp
'/tmp',
# all jvm are specified relative to /usr/bin
'/usr/bin')
# define a jvm with relative path java
jvm = jvms.JVM('java')
# you can also specify an absolute path:
# jvm = jvms.JVM('/usr/java')
# fuse jvm and node
jconfig = makeJVMNodeConfiguration(jvm, node,
# and give it a decorative name (optional)
name="Simple Example!")
# setup a workload
w = workloads.ScalaBench('dummy')
# and add it the the jvms that should execute it
jvm.workload = w
# setup filter, used in flows
f1 = filters.DacapoHarness()
f2 = filters.Print()
# part 2: form elements to a job
job = Job(
# setup the JVMNodeConfigurations that are included, can be a single one or
# a list of configurations
configurations=jconfig,
# specify the flow of data on clients
client_flow=[
# flow from Scalabench workload to DacapoHarness filter
Edge(w, f1,
# and match filter inputs to workload outputs (here with same name)
[('stderr', 'stderr'),
('exit_code', 'exit_code')]),
# flow from ScalaBench workload to Print filter
Edge(w, f2,
# and feed stderr and exit_code output prefix with 'workload_' to filter
[('stderr', 'workload_stderr'),
('exit_code', 'workload_exit_code')]),
# feed whole output of DacapoHarness filter to print filter (with the name of the output)
Edge(f1, f2)
],
# there is no flow on the server side
server_flow=[],
# jvms will be run twice
invocations = 2
)
# a nice trick: check the job for plausibility if run as ``python <jobname>``
if __name__ == '__main__':
job.check()
| Python | 0 |
927de70d3212c5106846b6f6f6333b93eceacea5 | add python 脚本 | pub-python.py | pub-python.py | # coding=utf8
import paramiko
import datetime
import telnetlib
HOSTS = [
{
'HOST':'hive1_host',
'PORT':9092,
'USER':'root'
},
{
'HOST':'hive2_host',
'PORT':9092,
'USER':'root'
}
]
BASEPATH = '/root/mpush'
class SSH():
def __init__(self):
self.client = None
def connect(self,host,port=22,username='root',password=None):
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(host, port, username=username, password=password, timeout=10)
return self
def exe(self,cmd,isprint=True):
if not cmd:
return
stdin, stdout, stderr = self.client.exec_command(cmd)
if isprint:
for std in stdout.readlines():
print std,
return stdin, stdout, stderr
def close(self):
if self.client:
self.client.close()
def showText(s, typ):
if typ == 'RED':
return redText(s)
elif typ == 'GREEN':
return greenText(s)
elif typ == 'YELLOW':
return yellowText(s)
else:
return s
def redText(s):
return "\033[1;31m%s\033[0m" % s
def greenText(s):
return "\033[1;32m%s\033[0m" % s
def yellowText(s):
return "\033[1;33m%s\033[0m" % s
def main():
for item in HOSTS:
ssh = SSH().connect(item['HOST'],item['PORT'],username=item['USER'])
##backup
base = BASEPATH+'/mpush-jar-with-dependency.tar.gz'
to = BASEPATH+'/back/mpush-jar-with-dependency.tar.gz.'+datetime.datetime.now().strftime('%Y%m%d%H%M%S')
ssh.exe('cp %s %s '%(base,to))
print greenText('backup mpush ok')
##telnet remove zk info
#ssh.exe('telent 127.0.0.1 4001')
#ssh.exe('')
## kill process
ssh.exe('ps aux|grep mpush-cs.jar')
## start process
# ssh.exe('')
ssh.close()
if __name__ == "__main__":
main()
| # coding=utf8
import paramiko
import datetime
HOSTS = [
{
'HOST':'hive1_host',
'PORT':9092,
'USER':'root'
},
{
'HOST':'hive2_host',
'PORT':9092,
'USER':'root'
}
]
class SSH():
def __init__(self):
self.client = None
def connect(self,host,port=22,username='root',password=None):
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(host, port, username=username, password=password, timeout=10)
return self
def exe(self,cmd,isprint=True):
if not cmd:
return
stdin, stdout, stderr = self.client.exec_command(cmd)
if isprint:
for std in stdout.readlines():
print std,
return stdin, stdout, stderr
def close(self):
if self.client:
self.client.close()
def showText(s, typ):
if typ == 'RED':
return redText(s)
elif typ == 'GREEN':
return greenText(s)
elif typ == 'YELLOW':
return yellowText(s)
else:
return s
def redText(s):
return "\033[1;31m%s\033[0m" % s
def greenText(s):
return "\033[1;32m%s\033[0m" % s
def yellowText(s):
return "\033[1;33m%s\033[0m" % s
def main():
for item in HOSTS:
ssh = SSH().connect(item['HOST'],item['PORT'],username=item['USER'])
##back
base = '/root/mpush/mpush-jar-with-dependency.tar.gz'
to = '/root/mpush/back/mpush-jar-with-dependency.tar.gz.'+datetime.datetime.now().strftime('%Y%m%d%H%M%S')
ssh.exe('cp %s %s '%(base,to))
ssh.close()
if __name__ == "__main__":
main()
| Python | 0.000245 |
9f699f66c1ff14d884157cee358793d715b1e702 | delete print | tests/test_apiserver.py | tests/test_apiserver.py | # -*- coding: utf-8 -*-
"""
tests.apiserver
~~~~~~~~~~~~
Tests cobra.api
:author: 40huo <git@40huo.cn>
:homepage: https://github.com/wufeifei/cobra
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2017 Feei. All rights reserved
"""
# 测试完成需要手动关闭 API server 和扫描进程
# kill -9 $(ps aux|grep test_apiserver.py|awk '{print $2}')
# kill -9 $(ps aux|grep cobra.py|awk '{print $2}')
# 第一次启动 server 测试可能会卡住
import requests
from cobra.api import start
import json
start(host="127.0.0.1", port=5000, debug=True)
def test_add_job():
url = "http://127.0.0.1:5000/api/add"
post_data = {
"key": "your_secret_key",
"target": "https://github.com/wufeifei/grw.git",
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
assert "1001" in re.content
assert "Add scan job successfully" in re.content
assert "scan_id" in re.content
def test_job_status():
url = "http://127.0.0.1:5000/api/status"
post_data = {
"key": "your_secret_key",
"scan_id": 24,
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
assert "1001" in re.content
assert "msg" in re.content
assert "scan_id" in re.content
assert "status" in re.content
assert "report" in re.content
| # -*- coding: utf-8 -*-
"""
tests.apiserver
~~~~~~~~~~~~
Tests cobra.api
:author: 40huo <git@40huo.cn>
:homepage: https://github.com/wufeifei/cobra
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2017 Feei. All rights reserved
"""
# 测试完成需要手动关闭 API server 和扫描进程
# kill -9 $(ps aux|grep test_apiserver.py|awk '{print $2}')
# kill -9 $(ps aux|grep cobra.py|awk '{print $2}')
# 第一次启动 server 测试可能会卡住
import requests
from cobra.api import start
import json
start(host="127.0.0.1", port=5000, debug=True)
def test_add_job():
url = "http://127.0.0.1:5000/api/add"
post_data = {
"key": "your_secret_key",
"target": "https://github.com/wufeifei/grw.git",
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
assert "1001" in re.content
assert "Add scan job successfully" in re.content
assert "scan_id" in re.content
def test_job_status():
url = "http://127.0.0.1:5000/api/status"
post_data = {
"key": "your_secret_key",
"scan_id": 24,
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
print re.content
assert "1001" in re.content
assert "msg" in re.content
assert "scan_id" in re.content
assert "status" in re.content
assert "report" in re.content
| Python | 0.000195 |
17d79c5ec4584ea2f1f8b7fe52b157b3988bb7fc | test gap score | tests/test_gap_score.py | tests/test_gap_score.py | """
Using gap score to determine optimal cluster number
"""
import unittest
from unittest import TestCase
from flaky import flaky
import numpy as np
import scipy
from uncurl import gap_score
class GapScoreTest(TestCase):
def setUp(self):
pass
def test_gap_score(self):
data_mat = scipy.io.loadmat('data/10x_pooled_400.mat')
data = data_mat['data']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
# just test that the score is in a very broad range
self.assertTrue(max_k > 3)
self.assertTrue(max_k < 20)
def test_gap_score_2(self):
data_mat = scipy.io.loadmat('data/GSE60361_dat.mat')
data = data_mat['Dat']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
self.assertTrue(max_k > 3)
self.assertTrue(max_k < 30)
@flaky(max_runs=3)
def test_gap_score_3(self):
data_mat = scipy.io.loadmat('data/SCDE_test.mat')
data = data_mat['dat']
data_tsvd = gap_score.preproc_data(data, gene_subset=True)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
self.assertTrue(max_k < 10)
if __name__ == '__main__':
unittest.main()
| """
Using gap score to determine optimal cluster number
"""
import unittest
from unittest import TestCase
from flaky import flaky
import numpy as np
import scipy
from uncurl import gap_score
class GapScoreTest(TestCase):
def setUp(self):
pass
def test_gap_score(self):
data_mat = scipy.io.loadmat('data/10x_pooled_400.mat')
data = data_mat['data']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
# just test that the score is in a very broad range
self.assertTrue(max_k > 3)
self.assertTrue(max_k < 20)
def test_gap_score_2(self):
data_mat = scipy.io.loadmat('data/GSE60361_dat.mat')
data = data_mat['Dat']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
self.assertTrue(max_k > 3)
self.assertTrue(max_k < 30)
@flaky(max_runs=3)
def test_gap_score_3(self):
data_mat = scipy.io.loadmat('data/SCDE_test.mat')
data = data_mat['dat']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
self.assertTrue(max_k < 10)
if __name__ == '__main__':
unittest.main()
| Python | 0.000004 |
3ce54da38119987c2e23089cca3e14a1664cd0c9 | remove dots at the end of description | python2nix.py | python2nix.py | #!/usr/bin/env python2.7
import sys
import requests
import pip_deps
PACKAGE = """\
{name_only} = pythonPackages.buildPythonPackage rec {{
name = "{name}";
propagatedBuildInputs = [ {inputs} ];
src = fetchurl {{
url = "{url}";
md5 = "{md5}";
}};
meta = with stdenv.lib; {{
description = "{description}";
homepage = {homepage};
license = {license};
}};
}};
"""
LICENSE_MAP = {
'http://www.opensource.org/licenses/mit-license.php': 'licenses.mit',
'MIT': 'licenses.mit',
'PSF': 'licenses.psfl'
}
_missing = object()
def guess_license(info):
l = info['info']['license']
license = LICENSE_MAP.get(l, _missing)
if license is _missing:
sys.stderr.write('WARNING: unknown license (please update LICENSE_MAP): ' + l + '\n')
return 'unknown'
return license
_pip_dependency_cache = {}
def pip_dump_dependencies(name): # memoized version
if name in _pip_dependency_cache:
return _pip_dependency_cache[name]
ret = pip_deps.pip_dump_dependencies(name)
_pip_dependency_cache[name] = ret
return ret
def build_inputs(name):
reqs, vsns = pip_dump_dependencies(name)
def vsn(name):
vsn = vsns.get(name)
if not vsn:
name = name.replace('_', '-') # pypi workaround ?
vsn = vsns.get(name)
if vsn:
vsn = "_" + vsn
return vsn or ''
return [name + vsn(name) for name, specs in reqs[name]]
def package_to_info(package):
url = "https://pypi.python.org/pypi/{}/json".format(package)
r = requests.get(url)
return r.json()
def info_to_expr(info):
name_only = info['info']['name']
version = info['info']['version']
name = name_only + "-" + version
inputs = ' '.join(build_inputs(name_only))
url = None
md5 = None
for url_item in info['urls']:
url_ext = url_item['url']
if url_ext.endswith('zip') or url_ext.endswith('tar.gz'):
url = url_item['url']
md5 = url_item['md5_digest']
break
if url is None:
raise Exception('No download url found :-(')
description = info['info']['description'].split('\n')[0].rstrip('.')
homepage = info['info']['home_page']
license = guess_license(info)
return PACKAGE.format(**locals())
if __name__ == '__main__':
print info_to_expr(package_to_info(sys.argv[1]))
| #!/usr/bin/env python2.7
import sys
import requests
import pip_deps
PACKAGE = """\
{name_only} = pythonPackages.buildPythonPackage rec {{
name = "{name}";
propagatedBuildInputs = [ {inputs} ];
src = fetchurl {{
url = "{url}";
md5 = "{md5}";
}};
meta = with stdenv.lib; {{
description = "{description}";
homepage = {homepage};
license = {license};
}};
}};
"""
LICENSE_MAP = {
'http://www.opensource.org/licenses/mit-license.php': 'licenses.mit',
'MIT': 'licenses.mit',
'PSF': 'licenses.psfl'
}
_missing = object()
def guess_license(info):
l = info['info']['license']
license = LICENSE_MAP.get(l, _missing)
if license is _missing:
sys.stderr.write('WARNING: unknown license (please update LICENSE_MAP): ' + l + '\n')
return 'unknown'
return license
_pip_dependency_cache = {}
def pip_dump_dependencies(name): # memoized version
if name in _pip_dependency_cache:
return _pip_dependency_cache[name]
ret = pip_deps.pip_dump_dependencies(name)
_pip_dependency_cache[name] = ret
return ret
def build_inputs(name):
reqs, vsns = pip_dump_dependencies(name)
def vsn(name):
vsn = vsns.get(name)
if not vsn:
name = name.replace('_', '-') # pypi workaround ?
vsn = vsns.get(name)
if vsn:
vsn = "_" + vsn
return vsn or ''
return [name + vsn(name) for name, specs in reqs[name]]
def package_to_info(package):
url = "https://pypi.python.org/pypi/{}/json".format(package)
r = requests.get(url)
return r.json()
def info_to_expr(info):
name_only = info['info']['name']
version = info['info']['version']
name = name_only + "-" + version
inputs = ' '.join(build_inputs(name_only))
url = None
md5 = None
for url_item in info['urls']:
url_ext = url_item['url']
if url_ext.endswith('zip') or url_ext.endswith('tar.gz'):
url = url_item['url']
md5 = url_item['md5_digest']
break
if url is None:
raise Exception('No download url found :-(')
description = info['info']['description'].split('\n')[0]
homepage = info['info']['home_page']
license = guess_license(info)
return PACKAGE.format(**locals())
if __name__ == '__main__':
print info_to_expr(package_to_info(sys.argv[1]))
| Python | 0.001447 |
3f0ab3d63ad0a602b3332b9c83c742caae47289a | Fix test for invalid queue class | tests/test_lib_queue.py | tests/test_lib_queue.py | """
This file contains the tests for the job queue modules.
In particular, this tests
lib/queue/*.py
"""
from huey import RedisHuey
import mock
from privacyidea.app import create_app
from privacyidea.config import TestingConfig
from privacyidea.lib.error import ServerError
from privacyidea.lib.queue import job, JOB_COLLECTOR, JobCollector, get_job_queue, wrap_job, has_job_queue
from privacyidea.lib.queues.huey_queue import HueyQueue
from privacyidea.lib.queues.base import QueueError
from .base import OverrideConfigTestCase, MyTestCase
class TestSender(object):
""" defined in order to be able to mock the ``send_mail`` function in tests """
def send_mail(*args, **kwargs):
pass
SENDER = TestSender()
@job("test.my_add")
def my_add(a, b):
return a + b
@job("test.my_send_mail")
def my_send_mail(message):
SENDER.send_mail(message)
return 1337
class NoQueueTestCase(OverrideConfigTestCase):
class Config(TestingConfig):
PI_JOB_QUEUE_CLASS = ""
def test_01_no_job_queue(self):
self.assertFalse(has_job_queue())
with self.assertRaises(ServerError):
get_job_queue()
def test_02_collector(self):
self.assertIsInstance(JOB_COLLECTOR, JobCollector)
self.assertDictContainsSubset({
"test.my_add": (my_add, (), {}),
"test.my_send_mail": (my_send_mail, (), {})
}, JOB_COLLECTOR.jobs)
class InvalidQueueTestCase(MyTestCase):
def test_01_create_app_graciously(self):
class Config(TestingConfig):
PI_JOB_QUEUE_CLASS = "obviously.invalid"
with mock.patch.dict("privacyidea.config.config", {"testing": Config}):
app = create_app("testing", "") # we do not throw an exception
class HueyQueueTestCase(OverrideConfigTestCase):
class Config(TestingConfig):
PI_JOB_QUEUE_CLASS = "privacyidea.lib.queues.huey_queue.HueyQueue"
PI_JOB_QUEUE_NAME = "myqueuename"
PI_JOB_QUEUE_ALWAYS_EAGER = True # avoid redis server for testing
def test_01_app_job_queue(self):
queue = get_job_queue()
self.assertIsInstance(queue, HueyQueue)
self.assertEqual(queue.options, {"name": "myqueuename", "always_eager": True})
self.assertTrue({"test.my_add", "test.my_send_mail"}.issubset(set(queue.jobs)))
self.assertIsInstance(queue.huey, RedisHuey)
self.assertEqual(queue.huey.name, "myqueuename")
self.assertFalse(queue.huey.store_none)
with self.assertRaises(QueueError):
queue.register_job("test.my_add", lambda x: x)
def test_03_enqueue_jobs(self):
queue = get_job_queue()
queue.enqueue("test.my_add", (3, 4), {}) # No result is stored
with mock.patch.object(SENDER, 'send_mail') as mock_mail:
queue.enqueue("test.my_send_mail", ("hi",), {})
mock_mail.assert_called_once_with("hi")
with self.assertRaises(QueueError):
queue.enqueue("test.unknown", ("hi",), {})
def test_04_wrap_jobs(self):
wrapped = wrap_job("test.my_send_mail", True)
with mock.patch.object(SENDER, 'send_mail') as mock_mail:
result = wrapped("hi")
mock_mail.assert_called_once_with("hi")
self.assertTrue(result)
with mock.patch.object(SENDER, 'send_mail') as mock_mail:
result = my_send_mail("hi")
mock_mail.assert_called_once_with("hi")
self.assertEqual(result, 1337) | """
This file contains the tests for the job queue modules.
In particular, this tests
lib/queue/*.py
"""
from huey import RedisHuey
import mock
from privacyidea.app import create_app
from privacyidea.config import TestingConfig
from privacyidea.lib.error import ServerError
from privacyidea.lib.queue import job, JOB_COLLECTOR, JobCollector, get_job_queue, wrap_job, has_job_queue
from privacyidea.lib.queues.huey_queue import HueyQueue
from privacyidea.lib.queues.base import QueueError
from .base import OverrideConfigTestCase, MyTestCase
class TestSender(object):
""" defined in order to be able to mock the ``send_mail`` function in tests """
def send_mail(*args, **kwargs):
pass
SENDER = TestSender()
@job("test.my_add")
def my_add(a, b):
return a + b
@job("test.my_send_mail")
def my_send_mail(message):
SENDER.send_mail(message)
return 1337
class NoQueueTestCase(OverrideConfigTestCase):
class Config(TestingConfig):
PI_JOB_QUEUE_CLASS = ""
def test_01_no_job_queue(self):
self.assertFalse(has_job_queue())
with self.assertRaises(ServerError):
get_job_queue()
def test_02_collector(self):
self.assertIsInstance(JOB_COLLECTOR, JobCollector)
self.assertDictContainsSubset({
"test.my_add": (my_add, (), {}),
"test.my_send_mail": (my_send_mail, (), {})
}, JOB_COLLECTOR.jobs)
class InvalidQueueTestCase(MyTestCase):
def test_01_create_app_fails(self):
class Config(TestingConfig):
PI_JOB_QUEUE_CLASS = "obviously.invalid"
with mock.patch.dict("privacyidea.config.config", {"testing": Config}):
with self.assertRaises(ImportError):
create_app("testing", "")
class HueyQueueTestCase(OverrideConfigTestCase):
class Config(TestingConfig):
PI_JOB_QUEUE_CLASS = "privacyidea.lib.queues.huey_queue.HueyQueue"
PI_JOB_QUEUE_NAME = "myqueuename"
PI_JOB_QUEUE_ALWAYS_EAGER = True # avoid redis server for testing
def test_01_app_job_queue(self):
queue = get_job_queue()
self.assertIsInstance(queue, HueyQueue)
self.assertEqual(queue.options, {"name": "myqueuename", "always_eager": True})
self.assertTrue({"test.my_add", "test.my_send_mail"}.issubset(set(queue.jobs)))
self.assertIsInstance(queue.huey, RedisHuey)
self.assertEqual(queue.huey.name, "myqueuename")
self.assertFalse(queue.huey.store_none)
with self.assertRaises(QueueError):
queue.register_job("test.my_add", lambda x: x)
def test_03_enqueue_jobs(self):
queue = get_job_queue()
queue.enqueue("test.my_add", (3, 4), {}) # No result is stored
with mock.patch.object(SENDER, 'send_mail') as mock_mail:
queue.enqueue("test.my_send_mail", ("hi",), {})
mock_mail.assert_called_once_with("hi")
with self.assertRaises(QueueError):
queue.enqueue("test.unknown", ("hi",), {})
def test_04_wrap_jobs(self):
wrapped = wrap_job("test.my_send_mail", True)
with mock.patch.object(SENDER, 'send_mail') as mock_mail:
result = wrapped("hi")
mock_mail.assert_called_once_with("hi")
self.assertTrue(result)
with mock.patch.object(SENDER, 'send_mail') as mock_mail:
result = my_send_mail("hi")
mock_mail.assert_called_once_with("hi")
self.assertEqual(result, 1337) | Python | 0.000001 |
04f7b8aa85bf2bb2c16eb246ee7c9d7ae5fc8cff | check contents | tests/test_roundtrip.py | tests/test_roundtrip.py | import json
import bitjws
def test_encode_decode():
key = bitjws.PrivateKey()
ser = bitjws.sign_serialize(key)
header, payload = bitjws.validate_deserialize(ser)
rawheader, rawpayload = ser.rsplit('.', 1)[0].split('.')
origheader = bitjws.base64url_decode(rawheader.encode('utf8'))
origpayload = bitjws.base64url_decode(rawpayload.encode('utf8'))
assert header['typ'] == 'JWT'
assert header['alg'] == 'CUSTOM-BITCOIN-SIGN'
assert header['kid'] == bitjws.pubkey_to_addr(key.pubkey.serialize())
assert len(header) == 3
assert header == json.loads(origheader.decode('utf8'))
assert isinstance(payload.get('exp', ''), float)
assert payload['aud'] is None
assert len(payload) == 2
assert payload == json.loads(origpayload.decode('utf8'))
def test_audience():
key = bitjws.PrivateKey()
audience = 'https://example.com/api/login'
ser = bitjws.sign_serialize(key, requrl=audience)
header, payload = bitjws.validate_deserialize(ser, requrl=audience)
assert header is not None
assert payload is not None
assert payload['aud'] == audience
| import json
import bitjws
def test_encode_decode():
key = bitjws.PrivateKey()
ser = bitjws.sign_serialize(key)
header, payload = bitjws.validate_deserialize(ser)
rawheader, rawpayload = ser.rsplit('.', 1)[0].split('.')
origheader = bitjws.base64url_decode(rawheader.encode('utf8'))
origpayload = bitjws.base64url_decode(rawpayload.encode('utf8'))
assert header == json.loads(origheader.decode('utf8'))
assert payload == json.loads(origpayload.decode('utf8'))
def test_audience():
key = bitjws.PrivateKey()
ser = bitjws.sign_serialize(key, requrl='https://example.com/api/login')
header, payload = bitjws.validate_deserialize(
ser, requrl='https://example.com/api/login')
assert header is not None
assert payload is not None
| Python | 0 |
3059e2cf76e2e7bfb90c6c03afc5ee372294de94 | use with_setup instead of setUp/tearDown | tests/test_spotifile.py | tests/test_spotifile.py | from nose import with_setup
import os
from os import path
from subprocess import check_call
from sh import ls, cat
mountpoint = '/tmp/spotifile_test_mount'
def fs_mount():
if not path.exists(mountpoint):
os.mkdir(mountpoint)
check_call(['./spotifile', mountpoint])
def fs_unmount():
check_call(['fusermount', '-u', mountpoint])
if path.exists(mountpoint):
os.rmdir(mountpoint)
@with_setup(fs_mount, fs_unmount)
def test_ls():
assert 'connection' in ls(mountpoint)
@with_setup(fs_mount, fs_unmount)
def test_cat_connection():
assert 'logged in' in cat(path.join(mountpoint, 'connection'))
| import unittest
import os
from subprocess import check_call
from sh import ls
mountpoint = '/tmp/spotifile_test_mount'
class SpotifileTestClass(unittest.TestCase):
@classmethod
def setUpClass(cls):
if not os.path.exists(mountpoint):
os.mkdir(mountpoint)
@classmethod
def tearDownClass(cls):
if os.path.exists(mountpoint):
os.rmdir(mountpoint)
def setUp(self):
check_call(['./spotifile', mountpoint])
def tearDown(self):
check_call(['fusermount', '-u', mountpoint])
def test_ls(self):
assert 'connection' in ls(mountpoint)
| Python | 0 |
c74faacfc91c8925ced63abda00e7e097903e0f7 | Remove stray print statements. | tests/test_table_xls.py | tests/test_table_xls.py | #!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateexcel
class TestXLS(agate.AgateTestCase):
def setUp(self):
self.rows = (
(1, 'a', True, '11/4/2015', '11/4/2015 12:22 PM'),
(2, u'👍', False, '11/5/2015', '11/4/2015 12:45 PM'),
(None, 'b', None, None, None)
)
self.column_names = [
'number', 'text', 'boolean', 'date', 'datetime'
]
self.column_types = [
agate.Number(), agate.Text(), agate.Boolean(),
agate.Date(), agate.DateTime()
]
self.table = agate.Table(self.rows, self.column_names, self.column_types)
def test_from_xls(self):
table = agate.Table.from_xls('examples/test.xls')
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_file_like(self):
with open('examples/test.xls', 'rb') as f:
table = agate.Table.from_xls(f)
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_sheet_name(self):
table = agate.Table.from_xls('examples/test_sheets.xls', 'data')
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_sheet_index(self):
table = agate.Table.from_xls('examples/test_sheets.xls', 1)
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_zeros(self):
table = agate.Table.from_xls('examples/test_zeros.xls')
self.assertColumnNames(table, ['ordinal', 'binary', 'all_zero'])
self.assertColumnTypes(table, [agate.Number, agate.Number, agate.Number])
self.assertRows(table, [
[0, 0, 0],
[1, 1, 0],
[2, 1, 0]
])
| #!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateexcel
class TestXLS(agate.AgateTestCase):
def setUp(self):
self.rows = (
(1, 'a', True, '11/4/2015', '11/4/2015 12:22 PM'),
(2, u'👍', False, '11/5/2015', '11/4/2015 12:45 PM'),
(None, 'b', None, None, None)
)
self.column_names = [
'number', 'text', 'boolean', 'date', 'datetime'
]
self.column_types = [
agate.Number(), agate.Text(), agate.Boolean(),
agate.Date(), agate.DateTime()
]
self.table = agate.Table(self.rows, self.column_names, self.column_types)
def test_from_xls(self):
table = agate.Table.from_xls('examples/test.xls')
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_file_like(self):
with open('examples/test.xls', 'rb') as f:
table = agate.Table.from_xls(f)
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_sheet_name(self):
table = agate.Table.from_xls('examples/test_sheets.xls', 'data')
self.table.print_table()
table.print_table()
print('here')
print(self.table.rows[2])
print(table.rows[2])
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_sheet_index(self):
table = agate.Table.from_xls('examples/test_sheets.xls', 1)
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_zeros(self):
table = agate.Table.from_xls('examples/test_zeros.xls')
self.assertColumnNames(table, ['ordinal', 'binary', 'all_zero'])
self.assertColumnTypes(table, [agate.Number, agate.Number, agate.Number])
self.assertRows(table, [
[0, 0, 0],
[1, 1, 0],
[2, 1, 0]
])
| Python | 0.000022 |
7fb5b04bb4054f60cefc79efabcef07979628285 | add directory encoding test in test_conf | tests/unit/test_conf.py | tests/unit/test_conf.py | import os
from twisted.trial import unittest
from lbrynet import conf
class SettingsTest(unittest.TestCase):
def setUp(self):
os.environ['LBRY_TEST'] = 'test_string'
def tearDown(self):
del os.environ['LBRY_TEST']
@staticmethod
def get_mock_config_instance():
settings = {'test': (str, '')}
env = conf.Env(**settings)
return conf.Config({}, settings, environment=env)
def test_envvar_is_read(self):
settings = self.get_mock_config_instance()
self.assertEqual('test_string', settings['test'])
def test_setting_can_be_overridden(self):
settings = self.get_mock_config_instance()
settings['test'] = 'my_override'
self.assertEqual('my_override', settings['test'])
def test_setting_can_be_updated(self):
settings = self.get_mock_config_instance()
settings.update({'test': 'my_update'})
self.assertEqual('my_update', settings['test'])
def test_setting_is_in_dict(self):
settings = self.get_mock_config_instance()
setting_dict = settings.get_current_settings_dict()
self.assertEqual({'test': 'test_string'}, setting_dict)
def test_invalid_setting_raises_exception(self):
settings = self.get_mock_config_instance()
self.assertRaises(AssertionError, settings.set, 'invalid_name', 123)
def test_invalid_data_type_raises_exception(self):
settings = self.get_mock_config_instance()
self.assertIsNone(settings.set('test', 123))
self.assertRaises(AssertionError, settings.set, 'test', 123, ('fake_data_type',))
def test_setting_precedence(self):
settings = self.get_mock_config_instance()
settings.set('test', 'cli_test_string', data_types=(conf.TYPE_CLI,))
self.assertEqual('cli_test_string', settings['test'])
settings.set('test', 'this_should_not_take_precedence', data_types=(conf.TYPE_ENV,))
self.assertEqual('cli_test_string', settings['test'])
settings.set('test', 'runtime_takes_precedence', data_types=(conf.TYPE_RUNTIME,))
self.assertEqual('runtime_takes_precedence', settings['test'])
def test_data_dir(self):
# check if these directories are returned as string and not unicode
# otherwise there will be problems when calling os.path.join on
# unicode directory names with string file names
self.assertEqual(str, type(conf.default_download_directory))
self.assertEqual(str, type(conf.default_data_dir))
self.assertEqual(str, type(conf.default_lbryum_dir))
| import os
from twisted.trial import unittest
from lbrynet import conf
class SettingsTest(unittest.TestCase):
def setUp(self):
os.environ['LBRY_TEST'] = 'test_string'
def tearDown(self):
del os.environ['LBRY_TEST']
@staticmethod
def get_mock_config_instance():
settings = {'test': (str, '')}
env = conf.Env(**settings)
return conf.Config({}, settings, environment=env)
def test_envvar_is_read(self):
settings = self.get_mock_config_instance()
self.assertEqual('test_string', settings['test'])
def test_setting_can_be_overridden(self):
settings = self.get_mock_config_instance()
settings['test'] = 'my_override'
self.assertEqual('my_override', settings['test'])
def test_setting_can_be_updated(self):
settings = self.get_mock_config_instance()
settings.update({'test': 'my_update'})
self.assertEqual('my_update', settings['test'])
def test_setting_is_in_dict(self):
settings = self.get_mock_config_instance()
setting_dict = settings.get_current_settings_dict()
self.assertEqual({'test': 'test_string'}, setting_dict)
def test_invalid_setting_raises_exception(self):
settings = self.get_mock_config_instance()
self.assertRaises(AssertionError, settings.set, 'invalid_name', 123)
def test_invalid_data_type_raises_exception(self):
settings = self.get_mock_config_instance()
self.assertIsNone(settings.set('test', 123))
self.assertRaises(AssertionError, settings.set, 'test', 123, ('fake_data_type',))
def test_setting_precedence(self):
settings = self.get_mock_config_instance()
settings.set('test', 'cli_test_string', data_types=(conf.TYPE_CLI,))
self.assertEqual('cli_test_string', settings['test'])
settings.set('test', 'this_should_not_take_precedence', data_types=(conf.TYPE_ENV,))
self.assertEqual('cli_test_string', settings['test'])
settings.set('test', 'runtime_takes_precedence', data_types=(conf.TYPE_RUNTIME,))
self.assertEqual('runtime_takes_precedence', settings['test'])
| Python | 0.000001 |
c81393a8de27595f61cffc09fa6fa8352bb54b9c | Return a random set of factors | palindrome-products/palindrome_products.py | palindrome-products/palindrome_products.py | import random
from collections import defaultdict
def largest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, max)
def smallest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, min)
def _palindromes(max_factor, min_factor, minmax):
pals = defaultdict(set)
for i in range(min_factor, max_factor+1):
for j in range(min_factor, max_factor+1):
p = i * j
if is_palindrome(p):
pals[p].add(tuple(sorted([i,j])))
value = minmax(pals)
factors = random.choice(list(pals[value]))
return (value, factors)
def is_palindrome(n):
return str(n) == str(n)[::-1]
| from collections import defaultdict
def largest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, max)
def smallest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, min)
def _palindromes(max_factor, min_factor, minmax):
pals = defaultdict(set)
for i in range(min_factor, max_factor+1):
for j in range(min_factor, max_factor+1):
p = i * j
if is_palindrome(p):
pals[p].add(tuple(sorted([i,j])))
value = minmax(pals)
factors = pals[value]
return (value, factors)
def is_palindrome(n):
return str(n) == str(n)[::-1]
| Python | 0.999999 |
a1a261a88667c3066fd9e11e7af4673c1fca1b44 | Add tags! Task name and owner to start. | teuthology/run_tasks.py | teuthology/run_tasks.py | import sys
import logging
from teuthology.sentry import get_client as get_sentry_client
from .config import config as teuth_config
log = logging.getLogger(__name__)
def run_one_task(taskname, **kwargs):
submod = taskname
subtask = 'task'
if '.' in taskname:
(submod, subtask) = taskname.rsplit('.', 1)
parent = __import__('teuthology.task', globals(), locals(), [submod], 0)
mod = getattr(parent, submod)
fn = getattr(mod, subtask)
return fn(**kwargs)
def run_tasks(tasks, ctx):
stack = []
try:
for taskdict in tasks:
try:
((taskname, config),) = taskdict.iteritems()
except ValueError:
raise RuntimeError('Invalid task definition: %s' % taskdict)
log.info('Running task %s...', taskname)
manager = run_one_task(taskname, ctx=ctx, config=config)
if hasattr(manager, '__enter__'):
manager.__enter__()
stack.append(manager)
except Exception, e:
ctx.summary['success'] = False
if 'failure_reason' not in ctx.summary:
ctx.summary['failure_reason'] = str(e)
log.exception('Saw exception from tasks.')
sentry = get_sentry_client()
if sentry:
tags = {
'task': taskname,
'owner': ctx.owner,
}
exc_id = sentry.get_ident(sentry.captureException(tags=tags))
event_url = "{server}/search?q={id}".format(
server=teuth_config.sentry_server, id=exc_id)
log.exception(" Sentry event: %s" % event_url)
sentry_url_list = ctx.summary.get('sentry_events', [])
sentry_url_list.append(event_url)
ctx.summary['sentry_events'] = sentry_url_list
if ctx.config.get('interactive-on-error'):
from .task import interactive
log.warning('Saw failure, going into interactive mode...')
interactive.task(ctx=ctx, config=None)
finally:
try:
exc_info = sys.exc_info()
while stack:
manager = stack.pop()
log.debug('Unwinding manager %s', manager)
try:
suppress = manager.__exit__(*exc_info)
except Exception, e:
ctx.summary['success'] = False
if 'failure_reason' not in ctx.summary:
ctx.summary['failure_reason'] = str(e)
log.exception('Manager failed: %s', manager)
if exc_info == (None, None, None):
# if first failure is in an __exit__, we don't
# have exc_info set yet
exc_info = sys.exc_info()
if ctx.config.get('interactive-on-error'):
from .task import interactive
log.warning('Saw failure, going into interactive mode...')
interactive.task(ctx=ctx, config=None)
else:
if suppress:
sys.exc_clear()
exc_info = (None, None, None)
if exc_info != (None, None, None):
log.debug('Exception was not quenched, exiting: %s: %s', exc_info[0].__name__, exc_info[1])
raise SystemExit(1)
finally:
# be careful about cyclic references
del exc_info
| import sys
import logging
from teuthology.sentry import get_client as get_sentry_client
from .config import config as teuth_config
log = logging.getLogger(__name__)
def run_one_task(taskname, **kwargs):
submod = taskname
subtask = 'task'
if '.' in taskname:
(submod, subtask) = taskname.rsplit('.', 1)
parent = __import__('teuthology.task', globals(), locals(), [submod], 0)
mod = getattr(parent, submod)
fn = getattr(mod, subtask)
return fn(**kwargs)
def run_tasks(tasks, ctx):
stack = []
try:
for taskdict in tasks:
try:
((taskname, config),) = taskdict.iteritems()
except ValueError:
raise RuntimeError('Invalid task definition: %s' % taskdict)
log.info('Running task %s...', taskname)
manager = run_one_task(taskname, ctx=ctx, config=config)
if hasattr(manager, '__enter__'):
manager.__enter__()
stack.append(manager)
except Exception, e:
ctx.summary['success'] = False
if 'failure_reason' not in ctx.summary:
ctx.summary['failure_reason'] = str(e)
log.exception('Saw exception from tasks.')
sentry = get_sentry_client()
if sentry:
exc_id = sentry.get_ident(sentry.captureException())
event_url = "{server}/search?q={id}".format(
server=teuth_config.sentry_server, id=exc_id)
log.exception(" Sentry event: %s" % event_url)
sentry_url_list = ctx.summary.get('sentry_events', [])
sentry_url_list.append(event_url)
ctx.summary['sentry_events'] = sentry_url_list
if ctx.config.get('interactive-on-error'):
from .task import interactive
log.warning('Saw failure, going into interactive mode...')
interactive.task(ctx=ctx, config=None)
finally:
try:
exc_info = sys.exc_info()
while stack:
manager = stack.pop()
log.debug('Unwinding manager %s', manager)
try:
suppress = manager.__exit__(*exc_info)
except Exception, e:
ctx.summary['success'] = False
if 'failure_reason' not in ctx.summary:
ctx.summary['failure_reason'] = str(e)
log.exception('Manager failed: %s', manager)
if exc_info == (None, None, None):
# if first failure is in an __exit__, we don't
# have exc_info set yet
exc_info = sys.exc_info()
if ctx.config.get('interactive-on-error'):
from .task import interactive
log.warning('Saw failure, going into interactive mode...')
interactive.task(ctx=ctx, config=None)
else:
if suppress:
sys.exc_clear()
exc_info = (None, None, None)
if exc_info != (None, None, None):
log.debug('Exception was not quenched, exiting: %s: %s', exc_info[0].__name__, exc_info[1])
raise SystemExit(1)
finally:
# be careful about cyclic references
del exc_info
| Python | 0 |
45c1446779cbce050573264101b1afe3d7fe42b4 | Update BaseSearchCommand | elasticsearch_django/management/commands/__init__.py | elasticsearch_django/management/commands/__init__.py | # -*- coding: utf-8 -*-
"""Base command for search-related management commands."""
import logging
from django.core.management.base import BaseCommand
from elasticsearch.exceptions import TransportError
logger = logging.getLogger(__name__)
class BaseSearchCommand(BaseCommand):
"""Base class for commands that interact with the search index."""
description = "Base search command."
def add_arguments(self, parser):
"""Add default base options of --noinput and indexes."""
parser.add_argument(
'--noinput',
action='store_false',
dest='interactive',
default=True,
help='Do no display user prompts - may affect data.'
)
parser.add_argument(
'indexes',
nargs='*',
help="Names of indexes on which to run the command."
)
def do_index_command(self, index, interactive):
"""Run a command against a named index."""
raise NotImplementedError()
def handle(self, *args, **options):
"""Run do_index_command on each specified index and log the output."""
for index in options.pop('indexes'):
data = {}
try:
data = self.do_index_command(index, **options)
except TransportError as ex:
logger.warn("ElasticSearch threw an error: %s", ex)
data = {
"index": index,
"status": ex.status_code,
"reason": ex.info['error']['reason']
}
finally:
logger.info(data)
| # -*- coding: utf-8 -*-
"""Base command for search-related management commands."""
import logging
from django.core.management.base import BaseCommand
from elasticsearch.exceptions import TransportError
logger = logging.getLogger(__name__)
class BaseSearchCommand(BaseCommand):
"""Base class for commands that interact with the search index."""
description = "Base search command."
def add_arguments(self, parser):
"""Add default base options of --noinput and indexes."""
parser.add_argument(
'--noinput',
action='store_false',
dest='interactive',
default=True,
help='Do no display user prompts - may affect data.'
)
parser.add_argument(
'indexes',
nargs='*',
help="Names of indexes on which to run the command."
)
def do_index_command(self, index, interactive):
"""Run a command against a named index."""
raise NotImplementedError()
def handle(self, *args, **options):
"""Run do_index_command on each specified index and log the output."""
for index in options.pop('indexes'):
data = {}
try:
print "calling do_index_command", index, options
data = self.do_index_command(index, **options)
except TransportError as ex:
logger.warn("ElasticSearch threw an error: %s", ex)
data = {
"index": index,
"status": ex.status_code,
"reason": ex.info['error']['reason']
}
finally:
logger.info(data)
| Python | 0.000001 |
15a32b91b36c9deba5a4fc1d8c843a5e044b62c3 | remove unnecessary comments and print statements | tdp_core/mapping_table.py | tdp_core/mapping_table.py | import logging
from . import db
import itertools
_log = logging.getLogger(__name__)
class SQLMappingTable(object):
def __init__(self, mapping, engine):
self.from_idtype = mapping.from_idtype
self.to_idtype = mapping.to_idtype
self._engine = engine
self._query = mapping.query
self._integer_ids = mapping.integer_ids
def __call__(self, ids):
# ensure strings
if self._integer_ids: # convert to integer ids
ids = [int(i) for i in ids]
with db.session(self._engine) as session:
mapped = session.execute(self._query, ids=ids)
# handle multi mappings
data = sorted(mapped, key=lambda x: x['f'])
grouped = {k: [r['t'] for r in g] for k, g in itertools.groupby(data, lambda x: x['f'])}
return [grouped.get(id, []) for id in ids]
def _discover_mappings():
for k, connector in db.configs.connectors.items():
if not connector.mappings:
continue
engine = db.configs.engine(k)
for mapping in connector.mappings:
_log.info('registering %s to %s', mapping.from_idtype, mapping.to_idtype)
yield SQLMappingTable(mapping, engine)
class SQLMappingProvider(object):
def __init__(self):
self._mappings = list(_discover_mappings())
def __iter__(self):
return iter(((f.from_idtype, f.to_idtype, f) for f in self._mappings))
def create():
return SQLMappingProvider()
| import logging
from . import db
import itertools
_log = logging.getLogger(__name__)
class SQLMappingTable(object):
def __init__(self, mapping, engine):
self.from_idtype = mapping.from_idtype
self.to_idtype = mapping.to_idtype
self._engine = engine
self._query = mapping.query
self._integer_ids = mapping.integer_ids
def __call__(self, ids):
# ensure strings
print(type(ids))
# ids = [i.decode('utf-8') for i in ids if not isinstance(i, int)]
ids = [i for i in ids]
if self._integer_ids: # convert to integer ids
ids = [int(i) for i in ids]
with db.session(self._engine) as session:
mapped = session.execute(self._query, ids=ids)
# handle multi mappings
data = sorted(mapped, key=lambda x: x['f'])
grouped = {k: [r['t'] for r in g] for k, g in itertools.groupby(data, lambda x: x['f'])}
return [grouped.get(id, []) for id in ids]
def _discover_mappings():
for k, connector in db.configs.connectors.items():
if not connector.mappings:
continue
engine = db.configs.engine(k)
for mapping in connector.mappings:
_log.info('registering %s to %s', mapping.from_idtype, mapping.to_idtype)
yield SQLMappingTable(mapping, engine)
class SQLMappingProvider(object):
def __init__(self):
self._mappings = list(_discover_mappings())
def __iter__(self):
return iter(((f.from_idtype, f.to_idtype, f) for f in self._mappings))
def create():
return SQLMappingProvider()
| Python | 0 |
1ce24bd04f4b217e560707bd699bbeb6fe14fe09 | username should be case insensitive | timed/authentication.py | timed/authentication.py | import base64
import functools
import hashlib
import requests
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import SuspiciousOperation
from django.utils.encoding import force_bytes
from mozilla_django_oidc.auth import LOGGER, OIDCAuthenticationBackend
class TimedOIDCAuthenticationBackend(OIDCAuthenticationBackend):
def get_introspection(self, access_token, id_token, payload):
"""Return user details dictionary."""
basic = base64.b64encode(
f"{settings.OIDC_RP_INTROSPECT_CLIENT_ID}:{settings.OIDC_RP_INTROSPECT_CLIENT_SECRET}".encode(
"utf-8"
)
).decode()
headers = {
"Authorization": f"Basic {basic}",
"Content-Type": "application/x-www-form-urlencoded",
}
response = requests.post(
settings.OIDC_OP_INTROSPECT_ENDPOINT,
verify=settings.OIDC_VERIFY_SSL,
headers=headers,
data={"token": access_token},
)
response.raise_for_status()
return response.json()
def get_userinfo_or_introspection(self, access_token):
try:
claims = self.cached_request(
self.get_userinfo, access_token, "auth.userinfo"
)
except requests.HTTPError as e:
if not (
e.response.status_code in [401, 403] and settings.OIDC_CHECK_INTROSPECT
):
raise e
# check introspection if userinfo fails (confidental client)
claims = self.cached_request(
self.get_introspection, access_token, "auth.introspection"
)
if "client_id" not in claims:
raise SuspiciousOperation("client_id not present in introspection")
return claims
def get_or_create_user(self, access_token, id_token, payload):
"""Verify claims and return user, otherwise raise an Exception."""
claims = self.get_userinfo_or_introspection(access_token)
users = self.filter_users_by_claims(claims)
if len(users) == 1:
user = users.get()
self.update_user_from_claims(user, claims)
return user
elif settings.OIDC_CREATE_USER:
return self.create_user(claims)
else:
LOGGER.debug(
"Login failed: No user with username %s found, and "
"OIDC_CREATE_USER is False",
self.get_username(claims),
)
return None
def update_user_from_claims(self, user, claims):
user.email = claims.get(settings.OIDC_EMAIL_CLAIM, "")
user.first_name = claims.get(settings.OIDC_FIRSTNAME_CLAIM, "")
user.last_name = claims.get(settings.OIDC_LASTNAME_CLAIM, "")
user.save()
def filter_users_by_claims(self, claims):
username = self.get_username(claims)
return self.UserModel.objects.filter(username__iexact=username)
def cached_request(self, method, token, cache_prefix):
token_hash = hashlib.sha256(force_bytes(token)).hexdigest()
func = functools.partial(method, token, None, None)
return cache.get_or_set(
f"{cache_prefix}.{token_hash}",
func,
timeout=settings.OIDC_BEARER_TOKEN_REVALIDATION_TIME,
)
def create_user(self, claims):
"""Return object for a newly created user account."""
username = self.get_username(claims)
email = claims.get(settings.OIDC_EMAIL_CLAIM, "")
first_name = claims.get(settings.OIDC_FIRSTNAME_CLAIM, "")
last_name = claims.get(settings.OIDC_LASTNAME_CLAIM, "")
return self.UserModel.objects.create(
username=username, email=email, first_name=first_name, last_name=last_name
)
def get_username(self, claims):
try:
return claims[settings.OIDC_USERNAME_CLAIM]
except KeyError:
raise SuspiciousOperation("Couldn't find username claim")
| import base64
import functools
import hashlib
import requests
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import SuspiciousOperation
from django.utils.encoding import force_bytes
from mozilla_django_oidc.auth import LOGGER, OIDCAuthenticationBackend
class TimedOIDCAuthenticationBackend(OIDCAuthenticationBackend):
def get_introspection(self, access_token, id_token, payload):
"""Return user details dictionary."""
basic = base64.b64encode(
f"{settings.OIDC_RP_INTROSPECT_CLIENT_ID}:{settings.OIDC_RP_INTROSPECT_CLIENT_SECRET}".encode(
"utf-8"
)
).decode()
headers = {
"Authorization": f"Basic {basic}",
"Content-Type": "application/x-www-form-urlencoded",
}
response = requests.post(
settings.OIDC_OP_INTROSPECT_ENDPOINT,
verify=settings.OIDC_VERIFY_SSL,
headers=headers,
data={"token": access_token},
)
response.raise_for_status()
return response.json()
def get_userinfo_or_introspection(self, access_token):
try:
claims = self.cached_request(
self.get_userinfo, access_token, "auth.userinfo"
)
except requests.HTTPError as e:
if not (
e.response.status_code in [401, 403] and settings.OIDC_CHECK_INTROSPECT
):
raise e
# check introspection if userinfo fails (confidental client)
claims = self.cached_request(
self.get_introspection, access_token, "auth.introspection"
)
if "client_id" not in claims:
raise SuspiciousOperation("client_id not present in introspection")
return claims
def get_or_create_user(self, access_token, id_token, payload):
"""Verify claims and return user, otherwise raise an Exception."""
claims = self.get_userinfo_or_introspection(access_token)
users = self.filter_users_by_claims(claims)
if len(users) == 1:
user = users.get()
self.update_user_from_claims(user, claims)
return user
elif settings.OIDC_CREATE_USER:
return self.create_user(claims)
else:
LOGGER.debug(
"Login failed: No user with username %s found, and "
"OIDC_CREATE_USER is False",
self.get_username(claims),
)
return None
def update_user_from_claims(self, user, claims):
user.email = claims.get(settings.OIDC_EMAIL_CLAIM, "")
user.first_name = claims.get(settings.OIDC_FIRSTNAME_CLAIM, "")
user.last_name = claims.get(settings.OIDC_LASTNAME_CLAIM, "")
user.save()
def filter_users_by_claims(self, claims):
username = self.get_username(claims)
return self.UserModel.objects.filter(username=username)
def cached_request(self, method, token, cache_prefix):
token_hash = hashlib.sha256(force_bytes(token)).hexdigest()
func = functools.partial(method, token, None, None)
return cache.get_or_set(
f"{cache_prefix}.{token_hash}",
func,
timeout=settings.OIDC_BEARER_TOKEN_REVALIDATION_TIME,
)
def create_user(self, claims):
"""Return object for a newly created user account."""
username = self.get_username(claims)
email = claims.get(settings.OIDC_EMAIL_CLAIM, "")
first_name = claims.get(settings.OIDC_FIRSTNAME_CLAIM, "")
last_name = claims.get(settings.OIDC_LASTNAME_CLAIM, "")
return self.UserModel.objects.create(
username=username, email=email, first_name=first_name, last_name=last_name
)
def get_username(self, claims):
try:
return claims[settings.OIDC_USERNAME_CLAIM]
except KeyError:
raise SuspiciousOperation("Couldn't find username claim")
| Python | 0.999949 |
f14c5c9e4a3c7d196421ce3d60ec64fdee4749dd | make arguments consistent | src/redditquery/parse.py | src/redditquery/parse.py | #!/usr/bin/python3
import os
import argparse
def parser():
"""Parses arguments from comman line using argparse.
Parameters"""
# default directory for reddit files
default_directory = os.path.join(os.getcwd(), "data")
parser = argparse.ArgumentParser()
# obligatory
parser.add_argument("mode", type = int, help = "execution mode: 1 build index, 2: query using existing index, 3 build and query")
# conditionally obligatory
parser.add_argument("--first", "-f", type = str, help = "first year/month")
parser.add_argument("--last", "-l", type = str, help = "last year/month")
# optional with defaults
parser.add_argument("--dir", "-d", type = str, nargs = "?", default = default_directory, help = "directory for data storage")
parser.add_argument("--num", "-n", type = int, nargs = "?", default = 10, help = "number of results per query")
parser.add_argument("--cores", "-c", type = int, nargs = "?", default = 1, help = "number of cores to use")
parser.add_argument("--minfreq", "-m", type = int, nargs = "?", default = 5, help = "minimum term frequency")
parser.add_argument("--progress", "-p", action = "store_true", help = "report progress")
return parser | #!/usr/bin/python3
import os
import argparse
def parser():
"""Parses arguments from comman line using argparse.
Parameters"""
# default directory for reddit files
default_directory = os.path.join(os.getcwd(), "data")
parser = argparse.ArgumentParser()
# obligatory
parser.add_argument("mode", type = int, help = "execution mode: 1 build index, 2: query using existing index, 3 build and query")
# conditionally obligatory
parser.add_argument("-f", "--first", type = str, help = "first year/month")
parser.add_argument("-l", "--last", type = str, help = "last year/month")
# optional with defaults
parser.add_argument("--dir", "-d", type = str, nargs = "?", default = default_directory, help = "directory for data storage")
parser.add_argument("--num", "-n", type = int, nargs = "?", default = 10, help = "number of results per query")
parser.add_argument("--cores", "-c", type = int, nargs = "?", default = 1, help = "number of cores to use")
parser.add_argument("--minfreq", "-m", type = int, nargs = "?", default = 5, help = "minimum term frequency")
parser.add_argument("--progress", "-p", action = "store_true", help = "report progress")
return parser | Python | 0.999824 |
60202e6a4b51fb68045ee1df859c0827f5b770e4 | debug info | src/zeit/content/article/edit/body.py | src/zeit/content/article/edit/body.py | # Copyright (c) 2010 gocept gmbh & co. kg
# See also LICENSE.txt
import gocept.lxml.interfaces
import grokcore.component
import lxml.objectify
import uuid
import z3c.traverser.interfaces
import zeit.content.article.edit.interfaces
import zeit.content.article.interfaces
import zeit.edit.container
import zope.publisher.interfaces
editable_body_name = 'editable-body'
class EditableBody(zeit.edit.container.Base,
grokcore.component.MultiAdapter):
grokcore.component.implements(
zeit.content.article.edit.interfaces.IEditableBody)
grokcore.component.provides(
zeit.content.article.edit.interfaces.IEditableBody)
grokcore.component.adapts(zeit.content.article.interfaces.IArticle,
gocept.lxml.interfaces.IObjectified)
__name__ = editable_body_name
_find_item = lxml.etree.XPath(
'.//*[@cms:__name__ = $name]',
namespaces=dict(
cms='http://namespaces.zeit.de/CMS/cp'))
def _set_default_key(self, xml_node):
key = xml_node.get('{http://namespaces.zeit.de/CMS/cp}__name__')
if not key:
key = str(uuid.uuid4())
xml_node.set('{http://namespaces.zeit.de/CMS/cp}__name__',
key)
self._p_changed = True
return key
def _get_keys(self, xml_node):
# XXX this is much too simple and needs work. and tests.
result = []
for didx, division in enumerate(
xml_node.xpath('division[@type="page"]'), start=1):
key = self._set_default_key(division)
if didx > 1:
# Skip the first division as it isn't editable
result.append(key)
for child in division.iterchildren():
result.append(self._set_default_key(child))
return result
def _get_element_type(self, xml_node):
return xml_node.tag
def _add(self, item):
# Add to last division instead of self.xml
name = item.__name__
if name:
if name in self:
raise zope.container.interfaces.DuplicateIDError(name)
else:
name = str(uuid.uuid4())
item.__name__ = name
self.xml.division[:][-1].append(item.xml)
return name
def _delete(self, key):
__traceback_info__ = (key,)
item = self[key]
if zeit.content.article.edit.interfaces.IDivision.providedBy(item):
# Move contained elements to previous devision
prev = item.xml.getprevious()
for child in item.xml.iterchildren():
prev.append(child)
item.xml.getparent().remove(item.xml)
return item
@grokcore.component.adapter(zeit.content.article.interfaces.IArticle)
@grokcore.component.implementer(
zeit.content.article.edit.interfaces.IEditableBody)
def get_editable_body(article):
return zope.component.queryMultiAdapter(
(article,
zope.security.proxy.removeSecurityProxy(article.xml['body'])),
zeit.content.article.edit.interfaces.IEditableBody)
class BodyTraverser(object):
zope.interface.implements(z3c.traverser.interfaces.IPluggableTraverser)
def __init__(self, context, request):
self.context = context
self.request = request
def publishTraverse(self, request, name):
if name == editable_body_name:
body = zeit.content.article.edit.interfaces.IEditableBody(
self.context, None)
if body is not None:
return body
raise zope.publisher.interfaces.NotFound(self.context, name, request)
| # Copyright (c) 2010 gocept gmbh & co. kg
# See also LICENSE.txt
import gocept.lxml.interfaces
import grokcore.component
import lxml.objectify
import uuid
import z3c.traverser.interfaces
import zeit.content.article.edit.interfaces
import zeit.content.article.interfaces
import zeit.edit.container
import zope.publisher.interfaces
editable_body_name = 'editable-body'
class EditableBody(zeit.edit.container.Base,
grokcore.component.MultiAdapter):
grokcore.component.implements(
zeit.content.article.edit.interfaces.IEditableBody)
grokcore.component.provides(
zeit.content.article.edit.interfaces.IEditableBody)
grokcore.component.adapts(zeit.content.article.interfaces.IArticle,
gocept.lxml.interfaces.IObjectified)
__name__ = editable_body_name
_find_item = lxml.etree.XPath(
'.//*[@cms:__name__ = $name]',
namespaces=dict(
cms='http://namespaces.zeit.de/CMS/cp'))
def _set_default_key(self, xml_node):
key = xml_node.get('{http://namespaces.zeit.de/CMS/cp}__name__')
if not key:
key = str(uuid.uuid4())
xml_node.set('{http://namespaces.zeit.de/CMS/cp}__name__',
key)
self._p_changed = True
return key
def _get_keys(self, xml_node):
# XXX this is much too simple and needs work. and tests.
result = []
for didx, division in enumerate(
xml_node.xpath('division[@type="page"]'), start=1):
key = self._set_default_key(division)
if didx > 1:
# Skip the first division as it isn't editable
result.append(key)
for child in division.iterchildren():
result.append(self._set_default_key(child))
return result
def _get_element_type(self, xml_node):
return xml_node.tag
def _add(self, item):
# Add to last division instead of self.xml
name = item.__name__
if name:
if name in self:
raise zope.container.interfaces.DuplicateIDError(name)
else:
name = str(uuid.uuid4())
item.__name__ = name
self.xml.division[:][-1].append(item.xml)
return name
def _delete(self, key):
item = self[key]
if zeit.content.article.edit.interfaces.IDivision.providedBy(item):
# Move contained elements to previous devision
prev = item.xml.getprevious()
for child in item.xml.iterchildren():
prev.append(child)
item.xml.getparent().remove(item.xml)
return item
@grokcore.component.adapter(zeit.content.article.interfaces.IArticle)
@grokcore.component.implementer(
zeit.content.article.edit.interfaces.IEditableBody)
def get_editable_body(article):
return zope.component.queryMultiAdapter(
(article,
zope.security.proxy.removeSecurityProxy(article.xml['body'])),
zeit.content.article.edit.interfaces.IEditableBody)
class BodyTraverser(object):
zope.interface.implements(z3c.traverser.interfaces.IPluggableTraverser)
def __init__(self, context, request):
self.context = context
self.request = request
def publishTraverse(self, request, name):
if name == editable_body_name:
body = zeit.content.article.edit.interfaces.IEditableBody(
self.context, None)
if body is not None:
return body
raise zope.publisher.interfaces.NotFound(self.context, name, request)
| Python | 0.000001 |
deeb9a1cc773e7af4c539d3f451ab927ecea29ed | Check for uploader | whippersnapper/whippersnapper.py | whippersnapper/whippersnapper.py | #!/usr/bin/env python
import logging
import os
import subprocess
import sys
import time
import yaml
import screenshotter
import uploader
class WhipperSnapper(object):
"""
Implements all screenshot-related logic.
"""
def __init__(self):
if len(sys.argv) != 2:
self.usage()
sys.exit(1)
config_filepath = sys.argv[1]
self.config = self.load_config(config_filepath)
self.log_file = self.init_logging()
self.screenshotter = screenshotter.Screenshotter(self.config)
if not self.config.get('skip_upload'):
self.uploader = uploader.Uploader(self.config)
def main(self):
"""
Runs through the full screenshot process.
"""
print """
Whippersnapper is running. To view its log file:
tail -f %s
To quit, press ^C (ctrl-C).""" % (self.log_file)
while True:
targets = self.screenshotter.take_screenshots()
if hasattr(self, 'uploader'):
self.uploader.upload_screenshots(targets)
# TODO Image delete code probably doesn't belong here
if (self.config.get('delete_local_images')):
[os.remove(path.get('local_filepath')) for path in targets]
time.sleep(self.config.get('time_between_screenshots'))
def init_logging(self):
"""
Create a log file, and attach a basic logger to it.
"""
log_file = os.path.abspath(os.path.expanduser(self.config.get('log_file')))
# Create the log file if it does not yet exist
with open(log_file, 'a+'):
pass
logging.basicConfig(filename=log_file,
format='%(levelname)s:%(asctime)s %(message)s',
level=logging.INFO)
return log_file
def load_config(self, config_filepath):
"""
Load configuration from config.yaml.
Many options have defaults; use these unless they are overwritten in
config.yaml. This file includes the urls, css selectors and slugs for
the targets to screenshot.
"""
log_file = os.path.abspath(os.path.expanduser(os.path.dirname(
os.path.abspath(__file__)) + '/../whippersnapper.log'))
config = {
'skip_upload': False,
'aws_bucket': '',
'aws_subpath': '',
'aws_access_key': None,
'aws_secret_key': None,
'log_file': log_file,
'delete_local_images': False,
'time_between_screenshots': 60,
'override_css_file': None,
'page_load_delay': 2,
'wait_for_js_signal': False,
'failure_timeout': 30,
}
required = (
'targets',
'local_image_directory',
)
raw_config = None
with open(config_filepath) as f:
raw_config = yaml.load(f)
for option_name, option_value in raw_config.iteritems():
config[option_name] = option_value
for option in required:
try:
config[option] = raw_config[option]
except KeyError:
raise RuntimeError('Config is missing required attribute: %s'
% option)
return config
def usage(self):
config_template_file = os.path.abspath(os.path.expanduser(
os.path.dirname(os.path.abspath(__file__))
+ '/config_templates/config.yaml.template'))
"""
Print usage information.
"""
print """
USAGE: whippersnapper CONFIG_FILEPATH
To see an example config file:
cat %s
""" % config_template_file
def launch_new_instance():
"""
Launch an instance of Whippersnapper.
"""
try:
s = WhipperSnapper()
s.main()
except KeyboardInterrupt:
# Print a blank line
print
if __name__ == '__main__':
launch_new_instance()
| #!/usr/bin/env python
import logging
import os
import subprocess
import sys
import time
import yaml
import screenshotter
import uploader
class WhipperSnapper(object):
"""
Implements all screenshot-related logic.
"""
def __init__(self):
if len(sys.argv) != 2:
self.usage()
sys.exit(1)
config_filepath = sys.argv[1]
self.config = self.load_config(config_filepath)
self.log_file = self.init_logging()
self.screenshotter = screenshotter.Screenshotter(self.config)
if not self.config.get('skip_upload'):
self.uploader = uploader.Uploader(self.config)
def main(self):
"""
Runs through the full screenshot process.
"""
print """
Whippersnapper is running. To view its log file:
tail -f %s
To quit, press ^C (ctrl-C).""" % (self.log_file)
while True:
targets = self.screenshotter.take_screenshots()
self.uploader.upload_screenshots(targets)
# TODO Image delete code probably doesn't belong here
if (self.config.get('delete_local_images')):
[os.remove(path.get('local_filepath')) for path in targets]
time.sleep(self.config.get('time_between_screenshots'))
def init_logging(self):
"""
Create a log file, and attach a basic logger to it.
"""
log_file = os.path.abspath(os.path.expanduser(self.config.get('log_file')))
# Create the log file if it does not yet exist
with open(log_file, 'a+'):
pass
logging.basicConfig(filename=log_file,
format='%(levelname)s:%(asctime)s %(message)s',
level=logging.INFO)
return log_file
def load_config(self, config_filepath):
"""
Load configuration from config.yaml.
Many options have defaults; use these unless they are overwritten in
config.yaml. This file includes the urls, css selectors and slugs for
the targets to screenshot.
"""
log_file = os.path.abspath(os.path.expanduser(os.path.dirname(
os.path.abspath(__file__)) + '/../whippersnapper.log'))
config = {
'skip_upload': False,
'aws_bucket': '',
'aws_subpath': '',
'aws_access_key': None,
'aws_secret_key': None,
'log_file': log_file,
'delete_local_images': False,
'time_between_screenshots': 60,
'override_css_file': None,
'page_load_delay': 2,
'wait_for_js_signal': False,
'failure_timeout': 30,
}
required = (
'targets',
'local_image_directory',
)
raw_config = None
with open(config_filepath) as f:
raw_config = yaml.load(f)
for option_name, option_value in raw_config.iteritems():
config[option_name] = option_value
for option in required:
try:
config[option] = raw_config[option]
except KeyError:
raise RuntimeError('Config is missing required attribute: %s'
% option)
return config
def usage(self):
config_template_file = os.path.abspath(os.path.expanduser(
os.path.dirname(os.path.abspath(__file__))
+ '/config_templates/config.yaml.template'))
"""
Print usage information.
"""
print """
USAGE: whippersnapper CONFIG_FILEPATH
To see an example config file:
cat %s
""" % config_template_file
def launch_new_instance():
"""
Launch an instance of Whippersnapper.
"""
try:
s = WhipperSnapper()
s.main()
except KeyboardInterrupt:
# Print a blank line
print
if __name__ == '__main__':
launch_new_instance()
| Python | 0 |
2797797497f4f5ad606764815b334321732bef3b | Rename fibonacci() to fibonacci_recur() | alg_fibonacci.py | alg_fibonacci.py | """Fibonacci series:
0, 1, 1, 2, 3, 5, 8,...
- Fib(0) = 0
- Fib(1) = 1
- Fib(n) = Fib(n - 1) + Fib(n - 2)
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def fibonacci_recur(n):
"""Get nth number of Fibonacci series by recursion."""
if n <= 1:
return n
else:
return fibonacci_recur(n - 1) + fibonacci_recur(n - 2)
def main():
import time
n = 13
start_time = time.time()
print('{}th number of Fibonacci series by recursion: {}'
.format(n, fibonacci_recur(n)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
| """Fibonacci series:
0, 1, 1, 2, 3, 5, 8,...
- Fib(0) = 0
- Fib(1) = 1
- Fib(n) = Fib(n - 1) + Fib(n - 2)
"""
from __future__ import print_function
def fibonacci(n):
"""Get nth number of Fibonacci series by recursion."""
if n == 0:
return 0
elif n == 1 or n == 2:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
def main():
import time
n = 13
print('{}th number of Fibonacci series: {}'
.format(n, fibonacci(n)))
if __name__ == '__main__':
main()
| Python | 0.999999 |
0d7dc04a4e0c31924e64f8e2b8ed9da25e2b64ce | Fix PEP8 issues | wikidataeditor/wikidataeditor.py | wikidataeditor/wikidataeditor.py | # encoding=utf8
# @author Dan Michael O. Heggø <danmichaelo@gmail.com>
import requests
import logging
import time
import re
import json
from item import Item
__ver__ = '0.0.1'
logger = logging.getLogger('wikidataeditor')
class Repo:
def __init__(self, user_agent,
api_url='https://www.wikidata.org/w/api.php'):
self.session = requests.Session()
self.session.headers.update({'User-Agent': user_agent})
self.api_url = api_url
# Respect https://www.mediawiki.org/wiki/Maxlag
lps = r'Waiting for [^ ]*: (?P<lag>[0-9.]+) seconds? lagged'
self.lagpattern = re.compile(lps)
@property
def user_agent(self):
return self.session.headers.get('User-Agent')
def raw_api_call(self, args):
while True:
url = self.api_url
args['format'] = 'json'
args['maxlag'] = 5
# print args
# for k, v in args.iteritems():
# if type(v) == unicode:
# args[k] = v.encode('utf-8')
# else:
# args[k] = v
# data = urllib.urlencode(args)
logger.debug(args)
response = self.session.post(url, data=args)
response = json.loads(response.text)
logger.debug(response)
if 'error' not in response:
return response
code = response['error'].pop('code', 'Unknown')
info = response['error'].pop('info', '')
if code == 'maxlag':
lag = self.lagpattern.search(info)
if lag:
logger.warn('Pausing due to database lag: %s', info)
time.sleep(int(lag.group('lag')))
continue
logger.error("Unknown API error: %s\n%s\nResponse:\n%s",
info,
json.dumps(args, indent="\t"),
json.dumps(response, indent="\t"))
return response
# sys.exit(1)
def login(self, user, pwd):
args = {
'action': 'login',
'lgname': user,
'lgpassword': pwd
}
response = self.raw_api_call(args)
if response['login']['result'] == 'NeedToken':
args['lgtoken'] = response['login']['token']
response = self.raw_api_call(args)
return (response['login']['result'] == 'Success')
def item(self, entity):
return Item(self, entity)
def pageinfo(self, entity):
args = {
'action': 'query',
'prop': 'info',
'intoken': 'edit',
'titles': entity
}
return self.raw_api_call(args)
def get_entities(self, site, page):
args = {
'action': 'wbgetentities',
'sites': site,
'titles': page
}
return self.raw_api_call(args)
def add_entity(self, site, lang, title):
args = {
'new': 'item',
'data': {
'sitelinks': {site: {'site': site, 'title': title}},
'labels': {lang: {'language': lang, 'value': title}}
}
}
logger.info(' Adding entity for %s:%s', site, title)
time.sleep(3)
return self.edit_entity(**args)
def edit_entity(self, data={}, site=None, title=None, new=None,
summary=None):
response = self.pageinfo('DUMMY')
itm = response['query']['pages'].items()[0][1]
edittoken = itm['edittoken']
args = {
'action': 'wbeditentity',
'bot': 1,
'data': json.dumps(data),
'token': edittoken
}
if site:
args['site'] = site
if title:
args['title'] = title
if new:
args['new'] = new
if summary:
args['summary'] = summary
response = self.raw_api_call(args)
return response
| # encoding=utf8
# @author Dan Michael O. Heggø <danmichaelo@gmail.com>
__ver__ = '0.0.1'
import requests
import logging
import time
import re
import json
from item import Item
logger = logging.getLogger('wikidataeditor')
class Repo:
def __init__(self, user_agent,
api_url='https://www.wikidata.org/w/api.php'):
self.session = requests.Session()
self.session.headers.update({'User-Agent': user_agent})
self.api_url = api_url
# Respect https://www.mediawiki.org/wiki/Maxlag
lps = r'Waiting for [^ ]*: (?P<lag>[0-9.]+) seconds? lagged'
self.lagpattern = re.compile(lps)
@property
def user_agent(self):
return self.session.headers.get('User-Agent')
def raw_api_call(self, args):
while True:
url = self.api_url
args['format'] = 'json'
args['maxlag'] = 5
# print args
# for k, v in args.iteritems():
# if type(v) == unicode:
# args[k] = v.encode('utf-8')
# else:
# args[k] = v
# data = urllib.urlencode(args)
logger.debug(args)
response = self.session.post(url, data=args)
response = json.loads(response.text)
logger.debug(response)
if 'error' not in response:
return response
code = response['error'].pop('code', 'Unknown')
info = response['error'].pop('info', '')
if code == 'maxlag':
lag = self.lagpattern.search(info)
if lag:
logger.warn('Pausing due to database lag: %s', info)
time.sleep(int(lag.group('lag')))
continue
logger.error("Unknown API error: %s\n%s\nResponse:\n%s",
info,
json.dumps(args, indent="\t"),
json.dumps(response, indent="\t"))
return response
# sys.exit(1)
def login(self, user, pwd):
args = {
'action': 'login',
'lgname': user,
'lgpassword': pwd
}
response = self.raw_api_call(args)
if response['login']['result'] == 'NeedToken':
args['lgtoken'] = response['login']['token']
response = self.raw_api_call(args)
return (response['login']['result'] == 'Success')
def item(self, entity):
return Item(self, entity)
def pageinfo(self, entity):
args = {
'action': 'query',
'prop': 'info',
'intoken': 'edit',
'titles': entity
}
return self.raw_api_call(args)
def get_entities(self, site, page):
args = {
'action': 'wbgetentities',
'sites': site,
'titles': page
}
return self.raw_api_call(args)
def add_entity(self, site, lang, title):
args = {
'new': 'item',
'data': {
'sitelinks': {site: {'site': site, 'title': title}},
'labels': {lang: {'language': lang, 'value': title}}
}
}
logger.info(' Adding entity for %s:%s', site, title)
time.sleep(3)
return self.edit_entity(**args)
def edit_entity(self, data={}, site=None, title=None, new=None,
summary=None):
response = self.pageinfo('DUMMY')
itm = response['query']['pages'].items()[0][1]
edittoken = itm['edittoken']
args = {
'action': 'wbeditentity',
'bot': 1,
'data': json.dumps(data),
'token': edittoken
}
if site:
args['site'] = site
if title:
args['title'] = title
if new:
args['new'] = new
if summary:
args['summary'] = summary
response = self.raw_api_call(args)
return response
| Python | 0.000001 |
ec1d0b5673ef0eca398715eb1f48f1a99f427cca | Format detect_targets.py | tools/detect_targets.py | tools/detect_targets.py | #! /usr/bin/env python2
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
sys.path.insert(0, ROOT)
from tools.options import get_default_options_parser
# Check: Extra modules which are required by core test suite
from tools.utils import check_required_modules
check_required_modules(['prettytable'])
# Imports related to mbed build api
from tools.build_api import mcu_toolchain_matrix
from tools.test_api import get_autodetected_MUTS_list
def main():
"""Entry Point"""
try:
# Parse Options
parser = get_default_options_parser()
parser.add_argument("-S", "--supported-toolchains",
action="store_true",
dest="supported_toolchains",
default=False,
help="Displays supported matrix of"
" targets and toolchains")
parser.add_argument('-f', '--filter',
dest='general_filter_regex',
default=None,
help='Filter targets')
parser.add_argument("-v", "--verbose",
action="store_true",
dest="verbose",
default=False,
help="Verbose diagnostic output")
options = parser.parse_args()
# Only prints matrix of supported toolchains
if options.supported_toolchains:
print mcu_toolchain_matrix(
platform_filter=options.general_filter_regex)
exit(0)
# If auto_detect attribute is present, we assume other auto-detection
# parameters like 'toolchains_filter' are also set.
muts = get_autodetected_MUTS_list()
count = 0
for mut in muts.values():
print ""
print "[mbed] Detected %s, port %s, mounted %s" % \
(mut['mcu'], mut['port'], mut['disk'])
print "[mbed] Supported toolchains for %s" % mut['mcu']
print mcu_toolchain_matrix(platform_filter=r'^'+mut['mcu']+'$')
count += 1
if count == 0:
print "[mbed] No mbed targets where detected on your system."
except KeyboardInterrupt:
print "\n[CTRL+c] exit"
except Exception as exc:
import traceback
traceback.print_exc(file=sys.stdout)
print "[ERROR] %s" % str(exc)
sys.exit(1)
if __name__ == '__main__':
main()
| #! /usr/bin/env python2
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
TEST BUILD & RUN
"""
import sys
import os
import json
ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
sys.path.insert(0, ROOT)
from tools.options import get_default_options_parser
# Check: Extra modules which are required by core test suite
from tools.utils import check_required_modules
check_required_modules(['prettytable'])
# Imports related to mbed build api
from tools.build_api import mcu_toolchain_matrix
from tools.test_api import get_autodetected_MUTS_list
if __name__ == '__main__':
try:
# Parse Options
parser = get_default_options_parser()
parser.add_argument("-S", "--supported-toolchains",
action="store_true",
dest="supported_toolchains",
default=False,
help="Displays supported matrix of targets and toolchains")
parser.add_argument('-f', '--filter',
dest='general_filter_regex',
default=None,
help='Filter targets')
parser.add_argument("-v", "--verbose",
action="store_true",
dest="verbose",
default=False,
help="Verbose diagnostic output")
options = parser.parse_args()
# Only prints matrix of supported toolchains
if options.supported_toolchains:
print mcu_toolchain_matrix(platform_filter=options.general_filter_regex)
exit(0)
# If auto_detect attribute is present, we assume other auto-detection
# parameters like 'toolchains_filter' are also set.
MUTs = get_autodetected_MUTS_list()
count = 0
for mut in MUTs.values():
print ""
print "[mbed] Detected %s, port %s, mounted %s" % (mut['mcu'], mut['port'], mut['disk'])
print "[mbed] Supported toolchains for %s" % mut['mcu']
print mcu_toolchain_matrix(platform_filter=r'^'+mut['mcu']+'$')
count += 1
if count == 0:
print "[mbed] No mbed targets where detected on your system."
except KeyboardInterrupt, e:
print "\n[CTRL+c] exit"
except Exception,e:
import traceback
traceback.print_exc(file=sys.stdout)
print "[ERROR] %s" % str(e)
sys.exit(1)
| Python | 0.000002 |
ebbcce590483a5970268db0c59bae0cec81648ad | Add example commands for the User Preferences api | storyboard/api/v1/user_preferences.py | storyboard/api/v1/user_preferences.py | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log
from pecan import abort
from pecan import request
from pecan import rest
from pecan.secure import secure
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from storyboard.api.auth import authorization_checks as checks
from storyboard.api.v1 import validations
from storyboard.common import decorators
import storyboard.db.api.users as user_api
from storyboard.openstack.common.gettextutils import _ # noqa
CONF = cfg.CONF
LOG = log.getLogger(__name__)
class UserPreferencesController(rest.RestController):
validation_post_schema = validations.USER_PREFERENCES_POST_SCHEMA
@decorators.db_exceptions
@secure(checks.authenticated)
@wsme_pecan.wsexpose(wtypes.DictType(wtypes.text, wtypes.text), int)
def get_all(self, user_id):
"""Return all preferences for the current user.
Example::
curl https://my.example.org/api/v1/users/21/preferences \\
-H 'Authorization: Bearer MY_ACCESS_TOKEN'
:param user_id: An ID of the user.
"""
if request.current_user_id != user_id:
abort(403, _("You can't read preferences of other users."))
return
return user_api.user_get_preferences(user_id)
@decorators.db_exceptions
@secure(checks.authenticated)
@wsme_pecan.wsexpose(wtypes.DictType(wtypes.text, wtypes.text), int,
body=wtypes.DictType(wtypes.text, wtypes.text))
def post(self, user_id, body):
"""Allow a user to update their preferences. Note that a user must
explicitly set a preference value to Null/None to have it deleted.
Example::
curl https://my.example.org/api/v1/users/21/preferences \\
-H 'Authorization: Bearer MY_ACCESS_TOKEN' \\
-H 'Content-Type: application/json;charset=UTF-8' \\
--data-binary '{"display_events_tags_added":"false"}'
:param user_id: The ID of the user whose preferences we're updating.
:param body: A dictionary of preference values.
"""
if request.current_user_id != user_id:
abort(403, _("You can't change preferences of other users."))
return user_api.user_update_preferences(user_id, body)
| # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log
from pecan import abort
from pecan import request
from pecan import rest
from pecan.secure import secure
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from storyboard.api.auth import authorization_checks as checks
from storyboard.api.v1 import validations
from storyboard.common import decorators
import storyboard.db.api.users as user_api
from storyboard.openstack.common.gettextutils import _ # noqa
CONF = cfg.CONF
LOG = log.getLogger(__name__)
class UserPreferencesController(rest.RestController):
validation_post_schema = validations.USER_PREFERENCES_POST_SCHEMA
@decorators.db_exceptions
@secure(checks.authenticated)
@wsme_pecan.wsexpose(wtypes.DictType(wtypes.text, wtypes.text), int)
def get_all(self, user_id):
"""Return all preferences for the current user.
:param user_id: An ID of the user.
"""
if request.current_user_id != user_id:
abort(403, _("You can't read preferences of other users."))
return
return user_api.user_get_preferences(user_id)
@decorators.db_exceptions
@secure(checks.authenticated)
@wsme_pecan.wsexpose(wtypes.DictType(wtypes.text, wtypes.text), int,
body=wtypes.DictType(wtypes.text, wtypes.text))
def post(self, user_id, body):
"""Allow a user to update their preferences. Note that a user must
explicitly set a preference value to Null/None to have it deleted.
:param user_id: The ID of the user whose preferences we're updating.
:param body: A dictionary of preference values.
"""
if request.current_user_id != user_id:
abort(403, _("You can't change preferences of other users."))
return user_api.user_update_preferences(user_id, body)
| Python | 0.000002 |
839ff975b9d3cf29acd9c921e1b7c3722290d98a | Use np.nan_to_num instead of 'if x == 0' in _xlog2x | antropy/utils.py | antropy/utils.py | """Helper functions"""
import numpy as np
from numba import jit
from math import log, floor
all = ['_embed', '_linear_regression', '_log_n', '_xlog2x']
def _embed(x, order=3, delay=1):
"""Time-delay embedding.
Parameters
----------
x : 1d-array
Time series, of shape (n_times)
order : int
Embedding dimension (order).
delay : int
Delay.
Returns
-------
embedded : ndarray
Embedded time-series, of shape (n_times - (order - 1) * delay, order)
"""
N = len(x)
if order * delay > N:
raise ValueError("Error: order * delay should be lower than x.size")
if delay < 1:
raise ValueError("Delay has to be at least 1.")
if order < 2:
raise ValueError("Order has to be at least 2.")
Y = np.zeros((order, N - (order - 1) * delay))
for i in range(order):
Y[i] = x[(i * delay):(i * delay + Y.shape[1])]
return Y.T
@jit('UniTuple(float64, 2)(float64[:], float64[:])', nopython=True)
def _linear_regression(x, y):
"""Fast linear regression using Numba.
Parameters
----------
x, y : ndarray, shape (n_times,)
Variables
Returns
-------
slope : float
Slope of 1D least-square regression.
intercept : float
Intercept
"""
n_times = x.size
sx2 = 0
sx = 0
sy = 0
sxy = 0
for j in range(n_times):
sx2 += x[j] ** 2
sx += x[j]
sxy += x[j] * y[j]
sy += y[j]
den = n_times * sx2 - (sx ** 2)
num = n_times * sxy - sx * sy
slope = num / den
intercept = np.mean(y) - slope * np.mean(x)
return slope, intercept
@jit('i8[:](f8, f8, f8)', nopython=True)
def _log_n(min_n, max_n, factor):
"""
Creates a list of integer values by successively multiplying a minimum
value min_n by a factor > 1 until a maximum value max_n is reached.
Used for detrended fluctuation analysis (DFA).
Function taken from the nolds python package
(https://github.com/CSchoel/nolds) by Christopher Scholzel.
Parameters
----------
min_n (float):
minimum value (must be < max_n)
max_n (float):
maximum value (must be > min_n)
factor (float):
factor used to increase min_n (must be > 1)
Returns
-------
list of integers:
min_n, min_n * factor, min_n * factor^2, ... min_n * factor^i < max_n
without duplicates
"""
max_i = int(floor(log(1.0 * max_n / min_n) / log(factor)))
ns = [min_n]
for i in range(max_i + 1):
n = int(floor(min_n * (factor ** i)))
if n > ns[-1]:
ns.append(n)
return np.array(ns, dtype=np.int64)
def _xlog2x(x):
"""Returns x log2 x if x is positive, 0 if x == 0, and np.nan
otherwise. This handles the case when the power spectrum density
takes any zero value.
"""
return np.nan_to_num(x * np.log2(x), nan=0.0)
| """Helper functions"""
import numpy as np
from numba import jit
from math import log, floor
all = ['_embed', '_linear_regression', '_log_n', '_xlog2x']
def _embed(x, order=3, delay=1):
"""Time-delay embedding.
Parameters
----------
x : 1d-array
Time series, of shape (n_times)
order : int
Embedding dimension (order).
delay : int
Delay.
Returns
-------
embedded : ndarray
Embedded time-series, of shape (n_times - (order - 1) * delay, order)
"""
N = len(x)
if order * delay > N:
raise ValueError("Error: order * delay should be lower than x.size")
if delay < 1:
raise ValueError("Delay has to be at least 1.")
if order < 2:
raise ValueError("Order has to be at least 2.")
Y = np.zeros((order, N - (order - 1) * delay))
for i in range(order):
Y[i] = x[(i * delay):(i * delay + Y.shape[1])]
return Y.T
@jit('UniTuple(float64, 2)(float64[:], float64[:])', nopython=True)
def _linear_regression(x, y):
"""Fast linear regression using Numba.
Parameters
----------
x, y : ndarray, shape (n_times,)
Variables
Returns
-------
slope : float
Slope of 1D least-square regression.
intercept : float
Intercept
"""
n_times = x.size
sx2 = 0
sx = 0
sy = 0
sxy = 0
for j in range(n_times):
sx2 += x[j] ** 2
sx += x[j]
sxy += x[j] * y[j]
sy += y[j]
den = n_times * sx2 - (sx ** 2)
num = n_times * sxy - sx * sy
slope = num / den
intercept = np.mean(y) - slope * np.mean(x)
return slope, intercept
@jit('i8[:](f8, f8, f8)', nopython=True)
def _log_n(min_n, max_n, factor):
"""
Creates a list of integer values by successively multiplying a minimum
value min_n by a factor > 1 until a maximum value max_n is reached.
Used for detrended fluctuation analysis (DFA).
Function taken from the nolds python package
(https://github.com/CSchoel/nolds) by Christopher Scholzel.
Parameters
----------
min_n (float):
minimum value (must be < max_n)
max_n (float):
maximum value (must be > min_n)
factor (float):
factor used to increase min_n (must be > 1)
Returns
-------
list of integers:
min_n, min_n * factor, min_n * factor^2, ... min_n * factor^i < max_n
without duplicates
"""
max_i = int(floor(log(1.0 * max_n / min_n) / log(factor)))
ns = [min_n]
for i in range(max_i + 1):
n = int(floor(min_n * (factor ** i)))
if n > ns[-1]:
ns.append(n)
return np.array(ns, dtype=np.int64)
@np.vectorize
def _xlog2x(x):
"""Returns x log2 x if x is positive, 0 if x == 0, and np.nan
otherwise. This handles the case when the power spectrum density
takes any zero value.
"""
return 0.0 if x == 0 else x * np.log2(x)
| Python | 0.000628 |
f0ab4ecbc2e385dd69d644b6f8e4e41cdaa48423 | Add note. | software_engineering/problem_solving/design_patterns/grasp/pattern_pure_fabrication.py | software_engineering/problem_solving/design_patterns/grasp/pattern_pure_fabrication.py | # -*- coding: utf-8 -*-
__author__ = """Chris Tabor (dxdstudio@gmail.com)"""
if __name__ == '__main__':
from os import getcwd
from os import sys
sys.path.append(getcwd())
from helpers.display import Section
from uuid import uuid1
from random import choice
from random import randrange as rr
DEBUG = True if __name__ == '__main__' else False
class MonitorDB:
def __init__(self):
self.store = {}
def __setitem__(self, id, data):
self.store[id] = data
class GridMonitorService:
"""This monitor service acts as an intermediary for handling db and object
related functionality, and can be used to continually add more utilities
that are related to the single entity, but that shouldn't be stored
directly on it.
It can be though of as a service-like layer of indirection:
entity <------> entity_service <------> data-store
"""
def __init__(self, grid):
self.data = MonitorDB()
self.grid = grid
def check_status(self):
for id, light in self.grid.lights.iteritems():
print('Light #{} is currently: {} @ x:{} y:{} z:{}'.format(
id, light.status(), *light.coords))
class Monitor:
def on(self):
self.on = True
def off(self):
self.off = False
def status(self):
return 'ON' if self.on else 'OFF'
class LightMonitor(Monitor):
def __init__(self, coords):
self.coords = coords
# For fun
self.on = choice([True, False])
class LightGrid:
def __init__(self):
self.lights = {}
def __setitem__(self, id, coords):
self.lights[id] = LightMonitor(coords)
if DEBUG:
with Section('GRASP pure fabrication pattern'):
grid = LightGrid()
gridmon = GridMonitorService(grid)
for _ in xrange(10):
grid[uuid1()] = (rr(0, 1000), rr(0, 1000), rr(0, 1000))
gridmon.check_status()
| # -*- coding: utf-8 -*-
__author__ = """Chris Tabor (dxdstudio@gmail.com)"""
if __name__ == '__main__':
from os import getcwd
from os import sys
sys.path.append(getcwd())
from helpers.display import Section
from uuid import uuid1
from random import choice
from random import randrange as rr
DEBUG = True if __name__ == '__main__' else False
class MonitorDB:
def __init__(self):
self.store = {}
def __setitem__(self, id, data):
self.store[id] = data
class GridMonitorService:
"""This monitor service acts as an intermediary for handling db and object
related functionality, and can be used to continually add more utilities
that are related to the single entity, but that shouldn't be stored
directly on it."""
def __init__(self, grid):
self.data = MonitorDB()
self.grid = grid
def check_status(self):
for id, light in self.grid.lights.iteritems():
print('Light #{} is currently: {} @ x:{} y:{} z:{}'.format(
id, light.status(), *light.coords))
class Monitor:
def on(self):
self.on = True
def off(self):
self.off = False
def status(self):
return 'ON' if self.on else 'OFF'
class LightMonitor(Monitor):
def __init__(self, coords):
self.coords = coords
# For fun
self.on = choice([True, False])
class LightGrid:
def __init__(self):
self.lights = {}
def __setitem__(self, id, coords):
self.lights[id] = LightMonitor(coords)
if DEBUG:
with Section('GRASP pure fabrication pattern'):
grid = LightGrid()
gridmon = GridMonitorService(grid)
for _ in xrange(10):
grid[uuid1()] = (rr(0, 1000), rr(0, 1000), rr(0, 1000))
gridmon.check_status()
| Python | 0 |
705e7f1d68e4fb6bf37db623869a2c6d623dd9ae | use a pytest fixture for the CommandManager related tests | sunpy/tests/database/test_commands.py | sunpy/tests/database/test_commands.py | from __future__ import absolute_import
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import pytest
from sunpy.database.commands import AddEntry, RemoveEntry, EditEntry,\
NoSuchEntryError, CommandManager
from sunpy.database.tables import DatabaseEntry
@pytest.fixture
def session():
# always create an in-memory database with its own new table in each test
engine = create_engine('sqlite:///:memory:')
Session = sessionmaker()
DatabaseEntry.metadata.create_all(bind=engine)
return Session(bind=engine)
@pytest.fixture
def command_manager():
return CommandManager()
def test_add_entry(session):
assert not session.new
entry = DatabaseEntry()
AddEntry(session, entry)()
assert len(session.new) == 1
assert entry.id is None
session.commit()
assert not session.new
assert entry.id == 1
def test_add_entry_undo(session):
entry = DatabaseEntry()
cmd = AddEntry(session, entry)
cmd()
assert session.query(DatabaseEntry).count() == 1
assert entry.id == 1
cmd.undo()
assert entry in session.deleted
assert session.query(DatabaseEntry).count() == 0
def test_add_entry_undo_precommit(session):
entry = DatabaseEntry()
cmd = AddEntry(session, entry)
cmd()
cmd.undo()
session.commit()
assert session.query(DatabaseEntry).count() == 0
def test_edit_entry_invalid(session):
with pytest.raises(ValueError):
EditEntry(DatabaseEntry())
def test_edit_entry(session):
entry = DatabaseEntry()
session.add(entry)
session.commit()
assert entry.id == 1
EditEntry(entry, id=42)()
assert entry.id == 42
def test_edit_entry_undo(session):
entry = DatabaseEntry()
session.add(entry)
session.commit()
cmd = EditEntry(entry, id=42)
cmd()
session.commit()
assert entry.id == 42
cmd.undo()
session.commit()
assert entry.id == 1
def test_remove_existing_entry(session):
entry = DatabaseEntry()
session.add(entry)
assert session.query(DatabaseEntry).count() == 1
assert entry.id == 1
RemoveEntry(session, entry)()
assert entry in session.deleted
assert session.query(DatabaseEntry).count() == 0
def test_remove_nonexisting_entry(session):
with pytest.raises(NoSuchEntryError):
RemoveEntry(session, DatabaseEntry())()
def test_remove_entry_undo(session):
entry = DatabaseEntry()
session.add(entry)
cmd = RemoveEntry(session, entry)
session.commit()
cmd()
assert session.query(DatabaseEntry).count() == 0
cmd.undo()
assert session.query(DatabaseEntry).count() == 1
def test_redo_stack_empty_after_call(session, command_manager):
command_manager.do(AddEntry(session, DatabaseEntry()))
command_manager.do(AddEntry(session, DatabaseEntry()))
assert len(command_manager.undo_commands) == 2
session.commit()
command_manager.undo(2)
assert not command_manager.undo_commands
assert len(command_manager.redo_commands) == 2
command_manager.do(AddEntry(session, DatabaseEntry()))
assert not command_manager.redo_commands
| from __future__ import absolute_import
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import pytest
from sunpy.database.commands import AddEntry, RemoveEntry, EditEntry,\
NoSuchEntryError, CommandManager
from sunpy.database.tables import DatabaseEntry
@pytest.fixture
def session():
# always create an in-memory database with its own new table in each test
engine = create_engine('sqlite:///:memory:')
Session = sessionmaker()
DatabaseEntry.metadata.create_all(bind=engine)
return Session(bind=engine)
def test_add_entry(session):
assert not session.new
entry = DatabaseEntry()
AddEntry(session, entry)()
assert len(session.new) == 1
assert entry.id is None
session.commit()
assert not session.new
assert entry.id == 1
def test_add_entry_undo(session):
entry = DatabaseEntry()
cmd = AddEntry(session, entry)
cmd()
assert session.query(DatabaseEntry).count() == 1
assert entry.id == 1
cmd.undo()
assert entry in session.deleted
assert session.query(DatabaseEntry).count() == 0
def test_add_entry_undo_precommit(session):
entry = DatabaseEntry()
cmd = AddEntry(session, entry)
cmd()
cmd.undo()
session.commit()
assert session.query(DatabaseEntry).count() == 0
def test_edit_entry_invalid(session):
with pytest.raises(ValueError):
EditEntry(DatabaseEntry())
def test_edit_entry(session):
entry = DatabaseEntry()
session.add(entry)
session.commit()
assert entry.id == 1
EditEntry(entry, id=42)()
assert entry.id == 42
def test_edit_entry_undo(session):
entry = DatabaseEntry()
session.add(entry)
session.commit()
cmd = EditEntry(entry, id=42)
cmd()
session.commit()
assert entry.id == 42
cmd.undo()
session.commit()
assert entry.id == 1
def test_remove_existing_entry(session):
entry = DatabaseEntry()
session.add(entry)
assert session.query(DatabaseEntry).count() == 1
assert entry.id == 1
RemoveEntry(session, entry)()
assert entry in session.deleted
assert session.query(DatabaseEntry).count() == 0
def test_remove_nonexisting_entry(session):
with pytest.raises(NoSuchEntryError):
RemoveEntry(session, DatabaseEntry())()
def test_remove_entry_undo(session):
entry = DatabaseEntry()
session.add(entry)
cmd = RemoveEntry(session, entry)
session.commit()
cmd()
assert session.query(DatabaseEntry).count() == 0
cmd.undo()
assert session.query(DatabaseEntry).count() == 1
def test_redo_stack_empty_after_call(session):
manager = CommandManager()
manager.do(AddEntry(session, DatabaseEntry()))
manager.do(AddEntry(session, DatabaseEntry()))
assert len(manager.undo_commands) == 2
session.commit()
manager.undo(2)
assert not manager.undo_commands
assert len(manager.redo_commands) == 2
manager.do(AddEntry(session, DatabaseEntry()))
assert not manager.redo_commands
| Python | 0 |
aa278487b4e65da413a217729b852a9c08a090cf | create function headers and change request structure | pagarme/resources/handler_request.py | pagarme/resources/handler_request.py | import requests
TEMPORARY_COMPANY = 'https://api.pagar.me/1/companies/temporary'
def validate_response(pagarme_response):
if pagarme_response.status_code == 200:
return pagarme_response.json()
else:
return error(pagarme_response.json())
def create_temporary_company():
company = requests.post(TEMPORARY_COMPANY)
valid_company = validate_response(company)
return valid_company
KEYS = {}
def authentication_key(api_key=None):
global KEYS
if api_key is None:
company = create_temporary_company()
api_key = company['api_key']['test']
encryption_key = company['encryption_key']['test']
KEYS['api_key'] = api_key
KEYS['encryption_key'] = encryption_key
return KEYS
else:
KEYS['api_key'] = api_key
return KEYS
def post(end_point, data={}):
data['api_key'] = KEYS['api_key']
pagarme_response = requests.post(end_point, json=data, headers=headers())
return validate_response(pagarme_response)
def get(end_point, data = {}):
data['api_key'] = KEYS['api_key']
pagarme_response = requests.get(end_point, json=data, headers=headers())
return validate_response(pagarme_response)
def put(end_point, data = {}):
data['api_key'] = KEYS['api_key']
pagarme_response = requests.put(end_point, json=data, headers=headers())
return validate_response(pagarme_response)
def delete(end_point, data = {}):
data['api_key'] = KEYS['api_key']
pagarme_response = requests.delete(end_point, json=data, headers=headers())
return validate_response(pagarme_response)
def error(data):
erros = data['errors']
return erros
def headers():
_headers = {'content-type': 'application/json'}
return _headers
| import requests
import json
TEMPORARY_COMPANY = 'https://api.pagar.me/1/companies/temporary'
def validate_response(pagarme_response):
if pagarme_response.status_code == 200:
return pagarme_response.json()
else:
return error(pagarme_response.json())
def create_temporary_company():
company = requests.post(TEMPORARY_COMPANY)
valid_company = validate_response(company)
return valid_company
KEYS = {}
def authentication_key(api_key=None):
global KEYS
if api_key is None:
company = create_temporary_company()
api_key = company['api_key']['test']
encryption_key = company['encryption_key']['test']
KEYS['api_key'] = api_key
KEYS['encryption_key'] = encryption_key
return KEYS
else:
KEYS['api_key'] = api_key
return KEYS
def post(end_point, data={}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.post(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def get(end_point, data = {}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.get(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def put(end_point, data = {}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.put(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def delete(end_point, data = {}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.delete(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def error(data):
erros = data['errors']
return erros
| Python | 0 |
ca076bbd397edd87fd1a26ee119ac29622868f03 | Fix test | paystackapi/tests/test_bulkcharge.py | paystackapi/tests/test_bulkcharge.py | import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.bulkcharge import BulkCharge
class TestBulkCharge(BaseTestCase):
@httpretty.activate
def test_initiate_bulk_charge(self):
""" Method for testing the initiation of a bulk charge"""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/bulkcharge"),
content_type='applicationn/json',
body='{"status": true, "message": "Charges have been queued"}',
status=200,
)
response = BulkCharge.initiate_bulk_charge(
bulkcharge=[
{"authorization": "AUTH_n95vpedf", "amount": 2500},
{"authorization": "AUTH_ljdt4e4j", "amount": 1500}
]
)
self.assertTrue(response['status'])
@httpretty.activate
def test_list(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/bulkcharge"),
content_type='application/json',
body ='{"status": true, "message": "Bulk charges retrieved"}',
status=200,
)
response = BulkCharge.list()
self.assertTrue(response['status'])
@httpretty.activate
def test_fetch_bulk_batch(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/bulkcharge/BCH_orj0ttn8vtp80hx"),
content_type='text/json',
body='{"status": true, "message": "Bulk charges retrieved"}',
status=200,
)
response = BulkCharge.fetch_bulk_batch(
id_or_code="BCH_orj0ttn8vtp80hx"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_fetch_charges_batch(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/bulkcharge/BCH_orj0ttn8vtp80hx/charges"),
content_type= 'text/json',
body='{"status": true, "message": "Bulk charge items retrieved"}',
status=200,
)
response = BulkCharge.fetch_charges_batch(
id_or_code="BCH_orj0ttn8vtp80hx"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_pause_bulk_batch(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("bulkcharge/pause/BCH_orj0ttn8vtp80hx"),
content_type='text/json',
body='{"status": true, "message": "Bulk charge batch has been paused"}',
status=201,
)
response = BulkCharge.pause_bulk_batch(
batch_code="BCH_orj0ttn8vtp80hx"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_resume_batch_charges(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("bulkcharge/resume/BCH_orj0ttn8vtp80hx"),
content_type='text/json',
body='{"status": true, "message": "Bulk charge batch has been resumed"}',
status=201,
)
response = BulkCharge.resume_bulk_charge(
batch_code="BCH_orj0ttn8vtp80hx"
)
self.assertTrue(response['status'])
| import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.bulkcharge import BulkCharge
class TestBulkCharge(BaseTestCase):
@httpretty.activate
def test_initiate_bulk_charge(self):
""" Method for testing the initiation of a bulk charge"""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/bulkcharge"),
content_type='applicationn/json',
body='{"status": true, "message": "Charges have been queued"}',
status=200,
)
response = BulkCharge.initiate_bulk_charge(
bulkcharge=[
{"authorization": "AUTH_n95vpedf", "amount": 2500},
{"authorization": "AUTH_ljdt4e4j", "amount": 1500}
]
)
self.assertTrue(response['status'])
@httpretty.activate
def test_list_bulk_charge(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/bulkcharge"),
content_type='application/json',
body = '{"status": true, "message": "Bulk charges retrieved",}',
status=200,
)
response = BulkCharge.list()
self.assertTrue(response['status'])
@httpretty.activate
def test_fetch_bulk_batch(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/bulkcharge/BCH_orj0ttn8vtp80hx/charges"),
content_type='text/json',
body = '{"status": true, "message": "Bulk charges retrieved",}',
status=200,
)
response = BulkCharge.fetch_bulk_batch(
id_or_code= "BCH_orj0ttn8vtp80hx",
)
self.assertTrue(response['status'])
@httpretty.activate
def test_fetch_charges_batch(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/bulkcharge/BCH_180tl7oq7cayggh/charges"),
content_type= 'text/json',
body='{"status": true, "message": "Bulk charge items retrieved",}',
status=200,
)
response = BulkCharge.fetch_charges_batch(
id_or_code="BCH_orj0ttn8vtp80hx"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_pause_bulk_batch(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("bulkcharge/pause/BCH_orj0ttn8vtp80hx"),
content_type='text/json',
body='{""status": true, "message": "Bulk charge batch has been paused""}',
status=201,
)
response = BulkCharge.pause_bulk_batch(
batch_code="BCH_orj0ttn8vtp80hx"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_resume_batch_charges(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("bulkcharge/resume/BCH_orj0ttn8vtp80hx"),
content_type='text/json',
body='{"status": true, "message": "Bulk charge batch has been resumed"}',
status=201,
)
response = BulkCharge.resume_bulk_charge(
batch_code="BCH_orj0ttn8vtp80hx"
)
self.assertTrue(response['status'])
| Python | 0.000004 |
c838bee36ac1e68afd5f00630b98f806289f89c8 | Update fetch_metrics.py | perfmetrics/scripts/fetch_metrics.py | perfmetrics/scripts/fetch_metrics.py | """Executes fio_metrics.py and vm_metrics.py by passing appropriate arguments.
"""
import socket
import sys
import time
from fio import fio_metrics
from vm_metrics import vm_metrics
from gsheet import gsheet
INSTANCE = socket.gethostname()
PERIOD_SEC = 120
# Google sheet worksheets
FIO_WORKSHEET_NAME = 'fio_metrics'
VM_WORKSHEET_NAME = 'vm_metrics'
if __name__ == '__main__':
argv = sys.argv
if len(argv) != 2:
raise TypeError('Incorrect number of arguments.\n'
'Usage: '
'python3 fetch_metrics.py <fio output json filepath>')
fio_metrics_obj = fio_metrics.FioMetrics()
print('Getting fio metrics...')
temp = fio_metrics_obj.get_metrics(argv[1], FIO_WORKSHEET_NAME)
print('Waiting for 250 seconds for metrics to be updated on VM...')
# It takes up to 240 seconds for sampled data to be visible on the VM metrics graph
# So, waiting for 250 seconds to ensure the returned metrics are not empty
time.sleep(250)
vm_metrics_obj = vm_metrics.VmMetrics()
vm_metrics_data = []
# Getting VM metrics for every job
for ind, job in enumerate(temp):
start_time_sec = job[fio_metrics.consts.START_TIME]
end_time_sec = job[fio_metrics.consts.END_TIME]
rw = job[fio_metrics.consts.PARAMS][fio_metrics.consts.RW]
print(f'Getting VM metrics for job at index {ind+1}...')
metrics_data = vm_metrics_obj.fetch_metrics(start_time_sec, end_time_sec, INSTANCE, PERIOD_SEC, rw)
for row in metrics_data:
vm_metrics_data.append(row)
gsheet.write_to_google_sheet(VM_WORKSHEET_NAME, vm_metrics_data)
| """Executes fio_metrics.py and vm_metrics.py by passing appropriate arguments.
"""
import socket
import sys
import time
from fio import fio_metrics
from vm_metrics import vm_metrics
from gsheet import gsheet
INSTANCE = socket.gethostname()
PERIOD = 120
# Google sheet worksheets
FIO_WORKSHEET_NAME = 'fio_metrics'
VM_WORKSHEET_NAME = 'vm_metrics'
if __name__ == '__main__':
argv = sys.argv
if len(argv) != 2:
raise TypeError('Incorrect number of arguments.\n'
'Usage: '
'python3 fetch_metrics.py <fio output json filepath>')
fio_metrics_obj = fio_metrics.FioMetrics()
print('Getting fio metrics...')
temp = fio_metrics_obj.get_metrics(argv[1], FIO_WORKSHEET_NAME)
print('Waiting for 250 seconds for metrics to be updated on VM...')
# It takes up to 240 seconds for sampled data to be visible on the VM metrics graph
# So, waiting for 250 seconds to ensure the returned metrics are not empty
time.sleep(250)
vm_metrics_obj = vm_metrics.VmMetrics()
vm_metrics_data = []
# Getting VM metrics for every job
for ind, job in enumerate(temp):
start_time_sec = job[fio_metrics.consts.START_TIME]
end_time_sec = job[fio_metrics.consts.END_TIME]
rw = job[fio_metrics.consts.PARAMS][fio_metrics.consts.RW]
print(f'Getting VM metrics for job at index {ind+1}...')
metrics_data = vm_metrics_obj.fetch_metrics(start_time_sec, end_time_sec, INSTANCE, PERIOD, rw)
for row in metrics_data:
vm_metrics_data.append(row)
gsheet.write_to_google_sheet(VM_WORKSHEET_NAME, vm_metrics_data)
| Python | 0.000001 |
597ea6bd20c9c1dbca46891d8c2aa12c625da555 | Fix unit tests | Tests/ConsoleWorkerTest.py | Tests/ConsoleWorkerTest.py | from Tank.ConsoleWorker import ConsoleTank
from Tank.Plugins.ConsoleOnline import ConsoleOnlinePlugin
from Tank.Plugins.DataUploader import DataUploaderPlugin
from Tests.ConsoleOnlinePluginTest import FakeConsoleMarkup
from Tests.DataUploaderTest import FakeAPICLient
from Tests.TankTests import FakeOptions
import TankTests
import logging
import time
import unittest
class ConsoleWorkerTestCase(TankTests.TankTestCase):
def setUp(self):
opts = FakeOptions()
opts.no_rc = False
self.foo = ConsoleTank(opts, None)
self.foo.set_baseconfigs_dir('full')
def tearDown(self):
del self.foo
self.foo = None
def test_perform(self):
self.foo.configure()
uploader = self.foo.core.get_plugin_of_type(DataUploaderPlugin)
uploader.api_client = FakeAPICLient()
uploader.api_client.get_results.append('[{"closed":"", "name": "test task"}]')
uploader.api_client.get_results.append('[{"success":1}]')
uploader.api_client.post_results.append('[{"job":' + str(time.time()) + '}]')
for n in range(1, 120):
uploader.api_client.post_results.append('[{"success":1}]')
console = self.foo.core.get_plugin_of_type(ConsoleOnlinePlugin)
console.console_markup = FakeConsoleMarkup()
if self.foo.perform_test() != 0:
raise RuntimeError()
def test_option_override(self):
options = FakeOptions()
options.config = ["config/old-style.conf"]
options.option = ["owner.address=overridden"]
self.foo = ConsoleTank(options, None)
self.foo.configure()
res = self.foo.core.get_option("owner", "address")
logging.debug(res)
self.assertEquals("overridden", res)
def test_option_old_convert(self):
options = FakeOptions()
options.config = ["data/old_to_migrate.conf"]
self.foo = ConsoleTank(options, None)
self.foo.configure()
if __name__ == '__main__':
unittest.main()
| import TankTests
import os
import unittest
from Tank.ConsoleWorker import ConsoleTank
from Tests.TankTests import FakeOptions
from Tank.Plugins.DataUploader import DataUploaderPlugin
from Tests.DataUploaderTest import FakeAPICLient
from Tank.Plugins.ConsoleOnline import ConsoleOnlinePlugin
from Tests.ConsoleOnlinePluginTest import FakeConsoleMarkup
import time
import logging
class ConsoleWorkerTestCase(TankTests.TankTestCase):
def setUp(self):
self.foo = ConsoleTank(FakeOptions(), None)
self.foo.set_baseconfigs_dir('full')
def tearDown(self):
del self.foo
self.foo = None
def test_perform(self):
self.foo.configure()
uploader = self.foo.core.get_plugin_of_type(DataUploaderPlugin)
uploader.api_client = FakeAPICLient()
uploader.api_client.get_results.append('[{"closed":"", "name": "test task"}]')
uploader.api_client.get_results.append('[{"success":1}]')
uploader.api_client.post_results.append('[{"job":' + str(time.time()) + '}]')
for n in range(1, 120):
uploader.api_client.post_results.append('[{"success":1}]')
console = self.foo.core.get_plugin_of_type(ConsoleOnlinePlugin)
console.console_markup = FakeConsoleMarkup()
if self.foo.perform_test() != 0:
raise RuntimeError()
def test_option_override(self):
options = FakeOptions()
options.config = ["config/old-style.conf"]
options.option = ["owner.address=overridden"]
self.foo = ConsoleTank(options, None)
self.foo.configure()
res = self.foo.core.get_option("owner", "address")
logging.debug(res)
self.assertEquals("overridden", res)
def test_option_old_convert(self):
options = FakeOptions()
options.config = ["data/old_to_migrate.conf"]
self.foo = ConsoleTank(options, None)
self.foo.configure()
if __name__ == '__main__':
unittest.main()
| Python | 0.000005 |
8a4d259df272a65f95bacf233dc8654c68f5f54f | add identity coordinate mapping to ToUint8 and ToFloat32 augmentors (#339) | tensorpack/dataflow/imgaug/convert.py | tensorpack/dataflow/imgaug/convert.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: convert.py
from .base import ImageAugmentor
from .meta import MapImage
import numpy as np
import cv2
__all__ = ['ColorSpace', 'Grayscale', 'ToUint8', 'ToFloat32']
class ColorSpace(ImageAugmentor):
""" Convert into another colorspace. """
def __init__(self, mode, keepdims=True):
"""
Args:
mode: opencv colorspace conversion code (e.g., `cv2.COLOR_BGR2HSV`)
keepdims (bool): keep the dimension of image unchanged if opencv
changes it.
"""
self._init(locals())
def _augment(self, img, _):
transf = cv2.cvtColor(img, self.mode)
if self.keepdims:
if len(transf.shape) is not len(img.shape):
transf = transf[..., None]
return transf
class Grayscale(ColorSpace):
""" Convert image to grayscale. """
def __init__(self, keepdims=True, rgb=False):
"""
Args:
keepdims (bool): return image of shape [H, W, 1] instead of [H, W]
rgb (bool): interpret input as RGB instead of the default BGR
"""
mode = cv2.COLOR_RGB2GRAY if rgb else cv2.COLOR_BGR2GRAY
super(Grayscale, self).__init__(mode, keepdims)
class ToUint8(MapImage):
""" Convert image to uint8. Useful to reduce communication overhead. """
def __init__(self):
super(ToUint8, self).__init__(lambda x: np.clip(x, 0, 255).astype(np.uint8), lambda x: x)
class ToFloat32(MapImage):
""" Convert image to float32, may increase quality of the augmentor. """
def __init__(self):
super(ToFloat32, self).__init__(lambda x: x.astype(np.float32), lambda x: x)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: convert.py
from .base import ImageAugmentor
from .meta import MapImage
import numpy as np
import cv2
__all__ = ['ColorSpace', 'Grayscale', 'ToUint8', 'ToFloat32']
class ColorSpace(ImageAugmentor):
""" Convert into another colorspace. """
def __init__(self, mode, keepdims=True):
"""
Args:
mode: opencv colorspace conversion code (e.g., `cv2.COLOR_BGR2HSV`)
keepdims (bool): keep the dimension of image unchanged if opencv
changes it.
"""
self._init(locals())
def _augment(self, img, _):
transf = cv2.cvtColor(img, self.mode)
if self.keepdims:
if len(transf.shape) is not len(img.shape):
transf = transf[..., None]
return transf
class Grayscale(ColorSpace):
""" Convert image to grayscale. """
def __init__(self, keepdims=True, rgb=False):
"""
Args:
keepdims (bool): return image of shape [H, W, 1] instead of [H, W]
rgb (bool): interpret input as RGB instead of the default BGR
"""
mode = cv2.COLOR_RGB2GRAY if rgb else cv2.COLOR_BGR2GRAY
super(Grayscale, self).__init__(mode, keepdims)
class ToUint8(MapImage):
""" Convert image to uint8. Useful to reduce communication overhead. """
def __init__(self):
super(ToUint8, self).__init__(lambda x: np.clip(x, 0, 255).astype(np.uint8))
class ToFloat32(MapImage):
""" Convert image to float32, may increase quality of the augmentor. """
def __init__(self):
super(ToFloat32, self).__init__(lambda x: x.astype(np.float32))
| Python | 0 |
1e7a6b0fbbdb57053d3510b67c95c5d7e2fb6b81 | Enable to display accuracy graph | floppy/report_widget.py | floppy/report_widget.py | from floppy.train_configuration import TrainParamServer
from PyQt5.QtWidgets import QWidget
from PyQt5.QtWidgets import QTabWidget
from PyQt5.QtGui import QPixmap
from PyQt5.QtGui import QPainter
from PyQt5.QtCore import Qt
from PyQt5.QtCore import QPoint
class ReportWidget(QTabWidget):
def __init__(self, *args, **kwargs):
super(ReportWidget, self).__init__(height=210, *args, **kwargs)
self.setStyleSheet('''ReportWidget{background: rgb(55,55,55)}
''')
try:
loss_image = TrainParamServer()['WorkDir'] + "result/loss.png"
except KeyError:
loss_image = "result/loss.png"
self.addTab(GraphWidget(loss_image, parent=self), 'Loss')
try:
acc_image = TrainParamServer()['WorkDir'] + "result/accuracy.png"
except KeyError:
acc_image = "result/accuracy.png"
self.addTab(GraphWidget(acc_image, parent=self), 'Accuracy')
self.resize(200, 200)
class GraphWidget(QWidget):
def __init__(self, image_file, *args, **kwargs):
super(GraphWidget, self).__init__(height=200)
self.setStyleSheet('''ReportWidget{background: rgb(55,55,55)}
''')
self.pixmap = None
self.image_file = image_file
def paintEvent(self, event):
if 'Class' not in TrainParamServer()['TrainMode']:
if 'accuracy' in self.image_file:
return
self.pixmap = QPixmap(self.image_file)
#self.adjustSize()
size = self.size()
painter = QPainter(self)
point = QPoint(0, 0)
scaled_pix = self.pixmap.scaled(size, Qt.KeepAspectRatio,
transformMode=Qt.SmoothTransformation)
# start painting the label from left upper corner
point.setX((size.width() - scaled_pix.width()) / 2)
point.setY((size.height() - scaled_pix.height()) / 2)
painter.drawPixmap(point, scaled_pix)
| from floppy.train_configuration import TrainParamServer
from PyQt5.QtWidgets import QWidget
from PyQt5.QtGui import QPixmap
from PyQt5.QtGui import QPainter
from PyQt5.QtCore import Qt
from PyQt5.QtCore import QPoint
class ReportWidget(QWidget):
def __init__(self, *args, **kwargs):
super(ReportWidget, self).__init__(height=200, *args, **kwargs)
self.setStyleSheet('''ReportWidget{background: rgb(55,55,55)}
''')
try:
loss_image = TrainParamServer()['WorkDir'] + "result/loss.png"
except KeyError:
loss_image = "result/loss.png"
self.pixmap = QPixmap(loss_image)
self.resize(200, 200)
def paintEvent(self, event):
self.adjustSize()
size = self.size()
painter = QPainter(self)
point = QPoint(0, 0)
scaled_pix = self.pixmap.scaled(size, Qt.KeepAspectRatio,
transformMode=Qt.SmoothTransformation)
# start painting the label from left upper corner
point.setX((size.width() - scaled_pix.width()) / 2)
point.setY((size.height() - scaled_pix.height()) / 2)
painter.drawPixmap(point, scaled_pix)
| Python | 0.000001 |
ec82c7d7181803f577adb1a697ed53fbc42476ca | add goliad health check | plugins/bongo/check-goliad-health.py | plugins/bongo/check-goliad-health.py | #!/usr/bin/env python
from optparse import OptionParser
import socket
import sys
import httplib
import json
PASS = 0
WARNING = 1
FAIL = 2
def get_bongo_host(server, app):
try:
con = httplib.HTTPConnection(server, timeout=45)
con.request("GET","/v2/apps/" + app)
data = con.getresponse()
if data.status >= 300:
print "get_bongo_host: Recieved non-2xx response= %s" % (data.status)
sys.exit(FAIL)
json_data = json.loads(data.read())
host = json_data['app']['tasks'][0]['host']
port = json_data['app']['tasks'][0]['ports'][0]
con.close()
return host, port
except Exception, e:
print "%s Exception caught in get_bongo_host" % (e)
sys.exit(FAIL)
def get_status(host, group):
try:
con = httplib.HTTPConnection(host,timeout=45)
con.request("GET","/v1/health/goliad/" + group)
data = con.getresponse()
if data.status >= 300:
print "Recieved non-2xx response= %s in get_status" % (data.status)
sys.exit(FAIL)
json_data = json.loads(data.read())
con.close()
if json_data['status'] == 2:
print "%s" % (json_data['msg'])
sys.exit(FAIL)
elif json_data['status'] == 1:
print "%s" % (json_data['msg'])
sys.exit(WARNING)
else:
print " `%s` is fine" %group
sys.exit(PASS)
except Exception, e:
print "%s Exception caught in get_status" % (e)
sys.exit(FAIL)
if __name__=="__main__":
parser = OptionParser()
parser.add_option("-s", dest="server", action="store", default="localhost:8080", help="Marathon Cluster address with port no")
parser.add_option("-a", dest="app", action="store", default="bongo.useast.prod", help="App Id to retrieve the slave address")
parser.add_option("-c", dest="group", action="store", default="goliad.useast.prod", help="Name of goliad Consumer Group")
(options, args) = parser.parse_args()
host, port = get_bongo_host(options.server, options.app)
if "useast" in host:
host = host.rsplit("prd",1)
consul_host = "%snode.us-east-1.consul:%s" % (host[0], port)
else:
consul_host = "%s:%s" % (host, port)
get_status(consul_host, options.group)
| #!/usr/bin/env python
from optparse import OptionParser
import socket
import sys
import httplib
import json
PASS = 0
WARNING = 1
FAIL = 2
def get_bongo_host(server, app):
try:
con = httplib.HTTPConnection(server, timeout=45)
con.request("GET","/v2/apps/" + app)
data = con.getresponse()
if data.status >= 300:
print "get_bongo_host: Recieved non-2xx response= %s" % (data.status)
sys.exit(FAIL)
json_data = json.loads(data.read())
host = json_data['app']['tasks'][0]['host']
port = json_data['app']['tasks'][0]['ports'][0]
con.close()
return host, port
except Exception, e:
print "%s Exception caught in get_bongo_host" % (e)
sys.exit(FAIL)
def get_status(host, group):
try:
con = httplib.HTTPConnection(host,timeout=45)
con.request("GET","/v1/health/betty/" + group)
data = con.getresponse()
if data.status >= 300:
print "Recieved non-2xx response= %s in get_status" % (data.status)
sys.exit(FAIL)
json_data = json.loads(data.read())
con.close()
if json_data['status'] == 2:
print "%s" % (json_data['msg'])
sys.exit(FAIL)
elif json_data['status'] == 1:
print "%s" % (json_data['msg'])
sys.exit(WARNING)
else:
print " `%s` is fine" %group
sys.exit(PASS)
except Exception, e:
print "%s Exception caught in get_status" % (e)
sys.exit(FAIL)
if __name__=="__main__":
parser = OptionParser()
parser.add_option("-s", dest="server", action="store", default="localhost:8080", help="Marathon Cluster address with port no")
parser.add_option("-a", dest="app", action="store", default="bongo.useast.prod", help="App Id to retrieve the slave address")
parser.add_option("-c", dest="group", action="store", default="betty.useast.prod", help="Name of betty Consumer Group")
(options, args) = parser.parse_args()
host, port = get_bongo_host(options.server, options.app)
if "useast" in host:
host = host.rsplit("prd",1)
consul_host = "%snode.us-east-1.consul:%s" % (host[0], port)
else:
consul_host = "%s:%s" % (host, port)
get_status(consul_host, options.group)
| Python | 0 |
335abb7a4ddeabf9175b522d9336b94b7e32acc0 | Fix incorrect FAIL data. | test/broker/01-connect-anon-denied.py | test/broker/01-connect-anon-denied.py | #!/usr/bin/python
# Test whether an anonymous connection is correctly denied.
import subprocess
import socket
import time
from struct import *
rc = 1
keepalive = 10
connect_packet = pack('!BBH6sBBHH17s', 16, 12+2+17,6,"MQIsdp",3,2,keepalive,17,"connect-anon-test")
connack_packet = pack('!BBBB', 32, 2, 0, 5);
broker = subprocess.Popen(['../../src/mosquitto', '-c', '01-connect-anon-denied.conf'], stderr=subprocess.PIPE)
try:
time.sleep(0.1)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1888))
sock.send(connect_packet)
connack_recvd = sock.recv(256)
sock.close()
if connack_recvd != connack_packet:
(cmd, rl, resv, rc) = unpack('!BBBB', connack_recvd)
print("FAIL: Expected 32,2,0,5 got " + str(cmd) + "," + str(rl) + "," + str(resv) + "," + str(rc))
else:
rc = 0
finally:
broker.terminate()
exit(rc)
| #!/usr/bin/python
# Test whether an anonymous connection is correctly denied.
import subprocess
import socket
import time
from struct import *
rc = 1
keepalive = 10
connect_packet = pack('!BBH6sBBHH17s', 16, 12+2+17,6,"MQIsdp",3,2,keepalive,17,"connect-anon-test")
connack_packet = pack('!BBBB', 32, 2, 0, 5);
broker = subprocess.Popen(['../../src/mosquitto', '-c', '01-connect-anon-denied.conf'], stderr=subprocess.PIPE)
try:
time.sleep(0.1)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1888))
sock.send(connect_packet)
connack_recvd = sock.recv(256)
sock.close()
if connack_recvd != connack_packet:
(cmd, rl, resv, rc) = unpack('!BBBB', connack_recvd)
print("FAIL: Expected 32,2,0,0 got " + str(cmd) + "," + str(rl) + "," + str(resv) + "," + str(rc))
else:
rc = 0
finally:
broker.terminate()
exit(rc)
| Python | 0.002419 |
19b6207f6ec2cefa28e79fb10639d1d1f5602d2c | clean up test | app/app/tests.py | app/app/tests.py | import unittest
import transaction
import os
import app
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
from pyramid import testing
from .models import DBSession
DEFAULT_WAIT = 5
SCREEN_DUMP_LOCATION = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'screendumps'
)
class TestMyViewSuccessCondition(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
DBSession.remove()
testing.tearDown()
def test_passing_view(self):
pass
class TestMyViewFailureCondition(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
DBSession.remove()
testing.tearDown()
def test_failing_view(self):
pass
class FunctionalTest(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(DEFAULT_WAIT)
def tearDown(self):
DBSession.remove()
testing.tearDown()
self.browser.quit()
class HomePageTest(FunctionalTest):
def map_move(self, key_move, repeat=1, sleep_time=.5):
"""Move the map with a repeat and sleep"""
map_ = self.browser.find_element_by_id("map")
key_moves = {
'zoom_in': 'self.browser.find_element_by_class_name("leaflet-control-zoom-in").click()',
'zoom_out': 'self.browser.find_element_by_class_name("leaflet-control-zoom-out").click()',
'arrow_down': 'map_.send_keys(Keys.ARROW_DOWN)',
'arrow_right': 'map_.send_keys(Keys.ARROW_RIGHT)',
'arrow_left': 'map_.send_keys(Keys.ARROW_LEFT)',
'arrow_up': 'map_.send_keys(Keys.ARROW_UP)',
}
for _ in range(repeat):
import pdb; pdb.set_trace()
key_moves[key_move]
sleep(sleep_time)
def test_home_page_loads(self):
#Billy sees the landsat.club homepage and rejoices.
self.browser.get('localhost:8000')
self.map_move('zoom_out', repeat=5, sleep_time=1)
self.map_move('arrow_right', repeat=5, sleep_time=.75)
#zoom_in.click()
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.75)
#zoom_in.click()
#sleep(.75)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.75)
#zoom_in.click()
#sleep(5)
#self.browser.find_element_by_class_name('leaflet-control-mapbox-geocoder-toggle').click()
#self.browser.find_element_by_xpath('//*[@id="map"]/div[2]/div[1]/div[2]/div[2]/form/input').send_keys('10010', Keys.RETURN)
#
#
#sleep(.75)
#zoom_out.click()
#sleep(.75)
#zoom_out.click()
#sleep(.75)
#zoom_out.click()
#sleep(.75)
#zoom_out.click()
#
#sleep(600)
#
| import unittest
import transaction
import os
import app
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
from pyramid import testing
from .models import DBSession
DEFAULT_WAIT = 5
SCREEN_DUMP_LOCATION = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'screendumps'
)
class TestMyViewSuccessCondition(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
DBSession.remove()
testing.tearDown()
def test_passing_view(self):
pass
class TestMyViewFailureCondition(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
DBSession.remove()
testing.tearDown()
def test_failing_view(self):
pass
class FunctionalTest(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(DEFAULT_WAIT)
def tearDown(self):
DBSession.remove()
testing.tearDown()
self.browser.quit()
class HomePageTest(FunctionalTest):
def map_move(self, key_move, repeat=1, sleep_time=.5):
"""Move the map with a repeat and sleep"""
map_ = self.browser.find_element_by_id("map")
key_moves = {
'zoom_in': self.browser.find_element_by_class_name("leaflet-control-zoom-in").click(),
'zoom_out': self.browser.find_element_by_class_name("leaflet-control-zoom-out").click(),
'arrow_down': map_.send_keys(Keys.ARROW_DOWN),
'arrow_right': map_.send_keys(Keys.ARROW_RIGHT),
'arrow_left': map_.send_keys(Keys.ARROW_LEFT),
'arrow_up': map_.send_keys(Keys.ARROW_UP),
}
for _ in range(repeat):
key_moves[key_move]
sleep(sleep_time)
def test_home_page_loads(self):
#Billy sees the landsat.club homepage and rejoices.
self.browser.get('localhost:8000')
self.map_move('zoom_in', repeat=5)
self.map_move('arrow_right', repeat=5, sleep_time=.75)
#zoom_in.click()
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.75)
#zoom_in.click()
#sleep(.75)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.75)
#zoom_in.click()
#sleep(5)
#self.browser.find_element_by_class_name('leaflet-control-mapbox-geocoder-toggle').click()
#self.browser.find_element_by_xpath('//*[@id="map"]/div[2]/div[1]/div[2]/div[2]/form/input').send_keys('10010', Keys.RETURN)
#
#
#sleep(.75)
#zoom_out.click()
#sleep(.75)
#zoom_out.click()
#sleep(.75)
#zoom_out.click()
#sleep(.75)
#zoom_out.click()
#
#sleep(600)
#
| Python | 0.000001 |
9de0a05d28c83742224c0e708e80b8add198a8a8 | Add user data export for comments | froide/comments/apps.py | froide/comments/apps.py | import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class CommentConfig(AppConfig):
name = 'froide.comments'
verbose_name = _('Comments')
def ready(self):
from froide.account import account_canceled
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import FroideComment
if user is None:
return
FroideComment.objects.filter(user=user).update(
user_name='',
user_email='',
user_url=''
)
def export_user_data(user):
from .models import FroideComment
comments = FroideComment.objects.filter(user=user)
if not comments:
return
yield ('comments.json', json.dumps([
{
'submit_date': (
c.submit_date.isoformat() if c.submit_date else None
),
'comment': c.comment,
'is_public': c.is_public,
'is_removed': c.is_removed,
'url': c.get_absolute_url(),
}
for c in comments]).encode('utf-8')
)
| from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class CommentConfig(AppConfig):
name = 'froide.comments'
verbose_name = _('Comments')
def ready(self):
from froide.account import account_canceled
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import FroideComment
if user is None:
return
FroideComment.objects.filter(user=user).update(
user_name='',
user_email='',
user_url=''
)
| Python | 0 |
4a201a37318d5eea1e50e0619580a23f37e2e7da | Fix path for boringssl | libwebsockets.gyp | libwebsockets.gyp | {
'targets': [
{
'target_name': 'libwebsockets',
'type': 'static_library',
'standalone_static_library': 1,
'sources': [
'lib/base64-decode.c',
'lib/handshake.c',
'lib/libwebsockets.c',
'lib/service.c',
'lib/pollfd.c',
'lib/output.c',
'lib/parsers.c',
'lib/context.c',
'lib/sha-1.c',
'lib/alloc.c',
'lib/header.c',
'lib/client.c',
'lib/client-handshake.c',
'lib/client-parser.c',
'lib/ssl.c',
'lib/server.c',
'lib/server-handshake.c',
'lib/extension.c',
'lib/extension-deflate-frame.c',
'lib/extension-deflate-stream.c',
],
'dependencies': [
'<(DEPTH)/third_party/zlib/zlib.gyp:zlib',
'<(peeracle_webrtc_root)/third_party/boringssl/boringssl.gyp:boringssl'
],
'include_dirs': [
'config/<(OS)/<(target_arch)',
],
'direct_dependent_settings': {
'include_dirs': [
'config/<(OS)/<(target_arch)',
],
},
'conditions': [
['OS == "win"', {
'sources': [
'lib/lws-plat-win.c',
],
'msvs_settings': {
'VCCLCompilerTool': {
'WarnAsError': 'true',
'DisableSpecificWarnings': ['4018']
}
}
}, {
'sources': [
'lib/lws-plat-unix.c',
],
}],
],
},
],
}
| {
'targets': [
{
'target_name': 'libwebsockets',
'type': 'static_library',
'standalone_static_library': 1,
'sources': [
'lib/base64-decode.c',
'lib/handshake.c',
'lib/libwebsockets.c',
'lib/service.c',
'lib/pollfd.c',
'lib/output.c',
'lib/parsers.c',
'lib/context.c',
'lib/sha-1.c',
'lib/alloc.c',
'lib/header.c',
'lib/client.c',
'lib/client-handshake.c',
'lib/client-parser.c',
'lib/ssl.c',
'lib/server.c',
'lib/server-handshake.c',
'lib/extension.c',
'lib/extension-deflate-frame.c',
'lib/extension-deflate-stream.c',
],
'dependencies': [
'<(DEPTH)/third_party/zlib/zlib.gyp:zlib',
'<(peeracle_webrtc_root)/chromium/src/third_party/boringssl/boringssl.gyp:boringssl'
],
'include_dirs': [
'config/<(OS)/<(target_arch)',
],
'direct_dependent_settings': {
'include_dirs': [
'config/<(OS)/<(target_arch)',
],
},
'conditions': [
['OS == "win"', {
'sources': [
'lib/lws-plat-win.c',
],
'msvs_settings': {
'VCCLCompilerTool': {
'WarnAsError': 'true',
'DisableSpecificWarnings': ['4018']
}
}
}, {
'sources': [
'lib/lws-plat-unix.c',
],
}],
],
},
],
}
| Python | 0.000013 |
eed4faf3bfe670421e7dc9c3065adbfceef0d2b6 | fix test for heapify | linear_heapify.py | linear_heapify.py | # Building hash in O(n) time and O(1) additional space. Inspired by https://www.youtube.com/watch?v=MiyLo8adrWw
def heapify(a):
for i in range(len(a) // 2, -1, -1):
parent = i
while True:
candidates = [parent, 2 * parent + 1, 2 * parent + 2]
candidates = [e for e in candidates if e < len(a)]
largest = max(candidates, key=lambda e: a[e])
if largest == parent:
break
else:
a[parent], a[largest], parent = a[largest], a[parent], largest
# Test
arr = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
heapify(arr)
print(arr)
| # Building hash in O(n) time and O(1) additional space. Inspired by https://www.youtube.com/watch?v=MiyLo8adrWw
def heapify(a):
for i in range(len(a) // 2, -1, -1):
parent = i
while True:
candidates = [parent, 2 * parent + 1, 2 * parent + 2]
candidates = [e for e in candidates if e < len(a)]
largest = max(candidates, key=lambda e: a[e])
if largest == parent:
break
else:
a[parent], a[largest], parent = a[largest], a[parent], largest
# Test
arr = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
print(heapify(arr))
| Python | 0.000005 |
b113cf82004b608b371d1a249801340f57195587 | add __str__. | linguist/cache.py | linguist/cache.py | # -*- coding: utf-8 -*-
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class CachedTranslation(object):
def __init__(self, **kwargs):
from .models import Translation
self.instances = ['instance', 'translation']
self.fields = Translation._meta.get_all_field_names()
self.fields.remove('id')
attrs = self.fields + self.instances
for attr in attrs:
setattr(self, attr, None)
self.__dict__.update(**kwargs)
self.is_new = True
if self.instance is not None:
self.identifier = self.instance.linguist_identifier
self.object_id = self.instance.pk
if self.translation is not None:
self.is_new = bool(self.translation.pk is None)
for attr in ('language', 'field_name', 'field_value'):
setattr(self, attr, getattr(self.translation, attr))
@property
def attrs(self):
"""
Returns Translation attributes to pass as kwargs for creating or updating objects.
"""
return dict((k, getattr(self, k)) for k in self.fields)
@property
def lookup(self):
"""
Returns Translation lookup to use for filter method.
"""
lookup = {'identifier': self.identifier,
'object_id': self.object_id}
if self.language is not None:
lookup['language'] = self.language
return lookup
@classmethod
def from_object(cls, obj):
"""
Updates values from the given object.
"""
from .models import Translation
fields = Translation._meta.get_all_field_names()
fields.remove('id')
return cls(**dict((field, getattr(obj, field)) for field in fields))
def __str__(self):
return '%s:%s:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.language)
| # -*- coding: utf-8 -*-
class CachedTranslation(object):
def __init__(self, **kwargs):
from .models import Translation
self.instances = ['instance', 'translation']
self.fields = Translation._meta.get_all_field_names()
self.fields.remove('id')
attrs = self.fields + self.instances
for attr in attrs:
setattr(self, attr, None)
self.__dict__.update(**kwargs)
self.is_new = True
if self.instance is not None:
self.identifier = self.instance.linguist_identifier
self.object_id = self.instance.pk
if self.translation is not None:
self.is_new = bool(self.translation.pk is None)
for attr in ('language', 'field_name', 'field_value'):
setattr(self, attr, getattr(self.translation, attr))
@property
def attrs(self):
"""
Returns Translation attributes to pass as kwargs for creating or updating objects.
"""
return dict((k, getattr(self, k)) for k in self.fields)
@property
def lookup(self):
"""
Returns Translation lookup to use for filter method.
"""
lookup = {'identifier': self.identifier,
'object_id': self.object_id}
if self.language is not None:
lookup['language'] = self.language
return lookup
@classmethod
def from_object(cls, obj):
"""
Updates values from the given object.
"""
from .models import Translation
fields = Translation._meta.get_all_field_names()
fields.remove('id')
return cls(**dict((field, getattr(obj, field)) for field in fields))
| Python | 0.000011 |
2466ca9839aaf1b5cfe98312c015a2defea71971 | to 0.1.0 | loris/__init__.py | loris/__init__.py | # __init__.py
__version__ = '0.1.0'
| # __init__.py
__version__ = '0.1.0dev'
| Python | 0.999999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.