commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
ec0cad36ccffb000111cacdfc22cc17bc8be4e84 | Adjust display threshold when using verbose mode. | jchoude/tractconverter,jchoude/tractconverter,MarcCote/tractconverter,MarcCote/tractconverter | tractconverter/utils.py | tractconverter/utils.py | import os
import logging
from pdb import set_trace as dbg
from tractconverter.formats.tck import TCK
from tractconverter.formats.trk import TRK
from tractconverter.formats.fib import FIB
from tractconverter.formats.vtk import VTK
# Supported format
FORMATS = {"tck": TCK,
"trk": TRK,
"fib": FIB,
"vtk": VTK}
# Input and output extensions.
EXT_ANAT = ".nii|.nii.gz"
def is_supported(filename):
return detect_format(filename) is not None
def detect_format(filename):
if not os.path.isfile(filename):
return FORMATS.get(filename[-3:], None)
for format in FORMATS.values():
if format._check(filename):
return format
return None
def convert(input, output, verbose=False):
from tractconverter.formats.header import Header
nbFibers = 0
fibers = []
display_threshold = 10000 if input.hdr[Header.NB_FIBERS] > 100000 else 1000
for i, f in enumerate(input):
fibers.append(f)
if (i + 1) % 1000 == 0:
output += fibers
fibers = []
if i % display_threshold == 0:
logging.info('(' + str(nbFibers) + "/" + str(input.hdr[Header.NB_FIBERS]) + ' fibers)')
nbFibers += 1
if len(fibers) > 0:
output += fibers
output.close()
logging.info('Done! (' + str(nbFibers) + "/" + str(input.hdr[Header.NB_FIBERS]) + ' fibers)')
def merge(inputs, output, verbose=False):
from tractconverter.formats.header import Header
streamlines = []
for f in inputs:
streamlines += [s for s in f]
output.hdr[Header.NB_FIBERS] = len(streamlines)
output.writeHeader() # Update existing header
output += streamlines
logging.info('Done! (' + str(len(streamlines)) + " streamlines merged.)")
| import os
import logging
from pdb import set_trace as dbg
from tractconverter.formats.tck import TCK
from tractconverter.formats.trk import TRK
from tractconverter.formats.fib import FIB
from tractconverter.formats.vtk import VTK
# Supported format
FORMATS = {"tck": TCK,
"trk": TRK,
"fib": FIB,
"vtk": VTK}
# Input and output extensions.
EXT_ANAT = ".nii|.nii.gz"
def is_supported(filename):
return detect_format(filename) is not None
def detect_format(filename):
if not os.path.isfile(filename):
return FORMATS.get(filename[-3:], None)
for format in FORMATS.values():
if format._check(filename):
return format
return None
def convert(input, output, verbose=False):
from tractconverter.formats.header import Header
nbFibers = 0
fibers = []
for i, f in enumerate(input):
fibers.append(f)
if (i + 1) % 100 == 0:
output += fibers
fibers = []
if i % 1000 == 0:
logging.info('(' + str(nbFibers) + "/" + str(input.hdr[Header.NB_FIBERS]) + ' fibers)')
nbFibers += 1
if len(fibers) > 0:
output += fibers
output.close()
logging.info('Done! (' + str(nbFibers) + "/" + str(input.hdr[Header.NB_FIBERS]) + ' fibers)')
def merge(inputs, output, verbose=False):
from tractconverter.formats.header import Header
streamlines = []
for f in inputs:
streamlines += [s for s in f]
output.hdr[Header.NB_FIBERS] = len(streamlines)
output.writeHeader() # Update existing header
output += streamlines
logging.info('Done! (' + str(len(streamlines)) + " streamlines merged.)")
| bsd-3-clause | Python |
26887135f1d8fcce669874f264b0c5ba4a0ddaad | Set version to 'dev' in master | srthurman/transitland-python-client,transitland/transitland-python-client | transitland/__init__.py | transitland/__init__.py | """
Transitland Python Client.
This library implements an interface for the Transitland Feed Registry, Transitland Datastore, and working with Onestop IDs:
https://github.com/transit-land/onestop-id
Modules:
registry - Feed Registry reader
entities - Transitland entities
geom - Geometry utilities
util - Other utilities
errors - Exceptions
bootstrap - Create Transitland Feed from GTFS URL
fetch - Feed aggregator
"""
__version__ = 'dev' | """
Transitland Python Client.
This library implements an interface for the Transitland Feed Registry, Transitland Datastore, and working with Onestop IDs:
https://github.com/transit-land/onestop-id
Modules:
registry - Feed Registry reader
entities - Transitland entities
geom - Geometry utilities
util - Other utilities
errors - Exceptions
bootstrap - Create Transitland Feed from GTFS URL
fetch - Feed aggregator
"""
__version__ = '0.5.2' | mit | Python |
ad0d43246ab135facdfa865b4c335ca2bb2db491 | write data in bulk and remove the path to the data file - require the arg instead in the script | DOAJ/doaj,DOAJ/doaj,DOAJ/doaj,DOAJ/doaj | portality/migrate/2966_add_oa_start_date_from_backup/add_oa_start_date_from_backup.py | portality/migrate/2966_add_oa_start_date_from_backup/add_oa_start_date_from_backup.py | import csv
from copy import deepcopy
from datetime import datetime
import esprit
from portality.core import es_connection
from portality.models import Journal
from portality.settings import BASE_FILE_PATH
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--data", help="csv with backup data")
args = parser.parse_args()
if not args.data:
#print("Please specify a csv data file path with the -d option")
#parser.print_help()
#exit()
args.data = BASE_FILE_PATH + "/migrate/2966_add_oa_start_date_from_backup/oa_start_out.csv"
try:
f = open(args.data)
except:
print("Could not open file: " + args.data + ". Try again.")
batch = []
batch_size = 1000
csv_reader = csv.reader(f)
header = next(csv_reader)
if header is not None:
for row in csv_reader:
print(row[0])
j = Journal.pull(row[0])
if j is not None and "oa_start" not in j["bibjson"]:
data = j.data
data["bibjson"]["oa_start"] = row[1]
batch.append(data)
if len(batch) >= batch_size:
print(datetime.now(), "writing ", len(batch), "to", "journal")
esprit.raw.bulk(es_connection, batch, idkey="id", type_="journal", bulk_type="index")
batch = []
if len(batch) > 0:
print(datetime.now(), "final result set / writing ", len(batch), "to", "journal")
esprit.raw.bulk(es_connection, batch, idkey="id", type_="journal", bulk_type="index")
f.close()
| import csv
from copy import deepcopy
from portality.models import Journal
from portality.settings import BASE_FILE_PATH
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--out", help="output file path")
args = parser.parse_args()
if not args.out:
print("Please specify an output file path with the -o option")
parser.print_help()
exit()
with open(args.out, "w", encoding="utf-8") as f:
writer = csv.writer(f)
writer.writerow(['Journal ID', 'OA Start Date'])
f = open(BASE_FILE_PATH + "/migrate/2966_add_oa_start_date_from_backup/oa_start_out.csv")
csv_reader = csv.reader(f)
header = next(csv_reader)
if header is not None:
for row in csv_reader:
print(row)
j = Journal.pull(row[0])
if j is not None:
j.bibjson().oa_start = row[1]
j.save()
writer.writerow([j.id, j.bibjson().oa_start])
f.close()
| apache-2.0 | Python |
da84d680ca271fd79027692eac127167c00fcee8 | Fix morning time span to be between 6 and 8 | fjacob21/MAX,fjacob21/MAX,fjacob21/MAX,fjacob21/MAX | service/src/state_scheduler/states/salon_entry_light_state/salon_entry_light_state.py | service/src/state_scheduler/states/salon_entry_light_state/salon_entry_light_state.py | import MAX
import time
import datetime
from .off_state import off_state
from .on_state import on_state
from .prio_off_state import prio_off_state
from .prio_on_state import prio_on_state
class salon_entry_light_state(object):
def __init__(self):
self._off_state = off_state(self)
self._on_state = on_state(self)
self._prio_off_state = prio_off_state(self)
self._prio_on_state = prio_on_state(self)
self.set_state(self._off_state)
@property
def name(self):
return 'Salont entry light state'
@property
def description(self):
return "State machine that control the salon entry light"
def isMorning(self):
now = datetime.datetime.now().time()
isMorning = (datetime.time(6) <= now <= datetime.time(8))
print("Is it morning? {0}".format(isMorning))
return isMorning
def isEvening(self):
now = datetime.datetime.now().time()
isEvening = (datetime.time(18) <= now <= datetime.time(21, 30))
print("Is it evening? {0}".format(isEvening))
return isEvening
def isLightNeeded(self):
isLightNeeded = self.isMorning() or self.isEvening()
print("Is light needed? {0}".format(isLightNeeded))
return isLightNeeded
def set_state(self, state):
self._current_state = state
self._current_state.enter()
def event(self, event, source, params):
#Sprint('Fire {device}.{event}'.format(event=event, device=source.name))
self._current_state.event(event, source, params)
| import MAX
import time
import datetime
from .off_state import off_state
from .on_state import on_state
from .prio_off_state import prio_off_state
from .prio_on_state import prio_on_state
class salon_entry_light_state(object):
def __init__(self):
self._off_state = off_state(self)
self._on_state = on_state(self)
self._prio_off_state = prio_off_state(self)
self._prio_on_state = prio_on_state(self)
self.set_state(self._off_state)
@property
def name(self):
return 'Salont entry light state'
@property
def description(self):
return "State machine that control the salon entry light"
def isMorning(self):
now = datetime.datetime.now().time()
isMorning = (datetime.time(0) <= now <= datetime.time(2))
print("Is it morning? {0}".format(isMorning))
return isMorning
def isEvening(self):
now = datetime.datetime.now().time()
isEvening = (datetime.time(18) <= now <= datetime.time(21, 30))
print("Is it evening? {0}".format(isEvening))
return isEvening
def isLightNeeded(self):
isLightNeeded = self.isMorning() or self.isEvening()
print("Is light needed? {0}".format(isLightNeeded))
return isLightNeeded
def set_state(self, state):
self._current_state = state
self._current_state.enter()
def event(self, event, source, params):
#Sprint('Fire {device}.{event}'.format(event=event, device=source.name))
self._current_state.event(event, source, params)
| mit | Python |
3f43163e0900aa6b830013f2ba3c6c6915f514aa | implement playMovie action | flurischt/omxremote,flurischt/omxremote,flurischt/omxremote | omxremote/backend.py | omxremote/backend.py | import os
import json
import hashlib
from flask import Flask, Response, request, jsonify
from omxremote.dbus_connection import OmxRemote
app = Flask(__name__)
#TODO
SUPPORTED_COMMANDS = ('pause', 'togglesubtitles', 'volumeup', 'volumedown', 'stop')
VIDEO_FILE_EXTENSIONS = ('.avi', '.mkv', '.mp4')
MOVIES_DIR = 'movies_dir/'
#memoize this...
def __find_movie_files():
data = []
for path,dirs,files in os.walk(MOVIES_DIR):
for f in files:
if os.path.splitext(f)[1].lower() in VIDEO_FILE_EXTENSIONS:
absolute = os.path.join(path, f)
hash = hashlib.sha256(absolute.encode('utf-8')).hexdigest()
data.append({'filename' : f, 'hash' : hash, 'absolute' : absolute})
return data
@app.route('/')
def index():
return app.send_static_file('index.html')
@app.route('/api/status')
def status():
progress, duration, playback = OmxRemote().status()
#TODO filename...
data = {'filename' : '', 'progress' : int(progress), 'duration' : int(duration), 'playback' : playback}
return jsonify(data)
@app.route('/api/list')
def list():
data = __find_movie_files()
return Response(json.dumps(data), mimetype='application/json')
@app.route('/api/exec/<string:cmd>', methods=['POST'])
def command(cmd):
assert cmd in SUPPORTED_COMMANDS
OmxRemote().send_command(cmd)
print('got command: ' + cmd)
return jsonify({'status' : 'OK'})
@app.route('/api/changeMovie', methods=['POST'])
def change_movie():
hash = request.form['hash']
data = __find_movie_files()
for f in data: #TODO comparing with every file is obviously stupid...
if f['hash'] == hash:
OmxRemote().playMovie(f['absolute'])
return jsonify({'status' : 'OK'})
return jsonify({'status' : 'FAIL'})
if __name__ == '__main__':
app.run(debug=True)
| import os
import json
import hashlib
from flask import Flask, Response, request, jsonify
from omxremote.dbus_connection import OmxRemote
app = Flask(__name__)
#TODO
SUPPORTED_COMMANDS = ('pause', 'togglesubtitles', 'volumeup', 'volumedown', 'stop')
VIDEO_FILE_EXTENSIONS = ('.avi', '.mkv', '.mp4')
MOVIES_DIR = 'movies_dir/'
@app.route('/')
def index():
return app.send_static_file('index.html')
@app.route('/api/status')
def status():
progress, duration, playback = OmxRemote().status()
#TODO filename...
data = {'filename' : '', 'progress' : int(progress), 'duration' : int(duration), 'playback' : playback}
return jsonify(data)
@app.route('/api/list')
def list():
#TODO memoize os.walk and hashing...
data = []
for path,dirs,files in os.walk(MOVIES_DIR):
for f in files:
if os.path.splitext(f)[1].lower() in VIDEO_FILE_EXTENSIONS:
absolute = os.path.join(path, f)
hash = hashlib.sha256(absolute.encode('utf-8')).hexdigest()
data.append({'filename' : f, 'hash' : hash})
return Response(json.dumps(data), mimetype='application/json')
@app.route('/api/exec/<string:cmd>', methods=['POST'])
def command(cmd):
assert cmd in SUPPORTED_COMMANDS
OmxRemote().send_command(cmd)
print('got command: ' + cmd)
return jsonify({'status' : 'OK'})
@app.route('/api/changeMovie', methods=['POST'])
def change_movie():
print(request.form['hash'])
return jsonify({'status' : 'OK'})
if __name__ == '__main__':
app.run(debug=True)
| bsd-2-clause | Python |
95655e2dc094a72f6b9e25c2c44ac69f5fba6094 | Add search urls on articles | opps/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,opps/opps,opps/opps,YACOWS/opps,jeanmask/opps | opps/article/urls.py | opps/article/urls.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls import patterns, url
from opps.article.views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', OppsList.as_view(), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='open'),
url(r'^(?P<channel__long_slug>[\w//-]+)$', OppsList.as_view(),
name='channel'),
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls import patterns, url
from opps.article.views import OppsDetail, OppsList
urlpatterns = patterns(
'',
url(r'^$', OppsList.as_view(), name='home'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='open'),
url(r'^(?P<channel__long_slug>[\w//-]+)$', OppsList.as_view(),
name='channel'),
)
| mit | Python |
39eeca93f28ee6e452590041dee18ec853c761f2 | Use tf.contrib.rnn.LayerNormBasicLSTMCell | raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten | nn/rnn/cell.py | nn/rnn/cell.py | import tensorflow as tf
from ..flags import FLAGS
from ..util import funcname_scope
@funcname_scope
def ln_lstm(output_size, dropout_prob=FLAGS.dropout_prob):
return tf.contrib.rnn.LayerNormBasicLSTMCell(
output_size,
dropout_keep_prob=1-dropout_prob)
@funcname_scope
def gru(output_size, dropout_prob=FLAGS.dropout_prob):
return _dropout_cell(tf.nn.rnn_cell.GRUCell(output_size), dropout_prob)
def _dropout_cell(cell, dropout_prob):
return tf.nn.rnn_cell.DropoutWrapper(cell, 1 - dropout_prob)
| import tensorflow as tf
from ..assertion import is_sequence
from ..util import funcname_scope, static_shape, static_rank
from ..normalization import layer_normalization
class LNLSTMCell(tf.nn.rnn_cell.RNNCell):
def __init__(self, num_units, forget_bias=1.0, activation=tf.tanh):
self._num_units = num_units
self._forget_bias = forget_bias
self._activation = activation
@property
def state_size(self):
return tf.nn.rnn_cell.LSTMStateTuple(self._num_units, self._num_units)
@property
def output_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
with tf.variable_scope(scope or type(self).__name__):
c, h = state
concat = _ln_linear([inputs, h], 4 * self._num_units, True)
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
i, j, f, o = tf.split(1, 4, concat)
new_c = layer_normalization(c * tf.sigmoid(f + self._forget_bias)
+ tf.sigmoid(i) * self._activation(j),
share_variables=True)
new_h = self._activation(new_c) * tf.sigmoid(o)
return new_h, tf.nn.rnn_cell.LSTMStateTuple(new_c, new_h)
@funcname_scope
def ln_lstm_cell(output_embedding_size, dropout_prob):
return _dropout_cell(LNLSTMCell(output_embedding_size), dropout_prob)
@funcname_scope
def gru_cell(output_embedding_size, dropout_prob):
return _dropout_cell(tf.nn.rnn_cell.GRUCell(output_embedding_size),
dropout_prob)
def _dropout_cell(cell, dropout_prob):
return tf.nn.rnn_cell.DropoutWrapper(cell, 1 - dropout_prob)
@funcname_scope
def _ln_linear(inputs, output_size, add_bias=True, initial_bias=0.0):
if not is_sequence(inputs):
inputs = [inputs]
assert is_sequence(inputs)
result = 0.
for index, input_ in enumerate(inputs):
input_size = static_shape(input_)[1]
assert static_rank(input_) == 2 and input_size != None
with tf.variable_scope("weighted_{}".format(index)):
weight = tf.get_variable("Weight{}".format(index),
[input_size, output_size])
result += layer_normalization(tf.matmul(input_, weight),
share_variables=True)
if add_bias:
result += tf.get_variable(
"Bias",
[output_size],
initializer=tf.constant_initializer(initial_bias))
return result
| unlicense | Python |
780bf8ee4bbe8936055ddc0e71042fc7736f0192 | Fix prime declaraion in make_tests | boppreh/ecc,boppreh/ecc | make_tests.py | make_tests.py | #!/bin/env python3
from field import FieldValue
from random import randint
import math
prime = 2**31 -1
n_chunks = math.ceil(math.log2(prime) / 2)
F = lambda i: FieldValue(i, prime)
h = lambda i: hex(i)[2:]
print("""
#include "ecc.c"
int main() {
""")
print('\tNumber p = parse("{}");'.format(h(prime)))
print('\tNumber a = new_number({});'.format(n_chunks))
print('\tNumber b = new_number({});'.format(n_chunks))
print('\tNumber result = new_number(2*{});'.format(n_chunks))
def make_test(a, b, expected, op):
print()
print('\ta = parse("{}");'.format(h(a)))
print('\tb = parse("{}");'.format(h(b)))
print('\t{}(a, b, p, result);'.format(op))
print('\tassert(cmp(result, parse("{}")) == 0);'.format(h(expected)))
make_test(F(0), F(0), F(0), 'addm')
make_test(F(0), F(1), F(1), 'addm')
make_test(F(1), F(0), F(1), 'addm')
make_test(F(1), F(1), F(2), 'addm')
make_test(F(0), F(0), F(0), 'mulm')
make_test(F(0), F(1), F(0), 'mulm')
make_test(F(1), F(0), F(0), 'mulm')
make_test(F(1), F(1), F(1), 'mulm')
def make_op_test(n, function, name):
for i in range(n):
a = F(randint(0, prime))
b = F(randint(0, prime))
make_test(a, b, function(a, b), name)
make_op_test(100, lambda a, b: a + b, 'addm')
make_op_test(100, lambda a, b: a + b, 'subm')
make_op_test(100, lambda a, b: a + b, 'mulm')
make_op_test(100, lambda a, b: a + b, 'divm')
print('}')
| #!/bin/env python3
from field import FieldValue
from random import randint
import math
prime = 2**31 -1
n_chunks = math.ceil(math.log2(prime) / 2)
F = lambda i: FieldValue(i, prime)
h = lambda i: hex(i)[2:]
print("""
#include "ecc.c"
int main() {
""")
print('\tNumber p = parse("{}");'.format(prime))
print('\tNumber a = new_number({});'.format(n_chunks))
print('\tNumber b = new_number({});'.format(n_chunks))
print('\tNumber result = new_number(2*{});'.format(n_chunks))
def make_test(a, b, expected, op):
print()
print('\ta = parse("{}");'.format(h(a)))
print('\tb = parse("{}");'.format(h(b)))
print('\t{}(a, b, p, result);'.format(op))
print('\tassert(cmp(result, parse("{}")) == 0);'.format(h(expected)))
make_test(F(0), F(0), F(0), 'addm')
make_test(F(0), F(1), F(1), 'addm')
make_test(F(1), F(0), F(1), 'addm')
make_test(F(1), F(1), F(2), 'addm')
make_test(F(0), F(0), F(0), 'mulm')
make_test(F(0), F(1), F(0), 'mulm')
make_test(F(1), F(0), F(0), 'mulm')
make_test(F(1), F(1), F(1), 'mulm')
def make_op_test(n, function, name):
for i in range(n):
a = F(randint(0, prime))
b = F(randint(0, prime))
make_test(a, b, function(a, b), name)
make_op_test(100, lambda a, b: a + b, 'addm')
make_op_test(100, lambda a, b: a + b, 'subm')
make_op_test(100, lambda a, b: a + b, 'mulm')
make_op_test(100, lambda a, b: a + b, 'divm')
print('}')
| mit | Python |
ee5bc5d6d212e2bd8e0d60e6024c5180821f6a35 | Format test according to Black | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | tests/integration/services/ticketing/test_creation.py | tests/integration/services/ticketing/test_creation.py | """
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
from pytest import raises
from byceps.services.ticketing import (
event_service,
ticket_creation_service,
ticket_service,
)
def test_create_ticket(admin_app, category, ticket_owner):
ticket = ticket_creation_service.create_ticket(category.id, ticket_owner.id)
assert_created_ticket(ticket, category.id, ticket_owner.id)
# Clean up.
ticket_service.delete_ticket(ticket.id)
@patch(
'byceps.services.ticketing.ticket_creation_service._generate_ticket_code'
)
def test_create_ticket_with_existing_code(
generate_ticket_code_mock, admin_app, category, ticket_owner
):
generate_ticket_code_mock.return_value = 'TAKEN'
existing_ticket = ticket_creation_service.create_ticket(
category.id, ticket_owner.id
)
assert existing_ticket.code == 'TAKEN'
with raises(ticket_creation_service.TicketCreationFailed):
ticket_creation_service.create_ticket(category.id, ticket_owner.id)
# Clean up.
ticket_service.delete_ticket(existing_ticket.id)
def test_create_tickets(admin_app, category, ticket_owner):
quantity = 3
tickets = ticket_creation_service.create_tickets(
category.id, ticket_owner.id, quantity
)
for ticket in tickets:
assert_created_ticket(ticket, category.id, ticket_owner.id)
# Clean up.
for ticket in tickets:
ticket_service.delete_ticket(ticket.id)
def assert_created_ticket(ticket, expected_category_id, expected_owner_id):
assert ticket is not None
assert ticket.created_at is not None
assert ticket.code is not None
assert ticket.bundle_id is None
assert ticket.category_id == expected_category_id
assert ticket.owned_by_id == expected_owner_id
assert ticket.seat_managed_by_id is None
assert ticket.user_managed_by_id is None
assert ticket.occupied_seat_id is None
assert ticket.used_by_id is None
assert not ticket.revoked
assert not ticket.user_checked_in
events = event_service.get_events_for_ticket(ticket.id)
assert len(events) == 0
| """
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
from pytest import raises
from byceps.services.ticketing import (
event_service,
ticket_creation_service,
ticket_service,
)
def test_create_ticket(admin_app, category, ticket_owner):
ticket = ticket_creation_service.create_ticket(category.id, ticket_owner.id)
assert_created_ticket(ticket, category.id, ticket_owner.id)
# Clean up.
ticket_service.delete_ticket(ticket.id)
@patch('byceps.services.ticketing.ticket_creation_service._generate_ticket_code')
def test_create_ticket_with_existing_code(generate_ticket_code_mock, admin_app, category, ticket_owner):
generate_ticket_code_mock.return_value = 'TAKEN'
existing_ticket = ticket_creation_service.create_ticket(category.id, ticket_owner.id)
assert existing_ticket.code == 'TAKEN'
with raises(ticket_creation_service.TicketCreationFailed):
ticket_creation_service.create_ticket(category.id, ticket_owner.id)
# Clean up.
ticket_service.delete_ticket(existing_ticket.id)
def test_create_tickets(admin_app, category, ticket_owner):
quantity = 3
tickets = ticket_creation_service.create_tickets(
category.id, ticket_owner.id, quantity
)
for ticket in tickets:
assert_created_ticket(ticket, category.id, ticket_owner.id)
# Clean up.
for ticket in tickets:
ticket_service.delete_ticket(ticket.id)
def assert_created_ticket(ticket, expected_category_id, expected_owner_id):
assert ticket is not None
assert ticket.created_at is not None
assert ticket.code is not None
assert ticket.bundle_id is None
assert ticket.category_id == expected_category_id
assert ticket.owned_by_id == expected_owner_id
assert ticket.seat_managed_by_id is None
assert ticket.user_managed_by_id is None
assert ticket.occupied_seat_id is None
assert ticket.used_by_id is None
assert not ticket.revoked
assert not ticket.user_checked_in
events = event_service.get_events_for_ticket(ticket.id)
assert len(events) == 0
| bsd-3-clause | Python |
784f58a27c6e010b87ff568fc8c0719e52c037a2 | Update disabled_print_settings_for_custom_print_format.py | rmehta/frappe,StrellaGroup/frappe,mhbu50/frappe,mbauskar/frappe,elba7r/builder,mbauskar/frappe,mhbu50/frappe,elba7r/frameworking,yashodhank/frappe,manassolanki/frappe,rmehta/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,indautgrp/frappe,frappe/frappe,mbauskar/frappe,rohitwaghchaure/frappe,bcornwellmott/frappe,manassolanki/frappe,almeidapaulopt/frappe,tundebabzy/frappe,saurabh6790/frappe,adityahase/frappe,yashodhank/frappe,elba7r/frameworking,vjFaLk/frappe,manassolanki/frappe,neilLasrado/frappe,RicardoJohann/frappe,bcornwellmott/frappe,tundebabzy/frappe,adityahase/frappe,elba7r/builder,yashodhank/frappe,manassolanki/frappe,neilLasrado/frappe,rohitwaghchaure/frappe,frappe/frappe,saurabh6790/frappe,maxtorete/frappe,mbauskar/frappe,elba7r/frameworking,RicardoJohann/frappe,rmehta/frappe,indautgrp/frappe,ESS-LLP/frappe,elba7r/builder,StrellaGroup/frappe,chdecultot/frappe,almeidapaulopt/frappe,elba7r/frameworking,tundebabzy/frappe,maxtorete/frappe,bohlian/frappe,vjFaLk/frappe,elba7r/builder,bcornwellmott/frappe,bohlian/frappe,almeidapaulopt/frappe,ESS-LLP/frappe,paurosello/frappe,RicardoJohann/frappe,adityahase/frappe,rohitwaghchaure/frappe,rmehta/frappe,tmimori/frappe,ESS-LLP/frappe,vjFaLk/frappe,vjFaLk/frappe,mhbu50/frappe,tundebabzy/frappe,indautgrp/frappe,maxtorete/frappe,tmimori/frappe,bohlian/frappe,chdecultot/frappe,neilLasrado/frappe,paurosello/frappe,bcornwellmott/frappe,saurabh6790/frappe,bohlian/frappe,chdecultot/frappe,rohitwaghchaure/frappe,RicardoJohann/frappe,chdecultot/frappe,paurosello/frappe,ESS-LLP/frappe,indautgrp/frappe,paurosello/frappe,mhbu50/frappe,maxtorete/frappe,neilLasrado/frappe,yashodhank/frappe,frappe/frappe,saurabh6790/frappe,adityahase/frappe,tmimori/frappe,tmimori/frappe | frappe/patches/v7_1/disabled_print_settings_for_custom_print_format.py | frappe/patches/v7_1/disabled_print_settings_for_custom_print_format.py | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doctype('Print Format')
frappe.db.sql("""
update
`tabPrint Format`
set
align_labels_left = 0, line_breaks = 0, show_section_headings = 0
where
custom_format = 1
""")
| # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""
update
`tabPrint Format`
set
align_labels_left = 0, line_breaks = 0, show_section_headings = 0
where
custom_format = 1
""") | mit | Python |
4aac0d6d1c5b253a8078316fc8ac5a4d735cd69f | Upgrade to Chrome 38.0.2125.102 | fabien-d/electron,JesselJohn/electron,dkfiresky/electron,lzpfmh/electron,JussMee15/electron,howmuchcomputer/electron,kenmozi/electron,jannishuebl/electron,brave/muon,meowlab/electron,icattlecoder/electron,bwiggs/electron,nekuz0r/electron,jtburke/electron,michaelchiche/electron,kokdemo/electron,posix4e/electron,LadyNaggaga/electron,brave/electron,kokdemo/electron,fireball-x/atom-shell,ervinb/electron,pandoraui/electron,nicholasess/electron,sircharleswatson/electron,saronwei/electron,webmechanicx/electron,faizalpribadi/electron,dongjoon-hyun/electron,stevemao/electron,bruce/electron,jjz/electron,fabien-d/electron,GoooIce/electron,nagyistoce/electron-atom-shell,thingsinjars/electron,webmechanicx/electron,Neron-X5/electron,SufianHassan/electron,mattotodd/electron,Floato/electron,BionicClick/electron,Neron-X5/electron,gabrielPeart/electron,kostia/electron,saronwei/electron,mjaniszew/electron,baiwyc119/electron,JesselJohn/electron,Gerhut/electron,zhakui/electron,ankitaggarwal011/electron,JussMee15/electron,davazp/electron,gstack/infinium-shell,miniak/electron,felixrieseberg/electron,iftekeriba/electron,Gerhut/electron,twolfson/electron,gstack/infinium-shell,jiaz/electron,benweissmann/electron,bwiggs/electron,yan-foto/electron,felixrieseberg/electron,JesselJohn/electron,stevekinney/electron,Gerhut/electron,cos2004/electron,jcblw/electron,benweissmann/electron,brave/muon,minggo/electron,John-Lin/electron,jhen0409/electron,ervinb/electron,seanchas116/electron,yan-foto/electron,hokein/atom-shell,adcentury/electron,medixdev/electron,electron/electron,brenca/electron,bruce/electron,kikong/electron,arturts/electron,edulan/electron,noikiy/electron,iftekeriba/electron,RIAEvangelist/electron,sshiting/electron,jannishuebl/electron,gabriel/electron,thompsonemerson/electron,jlhbaseball15/electron,timruffles/electron,christian-bromann/electron,bpasero/electron,brenca/electron,jsutcodes/electron,synaptek/electron,leethomas/electron,eric-seekas/electron,noikiy/electron,MaxGraey/electron,robinvandernoord/electron,SufianHassan/electron,arturts/electron,natgolov/electron,timruffles/electron,jlord/electron,aliib/electron,kazupon/electron,d-salas/electron,soulteary/electron,christian-bromann/electron,meowlab/electron,sircharleswatson/electron,destan/electron,bpasero/electron,eriser/electron,SufianHassan/electron,anko/electron,deepak1556/atom-shell,synaptek/electron,neutrous/electron,leolujuyi/electron,takashi/electron,bbondy/electron,synaptek/electron,gerhardberger/electron,aliib/electron,Jacobichou/electron,vaginessa/electron,miniak/electron,rsvip/electron,yan-foto/electron,setzer777/electron,joneit/electron,vaginessa/electron,bbondy/electron,yalexx/electron,Jonekee/electron,michaelchiche/electron,timruffles/electron,jsutcodes/electron,shiftkey/electron,etiktin/electron,eric-seekas/electron,gamedevsam/electron,michaelchiche/electron,simongregory/electron,leftstick/electron,simonfork/electron,arturts/electron,aichingm/electron,preco21/electron,benweissmann/electron,systembugtj/electron,ervinb/electron,arturts/electron,gbn972/electron,pombredanne/electron,baiwyc119/electron,GoooIce/electron,noikiy/electron,adcentury/electron,tomashanacek/electron,dongjoon-hyun/electron,hokein/atom-shell,kazupon/electron,mubassirhayat/electron,vaginessa/electron,takashi/electron,eric-seekas/electron,zhakui/electron,matiasinsaurralde/electron,MaxWhere/electron,Floato/electron,jlhbaseball15/electron,tinydew4/electron,mrwizard82d1/electron,miniak/electron,nekuz0r/electron,jhen0409/electron,deepak1556/atom-shell,miniak/electron,jannishuebl/electron,cqqccqc/electron,rreimann/electron,brave/muon,stevekinney/electron,biblerule/UMCTelnetHub,setzer777/electron,LadyNaggaga/electron,seanchas116/electron,shockone/electron,shaundunne/electron,dkfiresky/electron,deed02392/electron,fomojola/electron,pirafrank/electron,IonicaBizauKitchen/electron,leethomas/electron,tomashanacek/electron,pombredanne/electron,chrisswk/electron,lzpfmh/electron,leolujuyi/electron,systembugtj/electron,aichingm/electron,the-ress/electron,davazp/electron,pandoraui/electron,stevekinney/electron,adcentury/electron,simongregory/electron,nicholasess/electron,natgolov/electron,dahal/electron,Ivshti/electron,eric-seekas/electron,bright-sparks/electron,subblue/electron,subblue/electron,faizalpribadi/electron,subblue/electron,jsutcodes/electron,posix4e/electron,wolfflow/electron,aecca/electron,micalan/electron,mirrh/electron,kostia/electron,simonfork/electron,adamjgray/electron,bobwol/electron,Rokt33r/electron,Faiz7412/electron,posix4e/electron,Neron-X5/electron,webmechanicx/electron,electron/electron,brenca/electron,soulteary/electron,trankmichael/electron,minggo/electron,tinydew4/electron,Jacobichou/electron,egoist/electron,tylergibson/electron,sshiting/electron,matiasinsaurralde/electron,destan/electron,mattdesl/electron,gamedevsam/electron,Jacobichou/electron,cos2004/electron,bright-sparks/electron,BionicClick/electron,GoooIce/electron,deed02392/electron,etiktin/electron,bitemyapp/electron,Andrey-Pavlov/electron,bruce/electron,bitemyapp/electron,cos2004/electron,noikiy/electron,robinvandernoord/electron,shiftkey/electron,systembugtj/electron,Floato/electron,xfstudio/electron,JesselJohn/electron,iftekeriba/electron,joneit/electron,medixdev/electron,maxogden/atom-shell,bright-sparks/electron,setzer777/electron,shiftkey/electron,brave/electron,shennushi/electron,takashi/electron,nicobot/electron,JussMee15/electron,carsonmcdonald/electron,wan-qy/electron,lrlna/electron,mattdesl/electron,roadev/electron,jiaz/electron,smczk/electron,shaundunne/electron,evgenyzinoviev/electron,cqqccqc/electron,nicobot/electron,JussMee15/electron,robinvandernoord/electron,rajatsingla28/electron,jannishuebl/electron,shiftkey/electron,brenca/electron,rajatsingla28/electron,mattotodd/electron,iftekeriba/electron,gbn972/electron,thingsinjars/electron,miniak/electron,soulteary/electron,fffej/electron,xfstudio/electron,fireball-x/atom-shell,mattotodd/electron,simonfork/electron,Faiz7412/electron,jcblw/electron,LadyNaggaga/electron,kazupon/electron,kostia/electron,edulan/electron,jannishuebl/electron,rhencke/electron,d-salas/electron,LadyNaggaga/electron,vaginessa/electron,Faiz7412/electron,John-Lin/electron,michaelchiche/electron,sircharleswatson/electron,thingsinjars/electron,MaxWhere/electron,sircharleswatson/electron,bright-sparks/electron,John-Lin/electron,IonicaBizauKitchen/electron,joaomoreno/atom-shell,ianscrivener/electron,preco21/electron,wan-qy/electron,anko/electron,simongregory/electron,SufianHassan/electron,wan-qy/electron,greyhwndz/electron,astoilkov/electron,joneit/electron,micalan/electron,farmisen/electron,twolfson/electron,leftstick/electron,zhakui/electron,rajatsingla28/electron,leolujuyi/electron,rhencke/electron,BionicClick/electron,bruce/electron,shennushi/electron,fritx/electron,Evercoder/electron,gerhardberger/electron,electron/electron,mrwizard82d1/electron,bpasero/electron,micalan/electron,xiruibing/electron,chrisswk/electron,DivyaKMenon/electron,howmuchcomputer/electron,astoilkov/electron,evgenyzinoviev/electron,bitemyapp/electron,vHanda/electron,carsonmcdonald/electron,cqqccqc/electron,fritx/electron,christian-bromann/electron,gstack/infinium-shell,sircharleswatson/electron,evgenyzinoviev/electron,leftstick/electron,jacksondc/electron,Faiz7412/electron,thomsonreuters/electron,bwiggs/electron,deepak1556/atom-shell,trigrass2/electron,jiaz/electron,dahal/electron,RIAEvangelist/electron,ianscrivener/electron,bobwol/electron,RIAEvangelist/electron,oiledCode/electron,eric-seekas/electron,faizalpribadi/electron,xiruibing/electron,abhishekgahlot/electron,jiaz/electron,webmechanicx/electron,jjz/electron,joaomoreno/atom-shell,hokein/atom-shell,cos2004/electron,pombredanne/electron,gbn972/electron,destan/electron,benweissmann/electron,oiledCode/electron,kcrt/electron,beni55/electron,the-ress/electron,kcrt/electron,yalexx/electron,dahal/electron,smczk/electron,saronwei/electron,anko/electron,medixdev/electron,twolfson/electron,adcentury/electron,gamedevsam/electron,dahal/electron,bitemyapp/electron,mrwizard82d1/electron,tylergibson/electron,jonatasfreitasv/electron,thompsonemerson/electron,SufianHassan/electron,smczk/electron,fireball-x/atom-shell,aaron-goshine/electron,twolfson/electron,christian-bromann/electron,abhishekgahlot/electron,trigrass2/electron,rsvip/electron,trankmichael/electron,dongjoon-hyun/electron,natgolov/electron,ianscrivener/electron,shaundunne/electron,leolujuyi/electron,leolujuyi/electron,maxogden/atom-shell,mrwizard82d1/electron,jhen0409/electron,gabriel/electron,fomojola/electron,Jonekee/electron,vipulroxx/electron,jtburke/electron,baiwyc119/electron,jannishuebl/electron,the-ress/electron,sshiting/electron,sshiting/electron,christian-bromann/electron,mattdesl/electron,yalexx/electron,gabriel/electron,DivyaKMenon/electron,tonyganch/electron,howmuchcomputer/electron,meowlab/electron,beni55/electron,tomashanacek/electron,fffej/electron,astoilkov/electron,seanchas116/electron,shaundunne/electron,ianscrivener/electron,setzer777/electron,edulan/electron,aaron-goshine/electron,electron/electron,xiruibing/electron,bright-sparks/electron,jtburke/electron,ervinb/electron,thomsonreuters/electron,simongregory/electron,bitemyapp/electron,bbondy/electron,darwin/electron,pombredanne/electron,sky7sea/electron,shockone/electron,d-salas/electron,wolfflow/electron,kenmozi/electron,edulan/electron,RIAEvangelist/electron,ankitaggarwal011/electron,mhkeller/electron,nicobot/electron,jhen0409/electron,roadev/electron,arturts/electron,miniak/electron,hokein/atom-shell,eric-seekas/electron,jlhbaseball15/electron,electron/electron,jsutcodes/electron,chrisswk/electron,mirrh/electron,soulteary/electron,sshiting/electron,Jonekee/electron,abhishekgahlot/electron,JesselJohn/electron,beni55/electron,nagyistoce/electron-atom-shell,JussMee15/electron,sky7sea/electron,renaesop/electron,MaxGraey/electron,arusakov/electron,destan/electron,digideskio/electron,jacksondc/electron,simonfork/electron,JesselJohn/electron,rreimann/electron,jsutcodes/electron,saronwei/electron,joaomoreno/atom-shell,kenmozi/electron,tylergibson/electron,Jonekee/electron,astoilkov/electron,benweissmann/electron,jtburke/electron,mhkeller/electron,brave/muon,tincan24/electron,chrisswk/electron,arusakov/electron,coderhaoxin/electron,mrwizard82d1/electron,kazupon/electron,Ivshti/electron,rreimann/electron,nekuz0r/electron,natgolov/electron,xiruibing/electron,aliib/electron,brave/electron,astoilkov/electron,the-ress/electron,mhkeller/electron,kcrt/electron,vipulroxx/electron,carsonmcdonald/electron,stevemao/electron,bwiggs/electron,joaomoreno/atom-shell,deepak1556/atom-shell,kcrt/electron,Andrey-Pavlov/electron,mattdesl/electron,GoooIce/electron,RobertJGabriel/electron,jlord/electron,matiasinsaurralde/electron,fomojola/electron,vHanda/electron,jlhbaseball15/electron,abhishekgahlot/electron,cos2004/electron,adamjgray/electron,farmisen/electron,lrlna/electron,darwin/electron,fritx/electron,fritx/electron,edulan/electron,jonatasfreitasv/electron,simonfork/electron,chriskdon/electron,matiasinsaurralde/electron,yalexx/electron,matiasinsaurralde/electron,cqqccqc/electron,jaanus/electron,voidbridge/electron,jtburke/electron,setzer777/electron,eriser/electron,deed02392/electron,icattlecoder/electron,tincan24/electron,etiktin/electron,neutrous/electron,IonicaBizauKitchen/electron,pirafrank/electron,John-Lin/electron,dongjoon-hyun/electron,gbn972/electron,fffej/electron,d-salas/electron,rhencke/electron,jjz/electron,felixrieseberg/electron,rprichard/electron,Faiz7412/electron,aecca/electron,nicobot/electron,seanchas116/electron,bwiggs/electron,aecca/electron,hokein/atom-shell,the-ress/electron,synaptek/electron,BionicClick/electron,tincan24/electron,deed02392/electron,digideskio/electron,ankitaggarwal011/electron,smczk/electron,zhakui/electron,shockone/electron,fireball-x/atom-shell,fabien-d/electron,thomsonreuters/electron,trigrass2/electron,electron/electron,rajatsingla28/electron,arusakov/electron,sircharleswatson/electron,vipulroxx/electron,John-Lin/electron,wan-qy/electron,adamjgray/electron,chriskdon/electron,bobwol/electron,micalan/electron,tonyganch/electron,zhakui/electron,jhen0409/electron,leethomas/electron,deed02392/electron,dahal/electron,greyhwndz/electron,yan-foto/electron,howmuchcomputer/electron,pirafrank/electron,tonyganch/electron,mjaniszew/electron,saronwei/electron,wan-qy/electron,kenmozi/electron,anko/electron,fireball-x/atom-shell,lzpfmh/electron,adamjgray/electron,jcblw/electron,nicholasess/electron,kenmozi/electron,synaptek/electron,preco21/electron,aichingm/electron,fffej/electron,eriser/electron,renaesop/electron,neutrous/electron,medixdev/electron,egoist/electron,iftekeriba/electron,JussMee15/electron,aliib/electron,dahal/electron,vHanda/electron,posix4e/electron,cqqccqc/electron,trankmichael/electron,leethomas/electron,mjaniszew/electron,matiasinsaurralde/electron,saronwei/electron,kostia/electron,beni55/electron,bruce/electron,GoooIce/electron,Evercoder/electron,pombredanne/electron,renaesop/electron,thompsonemerson/electron,minggo/electron,aecca/electron,posix4e/electron,xiruibing/electron,ervinb/electron,rprichard/electron,timruffles/electron,gerhardberger/electron,anko/electron,benweissmann/electron,ankitaggarwal011/electron,renaesop/electron,Rokt33r/electron,tincan24/electron,jacksondc/electron,rajatsingla28/electron,Jacobichou/electron,wolfflow/electron,shennushi/electron,tonyganch/electron,vaginessa/electron,jtburke/electron,vHanda/electron,howmuchcomputer/electron,aliib/electron,adcentury/electron,mattotodd/electron,beni55/electron,maxogden/atom-shell,pombredanne/electron,noikiy/electron,d-salas/electron,Evercoder/electron,fomojola/electron,brave/electron,MaxWhere/electron,aecca/electron,neutrous/electron,jacksondc/electron,simongregory/electron,posix4e/electron,Jacobichou/electron,mirrh/electron,deepak1556/atom-shell,thingsinjars/electron,bpasero/electron,robinvandernoord/electron,soulteary/electron,tincan24/electron,pandoraui/electron,Jacobichou/electron,noikiy/electron,subblue/electron,setzer777/electron,voidbridge/electron,Rokt33r/electron,takashi/electron,biblerule/UMCTelnetHub,twolfson/electron,leftstick/electron,tomashanacek/electron,baiwyc119/electron,xfstudio/electron,egoist/electron,Zagorakiss/electron,bobwol/electron,Ivshti/electron,jjz/electron,mubassirhayat/electron,jonatasfreitasv/electron,ervinb/electron,kenmozi/electron,digideskio/electron,simongregory/electron,electron/electron,cos2004/electron,wolfflow/electron,jaanus/electron,joaomoreno/atom-shell,farmisen/electron,thomsonreuters/electron,gamedevsam/electron,brave/muon,subblue/electron,egoist/electron,pirafrank/electron,bobwol/electron,davazp/electron,mirrh/electron,icattlecoder/electron,kcrt/electron,Andrey-Pavlov/electron,greyhwndz/electron,pirafrank/electron,bbondy/electron,jlhbaseball15/electron,DivyaKMenon/electron,aaron-goshine/electron,Jonekee/electron,yan-foto/electron,minggo/electron,gerhardberger/electron,roadev/electron,lrlna/electron,gbn972/electron,deed02392/electron,GoooIce/electron,baiwyc119/electron,rhencke/electron,trigrass2/electron,dkfiresky/electron,jaanus/electron,MaxGraey/electron,dongjoon-hyun/electron,vipulroxx/electron,dkfiresky/electron,lrlna/electron,the-ress/electron,takashi/electron,biblerule/UMCTelnetHub,arusakov/electron,mjaniszew/electron,robinvandernoord/electron,dongjoon-hyun/electron,thomsonreuters/electron,stevemao/electron,stevemao/electron,roadev/electron,jaanus/electron,aaron-goshine/electron,tylergibson/electron,gabrielPeart/electron,RobertJGabriel/electron,Ivshti/electron,aichingm/electron,lzpfmh/electron,nekuz0r/electron,kostia/electron,lzpfmh/electron,kokdemo/electron,shennushi/electron,Ivshti/electron,thingsinjars/electron,digideskio/electron,nekuz0r/electron,jsutcodes/electron,seanchas116/electron,ianscrivener/electron,ianscrivener/electron,baiwyc119/electron,tomashanacek/electron,Zagorakiss/electron,bright-sparks/electron,coderhaoxin/electron,icattlecoder/electron,systembugtj/electron,RobertJGabriel/electron,coderhaoxin/electron,twolfson/electron,rhencke/electron,the-ress/electron,farmisen/electron,gstack/infinium-shell,jjz/electron,jhen0409/electron,Evercoder/electron,eriser/electron,leftstick/electron,rhencke/electron,leftstick/electron,kazupon/electron,shockone/electron,etiktin/electron,Andrey-Pavlov/electron,biblerule/UMCTelnetHub,leolujuyi/electron,gabriel/electron,bpasero/electron,evgenyzinoviev/electron,IonicaBizauKitchen/electron,natgolov/electron,stevekinney/electron,mubassirhayat/electron,thingsinjars/electron,eriser/electron,felixrieseberg/electron,coderhaoxin/electron,gabriel/electron,greyhwndz/electron,John-Lin/electron,jlord/electron,jonatasfreitasv/electron,jcblw/electron,gerhardberger/electron,felixrieseberg/electron,robinvandernoord/electron,adamjgray/electron,cqqccqc/electron,nicholasess/electron,IonicaBizauKitchen/electron,chriskdon/electron,rsvip/electron,mhkeller/electron,farmisen/electron,tonyganch/electron,jacksondc/electron,nicholasess/electron,iftekeriba/electron,MaxWhere/electron,greyhwndz/electron,Zagorakiss/electron,renaesop/electron,tomashanacek/electron,Gerhut/electron,meowlab/electron,aaron-goshine/electron,edulan/electron,joaomoreno/atom-shell,aaron-goshine/electron,stevemao/electron,Neron-X5/electron,egoist/electron,synaptek/electron,sshiting/electron,dkfiresky/electron,maxogden/atom-shell,mjaniszew/electron,kikong/electron,eriser/electron,adcentury/electron,nekuz0r/electron,wolfflow/electron,coderhaoxin/electron,jlhbaseball15/electron,jlord/electron,RIAEvangelist/electron,lrlna/electron,takashi/electron,gamedevsam/electron,medixdev/electron,micalan/electron,meowlab/electron,davazp/electron,xfstudio/electron,kikong/electron,preco21/electron,trankmichael/electron,nagyistoce/electron-atom-shell,fomojola/electron,digideskio/electron,roadev/electron,neutrous/electron,tincan24/electron,jcblw/electron,yan-foto/electron,adamjgray/electron,rreimann/electron,arusakov/electron,kokdemo/electron,gamedevsam/electron,bbondy/electron,Zagorakiss/electron,coderhaoxin/electron,thompsonemerson/electron,shennushi/electron,Neron-X5/electron,Floato/electron,systembugtj/electron,BionicClick/electron,faizalpribadi/electron,webmechanicx/electron,icattlecoder/electron,etiktin/electron,shiftkey/electron,brave/muon,soulteary/electron,oiledCode/electron,preco21/electron,egoist/electron,destan/electron,ankitaggarwal011/electron,digideskio/electron,abhishekgahlot/electron,xfstudio/electron,fritx/electron,leethomas/electron,shiftkey/electron,fabien-d/electron,brave/electron,brenca/electron,stevekinney/electron,trigrass2/electron,davazp/electron,jonatasfreitasv/electron,mjaniszew/electron,howmuchcomputer/electron,bitemyapp/electron,tylergibson/electron,Andrey-Pavlov/electron,stevekinney/electron,shaundunne/electron,lrlna/electron,kcrt/electron,gabriel/electron,mirrh/electron,neutrous/electron,arusakov/electron,evgenyzinoviev/electron,stevemao/electron,preco21/electron,darwin/electron,mattotodd/electron,maxogden/atom-shell,kokdemo/electron,farmisen/electron,Zagorakiss/electron,xfstudio/electron,jcblw/electron,LadyNaggaga/electron,brenca/electron,fffej/electron,jiaz/electron,simonfork/electron,joneit/electron,mattdesl/electron,Floato/electron,Evercoder/electron,aliib/electron,fffej/electron,kikong/electron,rreimann/electron,sky7sea/electron,Evercoder/electron,shockone/electron,carsonmcdonald/electron,jaanus/electron,biblerule/UMCTelnetHub,sky7sea/electron,chrisswk/electron,sky7sea/electron,medixdev/electron,tonyganch/electron,Jonekee/electron,jjz/electron,icattlecoder/electron,thompsonemerson/electron,MaxGraey/electron,SufianHassan/electron,bobwol/electron,lzpfmh/electron,nicobot/electron,jacksondc/electron,vipulroxx/electron,joneit/electron,RIAEvangelist/electron,minggo/electron,trigrass2/electron,aichingm/electron,michaelchiche/electron,DivyaKMenon/electron,chriskdon/electron,oiledCode/electron,sky7sea/electron,voidbridge/electron,faizalpribadi/electron,seanchas116/electron,biblerule/UMCTelnetHub,rsvip/electron,pandoraui/electron,tylergibson/electron,vaginessa/electron,evgenyzinoviev/electron,meowlab/electron,vipulroxx/electron,bwiggs/electron,gabrielPeart/electron,chriskdon/electron,nicholasess/electron,gstack/infinium-shell,dkfiresky/electron,Floato/electron,astoilkov/electron,jiaz/electron,darwin/electron,aichingm/electron,voidbridge/electron,leethomas/electron,bruce/electron,Zagorakiss/electron,nagyistoce/electron-atom-shell,BionicClick/electron,mubassirhayat/electron,MaxWhere/electron,fomojola/electron,kikong/electron,jlord/electron,DivyaKMenon/electron,fritx/electron,kostia/electron,chriskdon/electron,d-salas/electron,rajatsingla28/electron,Andrey-Pavlov/electron,rprichard/electron,thomsonreuters/electron,greyhwndz/electron,rreimann/electron,subblue/electron,bbondy/electron,mhkeller/electron,mrwizard82d1/electron,nicobot/electron,timruffles/electron,gerhardberger/electron,kazupon/electron,michaelchiche/electron,pandoraui/electron,IonicaBizauKitchen/electron,darwin/electron,smczk/electron,gabrielPeart/electron,Gerhut/electron,shaundunne/electron,nagyistoce/electron-atom-shell,micalan/electron,roadev/electron,rprichard/electron,yalexx/electron,kokdemo/electron,gerhardberger/electron,xiruibing/electron,RobertJGabriel/electron,thompsonemerson/electron,smczk/electron,davazp/electron,joneit/electron,Neron-X5/electron,carsonmcdonald/electron,oiledCode/electron,ankitaggarwal011/electron,mhkeller/electron,arturts/electron,systembugtj/electron,rsvip/electron,mattdesl/electron,felixrieseberg/electron,LadyNaggaga/electron,fabien-d/electron,Rokt33r/electron,christian-bromann/electron,gabrielPeart/electron,renaesop/electron,bpasero/electron,etiktin/electron,oiledCode/electron,shockone/electron,faizalpribadi/electron,tinydew4/electron,jonatasfreitasv/electron,bpasero/electron,carsonmcdonald/electron,trankmichael/electron,destan/electron,abhishekgahlot/electron,brave/electron,aecca/electron,minggo/electron,mirrh/electron,MaxGraey/electron,wan-qy/electron,shennushi/electron,gabrielPeart/electron,tinydew4/electron,pandoraui/electron,Gerhut/electron,Rokt33r/electron,natgolov/electron,vHanda/electron,beni55/electron,tinydew4/electron,yalexx/electron,zhakui/electron,RobertJGabriel/electron,vHanda/electron,trankmichael/electron,tinydew4/electron,DivyaKMenon/electron,voidbridge/electron,Rokt33r/electron,webmechanicx/electron,wolfflow/electron,jaanus/electron,mubassirhayat/electron,MaxWhere/electron,voidbridge/electron,pirafrank/electron,anko/electron,RobertJGabriel/electron,mattotodd/electron,gbn972/electron | script/lib/config.py | script/lib/config.py | #!/usr/bin/env python
import platform
import sys
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '70fe9677634a8f13fd5c67b8c83c056f2d9c8d46'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| #!/usr/bin/env python
import platform
import sys
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '3245ef802fbf546f1a1d206990aa9d18be6bfbfe'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| mit | Python |
1f2c3057d4fa5ec875c60297184c154ecd0fff39 | Bump reported version string to 1.3.1 | basho-labs/riak-mesos-tools,basho-labs/riak-mesos-tools | riak_mesos/constants.py | riak_mesos/constants.py | #
# Copyright (C) 2016 Basho Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""DCOS Riak Constants"""
version = '1.3.1'
| #
# Copyright (C) 2016 Basho Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""DCOS Riak Constants"""
version = '1.3.0'
| apache-2.0 | Python |
f3ab4f25797e59187807095ebc689616007b8c9e | add more test case | nonlining/SimpleML | Regression/testKnearestRegression.py | Regression/testKnearestRegression.py | #-------------------------------------------------------------------------------
# Name: test Knearest Regression
# Purpose:
#
# Author: Nonlining
#
# Created: 23/03/2017
# Copyright: (c) Nonlining 2017
# Licence: <your licence>
#-------------------------------------------------------------------------------
import numpy as np
import pandas as pd
import KnearestRegression as KNN
dtype_dict = {'bathrooms':float, 'waterfront':int, 'sqft_above':int,
'sqft_living15':float, 'grade':int, 'yr_renovated':int,
'price':float, 'bedrooms':float, 'zipcode':str,
'long':float, 'sqft_lot15':float, 'sqft_living':float,
'floors':float, 'condition':int, 'lat':float, 'date':str,
'sqft_basement':int, 'yr_built':int, 'id':str,
'sqft_lot':int, 'view':int}
feature_list = ['bedrooms',
'bathrooms',
'sqft_living',
'sqft_lot',
'floors',
'waterfront',
'view',
'condition',
'grade',
'sqft_above',
'sqft_basement',
'yr_built',
'yr_renovated',
'lat',
'long',
'sqft_living15',
'sqft_lot15']
def main():
train = pd.read_csv('../Datafiles/kc_house_data_small_train.csv', dtype=dtype_dict)
valid = pd.read_csv('../Datafiles/kc_house_data_validation.csv', dtype=dtype_dict)
test = pd.read_csv('../Datafiles/kc_house_data_small_test.csv', dtype=dtype_dict)
features_train, output_train = KNN.extract(train, feature_list, 'price')
features_test, output_test = KNN.extract(test, feature_list, 'price')
features_valid, output_valid = KNN.extract(valid, feature_list, 'price')
features_train, norms = KNN.normalize_features(features_train) # normalize training set features (columns)
features_test = features_test / norms # normalize test set by training set norms
features_valid = features_valid / norms # normalize validation set by training set norms
print features_test[0]
print features_train[9]
if __name__ == '__main__':
main()
| #-------------------------------------------------------------------------------
# Name: test Knearest Regression
# Purpose:
#
# Author: Nonlining
#
# Created: 23/03/2017
# Copyright: (c) Nonlining 2017
# Licence: <your licence>
#-------------------------------------------------------------------------------
import numpy as np
import pandas as pd
import KnearestRegression as KNN
dtype_dict = {'bathrooms':float, 'waterfront':int, 'sqft_above':int,
'sqft_living15':float, 'grade':int, 'yr_renovated':int,
'price':float, 'bedrooms':float, 'zipcode':str,
'long':float, 'sqft_lot15':float, 'sqft_living':float,
'floors':float, 'condition':int, 'lat':float, 'date':str,
'sqft_basement':int, 'yr_built':int, 'id':str,
'sqft_lot':int, 'view':int}
feature_list = ['bedrooms',
'bathrooms',
'sqft_living',
'sqft_lot',
'floors',
'waterfront',
'view',
'condition',
'grade',
'sqft_above',
'sqft_basement',
'yr_built',
'yr_renovated',
'lat',
'long',
'sqft_living15',
'sqft_lot15']
def main():
train = pd.read_csv('../Datafiles/kc_house_data_small_train.csv', dtype=dtype_dict)
valid = pd.read_csv('../Datafiles/kc_house_data_validation.csv', dtype=dtype_dict)
test = pd.read_csv('../Datafiles/kc_house_data_small_test.csv', dtype=dtype_dict)
features_train, output_train = KNN.extract(train, feature_list, 'price')
features_test, output_test = KNN.extract(test, feature_list, 'price')
features_valid, output_valid = KNN.extract(valid, feature_list, 'price')
if __name__ == '__main__':
main()
| mit | Python |
92b9a27ade884bb84dd06cf3af8295d5270773cc | make last.py and lastz.py interface similar | tanghaibao/jcvi,sgordon007/jcvi_062915 | apps/last.py | apps/last.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Wrapper for LAST program.
<http://last.cbrc.jp>
"""
import sys
from optparse import OptionParser
from jcvi.utils.cbook import depends
from jcvi.apps.base import debug, sh, set_outfile
debug()
@depends
def run_lastdb(infile=None, outfile=None):
outfilebase = outfile.rsplit(".", 1)[0]
cmd = "lastdb -c {0} {1}".format(outfilebase, infile)
sh(cmd)
def main(args):
"""
%prog database.fasta query.fasta
Run LAST by calling LASTDB, LASTAL and LASTEX.
"""
supported_formats = ("tab", "maf", "blast")
p = OptionParser(main.__doc__)
p.add_option("-a", "-A", dest="cpus", default=1, type="int",
help="parallelize job to multiple cpus [default: %default]")
p.add_option("--path", help="specify LAST path")
p.add_option("--format", default="blast", choices=supported_formats,
help="Output format, one of {0} [default: %default]".\
format("|".join(supported_formats)))
p.add_option("--eval", default=False, action="store_true",
help="Use lastex to recalculate E-value [default: %default]")
set_outfile(p)
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
subject, query = args
if opts.eval and opts.cpus > 1:
raise Exception, "Option --eval cannnot work with multiple threads"
subjectdb = subject.rsplit(".", 1)[0]
querydb = query.rsplit(".", 1)[0]
run_lastdb(infile=subject, outfile=subjectdb + ".prj")
if opts.format == "maf":
cmd = 'echo "##maf version=1"'
sh(cmd)
cmd = "lastal -u 0"
f = supported_formats.index(opts.format)
cmd += " -f {0}".format(f)
cmd += " {0} {1}".format(subjectdb, query)
if opts.eval:
run_lastdb(infile=query, outfile=querydb + ".prj")
cmd += " | lastex {0}.prj {1}.prj -".format(subjectdb, querydb)
sh(cmd)
if __name__ == '__main__':
main(sys.argv[1:])
| #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Wrapper for LAST program.
<http://last.cbrc.jp>
"""
import sys
from optparse import OptionParser
from jcvi.utils.cbook import depends
from jcvi.apps.base import ActionDispatcher, debug, sh, set_outfile
debug()
def main():
actions = (
('last', 'run LAST by calling LASTDB, LASTAL and LASTEX'),
)
p = ActionDispatcher(actions)
p.dispatch(globals())
@depends
def run_lastdb(infile=None, outfile=None):
outfilebase = outfile.rsplit(".", 1)[0]
cmd = "lastdb -c {0} {1}".format(outfilebase, infile)
sh(cmd)
def last(args):
"""
%prog last database.fasta query.fasta
Run LAST by calling LASTDB, LASTAL and LASTEX.
"""
supported_formats = ("tab", "maf")
p = OptionParser(last.__doc__)
p.add_option("--format", default="maf", choices=supported_formats,
help="Output format, one of {0} [default: %default]".\
format("|".join(supported_formats)))
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
subject, query = args
subjectdb = subject.rsplit(".", 1)[0]
querydb = query.rsplit(".", 1)[0]
run_lastdb(infile=subject, outfile=subjectdb + ".prj")
run_lastdb(infile=query, outfile=querydb + ".prj")
if opts.format == "maf":
cmd = 'echo "##maf version=1"'
sh(cmd)
cmd = "lastal"
if opts.format == "tab":
cmd += " -f 0"
cmd += " {0} {1}".format(subjectdb, query)
cmd += " | lastex {0}.prj {1}.prj -".format(subjectdb, querydb)
sh(cmd)
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
525f604a898967b2f07e53264479b5609173d2b7 | Add a BLT parser test. | cjerdonek/open-rcv,cjerdonek/open-rcv | openrcv/test/test.py | openrcv/test/test.py |
from io import StringIO
from textwrap import dedent
import unittest
from unittest import TestCase
from openrcv.main import BLTParser, ContestInfo
def run_tests():
# TODO: discover all tests.
unittest.main(module=__name__)
class MainTestCase(TestCase):
def test(self):
self.assertEqual(1, 1)
class BLTParserTest(TestCase):
def test_parse_file(self):
blt = dedent("""\
4 2
-3
2 2 0
1 2 4 3 1 0
2 1 3 4 0
3 1 0
2 3 4 0
4 4 1 3 0
0
"Jen"
"Alice"
"Steve"
"Bill"
"My Election"
""")
parser = BLTParser()
with StringIO(blt) as f:
info = parser.parse_file(f)
self.assertEqual(type(info), ContestInfo)
|
import unittest
def run_tests():
# TODO: discover all tests.
unittest.main(module=__name__)
class MainTestCase(unittest.TestCase):
def test(self):
self.assertEqual(1, 1)
| mit | Python |
7428fc66f76a9c25ca924871e91c664a133d362e | update web site and git url for BMI package (#12036) | iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/bmi/package.py | var/spack/repos/builtin/packages/bmi/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Bmi(AutotoolsPackage):
"""a communications framework and network abstraction layer"""
homepage = 'https://xgitlab.cels.anl.gov/sds/bmi'
git = 'https://xgitlab.cels.anl.gov/sds/bmi.git'
version('develop', branch='master')
depends_on('autoconf', type='build')
# need to override 'autoreconf' so we can run BMI's 'prepare' script
def autoreconf(self, spec, prefix):
Executable('./prepare')()
def configure_args(self):
args = ["--enable-shared", "--enable-bmi-only"]
return args
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Bmi(AutotoolsPackage):
"""a communications framework and network abstraction layer"""
homepage = 'http://git.mcs.anl.gov/bmi.git/'
git = 'git://git.mcs.anl.gov/bmi'
version('develop', branch='master')
depends_on('autoconf', type='build')
# need to override 'autoreconf' so we can run BMI's 'prepare' script
def autoreconf(self, spec, prefix):
Executable('./prepare')()
def configure_args(self):
args = ["--enable-shared", "--enable-bmi-only"]
return args
| lgpl-2.1 | Python |
b3fcfc1cb25884f668466816be08a2a68cee256a | Add lz4 1.9.2 (#13155) | LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack | var/spack/repos/builtin/packages/lz4/package.py | var/spack/repos/builtin/packages/lz4/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import sys
class Lz4(MakefilePackage):
"""LZ4 is lossless compression algorithm, providing compression speed
at 400 MB/s per core, scalable with multi-cores CPU. It also features
an extremely fast decoder, with speed in multiple GB/s per core,
typically reaching RAM speed limits on multi-core systems."""
homepage = "http://lz4.github.io/lz4/"
url = "https://github.com/lz4/lz4/archive/v1.9.2.tar.gz"
version('1.9.2', sha256='658ba6191fa44c92280d4aa2c271b0f4fbc0e34d249578dd05e50e76d0e5efcc')
version('1.9.0', sha256='f8b6d5662fa534bd61227d313535721ae41a68c9d84058b7b7d86e143572dcfb')
version('1.8.3', sha256='33af5936ac06536805f9745e0b6d61da606a1f8b4cc5c04dd3cbaca3b9b4fc43')
version('1.8.1.2', sha256='12f3a9e776a923275b2dc78ae138b4967ad6280863b77ff733028ce89b8123f9')
version('1.7.5', sha256='0190cacd63022ccb86f44fa5041dc6c3804407ad61550ca21c382827319e7e7e')
version('1.3.1', sha256='9d4d00614d6b9dec3114b33d1224b6262b99ace24434c53487a0c8fd0b18cfed')
depends_on('valgrind', type='test')
def url_for_version(self, version):
url = "https://github.com/lz4/lz4/archive"
if version > Version('1.3.1'):
return "{0}/v{1}.tar.gz".format(url, version)
else:
return "{0}/r{1}.tar.gz".format(url, version.joined)
def build(self, spec, prefix):
if sys.platform != "darwin":
make('MOREFLAGS=-lrt') # fixes make error on CentOS6
else:
make()
def install(self, spec, prefix):
make('install', 'PREFIX={0}'.format(prefix))
@run_after('install')
def darwin_fix(self):
if sys.platform == 'darwin':
fix_darwin_install_name(self.prefix.lib)
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import sys
class Lz4(Package):
"""LZ4 is lossless compression algorithm, providing compression speed
at 400 MB/s per core, scalable with multi-cores CPU. It also features
an extremely fast decoder, with speed in multiple GB/s per core,
typically reaching RAM speed limits on multi-core systems."""
homepage = "http://lz4.github.io/lz4/"
url = "https://github.com/lz4/lz4/archive/v1.7.5.tar.gz"
version('1.9.0', sha256='f8b6d5662fa534bd61227d313535721ae41a68c9d84058b7b7d86e143572dcfb')
version('1.8.3', sha256='33af5936ac06536805f9745e0b6d61da606a1f8b4cc5c04dd3cbaca3b9b4fc43')
version('1.8.1.2', sha256='12f3a9e776a923275b2dc78ae138b4967ad6280863b77ff733028ce89b8123f9')
version('1.7.5', sha256='0190cacd63022ccb86f44fa5041dc6c3804407ad61550ca21c382827319e7e7e')
version('1.3.1', sha256='9d4d00614d6b9dec3114b33d1224b6262b99ace24434c53487a0c8fd0b18cfed')
depends_on('valgrind', type='test')
def url_for_version(self, version):
url = "https://github.com/lz4/lz4/archive"
if version > Version('1.3.1'):
return "{0}/v{1}.tar.gz".format(url, version)
else:
return "{0}/r{1}.tar.gz".format(url, version.joined)
def install(self, spec, prefix):
if sys.platform != "darwin":
make('MOREFLAGS=-lrt') # fixes make error on CentOS6
else:
make()
if self.run_tests:
make('test') # requires valgrind to be installed
make('install', 'PREFIX={0}'.format(prefix))
@run_after('install')
def darwin_fix(self):
if sys.platform == 'darwin':
fix_darwin_install_name(self.prefix.lib)
| lgpl-2.1 | Python |
cd70f6b89b1fff89c5c6a628c926365be9e3c95f | change scenario and for loop | openego/data_processing | preprocessing/python_scripts/renpass_gis/simple_feedin/results_to_oedb.py | preprocessing/python_scripts/renpass_gis/simple_feedin/results_to_oedb.py | """
Write results of simple_feedin.py into the openEnergy Database.
links:
https://www.shanelynn.ie/select-pandas-dataframe-rows-and-columns-using-iloc-loc-and-ix/
http://www.datacarpentry.org/python-ecology-lesson/02-index-slice-subset/
ToDo:
Insert capacity
move to odeb
update data_processing
"""
__copyright__ = "ZNES"
__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
__url__ = "https://github.com/openego/data_processing/blob/master/LICENSE"
__author__ = "wolfbunke"
import pandas as pd
import psycopg2
from sqlalchemy import create_engine
import numpy as np
from db import conn, readcfg, dbconnect
import os
# Settings
#filename = '2017-08-07_simple_feedin_All_subids_weatherids_ego_weatherYear2011.csv'
filename = 'simple_feedin_test.csv'
conn = conn
# read configuration file
path = os.path.join(os.path.expanduser("~"), '.open_eGo', 'config.ini')
config = readcfg(path=path)
# establish DB connection
section = 'oedb'
conn = dbconnect(section=section, cfg=config)
# load data from csv
time_series = pd.read_csv(filename,skiprows=1)
scenario_name = pd.read_csv(filename,header=None,nrows=1).iloc[0,0]
# prepare DataFrame
columns = ['hour','coastdat_id','sub_id', 'generation_type', 'feedin', 'scenario']
index = np.arange(1,len(time_series)-1)
db_structure = pd.DataFrame(columns=columns, index=index)
time_series.iloc[2,2]
# Insert Data to DB by DataFrame
for k in time_series.columns:
# fill empty DataFrame
m = k.index('_')
n = k.index('_c')
l = k[:m] # coastdat_id
p = k[m+1:n] # substation_id
db_structure['coastdat_id'] = l
db_structure['sub_id'] = p
db_structure['hour'] = index
db_structure['generation_type'] = time_series.iloc[0,0]
db_structure['feedin'] = time_series[k].shift(-1)
db_structure['scenario'] = scenario_name
db_structure = db_structure.reset_index(drop=True)
# write to csv
if not os.path.isfile('result_feedin.csv'):
db_structure.to_csv('result_feedin.csv',header ='columns')
else: # else it exists so append without writing the header
db_structure.to_csv('result_feedin.csv',mode = 'a',header=False)
# write df to database
#db_structure.to_sql('ego_simple_feedin_full', conn, schema='model_draft',
#if_exists='append',index=False)
print('Done!')
#Alter DB tables
| """
Write results of simple_feedin.py into the openEnergy Database.
links:
https://www.shanelynn.ie/select-pandas-dataframe-rows-and-columns-using-iloc-loc-and-ix/
http://www.datacarpentry.org/python-ecology-lesson/02-index-slice-subset/
ToDo:
Insert capacity
move to odeb
update data_processing
"""
__copyright__ = "ZNES"
__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
__url__ = "https://github.com/openego/data_processing/blob/master/LICENSE"
__author__ = "wolfbunke"
import pandas as pd
import psycopg2
from sqlalchemy import create_engine
import numpy as np
from db import conn, readcfg, dbconnect
import os
# Settings
#filename = '2017-08-07_simple_feedin_All_subids_weatherids_ego_weatherYear2011.csv'
filename = 'simple_feedin_test.csv'
conn = conn
# read configuration file
path = os.path.join(os.path.expanduser("~"), '.open_eGo', 'config.ini')
config = readcfg(path=path)
# establish DB connection
section = 'oedb'
conn = dbconnect(section=section, cfg=config)
# load data from csv
time_series = pd.read_csv(filename,skiprows=1)
time_series[[1]]
# prepare DataFrame
columns = ['hour','coastdat_id','sub_id', 'generation_type', 'feedin', 'scenario']
index = np.arange(1,len(time_series)-1)
db_structure = pd.DataFrame(columns=columns, index=index)
db_data = pd.DataFrame(columns=columns,index=index)
# Insert Data to DB by DataFrame
for k in time_series.columns:
# fill empty DataFrame
m = k.index('_')
n = k.index('_c')
l = k[:m] # coastdat_id
p = k[m+1:n] # substation_id
db_structure['coastdat_id'] = l
db_structure['sub_id'] = p
db_structure['hour'] = index
db_structure['generation_type'] = time_series.iloc[0,0]
db_structure['feedin'] = time_series[[1]].shift(-1) # Contoll ts or K !!!
db_structure['scenario'] =
db_structure = db_structure.reset_index(drop=True)
# write df to database
db_structure.to_sql('ego_simple_feedin_full', conn, schema='model_draft',
if_exists='append',index=False)
print('Done!')
| agpl-3.0 | Python |
87846ebce6214b72f2445b929db49e7545ae2be5 | replace 1wire sensor with new one, change the id | teemuhirsikangas/magicaespeculo,teemuhirsikangas/magicaespeculo,teemuhirsikangas/magicaespeculo,teemuhirsikangas/magicaespeculo | scripts/send_temp.py | scripts/send_temp.py | #!/usr/bin/python
import time
import os
import json
import requests
#for adafruit dht22/AM2302 sensor lib
import sys
import Adafruit_DHT
outside_temp=os.path.join("/","mnt","1wire","28.FF1521621603","temperature")
floor_temp=os.path.join("/","mnt","1wire","10.04A794020800","temperature")
room_temp=os.path.join("/","mnt","1wire","10.D9AB94020800","temperature")
def get1wiretemp(file_name):
file_object=open(file_name,'r')
line=file_object.read()
file_object.close()
return round(float(line.lstrip()),1)
try:
out = get1wiretemp(outside_temp)
except IOError, e:
out = "99"
floor = get1wiretemp(floor_temp)
room = get1wiretemp(room_temp)
sensor = Adafruit_DHT.AM2302
pin = 4
humidity, temperature = Adafruit_DHT.read_retry(sensor,pin)
humidity = round(float(humidity),1)
url = 'http://localhost:3333/homeautomation/temperature'
payload = {'floor': floor, 'room': room, 'out': out, 'humid': humidity}
#print (payload)
headers = {'content-type': 'application/json'}
r = requests.post(url, data = json.dumps(payload), headers = headers)
| #!/usr/bin/python
import time
import os
import json
import requests
#for adafruit dht22/AM2302 sensor lib
import sys
import Adafruit_DHT
outside_temp=os.path.join("/","mnt","1wire","10.E1B894020800","temperature")
floor_temp=os.path.join("/","mnt","1wire","10.04A794020800","temperature")
room_temp=os.path.join("/","mnt","1wire","10.D9AB94020800","temperature")
def get1wiretemp(file_name):
file_object=open(file_name,'r')
line=file_object.read()
file_object.close()
return round(float(line.lstrip()),1)
out = get1wiretemp(outside_temp)
floor = get1wiretemp(floor_temp)
room = get1wiretemp(room_temp)
sensor = Adafruit_DHT.AM2302
pin = 4
humidity, temperature = Adafruit_DHT.read_retry(sensor,pin)
humidity = round(float(humidity),1)
url = 'http://localhost:3333/homeautomation/temperature'
payload = {'floor': floor, 'room': room, 'out': out, 'humid': humidity}
#print (payload)
headers = {'content-type': 'application/json'}
r = requests.post(url, data = json.dumps(payload), headers = headers)
| mit | Python |
e4990f65c08db3dbdf2dce0056d2b0b5102b3ee5 | Use relative imports | MBARIMike/biofloat,biofloat/biofloat,biofloat/biofloat,MBARIMike/oxyfloat,MBARIMike/biofloat,MBARIMike/oxyfloat | oxyfloat/__init__.py | oxyfloat/__init__.py | __all__ = ['exceptions', 'OxyFloat']
from .OxyFloat import OxyFloat
from .exceptions import RequiredVariableNotPresent, OpenDAPServerError
| __all__ = ['exceptions', 'OxyFloat']
from oxyfloat.OxyFloat import OxyFloat
from oxyfloat.exceptions import RequiredVariableNotPresent, OpenDAPServerError
| mit | Python |
ea972c89cd7abe4fdb772ce359dd9acd83817242 | Add http_transport and websocket_transport methods | devicehive/devicehive-python | tests/test.py | tests/test.py | from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
| from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
| apache-2.0 | Python |
49d9f8374769a5febc20edaa09ed7be02e5c6b25 | Fix test | noxiouz/docker-registry-driver-elliptics,noxiouz/docker-registry-driver-elliptics | tests/test.py | tests/test.py | # -*- coding: utf-8 -*-
import logging
from docker_registry import testing
logger = logging.getLogger(__name__)
class TestQuery(testing.Query):
def __init__(self):
self.scheme = 'elliptics'
class TestDriver(testing.Driver):
def __init__(self):
self.scheme = 'elliptics'
self.path = ''
self.config = testing.Config({"elliptics_nodes": "1.1.2.3:1025:2 10.0.0.2:1025:10"})
| # -*- coding: utf-8 -*-
import logging
from docker_registry import testing
logger = logging.getLogger(__name__)
class TestQuery(testing.Query):
def __init__(self):
self.scheme = 'elliptics'
class TestDriver(testing.Driver):
def __init__(self):
self.scheme = 'elliptics'
self.path = ''
self.config = testing.Config({})
| apache-2.0 | Python |
2717f5ba22ccc0308b4805658c8ebe9077c20352 | add a str-to-int helper | alfredodeza/ceph-doctor | ceph_medic/util/__init__.py | ceph_medic/util/__init__.py |
def str_to_int(string):
"""
Parses a string number into an integer, optionally converting to a float
and rounding down.
Some LVM values may come with a comma instead of a dot to define decimals.
This function normalizes a comma into a dot
"""
error_msg = "Unable to convert to integer: '%s'" % str(string)
try:
integer = float(string.replace(',', '.'))
except AttributeError:
# this might be a integer already, so try to use it, otherwise raise
# the original exception
if isinstance(string, (int, float)):
integer = string
else:
raise
return int(integer)
| mit | Python | |
885e41eebe513cc237c9473069f61a609d2eb9b4 | Bump version number. | tkem/mopidy-mobile,tkem/mopidy-mobile,tkem/mopidy-mobile | mopidy_mobile/__init__.py | mopidy_mobile/__init__.py | from __future__ import unicode_literals
import os
from mopidy import config, ext
__version__ = '1.8.1'
class Extension(ext.Extension):
dist_name = 'Mopidy-Mobile'
ext_name = 'mobile'
version = __version__
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['title'] = config.String()
schema['ws_url'] = config.String(optional=True)
return schema
def get_default_config(self):
return config.read(os.path.join(os.path.dirname(__file__), 'ext.conf'))
def setup(self, registry):
registry.add('http:app', {'name': 'mobile', 'factory': self.factory})
def factory(self, config, core):
from tornado.web import RedirectHandler
from .web import IndexHandler, StaticHandler
path = os.path.join(os.path.dirname(__file__), 'www')
return [
(r'/', RedirectHandler, {'url': 'index.html'}),
(r'/(.*\.html)', IndexHandler, {'config': config, 'path': path}),
(r'/(.*\.json)', IndexHandler, {'config': config, 'path': path}),
(r'/(.*)', StaticHandler, {'path': path})
]
| from __future__ import unicode_literals
import os
from mopidy import config, ext
__version__ = '1.8.0'
class Extension(ext.Extension):
dist_name = 'Mopidy-Mobile'
ext_name = 'mobile'
version = __version__
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['title'] = config.String()
schema['ws_url'] = config.String(optional=True)
return schema
def get_default_config(self):
return config.read(os.path.join(os.path.dirname(__file__), 'ext.conf'))
def setup(self, registry):
registry.add('http:app', {'name': 'mobile', 'factory': self.factory})
def factory(self, config, core):
from tornado.web import RedirectHandler
from .web import IndexHandler, StaticHandler
path = os.path.join(os.path.dirname(__file__), 'www')
return [
(r'/', RedirectHandler, {'url': 'index.html'}),
(r'/(.*\.html)', IndexHandler, {'config': config, 'path': path}),
(r'/(.*\.json)', IndexHandler, {'config': config, 'path': path}),
(r'/(.*)', StaticHandler, {'path': path})
]
| apache-2.0 | Python |
74c20f976060f2056c91edcde810ccc3e8dc3f14 | add scene_doc to subscene iteration | gopro/gopro-lib-node.gl,gopro/gopro-lib-node.gl,gopro/gopro-lib-node.gl,gopro/gopro-lib-node.gl | tests/serialize.py | tests/serialize.py | #!/usr/bin/env python
import os
import os.path as op
from pynodegl_utils.com import query_subproc, query_inplace
def serialize(dirname, subproc=False):
module_pkg = 'pynodegl_utils.examples'
if subproc:
ret = query_subproc(query='list', pkg=module_pkg)
else:
ret = query_inplace(query='list', pkg=module_pkg)
assert 'error' not in ret
scenes = ret['scenes']
if not op.exists(dirname):
os.makedirs(dirname)
for module_name, sub_scenes in scenes:
for scene_name, scene_doc, widgets_specs in sub_scenes:
cfg = {
'pkg': module_pkg,
'scene': (module_name, scene_name),
}
if subproc:
ret = query_subproc(query='scene', **cfg)
else:
ret = query_inplace(query='scene', **cfg)
assert 'error' not in ret
fname = op.join(dirname, '%s_%s.ngl' % (module_name, scene_name))
print(fname)
open(fname, 'w').write(ret['scene'])
if __name__ == '__main__':
import sys
serialize(op.join(sys.argv[1]))
| #!/usr/bin/env python
import os
import os.path as op
from pynodegl_utils.com import query_subproc, query_inplace
def serialize(dirname, subproc=False):
module_pkg = 'pynodegl_utils.examples'
if subproc:
ret = query_subproc(query='list', pkg=module_pkg)
else:
ret = query_inplace(query='list', pkg=module_pkg)
assert 'error' not in ret
scenes = ret['scenes']
if not op.exists(dirname):
os.makedirs(dirname)
for module_name, sub_scenes in scenes:
for scene_name, widgets_specs in sub_scenes:
cfg = {
'pkg': module_pkg,
'scene': (module_name, scene_name),
}
if subproc:
ret = query_subproc(query='scene', **cfg)
else:
ret = query_inplace(query='scene', **cfg)
assert 'error' not in ret
fname = op.join(dirname, '%s_%s.ngl' % (module_name, scene_name))
print(fname)
open(fname, 'w').write(ret['scene'])
if __name__ == '__main__':
import sys
serialize(op.join(sys.argv[1]))
| apache-2.0 | Python |
b1e8ee5d8578ce18484e278d4b7bf3b654ce96de | use compatible python3 syntax | Sevalecan/paintown,Sevalecan/paintown,boyjimeking/paintown,Sevalecan/paintown,boyjimeking/paintown,boyjimeking/paintown,Sevalecan/paintown,boyjimeking/paintown,Sevalecan/paintown,boyjimeking/paintown,boyjimeking/paintown,boyjimeking/paintown,boyjimeking/paintown,Sevalecan/paintown,Sevalecan/paintown,Sevalecan/paintown | misc/cores.py | misc/cores.py | # http://codeliberates.blogspot.com/2008/05/detecting-cpuscores-in-python.html
def detectCPUs():
import os
"""
Detects the number of CPUs on a system. Cribbed from pp.
"""
# Linux, Unix and MacOS:
if hasattr(os, "sysconf"):
if os.sysconf_names.has_key("SC_NPROCESSORS_ONLN"):
# Linux & Unix:
ncpus = os.sysconf("SC_NPROCESSORS_ONLN")
if isinstance(ncpus, int) and ncpus > 0:
return ncpus
else: # OSX:
return int(os.popen2("sysctl -n hw.ncpu")[1].read())
# Windows:
if os.environ.has_key("NUMBER_OF_PROCESSORS"):
ncpus = int(os.environ["NUMBER_OF_PROCESSORS"]);
if ncpus > 0:
return ncpus
return 1 # Default
def usingIcecream():
import os
try:
return int(os.environ['USE_ICECC']) == 1
except KeyError:
return False
# if using the icecc distributed compiler stuff just set cores to 5
if usingIcecream():
print("5")
else:
print(detectCPUs())
| # http://codeliberates.blogspot.com/2008/05/detecting-cpuscores-in-python.html
def detectCPUs():
import os
"""
Detects the number of CPUs on a system. Cribbed from pp.
"""
# Linux, Unix and MacOS:
if hasattr(os, "sysconf"):
if os.sysconf_names.has_key("SC_NPROCESSORS_ONLN"):
# Linux & Unix:
ncpus = os.sysconf("SC_NPROCESSORS_ONLN")
if isinstance(ncpus, int) and ncpus > 0:
return ncpus
else: # OSX:
return int(os.popen2("sysctl -n hw.ncpu")[1].read())
# Windows:
if os.environ.has_key("NUMBER_OF_PROCESSORS"):
ncpus = int(os.environ["NUMBER_OF_PROCESSORS"]);
if ncpus > 0:
return ncpus
return 1 # Default
def usingIcecream():
import os
try:
return int(os.environ['USE_ICECC']) == 1
except KeyError:
return False
# if using the icecc distributed compiler stuff just set cores to 5
if usingIcecream():
print "5"
else:
print detectCPUs()
| bsd-3-clause | Python |
def8eb0844e95d82b25c0a79c3be42bfaacb100c | add sphinx documentation | rflamary/POT,aje/POT,rflamary/POT,aje/POT | ot/__init__.py | ot/__init__.py |
# Python Optimal Transport toolbox
import utils
import datasets
import plot
# Ot functions
from emd import emd
from bregman import sinkhorn
from utils import dist,dots
__all__ = ["emd","sinkhorn","utils",'datasets','plot','dist','dots']
|
# Python Optimal Transport toolbox
import utils
import datasets
import plot
# Ot functions
from emd import emd
from bregman import sinkhorn
from utils import dist,dots
__all__ = ["emd","sinkhorn","utils",'datasets','plot','dist','dot']
| mit | Python |
96d6dea52b4ed0f793364e8641ffbb452a23c599 | Extend tests use cases | syci/partner-contact,syci/partner-contact | partner_ref_unique/tests/test_res_partner_ref.py | partner_ref_unique/tests/test_res_partner_ref.py | # Copyright 2017 Tecnativa - Vicent Cubells
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo.tests import common
from odoo.exceptions import ValidationError
class TestResPartnerRefUnique(common.SavepointCase):
@classmethod
def setUpClass(cls):
super(TestResPartnerRefUnique, cls).setUpClass()
cls.company = cls.env.ref('base.main_company')
cls.partner_obj = cls.env['res.partner']
cls.partner1 = cls.partner_obj.create({
'name': 'Partner1',
})
cls.partner2 = cls.partner_obj.create({
'name': 'Partner2',
})
def test_check_ref(self):
# Test can create/modify partners with same ref
self.company.partner_ref_unique = 'none'
self.partner1.ref = 'same_ref'
self.partner2.ref = 'same_ref'
self.assertEqual(self.partner1.ref, self.partner2.ref)
# Here there shouldn't be any problem
self.partner_obj.create({
'name': 'other',
'ref': 'same_ref',
})
self.partner2.ref = False
# Test can't create/modify partner with same ref
self.company.partner_ref_unique = 'all'
with self.assertRaises(ValidationError):
self.partner2.ref = 'same_ref'
with self.assertRaises(ValidationError):
self.partner_obj.create({
'name': 'other',
'ref': 'same_ref',
})
# Test can't create/modify companies with same ref
self.company.partner_ref_unique = 'companies'
self.partner2.ref = 'same_ref'
self.assertEqual(self.partner1.ref, self.partner2.ref)
self.partner2.ref = False
self.partner1.is_company = True
self.partner2.is_company = True
with self.assertRaises(ValidationError):
self.partner2.ref = 'same_ref'
with self.assertRaises(ValidationError):
self.partner_obj.create({
'is_company': True,
'name': 'other',
'ref': 'same_ref',
})
# Here there shouldn't be any problem
self.partner_obj.create({
'is_company': False,
'name': 'other',
'ref': 'same_ref',
})
| # Copyright 2017 Tecnativa - Vicent Cubells
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo.tests import common
from odoo.exceptions import ValidationError
class TestResPartnerRefUnique(common.SavepointCase):
@classmethod
def setUpClass(cls):
super(TestResPartnerRefUnique, cls).setUpClass()
cls.company = cls.env.ref('base.main_company')
partner_obj = cls.env['res.partner']
cls.partner1 = partner_obj.create({
'name': 'Partner1',
})
cls.partner2 = partner_obj.create({
'name': 'Partner2',
})
def test_check_ref(self):
# Test can create/modify partners with same ref
self.company.partner_ref_unique = 'none'
self.partner1.ref = 'same_ref'
self.partner2.ref = 'same_ref'
self.assertEqual(self.partner1.ref, self.partner2.ref)
self.partner2.ref = False
# Test can't create/modify partner with same ref
self.company.partner_ref_unique = 'all'
with self.assertRaises(ValidationError):
self.partner2.ref = 'same_ref'
# Test can't create/modify companies with same ref
self.company.partner_ref_unique = 'companies'
self.partner2.ref = 'same_ref'
self.assertEqual(self.partner1.ref, self.partner2.ref)
self.partner2.ref = False
self.partner1.is_company = True
self.partner2.is_company = True
with self.assertRaises(ValidationError):
self.partner2.ref = 'same_ref'
| agpl-3.0 | Python |
550c7b5b36c7f8c35eb31c3a32887c59d3519f84 | Update pytest for new deepclean function (1 fewer input, reverse scan) | NREL/bifacial_radiance,NREL/bifacial_radiance,NREL/bifacial_radiance,NREL/bifacial_radiance | tests/test_load.py | tests/test_load.py | # -*- coding: utf-8 -*-
"""
Created 2/19/19
@author: cdeline
Using pytest to create unit tests for load.py
to run unit tests, run pytest from the command line in the bifacial_radiance directory
"""
import bifacial_radiance
import os, pytest
# try navigating to tests directory so tests run from here.
try:
os.chdir('tests')
except:
pass
MET_FILENAME = 'USA_CO_Boulder.724699_TMY2.epw'
TEST_FILE = os.path.join('results','test_01_01_10.csv')
# test load function on a dummy csv file in the /tests/ directory
def test_save_load_pickle():
# quick save and re-load the entire RadianceObj as a pickle
demo = bifacial_radiance.RadianceObj(name = 'test')
demo.save('save.pickle')
demo2 = bifacial_radiance.load.loadRadianceObj('save.pickle')
assert demo2.name == 'test'
def test_load_trackerdict():
# example of saving and loading files in /results/ for 1-axis hourly workflow.
# this requires some pre-saved files in
demo = bifacial_radiance.RadianceObj(name = 'test')
demo.readEPW(MET_FILENAME)
trackerdict = demo.set1axis(cumulativesky = False)
demo.loadtrackerdict(trackerdict,fileprefix = 'test_')
assert demo.Wm2Front[0] == pytest.approx(166.3, abs = 0.01)
def test_cleanResult():
# example of setting NaN's when the scan intersects undesired material
# test_01_01_10.csv has some ground and sky references
import numpy as np
resultsDF = bifacial_radiance.load.read1Result(TEST_FILE)
cleanedDF = bifacial_radiance.load.cleanResult(resultsDF)
assert np.isnan(cleanedDF.Wm2Front.loc[4])
def test_read1Result():
# example of loading file in /results/
# this requires one pre-saved files in
resultsDict=bifacial_radiance.load.read1Result(TEST_FILE)
assert resultsDict['rearMat'][0] == 'a10.3.a0.PVmodule.2310'
def test_deepcleanResult():
# example of loading file in /results/
# this requires one pre-saved files in
resultfile=os.path.join("results", "test_2UP_torque_tube_hex_4020.csv")
resultsDict=bifacial_radiance.load.read1Result(resultfile)
Frontresults, Backresults=bifacial_radiance.load.deepcleanResult(resultsDict, 110, 2, automatic=True)
assert len(Frontresults) == 110
assert Backresults[54] == pytest.approx(245.3929333333333, rel = 0.01)
| # -*- coding: utf-8 -*-
"""
Created 2/19/19
@author: cdeline
Using pytest to create unit tests for load.py
to run unit tests, run pytest from the command line in the bifacial_radiance directory
"""
import bifacial_radiance
import os, pytest
# try navigating to tests directory so tests run from here.
try:
os.chdir('tests')
except:
pass
MET_FILENAME = 'USA_CO_Boulder.724699_TMY2.epw'
TEST_FILE = os.path.join('results','test_01_01_10.csv')
# test load function on a dummy csv file in the /tests/ directory
def test_save_load_pickle():
# quick save and re-load the entire RadianceObj as a pickle
demo = bifacial_radiance.RadianceObj(name = 'test')
demo.save('save.pickle')
demo2 = bifacial_radiance.load.loadRadianceObj('save.pickle')
assert demo2.name == 'test'
def test_load_trackerdict():
# example of saving and loading files in /results/ for 1-axis hourly workflow.
# this requires some pre-saved files in
demo = bifacial_radiance.RadianceObj(name = 'test')
demo.readEPW(MET_FILENAME)
trackerdict = demo.set1axis(cumulativesky = False)
demo.loadtrackerdict(trackerdict,fileprefix = 'test_')
assert demo.Wm2Front[0] == pytest.approx(166.3, abs = 0.01)
def test_cleanResult():
# example of setting NaN's when the scan intersects undesired material
# test_01_01_10.csv has some ground and sky references
import numpy as np
resultsDF = bifacial_radiance.load.read1Result(TEST_FILE)
cleanedDF = bifacial_radiance.load.cleanResult(resultsDF)
assert np.isnan(cleanedDF.Wm2Front.loc[4])
def test_read1Result():
# example of loading file in /results/
# this requires one pre-saved files in
resultsDict=bifacial_radiance.load.read1Result(TEST_FILE)
assert resultsDict['rearMat'][0] == 'a10.3.a0.PVmodule.2310'
def test_deepcleanResult():
# example of loading file in /results/
# this requires one pre-saved files in
resultfile=os.path.join("results", "test_2UP_torque_tube_hex_4020.csv")
resultsDict=bifacial_radiance.load.read1Result(resultfile)
Frontresults, Backresults=bifacial_radiance.load.deepcleanResult(resultsDict, 110, 2, 270, automatic=True)
assert len(Frontresults) == 110
assert Backresults[55] == pytest.approx(245.3929333333333, rel = 0.01)
| bsd-3-clause | Python |
edea6cec717d2bbfd0dc078a447f2bd2a7e90054 | Add test for when local is empty and s3 is populated | MichaelAquilina/s3backup,MichaelAquilina/s3backup | tests/test_main.py | tests/test_main.py | # -*- coding: utf-8 -*-
import gzip
import json
import os
import shutil
import tempfile
import boto3
from faker import Faker
import moto
from s3backup import main
from s3backup.local_sync_client import LocalSyncClient, traverse
from s3backup.s3_sync_client import S3SyncClient
fake = Faker()
def setup_local_sync_client(target_folder, file_names=None):
if file_names is None:
file_names = [fake.file_name(category='text') for _ in range(20)]
for key in file_names:
object_path = os.path.join(target_folder, key)
with open(object_path, 'w') as fp:
fp.write(fake.text())
return LocalSyncClient(target_folder)
class TestPerformSync(object):
def setup_method(self):
self.target_folder = tempfile.mkdtemp()
def teardown_method(self):
shutil.rmtree(self.target_folder)
@moto.mock_s3
def test_perform_sync_empty_local(self):
client = boto3.client('s3')
client.create_bucket(Bucket='testbucket')
index = {
'foo/bar': {'timestamp': 2000},
'skeleton/gloves.txt': {'timestamp': 2000},
'hello.txt': {'timestamp': 2000},
}
for key in index.keys():
client.put_object(Bucket='testbucket', Key='mybackup/{}'.format(key), Body=fake.text())
client.put_object(
Bucket='testbucket',
Key='mybackup/.syncindex.json.gz',
Body=gzip.compress(json.dumps(index).encode('utf-8')),
)
s3_client = S3SyncClient(client, 'testbucket', 'mybackup/')
local_client = setup_local_sync_client(self.target_folder, file_names=[])
main.perform_sync(s3_client, local_client)
object_list = client.list_objects(Bucket='testbucket', Prefix='mybackup/')
actual_s3_keys = set(obj['Key'].lstrip('mybackup/') for obj in object_list['Contents'])
actual_s3_keys.remove('.syncindex.json.gz')
actual_local_keys = set(traverse(self.target_folder))
assert actual_local_keys == actual_s3_keys
@moto.mock_s3
def test_perform_sync_empty_s3(self):
local_client = setup_local_sync_client(self.target_folder, file_names=['foo', 'bar'])
client = boto3.client('s3')
client.create_bucket(Bucket='testbucket')
s3_client = S3SyncClient(client, 'testbucket', 'mybackup/')
main.perform_sync(s3_client, local_client)
object_list = client.list_objects(Bucket='testbucket', Prefix='mybackup/')
actual_keys = set(obj['Key'] for obj in object_list['Contents'])
expected_keys = {'mybackup/foo', 'mybackup/bar', 'mybackup/.syncindex.json.gz'}
assert actual_keys == expected_keys
| # -*- coding: utf-8 -*-
import os
import shutil
import tempfile
import boto3
from faker import Faker
import moto
from s3backup import main
from s3backup.local_sync_client import LocalSyncClient
from s3backup.s3_sync_client import S3SyncClient
fake = Faker()
def setup_local_sync_client(target_folder, file_names=None):
if file_names is None:
file_names = [fake.file_name(category='text') for _ in range(20)]
for key in file_names:
object_path = os.path.join(target_folder, key)
with open(object_path, 'w') as fp:
fp.write(fake.text())
return LocalSyncClient(target_folder)
class TestPerformSync(object):
def setup_method(self):
self.target_folder = tempfile.mkdtemp()
def teardown_method(self):
shutil.rmtree(self.target_folder)
@moto.mock_s3
def test_perform_sync(self):
local_client = setup_local_sync_client(self.target_folder, file_names=['foo', 'bar'])
client = boto3.client('s3')
client.create_bucket(Bucket='testbucket')
s3_client = S3SyncClient(client, 'testbucket', 'mybackup/')
main.perform_sync(s3_client, local_client)
object_list = client.list_objects(Bucket='testbucket', Prefix='mybackup/')
actual_keys = set(obj['Key'] for obj in object_list['Contents'])
expected_keys = {'mybackup/foo', 'mybackup/bar', 'mybackup/.syncindex.json.gz'}
assert actual_keys == expected_keys
| mit | Python |
99a445163450b8ae43c8241cb8dde41a95092292 | test only on python 3 | tiagoantao/pygenomics | tests/test_plot.py | tests/test_plot.py | # -*- coding: utf-8 -*-
import six
def test_basic_chart():
if six.PY2:
return
from os import path
import tempfile
import matplotlib.pyplot as plt
plt.switch_backend('Agg')
from genomics.plot import GridGenomePlot
from genomics.organism import genome_db
ggp = GridGenomePlot(genome_db['Ag'], 2)
with tempfile.TemporaryDirectory() as tmp:
fname = '%s/test.png' % tmp
ggp.fig.savefig(fname)
assert path.isfile(fname)
| # -*- coding: utf-8 -*-
def test_basic_chart():
from os import path
import tempfile
import matplotlib.pyplot as plt
plt.switch_backend('Agg')
from genomics.plot import GridGenomePlot
from genomics.organism import genome_db
ggp = GridGenomePlot(genome_db['Ag'], 2)
with tempfile.TemporaryDirectory() as tmp:
fname = '%s/test.png' % tmp
ggp.fig.savefig(fname)
assert path.isfile(fname)
| agpl-3.0 | Python |
f874ab3c48926fa04558f1a862bb3de66934bce4 | rename test as test_url_to_all_params and remove paramater path as it was always the same value | lektor/lektor,lektor/lektor,lektor/lektor,lektor/lektor | tests/test_urls.py | tests/test_urls.py | import pytest
def test_cleanup_path():
from lektor.utils import cleanup_path
assert cleanup_path('/') == '/'
assert cleanup_path('/foo') == '/foo'
assert cleanup_path('/foo/') == '/foo'
assert cleanup_path('/////foo/') == '/foo'
assert cleanup_path('/////foo////') == '/foo'
assert cleanup_path('/////foo/.///') == '/foo'
assert cleanup_path('/////foo/..///') == '/foo'
assert cleanup_path('/foo/./bar/') == '/foo/bar'
assert cleanup_path('/foo/../bar/') == '/foo/bar'
def test_basic_url_to_with_alts(pad):
wolf_en = pad.get('/projects/wolf', alt='en')
slave_en = pad.get('/projects/slave', alt='en')
wolf_de = pad.get('/projects/wolf', alt='de')
slave_de = pad.get('/projects/slave', alt='de')
assert wolf_en.url_to(slave_en) == '../../projects/slave/'
assert wolf_de.url_to(slave_de) == '../../../de/projects/sklave/'
assert slave_en.url_to(slave_de) == '../../de/projects/sklave/'
assert slave_de.url_to(slave_en) == '../../../projects/slave/'
@pytest.mark.parametrize("alt, absolute, external, base_url, expected", [
("de", None, None, None, '../../de/projects/sklave/'),
("de", True, None, None, '/de/projects/sklave/'),
("de", True, True, None, '/de/projects/sklave/'), #
("de", True, True, '/content/', '/de/projects/sklave/'),#
("de", None, True, None, '/projects/slave1/de/projects/sklave/'),
("de", None, True, '/content/', '/projects/slave1/de/projects/sklave/'),#
("de", None, None, '/content/', '../de/projects/sklave/'),
(None, True, None, None, '/projects/slave/'),
(None, True, True, None, '/projects/slave/'),#
(None, True, True, '/content/', '/projects/slave/'),
(None, True, None, '/content/', '/projects/slave/'),
(None, None, True, None, '/projects/slave1/projects/slave/'),
(None, None, True, '/content/', '/projects/slave1/projects/slave/'),
(None, None, None, '/content/', '../projects/slave/'),
])
def test_url_to_all_params(pad, alt, absolute, external, base_url, expected):
if external:
pad.db.config.base_url = "/projects/slave1/"
wolf_en = pad.get('/projects/wolf')
assert wolf_en.url_to("/projects/slave/", alt, absolute, external, base_url) == expected
| import pytest
def test_cleanup_path():
from lektor.utils import cleanup_path
assert cleanup_path('/') == '/'
assert cleanup_path('/foo') == '/foo'
assert cleanup_path('/foo/') == '/foo'
assert cleanup_path('/////foo/') == '/foo'
assert cleanup_path('/////foo////') == '/foo'
assert cleanup_path('/////foo/.///') == '/foo'
assert cleanup_path('/////foo/..///') == '/foo'
assert cleanup_path('/foo/./bar/') == '/foo/bar'
assert cleanup_path('/foo/../bar/') == '/foo/bar'
def test_basic_url_to_with_alts(pad):
wolf_en = pad.get('/projects/wolf', alt='en')
slave_en = pad.get('/projects/slave', alt='en')
wolf_de = pad.get('/projects/wolf', alt='de')
slave_de = pad.get('/projects/slave', alt='de')
assert wolf_en.url_to(slave_en) == '../../projects/slave/'
assert wolf_de.url_to(slave_de) == '../../../de/projects/sklave/'
assert slave_en.url_to(slave_de) == '../../de/projects/sklave/'
assert slave_de.url_to(slave_en) == '../../../projects/slave/'
@pytest.mark.parametrize("path, alt, absolute, external, base_url, expected", [
("/projects/slave/", "de", None, None, None, '../../de/projects/sklave/'),
("/projects/slave/", "de", True, None, None, '/de/projects/sklave/'),
("/projects/slave/", "de", True, True, None, '/de/projects/sklave/'), #
("/projects/slave/", "de", True, True, '/content/', '/de/projects/sklave/'),#
("/projects/slave/", "de", None, True, None, '/projects/slave1/de/projects/sklave/'),
("/projects/slave/", "de", None, True, '/content/', '/projects/slave1/de/projects/sklave/'),#
("/projects/slave/", "de", None, None, '/content/', '../de/projects/sklave/'),
("/projects/slave/", None, True, None, None, '/projects/slave/'),
("/projects/slave/", None, True, True, None, '/projects/slave/'),#
("/projects/slave/", None, True, True, '/content/', '/projects/slave/'),
("/projects/slave/", None, True, None, '/content/', '/projects/slave/'),
("/projects/slave/", None, None, True, None, '/projects/slave1/projects/slave/'),
("/projects/slave/", None, None, True, '/content/', '/projects/slave1/projects/slave/'),
("/projects/slave/", None, None, None, '/content/', '../projects/slave/'),
])
def test_param(pad, path, alt, absolute, external, base_url, expected):
if external:
pad.db.config.base_url = "/projects/slave1/"
wolf_en = pad.get('/projects/wolf')
assert wolf_en.url_to(path, alt, absolute, external, base_url) == expected
| bsd-3-clause | Python |
2fec479006e7349f692f3a978f359b3619d7713e | Create drop target widget | thomasleese/tvnamer,tomleese/tvnamer | tvnamer/gui/__init__.py | tvnamer/gui/__init__.py | import sys
from PySide import QtCore, QtGui
class DirectoryDropWidget(QtGui.QLabel):
def __init__(self):
super().__init__()
self.setAcceptDrops(True)
self.setStyleSheet("background-color:black;")
self.setText("Hello.")
def dragEnterEvent(self, e):
if e.mimeData().hasUrls() and len(e.mimeData().urls()) == 1 and \
all(map(QtCore.QUrl.isLocalFile, e.mimeData().urls())):
e.accept()
else:
e.ignore()
def dropEvent(self, e):
url = e.mimeData().urls()[0]
print(url)
class MainWindow(QtGui.QWidget):
def __init__(self):
super().__init__()
self.setWindowTitle("TVNamer")
self.setMinimumSize(480, 480)
layout = QtGui.QGridLayout()
layout.addWidget(DirectoryDropWidget(), 0, 0)
layout.setColumnStretch(0, 1)
layout.setRowStretch(0, 1)
self.setLayout(layout)
self.show()
def main():
app = QtGui.QApplication(sys.argv)
window = MainWindow()
sys.exit(app.exec_())
| from PySide import QtCore, QtGui
class MainWindow(QtGui.QWidget):
pass
def main():
app = QtGui.QApplication(sys.argv)
window = MainWindow()
sys.exit(app.exec_())
| mit | Python |
68e62b56c836665e0446e2335000ae9a029302f7 | Sort DefinitionContainersModel by category and name | onitake/Uranium,onitake/Uranium | UM/Settings/Models/DefinitionContainersModel.py | UM/Settings/Models/DefinitionContainersModel.py | from UM.Qt.ListModel import ListModel
from PyQt5.QtCore import pyqtProperty, Qt, pyqtSignal
from UM.Settings.ContainerRegistry import ContainerRegistry
from UM.Settings.DefinitionContainer import DefinitionContainer
## Model that holds definition containers. By setting the filter property the definitions held by this model can be
# changed.
class DefinitionContainersModel(ListModel):
NameRole = Qt.UserRole + 1 # Human readable name (string)
IdRole = Qt.UserRole + 2 # Unique ID of Definition
CategoryRole = Qt.UserRole + 3 # Category of definition / machine. (string)
ManufacturerRole = Qt.UserRole + 4 # Manufacturer of definition / machine. (string)
def __init__(self, parent = None):
super().__init__(parent)
self.addRoleName(self.NameRole, "name")
self.addRoleName(self.IdRole, "id")
self.addRoleName(self.CategoryRole, "category")
self.addRoleName(self.ManufacturerRole, "manufacturer")
self._definition_containers = []
# Listen to changes
ContainerRegistry.getInstance().containerAdded.connect(self._onContainerChanged)
ContainerRegistry.getInstance().containerRemoved.connect(self._onContainerChanged)
self._filter_dict = {}
self._update()
## Handler for container change events from registry
def _onContainerChanged(self, container):
# We only need to update when the changed container is a DefinitionContainer.
if isinstance(container, DefinitionContainer):
self._update()
## Private convenience function to reset & repopulate the model.
def _update(self):
self.clear()
self._definition_containers = ContainerRegistry.getInstance().findDefinitionContainers(**self._filter_dict)
for container in self._definition_containers:
item = { # Prepare an item for insertion.
"name": container.getName(),
"id": container.getId(),
"category": container.getMetaDataEntry("category", ""),
"manufacturer": container.getMetaDataEntry("manufacturer", "")
}
self.appendItem(item)
self.sort(lambda k: (k["category"].lower(), k["name"].lower()))
## Set the filter of this model based on a string.
# \param filter_dict Dictionary to do the filtering by.
def setFilter(self, filter_dict):
self._filter_dict = filter_dict
self._update()
filterChanged = pyqtSignal()
@pyqtProperty("QVariantMap", fset = setFilter, notify = filterChanged)
def filter(self):
return self._filter_dict
| from UM.Qt.ListModel import ListModel
from PyQt5.QtCore import pyqtProperty, Qt, pyqtSignal
from UM.Settings.ContainerRegistry import ContainerRegistry
from UM.Settings.DefinitionContainer import DefinitionContainer
## Model that holds definition containers. By setting the filter property the definitions held by this model can be
# changed.
class DefinitionContainersModel(ListModel):
NameRole = Qt.UserRole + 1 # Human readable name (string)
ManufacturerRole = Qt.UserRole + 2 # Manufacturer of definition / machine. (string)
IdRole = Qt.UserRole + 3 # Unique ID of Definition
def __init__(self, parent = None):
super().__init__(parent)
self.addRoleName(self.NameRole, "name")
self.addRoleName(self.ManufacturerRole, "manufacturer")
self.addRoleName(self.IdRole, "id")
self._definition_containers = []
# Listen to changes
ContainerRegistry.getInstance().containerAdded.connect(self._onContainerChanged)
ContainerRegistry.getInstance().containerRemoved.connect(self._onContainerChanged)
self._filter_dict = {}
self._update()
## Handler for container change events from registry
def _onContainerChanged(self, container):
# We only need to update when the changed container is a DefinitionContainer.
if isinstance(container, DefinitionContainer):
self._update()
## Private convenience function to reset & repopulate the model.
def _update(self):
self.clear()
self._definition_containers = ContainerRegistry.getInstance().findDefinitionContainers(**self._filter_dict)
for container in self._definition_containers:
# Use insertion sort to keep the list sorted by manufacturer.
# The view doesn't support sorting.
item = { # Prepare an item for insertion.
"name": container.getName(),
"manufacturer": container.getMetaDataEntry("manufacturer", ""),
"id": container.getId()
}
manufacturer = item["manufacturer"]
for i, other_item in enumerate(self.items):
if manufacturer < other_item["manufacturer"]: # Need to insert it before this one.
self.insertItem(i, item)
break
else: # No existing manufacturer was greater, so put it at the end.
self.appendItem(item)
self.sort(lambda k: k["name"])
## Set the filter of this model based on a string.
# \param filter_dict Dictionary to do the filtering by.
def setFilter(self, filter_dict):
self._filter_dict = filter_dict
self._update()
filterChanged = pyqtSignal()
@pyqtProperty("QVariantMap", fset = setFilter, notify = filterChanged)
def filter(self):
return self._filter_dict
| agpl-3.0 | Python |
216b935f30baad28d6ff3fcc85c5d8e4acf2c84e | Bump version to 0.2.0 | sjperkins/tfopgen | tfopgen/version.py | tfopgen/version.py | # Do not edit this file, pipeline versioning is governed by git tags
__version__="0.2.0" | # Do not edit this file, pipeline versioning is governed by git tags
__version__="0.1.0" | mit | Python |
42a4a8b4480bc481e0467ae7ee46c60400d63f77 | Create tmp directory if it doesn't exist | kmklr72/LMMS-Theme-Installer | theme-installer.py | theme-installer.py | #!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# Create tmp directory if it doesn't exist
if not os.path.exists(os.path.join(os.getcwd(), 'tmp')):
os.mkdir(os.path.join(os.getcwd(), 'tmp'))
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_()) | #!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_()) | lgpl-2.1 | Python |
f6b6d988d03927484f7e02da90e970eb17f93ae5 | Update models | hs-hannover/salt-observer,hs-hannover/salt-observer,hs-hannover/salt-observer | salt_observer/models.py | salt_observer/models.py | from django.db import models
import json
class Network(models.Model):
''' Representation of an Network '''
net = models.CharField(max_length=15)
subnet_mask = models.CharField(max_length=15)
class Minion(models.Model):
''' Representation of a Server in Salt '''
fqdn = models.CharField(max_length=255)
grains = models.TextField()
@property
def get_grains(self):
return json.loads(self.grains)
def __str__(self):
return self.fqdn
| from django.db import models
class Network(models.Model):
''' Representation of an Network '''
net = models.CharField(max_length=15)
subnet_mask = models.CharField(max_length=15)
class Minion(models.Model):
''' Representation of a Server in Salt '''
name = models.CharField(max_length=255)
| mit | Python |
4b40c8207c8fc4003f5381299e7485a8e7ac04f9 | Update for release | lucius-feng/tg2,lucius-feng/tg2 | tg/release.py | tg/release.py | """TurboGears project related information"""
version = "2.3.2"
description = "Next generation TurboGears"
long_description="""
TurboGears brings together a best of breed python tools
to create a flexible, full featured, and easy to use web
framework.
TurboGears 2 provides an integrated and well tested set of tools for
everything you need to build dynamic, database driven applications.
It provides a full range of tools for front end javascript
develeopment, back database development and everything in between:
* dynamic javascript powered widgets (ToscaWidgets2)
* automatic JSON generation from your controllers
* powerful, designer friendly XHTML based templating (Genshi)
* object or route based URL dispatching
* powerful Object Relational Mappers (SQLAlchemy)
The latest development version is available in the
`TurboGears Git repositories`_.
.. _TurboGears Git repositories:
https://github.com/TurboGears
"""
url="http://www.turbogears.org/"
author= "Mark Ramm, Christopher Perkins, Jonathan LaCour, Rick Copland, Alberto Valverde, Michael Pedersen, Alessandro Molina, and the TurboGears community"
email = "mark.ramm@gmail.com, alberto@toscat.net, m.pedersen@icelus.org, amol@turbogears.org"
copyright = """Copyright 2005-2014 Kevin Dangoor,
Alberto Valverde, Mark Ramm, Christopher Perkins and contributors"""
license = "MIT"
| """TurboGears project related information"""
version = "2.3.2"
description = "Next generation TurboGears"
long_description="""
TurboGears brings together a best of breed python tools
to create a flexible, full featured, and easy to use web
framework.
TurboGears 2 provides an integrated and well tested set of tools for
everything you need to build dynamic, database driven applications.
It provides a full range of tools for front end javascript
develeopment, back database development and everything in between:
* dynamic javascript powered widgets (ToscaWidgets)
* automatic JSON generation from your controllers
* powerful, designer friendly XHTML based templating (Genshi)
* object or route based URL dispatching
* powerful Object Relational Mappers (SQLAlchemy)
The latest development version is available in the
`TurboGears Git repositories`_.
.. _TurboGears Git repositories:
https://github.com/TurboGears
"""
url="http://www.turbogears.org/"
author= "Mark Ramm, Christopher Perkins, Jonathan LaCour, Rick Copland, Alberto Valverde, Michael Pedersen, Alessandro Molina, and the TurboGears community"
email = "mark.ramm@gmail.com, alberto@toscat.net, m.pedersen@icelus.org, amol@turbogears.org"
copyright = """Copyright 2005-2013 Kevin Dangoor,
Alberto Valverde, Mark Ramm, Christopher Perkins and contributors"""
license = "MIT"
| mit | Python |
a8af5dd775731b05ebc92e0bdd7509736c9daa74 | Make progress.py test a bit nicer. | grandquista/rethinkdb,captainpete/rethinkdb,AntouanK/rethinkdb,sbusso/rethinkdb,AntouanK/rethinkdb,wojons/rethinkdb,greyhwndz/rethinkdb,gdi2290/rethinkdb,robertjpayne/rethinkdb,sebadiaz/rethinkdb,dparnell/rethinkdb,dparnell/rethinkdb,niieani/rethinkdb,spblightadv/rethinkdb,ayumilong/rethinkdb,Wilbeibi/rethinkdb,gavioto/rethinkdb,jesseditson/rethinkdb,Wilbeibi/rethinkdb,AntouanK/rethinkdb,scripni/rethinkdb,greyhwndz/rethinkdb,bpradipt/rethinkdb,lenstr/rethinkdb,robertjpayne/rethinkdb,ayumilong/rethinkdb,losywee/rethinkdb,mbroadst/rethinkdb,Wilbeibi/rethinkdb,bpradipt/rethinkdb,AtnNn/rethinkdb,AntouanK/rethinkdb,losywee/rethinkdb,ajose01/rethinkdb,Qinusty/rethinkdb,Qinusty/rethinkdb,lenstr/rethinkdb,nviennot/rethinkdb,niieani/rethinkdb,grandquista/rethinkdb,ayumilong/rethinkdb,4talesa/rethinkdb,jfriedly/rethinkdb,sebadiaz/rethinkdb,spblightadv/rethinkdb,dparnell/rethinkdb,losywee/rethinkdb,wujf/rethinkdb,Qinusty/rethinkdb,RubenKelevra/rethinkdb,spblightadv/rethinkdb,nviennot/rethinkdb,jfriedly/rethinkdb,tempbottle/rethinkdb,robertjpayne/rethinkdb,marshall007/rethinkdb,robertjpayne/rethinkdb,wojons/rethinkdb,yaolinz/rethinkdb,losywee/rethinkdb,ajose01/rethinkdb,eliangidoni/rethinkdb,yaolinz/rethinkdb,JackieXie168/rethinkdb,rrampage/rethinkdb,robertjpayne/rethinkdb,tempbottle/rethinkdb,victorbriz/rethinkdb,jmptrader/rethinkdb,grandquista/rethinkdb,greyhwndz/rethinkdb,mquandalle/rethinkdb,greyhwndz/rethinkdb,wujf/rethinkdb,AntouanK/rethinkdb,pap/rethinkdb,wujf/rethinkdb,jesseditson/rethinkdb,eliangidoni/rethinkdb,yakovenkodenis/rethinkdb,eliangidoni/rethinkdb,jesseditson/rethinkdb,jfriedly/rethinkdb,rrampage/rethinkdb,urandu/rethinkdb,ajose01/rethinkdb,bpradipt/rethinkdb,bpradipt/rethinkdb,victorbriz/rethinkdb,mcanthony/rethinkdb,jmptrader/rethinkdb,dparnell/rethinkdb,Wilbeibi/rethinkdb,lenstr/rethinkdb,4talesa/rethinkdb,yaolinz/rethinkdb,urandu/rethinkdb,catroot/rethinkdb,captainpete/rethinkdb,mquandalle/rethinkdb,captainpete/rethinkdb,urandu/rethinkdb,nviennot/rethinkdb,JackieXie168/rethinkdb,alash3al/rethinkdb,spblightadv/rethinkdb,robertjpayne/rethinkdb,bchavez/rethinkdb,4talesa/rethinkdb,nviennot/rethinkdb,KSanthanam/rethinkdb,elkingtonmcb/rethinkdb,victorbriz/rethinkdb,gdi2290/rethinkdb,victorbriz/rethinkdb,catroot/rethinkdb,elkingtonmcb/rethinkdb,alash3al/rethinkdb,bchavez/rethinkdb,tempbottle/rethinkdb,pap/rethinkdb,losywee/rethinkdb,matthaywardwebdesign/rethinkdb,niieani/rethinkdb,wojons/rethinkdb,ajose01/rethinkdb,wojons/rethinkdb,RubenKelevra/rethinkdb,rrampage/rethinkdb,yaolinz/rethinkdb,captainpete/rethinkdb,JackieXie168/rethinkdb,Wilbeibi/rethinkdb,tempbottle/rethinkdb,JackieXie168/rethinkdb,wojons/rethinkdb,JackieXie168/rethinkdb,robertjpayne/rethinkdb,sebadiaz/rethinkdb,wkennington/rethinkdb,tempbottle/rethinkdb,urandu/rethinkdb,marshall007/rethinkdb,RubenKelevra/rethinkdb,Wilbeibi/rethinkdb,spblightadv/rethinkdb,gdi2290/rethinkdb,rrampage/rethinkdb,AtnNn/rethinkdb,Qinusty/rethinkdb,catroot/rethinkdb,wujf/rethinkdb,ayumilong/rethinkdb,mcanthony/rethinkdb,wkennington/rethinkdb,gdi2290/rethinkdb,elkingtonmcb/rethinkdb,yaolinz/rethinkdb,AtnNn/rethinkdb,jmptrader/rethinkdb,sbusso/rethinkdb,ajose01/rethinkdb,4talesa/rethinkdb,scripni/rethinkdb,alash3al/rethinkdb,alash3al/rethinkdb,RubenKelevra/rethinkdb,eliangidoni/rethinkdb,sontek/rethinkdb,AntouanK/rethinkdb,robertjpayne/rethinkdb,KSanthanam/rethinkdb,scripni/rethinkdb,alash3al/rethinkdb,jmptrader/rethinkdb,alash3al/rethinkdb,wkennington/rethinkdb,dparnell/rethinkdb,pap/rethinkdb,wkennington/rethinkdb,ayumilong/rethinkdb,AntouanK/rethinkdb,jfriedly/rethinkdb,catroot/rethinkdb,gavioto/rethinkdb,marshall007/rethinkdb,AntouanK/rethinkdb,marshall007/rethinkdb,jesseditson/rethinkdb,victorbriz/rethinkdb,jmptrader/rethinkdb,RubenKelevra/rethinkdb,matthaywardwebdesign/rethinkdb,wkennington/rethinkdb,victorbriz/rethinkdb,mbroadst/rethinkdb,elkingtonmcb/rethinkdb,urandu/rethinkdb,sontek/rethinkdb,ayumilong/rethinkdb,jfriedly/rethinkdb,mquandalle/rethinkdb,alash3al/rethinkdb,scripni/rethinkdb,grandquista/rethinkdb,lenstr/rethinkdb,eliangidoni/rethinkdb,captainpete/rethinkdb,bchavez/rethinkdb,AtnNn/rethinkdb,eliangidoni/rethinkdb,sbusso/rethinkdb,mquandalle/rethinkdb,niieani/rethinkdb,mquandalle/rethinkdb,catroot/rethinkdb,mcanthony/rethinkdb,bpradipt/rethinkdb,jfriedly/rethinkdb,AtnNn/rethinkdb,sontek/rethinkdb,KSanthanam/rethinkdb,jmptrader/rethinkdb,matthaywardwebdesign/rethinkdb,matthaywardwebdesign/rethinkdb,ajose01/rethinkdb,wojons/rethinkdb,losywee/rethinkdb,losywee/rethinkdb,rrampage/rethinkdb,mbroadst/rethinkdb,dparnell/rethinkdb,gavioto/rethinkdb,KSanthanam/rethinkdb,pap/rethinkdb,spblightadv/rethinkdb,bchavez/rethinkdb,gdi2290/rethinkdb,losywee/rethinkdb,KSanthanam/rethinkdb,bpradipt/rethinkdb,bchavez/rethinkdb,marshall007/rethinkdb,gavioto/rethinkdb,pap/rethinkdb,marshall007/rethinkdb,captainpete/rethinkdb,jfriedly/rethinkdb,elkingtonmcb/rethinkdb,mcanthony/rethinkdb,mbroadst/rethinkdb,spblightadv/rethinkdb,jesseditson/rethinkdb,rrampage/rethinkdb,jesseditson/rethinkdb,eliangidoni/rethinkdb,marshall007/rethinkdb,victorbriz/rethinkdb,sbusso/rethinkdb,eliangidoni/rethinkdb,wkennington/rethinkdb,matthaywardwebdesign/rethinkdb,bpradipt/rethinkdb,captainpete/rethinkdb,JackieXie168/rethinkdb,niieani/rethinkdb,bchavez/rethinkdb,mbroadst/rethinkdb,niieani/rethinkdb,jesseditson/rethinkdb,ayumilong/rethinkdb,ajose01/rethinkdb,nviennot/rethinkdb,gavioto/rethinkdb,KSanthanam/rethinkdb,mquandalle/rethinkdb,nviennot/rethinkdb,ajose01/rethinkdb,sontek/rethinkdb,mquandalle/rethinkdb,mbroadst/rethinkdb,gavioto/rethinkdb,wujf/rethinkdb,KSanthanam/rethinkdb,yaolinz/rethinkdb,niieani/rethinkdb,gavioto/rethinkdb,yaolinz/rethinkdb,lenstr/rethinkdb,sontek/rethinkdb,greyhwndz/rethinkdb,lenstr/rethinkdb,victorbriz/rethinkdb,Wilbeibi/rethinkdb,sbusso/rethinkdb,Qinusty/rethinkdb,catroot/rethinkdb,wkennington/rethinkdb,AtnNn/rethinkdb,bpradipt/rethinkdb,urandu/rethinkdb,scripni/rethinkdb,catroot/rethinkdb,eliangidoni/rethinkdb,marshall007/rethinkdb,scripni/rethinkdb,nviennot/rethinkdb,ayumilong/rethinkdb,catroot/rethinkdb,JackieXie168/rethinkdb,Qinusty/rethinkdb,sebadiaz/rethinkdb,niieani/rethinkdb,tempbottle/rethinkdb,dparnell/rethinkdb,dparnell/rethinkdb,RubenKelevra/rethinkdb,lenstr/rethinkdb,KSanthanam/rethinkdb,wojons/rethinkdb,jmptrader/rethinkdb,JackieXie168/rethinkdb,captainpete/rethinkdb,yaolinz/rethinkdb,matthaywardwebdesign/rethinkdb,wkennington/rethinkdb,mquandalle/rethinkdb,tempbottle/rethinkdb,sbusso/rethinkdb,matthaywardwebdesign/rethinkdb,pap/rethinkdb,spblightadv/rethinkdb,4talesa/rethinkdb,alash3al/rethinkdb,greyhwndz/rethinkdb,lenstr/rethinkdb,pap/rethinkdb,jesseditson/rethinkdb,nviennot/rethinkdb,gavioto/rethinkdb,Wilbeibi/rethinkdb,elkingtonmcb/rethinkdb,bchavez/rethinkdb,sebadiaz/rethinkdb,robertjpayne/rethinkdb,yakovenkodenis/rethinkdb,4talesa/rethinkdb,RubenKelevra/rethinkdb,sebadiaz/rethinkdb,wojons/rethinkdb,wujf/rethinkdb,mbroadst/rethinkdb,sontek/rethinkdb,bchavez/rethinkdb,mcanthony/rethinkdb,grandquista/rethinkdb,mcanthony/rethinkdb,matthaywardwebdesign/rethinkdb,mcanthony/rethinkdb,yakovenkodenis/rethinkdb,sontek/rethinkdb,gdi2290/rethinkdb,AtnNn/rethinkdb,mbroadst/rethinkdb,yakovenkodenis/rethinkdb,sebadiaz/rethinkdb,greyhwndz/rethinkdb,jmptrader/rethinkdb,mbroadst/rethinkdb,sbusso/rethinkdb,Qinusty/rethinkdb,AtnNn/rethinkdb,grandquista/rethinkdb,wujf/rethinkdb,elkingtonmcb/rethinkdb,urandu/rethinkdb,yakovenkodenis/rethinkdb,Qinusty/rethinkdb,scripni/rethinkdb,RubenKelevra/rethinkdb,jfriedly/rethinkdb,rrampage/rethinkdb,greyhwndz/rethinkdb,scripni/rethinkdb,bchavez/rethinkdb,4talesa/rethinkdb,4talesa/rethinkdb,sbusso/rethinkdb,rrampage/rethinkdb,sebadiaz/rethinkdb,sontek/rethinkdb,tempbottle/rethinkdb,grandquista/rethinkdb,JackieXie168/rethinkdb,pap/rethinkdb,yakovenkodenis/rethinkdb,bpradipt/rethinkdb,urandu/rethinkdb,elkingtonmcb/rethinkdb,yakovenkodenis/rethinkdb,yakovenkodenis/rethinkdb,mcanthony/rethinkdb,grandquista/rethinkdb,gdi2290/rethinkdb,dparnell/rethinkdb,grandquista/rethinkdb,Qinusty/rethinkdb | test/interface/progress.py | test/interface/progress.py | #!/usr/bin/env python
import sys, os, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')))
import driver, http_admin, scenario_common
from memcached_workload_common import MemcacheConnection
from vcoptparse import *
op = OptParser()
scenario_common.prepare_option_parser_mode_flags(op)
opts = op.parse(sys.argv)
with driver.Metacluster() as metacluster:
cluster = driver.Cluster(metacluster)
executable_path, command_prefix, serve_options = scenario_common.parse_mode_flags(opts)
print "Starting cluster..."
processes = [
driver.Process(cluster, driver.Files(metacluster, executable_path = executable_path, command_prefix = command_prefix),
log_path = "serve-output-%d" % i,
executable_path = executable_path, command_prefix = command_prefix, extra_options = serve_options)
for i in xrange(2)]
for process in processes:
process.wait_until_started_up()
print "Creating namespace..."
http = http_admin.ClusterAccess([("localhost", p.http_port) for p in processes])
dc = http.add_datacenter()
for machine_id in http.machines:
http.move_server_to_datacenter(machine_id, dc)
ns = http.add_namespace(protocol = "memcached", primary = dc)
time.sleep(10)
host, port = driver.get_namespace_host(ns.port, processes)
print "Performing test queries..."
with MemcacheConnection(host, port) as mc:
for i in range(10000):
if (i + 1) % 100 == 0:
print i + 1,
sys.stdout.flush()
mc.set(str(i) * 10, str(i)*20)
print
print "Done with test queries."
print "Adding a replica."
http.set_namespace_affinities(ns, {dc : 1})
time.sleep(1)
print "Checking backfill progress... ",
progress = http.get_progress()
for machine_id, temp1 in progress.iteritems():
for namespace_id, temp2 in temp1.iteritems():
for activity_id, temp3 in temp2.iteritems():
for region, progress_val in temp3.iteritems():
assert(progress_val[0] != "Timeout")
assert(progress_val[0][0] <= progress_val[0][1])
print "OK."
print "Shutting down cluster."
cluster.check_and_stop()
| #!/usr/bin/env python
import sys, os, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')))
import driver, http_admin, scenario_common
from memcached_workload_common import MemcacheConnection
from vcoptparse import *
op = OptParser()
scenario_common.prepare_option_parser_mode_flags(op)
opts = op.parse(sys.argv)
with driver.Metacluster() as metacluster:
cluster = driver.Cluster(metacluster)
executable_path, command_prefix, serve_options = scenario_common.parse_mode_flags(opts)
print "Starting cluster..."
processes = [
driver.Process(cluster, driver.Files(metacluster, executable_path = executable_path, command_prefix = command_prefix),
executable_path = executable_path, command_prefix = command_prefix, extra_options = serve_options)
for i in xrange(2)]
for process in processes:
process.wait_until_started_up()
print "Creating namespace..."
http = http_admin.ClusterAccess([("localhost", p.http_port) for p in processes])
dc = http.add_datacenter()
for machine_id in http.machines:
http.move_server_to_datacenter(machine_id, dc)
ns = http.add_namespace(protocol = "memcached", primary = dc)
time.sleep(10)
host, port = driver.get_namespace_host(ns.port, processes)
with MemcacheConnection(host, port) as mc:
for i in range(10000):
if (i + 1) % 100 == 0:
print i + 1,
sys.stdout.flush()
mc.set(str(i) * 10, str(i)*20)
print
http.set_namespace_affinities(ns, {dc : 1})
time.sleep(1)
progress = http.get_progress()
for machine_id, temp1 in progress.iteritems():
for namespace_id, temp2 in temp1.iteritems():
for activity_id, temp3 in temp2.iteritems():
for region, progress_val in temp3.iteritems():
assert(progress_val[0] != "Timeout")
assert(progress_val[0][0] <= progress_val[0][1])
cluster.check_and_stop()
| apache-2.0 | Python |
ab8756c6a1664a44fd7a41ebe0af5bc0445738c8 | remove unused import | darren-wang/ks3,darren-wang/ks3 | keystone/common/sql/migrate_repo/versions/017_add_enabled_to_policy.py | keystone/common/sql/migrate_repo/versions/017_add_enabled_to_policy.py | import sqlalchemy as sql
_POLICY_TABLE_NAME = 'policy'
def upgrade(migrate_engine):
meta = sql.MetaData()
meta.bind = migrate_engine
policy_table = sql.Table(_POLICY_TABLE_NAME, meta, autoload=True)
# adding new columns
policy_enabled = sql.Column('enabled', sql.Boolean, nullable=False, default=False)
policy_table.create_column(policy_enabled)
| import sqlalchemy as sql
import migrate
_POLICY_TABLE_NAME = 'policy'
def upgrade(migrate_engine):
meta = sql.MetaData()
meta.bind = migrate_engine
policy_table = sql.Table(_POLICY_TABLE_NAME, meta, autoload=True)
# adding new columns
policy_enabled = sql.Column('enabled', sql.Boolean, nullable=False, default=False)
policy_table.create_column(policy_enabled)
| apache-2.0 | Python |
2e2104a33aacb6d23c4eab6ae47231992fcf0497 | raise exception | uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw | myuw/views/api/finance.py | myuw/views/api/finance.py | import logging
import traceback
from myuw.dao.finance import get_account_balances_for_current_user
from myuw.dao.notice import get_tuition_due_date
from myuw.dao.pws import is_student
from myuw.logger.timer import Timer
from myuw.logger.logresp import (
log_data_not_found_response, log_msg, log_success_response)
from myuw.views.api import ProtectedAPI
from myuw.views.error import data_not_found, handle_exception
logger = logging.getLogger(__name__)
class Finance(ProtectedAPI):
"""
Performs actions on resource at /api/v1/finance/.
"""
def get(self, request, *args, **kwargs):
"""
GET returns 200 with the student account balances
of the current user
"""
timer = Timer()
try:
if not is_student(request):
log_msg(logger, timer, "Not a student, abort!")
return data_not_found()
balances = get_account_balances_for_current_user(request)
date = get_tuition_due_date(request)
response = balances.json_data()
response['tuition_due'] = str(date)
log_success_response(logger, timer)
return self.json_response(response)
except KeyError:
return handle_exception(logger, timer, traceback)
except Exception:
raise
| import logging
import traceback
from myuw.dao.finance import get_account_balances_for_current_user
from myuw.dao.notice import get_tuition_due_date
from myuw.dao.pws import is_student
from myuw.logger.timer import Timer
from myuw.logger.logresp import (
log_data_not_found_response, log_msg, log_success_response)
from myuw.views.api import ProtectedAPI
from myuw.views.error import data_not_found, handle_exception
logger = logging.getLogger(__name__)
class Finance(ProtectedAPI):
"""
Performs actions on resource at /api/v1/finance/.
"""
def get(self, request, *args, **kwargs):
"""
GET returns 200 with the student account balances
of the current user
"""
timer = Timer()
try:
if not is_student(request):
log_msg(logger, timer, "Not a student, abort!")
return data_not_found()
balances = get_account_balances_for_current_user(request)
date = get_tuition_due_date(request)
response = balances.json_data()
response['tuition_due'] = str(date)
log_success_response(logger, timer)
return self.json_response(response)
except KeyError:
return handle_exception(logger, timer, traceback)
| apache-2.0 | Python |
5174fe64a076de6ef5afc73278d457157eca8d69 | Update transpositionTest: fixed imports and PEP8 formatting | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/CrackingCodesWithPython/Chapter09/transpositionTest.py | books/CrackingCodesWithPython/Chapter09/transpositionTest.py | # Transposition Cipher Test
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import random, sys
from books.CrackingCodesWithPython.Chapter07.transpositionEncrypt import encryptMessage
from books.CrackingCodesWithPython.Chapter08.transpositionDecrypt import decryptMessage
def main():
random.seed(42) # Set the random "seed" to a static value.
for i in range(20): # Run 20 tests.
# Generate random messages to test.
# The message will have a random length:
message = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' * random.randint(4, 40)
# Convert the message string to a list to shuffle it:
message = list(message)
random.shuffle(message)
message = ''.join(message) # Convert the list back to a string.
print('Test #%s: %s..."' % (i + 1, message[:50]))
# Check all possible keys for each message:
for key in range(1, int(len(message)/2)):
encrypted = encryptMessage(key, message)
decrypted = decryptMessage(key, encrypted)
# If the decryption doesn't match the original message, display
# an error message and quit:
if message != decrypted:
print('Mismatch with key %s and message %s.' % (key, message))
print('Decrypted as: ' + decrypted)
sys.exit()
print('Transposition cipher test passed.')
# If transpositionTest.py is run (instead of imported as a module) call
# the main() function:
if __name__ == '__main__':
main()
| # Transposition Cipher Test
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import random, sys, transpositionEncrypt, transpositionDecrypt
def main():
random.seed(42) # Set the random "seed" to a static value.
for i in range(20): # Run 20 tests.
# Generate random messages to test.
# The message will have a random length:
message = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' * random.randint(4, 40)
# Convert the message string to a list to shuffle it:
message = list(message)
random.shuffle(message)
message = ''.join(message) # Convert the list back to a string.
print('Test #%s: %s..."' % (i + 1, message[:50]))
# Check all possible keys for each message:
for key in range(1, int(len(message)/2)):
encrypted = transpositionEncrypt.encryptMessage(key, message)
decrypted = transpositionDecrypt.decryptMessage(key, encrypted)
# If the decryption doesn't match the original message, display
# an error message and quit:
if message != decrypted:
print('Mismatch with key %s and message %s.' % (key, message))
print('Decrypted as: ' + decrypted)
sys.exit()
print('Transposition cipher test passed.')
# If transpositionTest.py is run (instead of imported as a module) call
# the main() function:
if __name__ == '__main__':
main()
| mit | Python |
e9ec3a7ea22540c10b6db3e78fe2c5299e127119 | update importer management command to prep for multiple importers | texastribune/the-dp,texastribune/the-dp,texastribune/the-dp,texastribune/the-dp | tx_highered/management/commands/tx_highered_import.py | tx_highered/management/commands/tx_highered_import.py | import os
from django.core.management.base import BaseCommand
from tx_highered.scripts.import_customreport import generic
class Command(BaseCommand):
args = '(ipeds|thecb) <file file ...>'
help = "Import Data"
def handle(self, importer_type, *args, **options):
# TODO handle THECB data
if importer_type == 'ipeds':
for path in args:
if os.path.isfile(path):
generic(path)
| import os
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Import Data"
def handle(self, *args, **options):
# TODO handle THECB data
from tx_highered.scripts.import_customreport import generic
for path in args:
if os.path.isfile(path):
generic(path)
| apache-2.0 | Python |
d97b3b49cb916182550ada3e8e3384a29236a41a | Fix mongo queue test | giserh/grab,lorien/grab,pombredanne/grab-1,alihalabyah/grab,raybuhr/grab,maurobaraldi/grab,codevlabs/grab,kevinlondon/grab,giserh/grab,kevinlondon/grab,subeax/grab,DDShadoww/grab,codevlabs/grab,shaunstanislaus/grab,raybuhr/grab,subeax/grab,lorien/grab,liorvh/grab,SpaceAppsXploration/grab,istinspring/grab,huiyi1990/grab,liorvh/grab,SpaceAppsXploration/grab,shaunstanislaus/grab,huiyi1990/grab,DDShadoww/grab,subeax/grab,alihalabyah/grab,maurobaraldi/grab,pombredanne/grab-1,istinspring/grab | test/spider_mongo_queue.py | test/spider_mongo_queue.py | # coding: utf-8
from random import shuffle
from unittest import TestCase, main
from grab.spider import Spider, Task
from util import FakeServerThread, RESPONSE, SLEEP, BASE_URL
class TestSpider(TestCase):
TASKS_COUNT = 10
def setUp(self):
FakeServerThread().start()
def test_spider(self):
class SimpleSpider(Spider):
def prepare(self):
self.tasks = [index for index in xrange(TestSpider.TASKS_COUNT)]
shuffle(self.tasks)
self.tasks_order = []
def setup_queue(self, backend='mongo', database='queue_test', **kwargs):
super(SimpleSpider, self).setup_queue(backend, database, **kwargs)
def task_generator(self):
for index in self.tasks:
yield Task(
name='foo',
url=BASE_URL,
priority=index,
index=index,
first=True
)
def task_foo(self, grab, task):
self.tasks_order.append(task.index)
if task.get('first', False):
yield Task(
name=task.name,
priority=TestSpider.TASKS_COUNT + task.priority,
grab=grab,
index=task.index
)
RESPONSE['get'] = 'Hello spider!'
SLEEP['get'] = 0
sp = SimpleSpider(thread_number=TestSpider.TASKS_COUNT * 2)
sp.run()
self.assertEqual(range(TestSpider.TASKS_COUNT) * 2, sp.tasks_order)
if __name__ == '__main__':
main()
| # coding: utf-8
from random import shuffle
from unittest import TestCase, main
from grab.spider import Spider, Task
from util import FakeServerThread, RESPONSE, SLEEP, BASE_URL
class TestSpider(TestCase):
TASKS_COUNT = 10
def setUp(self):
FakeServerThread().start()
def test_spider(self):
class SimpleSpider(Spider):
def prepare(self):
self.tasks = [index for index in xrange(TestSpider.TASKS_COUNT)]
shuffle(self.tasks)
self.tasks_order = []
def setup_queue(self, backend='mongo', database='queue_test', **kwargs):
super(SimpleSpider, self).setup_queue(backend, database, **kwargs)
def task_generator(self):
for index in self.tasks:
yield Task(
name='foo',
url=BASE_URL,
priority=index,
index=index,
first=True
)
def task_foo(self, grab, task):
self.tasks_order.append(task.index)
if task.first:
yield Task(
name=task.name,
priority=TestSpider.TASKS_COUNT + task.priority,
grab=grab,
index=task.index
)
RESPONSE['get'] = 'Hello spider!'
SLEEP['get'] = 0
sp = SimpleSpider(thread_number=TestSpider.TASKS_COUNT * 2)
sp.run()
self.assertEqual(range(TestSpider.TASKS_COUNT) * 2, sp.tasks_order)
if __name__ == '__main__':
main()
| mit | Python |
dd40b392b73ddc1bcf88d932418b4f891bcc6a89 | Allow star imports from twine | pypa/twine | twine/__init__.py | twine/__init__.py | # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
| # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
| apache-2.0 | Python |
4988d4138994a392c2c558794d62af3b7c5ec2d8 | Add GPIO aliases to ZCU104 base overlay to match the Pynq-Z1/2 | Xilinx/PYNQ,yunqu/PYNQ,schelleg/PYNQ,Xilinx/PYNQ,yunqu/PYNQ,cathalmccabe/PYNQ,Xilinx/PYNQ,cathalmccabe/PYNQ,cathalmccabe/PYNQ,Xilinx/PYNQ,schelleg/PYNQ,cathalmccabe/PYNQ,schelleg/PYNQ,cathalmccabe/PYNQ,yunqu/PYNQ,cathalmccabe/PYNQ,Xilinx/PYNQ,schelleg/PYNQ,yunqu/PYNQ,yunqu/PYNQ,schelleg/PYNQ | boards/ZCU104/base/base.py | boards/ZCU104/base/base.py | import pynq
import pynq.lib
import time
from pynq.lib.video.clocks import *
from pynq import MMIO
class BaseOverlay(pynq.Overlay):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.is_loaded():
self.iop_pmod0.mbtype = "Pmod"
self.iop_pmod1.mbtype = "Pmod"
self.PMOD0 = self.iop_pmod0.mb_info
self.PMOD1 = self.iop_pmod1.mb_info
self.PMODA = self.PMOD0
self.PMODB = self.PMOD1
self.leds = self.gpio_leds.channel1
self.leds.setdirection('out')
self.leds.setlength(4)
self.buttons = self.gpio_btns.channel1
self.buttons.setdirection('in')
self.buttons.setlength(4)
self.switches = self.gpio_sws.channel1
self.switches.setdirection('in')
self.switches.setlength(4)
def download(self):
super().download()
self._init_clocks()
def _init_clocks(self):
# Wait for AXI reset to de-assert
time.sleep(0.2)
# Deassert HDMI clock reset
self.reset_control.channel1[0].write(1)
# Wait 200 ms for the clock to come out of reset
time.sleep(0.2)
self.video.phy.vid_phy_controller.initialize()
self.video.hdmi_in.frontend.set_phy(
self.video.phy.vid_phy_controller)
self.video.hdmi_out.frontend.set_phy(
self.video.phy.vid_phy_controller)
dp159 = DP159(self.fmch_axi_iic, 0x5C)
idt = IDT_8T49N24(self.fmch_axi_iic, 0x6C)
self.video.hdmi_out.frontend.clocks = [dp159, idt]
| import pynq
import pynq.lib
import time
from pynq.lib.video.clocks import *
from pynq import MMIO
class BaseOverlay(pynq.Overlay):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.is_loaded():
self.iop_pmod0.mbtype = "Pmod"
self.iop_pmod1.mbtype = "Pmod"
self.PMOD0 = self.iop_pmod0.mb_info
self.PMOD1 = self.iop_pmod1.mb_info
self.PMODA = self.PMOD0
self.PMODB = self.PMOD1
def download(self):
super().download()
self._init_clocks()
def _init_clocks(self):
# Wait for AXI reset to de-assert
time.sleep(0.2)
# Deassert HDMI clock reset
self.reset_control.channel1[0].write(1)
# Wait 200 ms for the clock to come out of reset
time.sleep(0.2)
self.video.phy.vid_phy_controller.initialize()
self.video.hdmi_in.frontend.set_phy(
self.video.phy.vid_phy_controller)
self.video.hdmi_out.frontend.set_phy(
self.video.phy.vid_phy_controller)
dp159 = DP159(self.fmch_axi_iic, 0x5C)
idt = IDT_8T49N24(self.fmch_axi_iic, 0x6C)
self.video.hdmi_out.frontend.clocks = [dp159, idt]
| bsd-3-clause | Python |
cddd4487c388cf5f010a39e36df1286568882135 | FIX CS-782 | CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland | wordpress_connector/models/request.py | wordpress_connector/models/request.py | ##############################################################################
#
# Copyright (C) 2019 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Christopher Meier <dev@c-meier.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import yaml
import logging
from odoo import api, fields, models
from odoo.tools import html2plaintext
_logger = logging.getLogger(__name__)
class CrmWordpressRequest(models.Model):
_inherit = "crm.claim"
front_matter = fields.Text()
# -------------------------------------------------------
# Mail gateway
# -------------------------------------------------------
# Parse the new message to check if it comes from Wordpress and if so parse
# it and fill the new object.
@api.multi
def message_new(self, msg, custom_values=None):
if custom_values is None:
custom_values = {}
defaults = {}
# Read YAML front matter
yaml_delim = "---" + "\n"
desc = html2plaintext(msg.get("body")) if msg.get("body") else ""
if desc.startswith(yaml_delim):
parts = desc[len(yaml_delim):].split("\n" + yaml_delim, 1)
if len(parts) == 2: # Does contain a front matter.
defaults["front_matter"] = parts[0]
desc = parts[1]
try:
front_matter = yaml.load(parts[0])
except yaml.YAMLError:
_logger.warning("Could not parse the front matter.", exc_info=True)
else:
self._parse_front_matter(front_matter, defaults)
defaults["description"] = "<pre>{}</pre>".format(desc)
defaults.update(custom_values)
return super().message_new(msg, custom_values=defaults)
def _parse_front_matter(self, fm, values):
# Match the partner
match_obj = self.env["res.partner.match.wp"]
if "title" in fm:
fm["title"] = match_obj.match_title(fm["title"])
if "lang" in fm:
fm["lang"] = match_obj.match_lang(fm["lang"])
partner = match_obj.match_partner_to_infos(fm, options={"skip_create": True})
if partner:
values["partner_id"] = partner.id
| ##############################################################################
#
# Copyright (C) 2019 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Christopher Meier <dev@c-meier.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import yaml
import logging
from odoo import api, fields, models
from odoo.tools import html2plaintext
_logger = logging.getLogger(__name__)
class CrmWordpressRequest(models.Model):
_inherit = "crm.claim"
front_matter = fields.Text()
# -------------------------------------------------------
# Mail gateway
# -------------------------------------------------------
# Parse the new message to check if it comes from Wordpress and if so parse
# it and fill the new object.
@api.multi
def message_new(self, msg, custom_values=None):
if custom_values is None:
custom_values = {}
defaults = {}
# Read YAML front matter
yaml_delim = "---" + "\n"
desc = html2plaintext(msg.get("body")) if msg.get("body") else ""
if desc.startswith(yaml_delim):
parts = desc[len(yaml_delim):].split("\n" + yaml_delim, 1)
if len(parts) == 2: # Does contain a front matter.
defaults["front_matter"] = parts[0]
desc = parts[1]
try:
front_matter = yaml.load(parts[0])
except yaml.YAMLError:
_logger.warning("Could not parse the front matter.", exc_info=True)
else:
self._parse_front_matter(front_matter, defaults)
defaults["description"] = "<pre>{}</pre>".format(desc)
defaults.update(custom_values)
return super().message_new(msg, custom_values=defaults)
def _parse_front_matter(self, fm, values):
# Match the partner
match_obj = self.env["res.partner.match.wp"]
if "title" in fm:
fm["title"] = match_obj.match_title(fm["title"])
if "lang" in fm:
fm["lang"] = match_obj.match_lang(fm["lang"])
partner = match_obj.match_partner_to_infos(fm, options={"skip_create": True})
values["partner_id"] = partner.id
| agpl-3.0 | Python |
10e4f30cbc4327b6ee51a600bba4304261c27ca0 | Update release script | adferrand/docker-letsencrypt-dns | utils/create_release.py | utils/create_release.py | import subprocess
from distutils.version import StrictVersion
def main():
git_clean = subprocess.check_output(
"git status --porcelain", universal_newlines=True
).strip()
if git_clean:
raise RuntimeError("Error, git workspace is not clean: \n{0}".format(git_clean))
current_version = subprocess.check_output(
"poetry version", shell=True, universal_newlines=True
).replace("dnsrobocert ", "")
current_version = StrictVersion(current_version)
print("Current version is: {0}".format(current_version))
print("Please insert new version:")
new_version = str(input())
new_version = StrictVersion(new_version)
if new_version <= current_version:
raise RuntimeError(
"Error new version is below current version: {0} < {1}".format(
new_version, current_version
)
)
try:
subprocess.check_call("poetry version {0}".format(new_version), shell=True)
subprocess.check_call("poetry run isort -rc src test utils", shell=True)
subprocess.check_call("poetry run black src test utils", shell=True)
subprocess.check_call('git commit -a -m "Version {0}"'.format(new_version))
subprocess.check_call("git tag v{0}".format(new_version))
except subprocess.CalledProcessError as e:
print("Error detected, cleaning state.")
subprocess.call("git tag -d v{0}".format(new_version))
subprocess.check_call("git reset --hard")
raise e
if __name__ == "__main__":
main()
| import subprocess
import sys
from distutils.version import StrictVersion
def main():
git_clean = subprocess.check_output("git status --porcelain", universal_newlines=True).strip()
# if git_clean:
# raise RuntimeError("Error, git workspace is not clean: \n{0}".format(git_clean))
current_version = subprocess.check_output(
"poetry version", shell=True, universal_newlines=True
).replace("dnsrobocert ", "")
current_version = StrictVersion(current_version)
print("Current version is: {0}".format(current_version))
print("Please insert new version:")
new_version = str(input())
new_version = StrictVersion(new_version)
if new_version <= current_version:
raise RuntimeError(
"Error new version is below current version: {0} < {1}".format(
new_version, current_version
)
)
try:
subprocess.check_call("poetry version {0}".format(new_version), shell=True)
subprocess.check_call("poetry run isort src test utils", shell=True)
subprocess.check_call("poetry run black src test utils", shell=True)
subprocess.check_call("poetry run mypy src", shell=True)
subprocess.check_call("poetry run pytest test", shell=True)
except subprocess.CalledProcessError:
subprocess.check_call("git reset --hard")
if __name__ == "__main__":
main()
| mit | Python |
49f8a47dfc06484a4d9d767a10c07c47fe5e76d3 | Fix synthax error | bird-house/flyingpigeon | flyingpigeon/processes/__init__.py | flyingpigeon/processes/__init__.py | from .wps_subset_countries import ClippingProcess
from .wps_subset_continents import ClipcontinentProcess
from .wps_subset_regionseurope import ClipregionseuropeProcess
from .wps_pointinspection import PointinspectionProcess
from .wps_landseamask import LandseamaskProcess
from .wps_climatefactsheet import FactsheetProcess
from .wps_fetch import FetchProcess
from .wps_indices_percentiledays import IndicespercentiledaysProcess
from .wps_indices_single import IndicessingleProcess
from .wps_robustness import RobustnessProcess
from .wps_plot_timeseries import PlottimeseriesProcess
from .wps_sdm_gbiffetch import GBIFfetchProcess
from .wps_sdm_getindices import SDMgetindicesProcess
from .wps_sdm_csv import SDMcsvProcess
from .wps_sdm_csvindices import SDMcsvindicesProcess
from .wps_sdm_allinone import SDMallinoneProcess
from .wps_weatherregimes_reanalyse import WeatherregimesreanalyseProcess
from .wps_weatherregimes_projection import WeatherregimesprojectionProcess
from .wps_weatherregimes_model import WeatherregimesmodelProcess
from .wps_analogs_reanalyse import AnalogsreanalyseProcess
from .wps_analogs_model import AnalogsmodelProcess
from .wps_analogs_compare import AnalogscompareProcess
from .wps_analogs_viewer import AnalogsviewerProcess
from .wps_segetalflora import SegetalfloraProcess
from .wps_spatial_analog import SpatialAnalogProcess
from .wps_map_spatial_analog import MapSpatialAnalogProcess
from .wps_subset import SubsetProcess
from .wps_averager import AveragerProcess
from .wps_subset_WFS import SubsetWFSProcess
from .wps_averager_WFS import AveragerWFSProcess
from .wps_ouranos_pub_indicators import OuranosPublicIndicatorProcess
processes = [
ClippingProcess(),
ClipcontinentProcess(),
ClipregionseuropeProcess(),
PointinspectionProcess(),
FactsheetProcess(),
FetchProcess(),
LandseamaskProcess(),
IndicespercentiledaysProcess(),
IndicessingleProcess(),
GBIFfetchProcess(),
SDMgetindicesProcess(),
# SDMcsvProcess(),
# SDMcsvindicesProcess(),
# SDMallinoneProcess(),
WeatherregimesreanalyseProcess(),
WeatherregimesprojectionProcess(),
WeatherregimesmodelProcess(),
AnalogsreanalyseProcess(),
# AnalogsmodelProcess(),
# AnalogscompareProcess(),
AnalogsviewerProcess(),
RobustnessProcess(),
PlottimeseriesProcess(),
SegetalfloraProcess(),
SpatialAnalogProcess(),
MapSpatialAnalogProcess(),
SubsetProcess(),
AveragerProcess(),
SubsetWFSProcess(),
AveragerWFSProcess(),
OuranosPublicIndicatorProcess(),
]
"""
pywps3 processes:
# climate for impact processes
"wps_c4i_simple_indice",
# processes under development
# "wps_eobs2cordex",
# TODO: c4i processes with multiple input sources
# "wps_c4i_multivar_indice",
# "wps_c4i_percentile_indice",
# "wps_c4i_compound_indice",
"""
| from .wps_subset_countries import ClippingProcess
from .wps_subset_continents import ClipcontinentProcess
from .wps_subset_regionseurope import ClipregionseuropeProcess
from .wps_pointinspection import PointinspectionProcess
from .wps_landseamask import LandseamaskProcess
from .wps_climatefactsheet import FactsheetProcess
from .wps_fetch import FetchProcess
from .wps_indices_percentiledays import IndicespercentiledaysProcess
from .wps_indices_single import IndicessingleProcess
from .wps_robustness import RobustnessProcess
from .wps_plot_timeseries import PlottimeseriesProcess
from .wps_sdm_gbiffetch import GBIFfetchProcess
from .wps_sdm_getindices import SDMgetindicesProcess
from .wps_sdm_csv import SDMcsvProcess
from .wps_sdm_csvindices import SDMcsvindicesProcess
from .wps_sdm_allinone import SDMallinoneProcess
from .wps_weatherregimes_reanalyse import WeatherregimesreanalyseProcess
from .wps_weatherregimes_projection import WeatherregimesprojectionProcess
from .wps_weatherregimes_model import WeatherregimesmodelProcess
from .wps_analogs_reanalyse import AnalogsreanalyseProcess
from .wps_analogs_model import AnalogsmodelProcess
from .wps_analogs_compare import AnalogscompareProcess
from .wps_analogs_viewer import AnalogsviewerProcess
from .wps_segetalflora import SegetalfloraProcess
from .wps_spatial_analog import SpatialAnalogProcess
from .wps_map_spatial_analog import MapSpatialAnalogProcess
from .wps_subset import SubsetProcess
from .wps_averager import AveragerProcess
from .wps_subset_WFS import SubsetWFSProcess
from .wps_averager_WFS import AveragerWFSProcess
from .wps_ouranos_pub_indicators import OuranosPublicIndicatorProcess
processes = [
ClippingProcess(),
ClipcontinentProcess(),
ClipregionseuropeProcess(),
PointinspectionProcess(),
FactsheetProcess(),
FetchProcess(),
LandseamaskProcess(),
IndicespercentiledaysProcess(),
IndicessingleProcess(),
GBIFfetchProcess(),
SDMgetindicesProcess(),
# SDMcsvProcess(),
# SDMcsvindicesProcess(),
# SDMallinoneProcess(),
WeatherregimesreanalyseProcess(),
WeatherregimesprojectionProcess(),
WeatherregimesmodelProcess(),
AnalogsreanalyseProcess(),
# AnalogsmodelProcess(),
# AnalogscompareProcess(),
AnalogsviewerProcess(),
RobustnessProcess(),
PlottimeseriesProcess(),
SegetalfloraProcess(),
SpatialAnalogProcess(),
MapSpatialAnalogProcess(),
SubsetProcess(),
AveragerProcess()
SubsetWFSProcess(),
AveragerWFSProcess(),
OuranosPublicIndicatorProcess(),
]
"""
pywps3 processes:
# climate for impact processes
"wps_c4i_simple_indice",
# processes under development
# "wps_eobs2cordex",
# TODO: c4i processes with multiple input sources
# "wps_c4i_multivar_indice",
# "wps_c4i_percentile_indice",
# "wps_c4i_compound_indice",
"""
| apache-2.0 | Python |
c29354ac1ef5805659ad20a30b9f78122f54012e | Include the active SDK version in --version output. | pebble/pebble-tool,pebble/pebble-tool,pebble/pebble-tool,gregoiresage/pebble-tool,gregoiresage/pebble-tool,pebble/pebble-tool,gregoiresage/pebble-tool,gregoiresage/pebble-tool | pebble_tool/__init__.py | pebble_tool/__init__.py | from __future__ import absolute_import, print_function
__author__ = 'katharine'
import atexit
import argparse
import logging
import sys
import requests.packages.urllib3 as urllib3
from .commands.base import register_children
from .commands.sdk import build, create
from .commands import (install, logs, screenshot, timeline, emucontrol, ping, account, repl,
transcription_server, data_logging)
from .commands.sdk import manage, analyse_size, convert, emulator
from .exceptions import ToolError
from .sdk import sdk_version
from .util.analytics import wait_for_analytics, analytics_prompt
from .util.config import config
from .util.updates import wait_for_update_checks
from .version import __version__, __version_info__
def run_tool(args=None):
urllib3.disable_warnings() # sigh. :(
logging.basicConfig()
analytics_prompt()
parser = argparse.ArgumentParser(description="Pebble Tool", prog="pebble")
version_string = "Pebble Tool v{}".format(__version__)
if sdk_version() is not None:
version_string += " (active SDK: v{})".format(sdk_version())
parser.add_argument("--version", action="version", version=version_string)
register_children(parser)
args = parser.parse_args(args)
if not hasattr(args, 'func'):
parser.error("no subcommand specified.")
try:
args.func(args)
except ToolError as e:
parser.exit(message=str(e)+"\n", status=1)
sys.exit(1)
@atexit.register
def wait_for_cleanup():
import time
now = time.time()
wait_for_analytics(2)
wait_for_update_checks(2)
logging.info("Spent %f seconds waiting for analytics.", time.time() - now)
config.save()
| from __future__ import absolute_import, print_function
__author__ = 'katharine'
import atexit
import argparse
import logging
import sys
import requests.packages.urllib3 as urllib3
from .commands.base import register_children
from .commands.sdk import build, create
from .commands import (install, logs, screenshot, timeline, emucontrol, ping, account, repl,
transcription_server, data_logging)
from .commands.sdk import manage, analyse_size, convert, emulator
from .exceptions import ToolError
from .sdk import sdk_version
from .util.analytics import wait_for_analytics, analytics_prompt
from .util.config import config
from .util.updates import wait_for_update_checks
from .version import __version__, __version_info__
def run_tool(args=None):
urllib3.disable_warnings() # sigh. :(
logging.basicConfig()
analytics_prompt()
parser = argparse.ArgumentParser(description="Pebble Tool", prog="pebble")
parser.add_argument("--version", action="version", version="Pebble Tool v{}".format(__version__))
register_children(parser)
args = parser.parse_args(args)
if not hasattr(args, 'func'):
parser.error("no subcommand specified.")
try:
args.func(args)
except ToolError as e:
parser.exit(message=str(e)+"\n", status=1)
sys.exit(1)
@atexit.register
def wait_for_cleanup():
import time
now = time.time()
wait_for_analytics(2)
wait_for_update_checks(2)
logging.info("Spent %f seconds waiting for analytics.", time.time() - now)
config.save()
| mit | Python |
3548e134f760b85b2b76d5cf3c8c3462f3885852 | update URLs (#5355) | iulian787/spack,tmerrick1/spack,skosukhin/spack,lgarren/spack,tmerrick1/spack,tmerrick1/spack,mfherbst/spack,mfherbst/spack,krafczyk/spack,tmerrick1/spack,krafczyk/spack,matthiasdiener/spack,iulian787/spack,EmreAtes/spack,skosukhin/spack,TheTimmy/spack,LLNL/spack,LLNL/spack,krafczyk/spack,LLNL/spack,matthiasdiener/spack,mfherbst/spack,skosukhin/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,LLNL/spack,lgarren/spack,matthiasdiener/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,TheTimmy/spack,iulian787/spack,skosukhin/spack,lgarren/spack,mfherbst/spack,tmerrick1/spack,matthiasdiener/spack,iulian787/spack,EmreAtes/spack,TheTimmy/spack,iulian787/spack,skosukhin/spack,EmreAtes/spack,TheTimmy/spack,LLNL/spack,TheTimmy/spack,lgarren/spack,lgarren/spack | var/spack/repos/builtin/packages/libsodium/package.py | var/spack/repos/builtin/packages/libsodium/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Libsodium(AutotoolsPackage):
"""Sodium is a modern, easy-to-use software library for encryption,
decryption, signatures, password hashing and more."""
homepage = "https://download.libsodium.org/doc/"
url = "https://download.libsodium.org/libsodium/releases/libsodium-1.0.13.tar.gz"
list_url = "https://download.libsodium.org/libsodium/releases/old"
version('1.0.13', 'f38aac160a4bd05f06f743863e54e499')
version('1.0.12', 'c308e3faa724b630b86cc0aaf887a5d4')
version('1.0.11', 'b58928d035064b2a46fb564937b83540')
version('1.0.10', 'ea89dcbbda0b2b6ff6a1c476231870dd')
version('1.0.3', 'b3bcc98e34d3250f55ae196822307fab')
version('1.0.2', 'dc40eb23e293448c6fc908757738003f')
version('1.0.1', '9a221b49fba7281ceaaf5e278d0f4430')
version('1.0.0', '3093dabe4e038d09f0d150cef064b2f7')
version('0.7.1', 'c224fe3923d1dcfe418c65c8a7246316')
def url_for_version(self, version):
url = 'https://download.libsodium.org/libsodium/releases/'
if version < Version('1.0.4'):
url += 'old/unsupported/'
elif version < Version('1.0.12'):
url += 'old/'
return url + 'libsodium-{0}.tar.gz'.format(version)
| ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Libsodium(AutotoolsPackage):
"""Sodium is a modern, easy-to-use software library for encryption,
decryption, signatures, password hashing and more."""
homepage = "https://download.libsodium.org/doc/"
url = "https://download.libsodium.org/libsodium/releases/libsodium-1.0.11.tar.gz"
list_url = "https://download.libsodium.org/libsodium/releases/old"
version('1.0.11', 'b58928d035064b2a46fb564937b83540')
version('1.0.10', 'ea89dcbbda0b2b6ff6a1c476231870dd')
version('1.0.3', 'b3bcc98e34d3250f55ae196822307fab')
version('1.0.2', 'dc40eb23e293448c6fc908757738003f')
version('1.0.1', '9a221b49fba7281ceaaf5e278d0f4430')
version('1.0.0', '3093dabe4e038d09f0d150cef064b2f7')
version('0.7.1', 'c224fe3923d1dcfe418c65c8a7246316')
def url_for_version(self, version):
url = 'https://download.libsodium.org/libsodium/releases/'
if version < Version('1.0.4'):
url += 'old/'
return url + 'libsodium-{0}.tar.gz'.format(version)
| lgpl-2.1 | Python |
fe728a9741be7eb1cfa440bf5d9c9b008255f3d7 | Add py-future (#19195) | iulian787/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/py-future/package.py | var/spack/repos/builtin/packages/py-future/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyFuture(PythonPackage):
"""Clean single-source support for Python 3 and 2"""
homepage = "https://python-future.org/"
url = "https://pypi.io/packages/source/f/future/future-0.18.2.tar.gz"
version('0.18.2', sha256='b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d')
version('0.17.1', sha256='67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8')
version('0.17.0', sha256='eb6d4df04f1fb538c99f69c9a28b255d1ee4e825d479b9c62fc38c0cf38065a4')
version('0.16.0', sha256='e39ced1ab767b5936646cedba8bcce582398233d6a627067d4c6a454c90cfedb')
version('0.15.2', sha256='3d3b193f20ca62ba7d8782589922878820d0a023b885882deec830adbf639b97')
depends_on('py-setuptools', type='build')
depends_on('py-importlib', type=('build', 'run'), when='^python@:2.6')
depends_on('py-argparse', type=('build', 'run'), when='^python@:2.6')
| # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyFuture(PythonPackage):
"""Clean single-source support for Python 3 and 2"""
homepage = "https://python-future.org/"
url = "https://pypi.io/packages/source/f/future/future-0.16.0.tar.gz"
version('0.17.1', sha256='67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8')
version('0.17.0', sha256='eb6d4df04f1fb538c99f69c9a28b255d1ee4e825d479b9c62fc38c0cf38065a4')
version('0.16.0', sha256='e39ced1ab767b5936646cedba8bcce582398233d6a627067d4c6a454c90cfedb')
version('0.15.2', sha256='3d3b193f20ca62ba7d8782589922878820d0a023b885882deec830adbf639b97')
depends_on('py-setuptools', type='build')
depends_on('py-importlib', type=('build', 'run'), when='^python@:2.6')
depends_on('py-argparse', type=('build', 'run'), when='^python@:2.6')
| lgpl-2.1 | Python |
0c84f6dd314ea62019356b09363f98118a4da776 | Use built-in IP address functionality to unmap IPv4 addresses | Heufneutje/txircd | txircd/factory.py | txircd/factory.py | from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
from typing import Union
def unmapIPv4(ip: str) -> Union["IPv4Address", "IPv6Address"]:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
addr = ip_address(ip)
if addr.ipv4_mapped is None:
return addr
return addr.ipv4_mapped
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False) | from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
import re
ipv4MappedAddr = re.compile("::ffff:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})")
def unmapIPv4(ip: str) -> str:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
mapped = ipv4MappedAddr.match(ip)
if mapped:
return mapped.group(1)
return ip
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False) | bsd-3-clause | Python |
d496568ff9615dbb69bb9d4edf971231232ff438 | add version 1.5 (#26731) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-gevent/package.py | var/spack/repos/builtin/packages/py-gevent/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyGevent(PythonPackage):
"""gevent is a coroutine-based Python networking library."""
homepage = "https://www.gevent.org"
pypi = "gevent/gevent-1.3a2.tar.gz"
version('1.5.0', sha256='b2814258e3b3fb32786bb73af271ad31f51e1ac01f33b37426b66cb8491b4c29')
version('1.3a2', sha256='f7ab82697111ea233c7beeadf5240f669dfad9c4bbc89a3ec80a49e2c48a65bd')
depends_on('py-setuptools@24.2:', type='build', when='@:1.4')
depends_on('py-setuptools@40.8:', type='build', when='@1.5:')
depends_on('py-cython@0.27:', type='build', when='@:1.4')
depends_on('py-cython@0.29.14:', type='build', when='@1.5:')
depends_on('py-cffi@1.4:', type=('build', 'run'), when='@:1.4')
depends_on('py-cffi@1.12.2:', type=('build', 'run'), when='@1.5:')
depends_on('py-greenlet@0.4.13:', type=('build', 'run'), when='@:1.4')
depends_on('py-greenlet@0.4.14:', type=('build', 'run'), when='@1.5:')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'), when='@1.5:')
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyGevent(PythonPackage):
"""gevent is a coroutine-based Python networking library."""
homepage = "https://www.gevent.org"
pypi = "gevent/gevent-1.3a2.tar.gz"
version('1.3a2', sha256='f7ab82697111ea233c7beeadf5240f669dfad9c4bbc89a3ec80a49e2c48a65bd')
depends_on('py-setuptools@24.2:', type='build')
depends_on('py-cython@0.27:', type='build')
depends_on('py-cffi@1.4.0:', type=('build', 'run'))
depends_on('py-greenlet@0.4.13:', type=('build', 'run'))
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
| lgpl-2.1 | Python |
58472d21c46cfe4a24e31a5553019731479e1e0b | Update ssh_dispatcher.py | nitzmahone/netmiko,ivandgreat/netmiko,shamanu4/netmiko,jumpojoy/netmiko,isidroamv/netmiko,isponline/netmiko,jinesh-patel/netmiko,nvoron23/netmiko,shsingh/netmiko,fooelisa/netmiko,rdezavalia/netmiko,shsingh/netmiko,mileswdavis/netmiko,isidroamv/netmiko,rdezavalia/netmiko,fooelisa/netmiko,mileswdavis/netmiko,mzbenami/netmiko,rumo/netmiko,jinesh-patel/netmiko,shamanu4/netmiko,nitzmahone/netmiko,enzzzy/netmiko,mzbenami/netmiko,rumo/netmiko,brutus333/netmiko,isponline/netmiko,ktbyers/netmiko,jumpojoy/netmiko,brutus333/netmiko,ivandgreat/netmiko,ktbyers/netmiko,MikeOfNoTrades/netmiko,enzzzy/netmiko,MikeOfNoTrades/netmiko | netmiko/ssh_dispatcher.py | netmiko/ssh_dispatcher.py | from cisco import CiscoIosSSH
from cisco import CiscoAsaSSH
from cisco import CiscoNxosSSH
from cisco import CiscoXrSSH
from arista import AristaSSH
from hp import HPProcurveSSH
CLASS_MAPPER = {
'cisco_ios' : CiscoIosSSH,
'cisco_xe' : CiscoIosSSH,
'cisco_asa' : CiscoAsaSSH,
'cisco_nxos' : CiscoNxosSSH,
'cisco_xr' : CiscoXrSSH,
'arista_eos' : AristaSSH,
'hp_procurve' : HPProcurveSSH,
'f5_ltm_ssh' : F5LtmSSH,
}
def ssh_dispatcher(device_type):
'''
Select the class to be instantiated based on vendor/platform
'''
return CLASS_MAPPER[device_type]
| from cisco import CiscoIosSSH
from cisco import CiscoAsaSSH
from cisco import CiscoNxosSSH
from cisco import CiscoXrSSH
from arista import AristaSSH
from hp import HPProcurveSSH
CLASS_MAPPER = {
'cisco_ios' : CiscoIosSSH,
'cisco_xe' : CiscoIosSSH,
'cisco_asa' : CiscoAsaSSH,
'cisco_nxos' : CiscoNxosSSH,
'cisco_xr' : CiscoXrSSH,
'arista_eos' : AristaSSH,
'hp_procurve' : HPProcurveSSH,
}
def ssh_dispatcher(device_type):
'''
Select the class to be instantiated based on vendor/platform
'''
return CLASS_MAPPER[device_type]
| mit | Python |
46214b0caa1772b405d76c370882eb259579dca5 | Set r-chipseq to bioconductor format (#24315) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/r-chipseq/package.py | var/spack/repos/builtin/packages/r-chipseq/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RChipseq(RPackage):
"""A package for analyzing chipseq data
Tools for helping process short read data for chipseq experiments"""
homepage = "https://bioconductor.org/packages/release/bioc/html/chipseq.html"
git = "https://git.bioconductor.org/packages/chipseq"
maintainers = ['dorton21']
version('1.40.0', commit='84bcbc0b7ad732730b5989a308f1624a6a358df1')
depends_on('r@2.10:', type=('build', 'run'))
depends_on('r-biocgenerics@0.1.0:', type=('build', 'run'))
depends_on('r-s4vectors@0.17.25:', type=('build', 'run'))
depends_on('r-iranges@2.13.12:', type=('build', 'run'))
depends_on('r-genomicranges@1.31.8:', type=('build', 'run'))
depends_on('r-shortread', type=('build', 'run'))
depends_on('r-lattice', type=('build', 'run'))
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RChipseq(RPackage):
"""A package for analyzing chipseq data"""
homepage = "https://bioconductor.org/packages/release/bioc/html/chipseq.html"
url = "https://bioconductor.org/packages/release/bioc/src/contrib/chipseq_1.40.0.tar.gz"
maintainers = ['dorton21']
version('1.40.0', sha256='5b48721a9eae6ebaf57a57af13f76eb887925ea1a02906abeb6f67a588c0ff8a')
depends_on('r@2.10:', type=('build', 'run'))
depends_on('r-biocgenerics@0.1.0:', type=('build', 'run'))
depends_on('r-s4vectors@0.17.25:', type=('build', 'run'))
depends_on('r-iranges@2.13.12:', type=('build', 'run'))
depends_on('r-genomicranges@1.31.8:', type=('build', 'run'))
depends_on('r-shortread', type=('build', 'run'))
depends_on('r-lattice', type=('build', 'run'))
| lgpl-2.1 | Python |
cf83a6abc6709fadfe8dfa44408462e79bd8c1ff | Enable apache logging for uweb_info. | edelooff/newWeb,edelooff/newWeb | uweb_info/www/router.py | uweb_info/www/router.py | #!/usr/bin/python
"""An uweb info page and testcase."""
# Custom modules
from underdark.libs import uweb
from underdark.libs.uweb.uweb_info import pages
__author__ = 'Elmer de Looff <elmer@underdark.nl>'
__version__ = '0.2'
CONFIG = 'example.conf'
PACKAGE = 'uweb_info'
# PAGE_CLASS is the constant that defines the class that should handle requests
# from clients. The method to call is defined by the ROUTES constant below.
PAGE_CLASS = pages.PageMaker
# This router uses the constant `ROUTES` to provide a request router for the
# uWeb Handler. `ROUTES` is an iterable consisting of 2-tuples, each of which
# defines a regular expression and a method name. The regular expressions are
# tested in order, and must match the whole URL that is requested.
# If a match is found, traversal stops and the method name corresponding the
# regex is looked up on the provided `PAGE_CLASS`. This method is then used to
# generate a response.
#
# Any capture groups defined in the regular expressions of the `ROUTES` will
# be provided as arguments on the methods they call to.
ROUTES = (
('/static/(.*)', 'Static'),
('/(broken.*)', 'FourOhFour'),
('/haltandcatchfire', 'MakeFail'),
('/text', 'Text'),
('/redirect/(.*)', 'Redirect'),
('/OpenIDLogin', '_OpenIdInitiate'),
('/OpenIDValidate', '_OpenIdValidate'),
('/(.*)', 'Index'))
uweb.ServerSetup(apache_logging=True)
| #!/usr/bin/python
"""An uweb info page and testcase."""
# Custom modules
from underdark.libs import uweb
from underdark.libs.uweb.uweb_info import pages
__author__ = 'Elmer de Looff <elmer@underdark.nl>'
__version__ = '0.2'
CONFIG = 'example.conf'
PACKAGE = 'uweb_info'
# PAGE_CLASS is the constant that defines the class that should handle requests
# from clients. The method to call is defined by the ROUTES constant below.
PAGE_CLASS = pages.PageMaker
# This router uses the constant `ROUTES` to provide a request router for the
# uWeb Handler. `ROUTES` is an iterable consisting of 2-tuples, each of which
# defines a regular expression and a method name. The regular expressions are
# tested in order, and must match the whole URL that is requested.
# If a match is found, traversal stops and the method name corresponding the
# regex is looked up on the provided `PAGE_CLASS`. This method is then used to
# generate a response.
#
# Any capture groups defined in the regular expressions of the `ROUTES` will
# be provided as arguments on the methods they call to.
ROUTES = (
('/static/(.*)', 'Static'),
('/(broken.*)', 'FourOhFour'),
('/haltandcatchfire', 'MakeFail'),
('/text', 'Text'),
('/redirect/(.*)', 'Redirect'),
('/OpenIDLogin', '_OpenIdInitiate'),
('/OpenIDValidate', '_OpenIdValidate'),
('/(.*)', 'Index'))
uweb.ServerSetup(apache_logging=False)
| isc | Python |
bee5ed1d9815a4c4291179d0de3ec54fe467b219 | Save sessions in JSON format instead of pickle. | shaurz/devo | project.py | project.py | import os
import json
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = json.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = json.dumps(project.session, indent=2)
fileutil.atomic_write_file(project.filename, data)
| import os, cPickle as pickle
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = pickle.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = pickle.dumps(project.session, pickle.HIGHEST_PROTOCOL)
fileutil.atomic_write_file(project.filename, data)
| mit | Python |
5b5ca9a25df66cef52d3e302a9bb135884f3981a | Update qt_translations.py | bowscoin/bowscoin,bowscoin/bowscoin,bowscoin/bowscoin,bowscoin/bowscoin,bowscoin/bowscoin | contrib/qt_translations.py | contrib/qt_translations.py | #!/usr/bin/env python
# Helpful little script that spits out a comma-separated list of
# language codes for Qt icons that should be included
# in binary bitcoin distributions
import glob
import os
import re
import sys
if len(sys.argv) != 3:
sys.exit("Usage: %s $QTDIR/translations $BOWSCOINDIR/src/qt/locale"%sys.argv[0])
d1 = sys.argv[1]
d2 = sys.argv[2]
l1 = set([ re.search(r'qt_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d1, 'qt_*.qm')) ])
l2 = set([ re.search(r'bitcoin_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d2, 'bitcoin_*.qm')) ])
print ",".join(sorted(l1.intersection(l2)))
| #!/usr/bin/env python
# Helpful little script that spits out a comma-separated list of
# language codes for Qt icons that should be included
# in binary bitcoin distributions
import glob
import os
import re
import sys
if len(sys.argv) != 3:
sys.exit("Usage: %s $QTDIR/translations $BITCOINDIR/src/qt/locale"%sys.argv[0])
d1 = sys.argv[1]
d2 = sys.argv[2]
l1 = set([ re.search(r'qt_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d1, 'qt_*.qm')) ])
l2 = set([ re.search(r'bitcoin_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d2, 'bitcoin_*.qm')) ])
print ",".join(sorted(l1.intersection(l2)))
| mit | Python |
225a5987f4cdf494770321f4f37641902b0422d9 | add help command to help message | Zumium/boxes | boxes/handlers/help_msg.py | boxes/handlers/help_msg.py | #Copyright (C) 2016 Zumium martin007323@gmail.com
#
#
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
from boxes import handlerBase
class HelpHandler(handlerBase.BaseHandler):
def __init__(self):
super().__init__()
def handle(self):
helpMessage='''
Copyright (C) 2016 Zumium martin007323@gmail.com
Version 1.0.2
usage: boxes COMMAND [OPTIONAL ARGUMENTS]
boxes create BOX_NAME
create a new box named BOX_NAME
boxes drop BOX_NAME
delete the box named BOX_NAME
boxes list-boxes
list all unpacked boxes
boxes list-arch
list all packed boxes
boxes list-file BOX_NAME
list all files in the box named BOX_NAME
boxes list
list all of both packed and unpacked boxes
boxes link BOX_NAME[:FILE_NAME] [PATH]
link BOX_NAME[:FILE_NAME] to the given path.Current working directory is as default
boxes unlink BOX_NAME[:FILE_NAME]
delete all links of BOX_NAME[:FILE_NAME]
boxes archive BOX_NAME
pack the box named BOX_NAME
boxes unarchive BOX_NAME
unpack the box named BOX_NAME
boxes path BOX_NAME
print out the path of box named BOX_NAME
boxes import BOX_ARCHIVE_FILE_PATH
import from an outer file into a packed box
boxes export BOX_NAME [PATH]
export a box to an outer file at given directory.Current working directory is as default
boxes add BOX_NAME FILE_NAME
copy given file into the given box
boxes del BOX_NAME:FILE_NAME
delete the given file that is in the box
boxes fresh [BOX_NAME[:FILE_NAME]]
check links and delete those that are missing in record
boxes help
show this help message
This software is released under Apache License Version 2.0 (http://www.apache.org/licenses/)
'''
print(helpMessage)
| #Copyright (C) 2016 Zumium martin007323@gmail.com
#
#
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
from boxes import handlerBase
class HelpHandler(handlerBase.BaseHandler):
def __init__(self):
super().__init__()
def handle(self):
helpMessage='''
Copyright (C) 2016 Zumium martin007323@gmail.com
Version 1.0.2
usage: boxes COMMAND [OPTIONAL ARGUMENTS]
boxes create BOX_NAME
create a new box named BOX_NAME
boxes drop BOX_NAME
delete the box named BOX_NAME
boxes list-boxes
list all unpacked boxes
boxes list-arch
list all packed boxes
boxes list-file BOX_NAME
list all files in the box named BOX_NAME
boxes list
list all of both packed and unpacked boxes
boxes link BOX_NAME[:FILE_NAME] [PATH]
link BOX_NAME[:FILE_NAME] to the given path.Current working directory is as default
boxes unlink BOX_NAME[:FILE_NAME]
delete all links of BOX_NAME[:FILE_NAME]
boxes archive BOX_NAME
pack the box named BOX_NAME
boxes unarchive BOX_NAME
unpack the box named BOX_NAME
boxes path BOX_NAME
print out the path of box named BOX_NAME
boxes import BOX_ARCHIVE_FILE_PATH
import from an outer file into a packed box
boxes export BOX_NAME [PATH]
export a box to an outer file at given directory.Current working directory is as default
boxes add BOX_NAME FILE_NAME
copy given file into the given box
boxes del BOX_NAME:FILE_NAME
delete the given file that is in the box
boxes fresh [BOX_NAME[:FILE_NAME]]
check links and delete those that are missing in record
This software is released under Apache License Version 2.0 (http://www.apache.org/licenses/)
'''
print(helpMessage)
| apache-2.0 | Python |
14c7d797c3b327aa4e55efe832c536d14bee16fc | Update pelican config for the tag cloud | fretboardfreak/escadrille,fretboardfreak/escadrille,fretboardfreak/escadrille | pelicanconf.py | pelicanconf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
import time
AUTHOR = u'Curtis Sand'
ABOUT_AUTHOR_URL = "pages/about-me.html"
SITENAME = u'Quizzical Silicon'
SITESUBTITLE=u'A charge carrier is not a wallet.'
SITEURL = 'http://curtissand.com/cs'
SITETHUMBNAIL_URL = SITEURL + '/images/site_thumb.jpg'
SITETHUMBNAIL_ALTTEXT = "Mah Guitjo!"
FOOTER_TEXT='Powered by pain and suffering, reading, beer and the sun.'
TIMEZONE = 'America/Edmonton'
DEFAULT_DATE_FORMAT = '%Y-%m-%d %H:%M'
ABOUT_SITE_URL = "pages/about-the-site.html"
SITE_LOGS = ["blog", "fret"]
LOG_PATH = "pages/%s-log.html"
LAST_UPDATED = time.strftime(DEFAULT_DATE_FORMAT)
WITH_FUTURE_DATES = True
DEFAULT_LANG = u'en'
THEME = 'theme/bootstrap'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
TYPOGRIFY = True
# Menu
DISPLAY_PAGES_ON_MENU = False
DISPLAY_CATEGORIES_ON_MENU = True
MENUITEMS = (('links for later', SITEURL+'/pages/links-for-later.html'),
('songs to learn', SITEURL+'/pages/songs-to-learn.html'),
('what\'s interesting?', SITEURL+'/pages/whats-interesting.html'))
# LINKS go in the sidebar
LINKS = (('All Tags', 'http://curtissand.com/cs/tags.html'),
('Archives', 'http://curtissand.com/cs/archives.html'),
)
# FOOTER_LINKS go in the footer
FOOTER_LINKS = (('About this Site', SITEURL+'/pages/about-the-site.html'),
('Pelican', "http://getpelican.com/"),
('Twitter Bootstrap', 'http://twitter.github.com/bootstrap/'),
('Magnific Popup', 'http://dimsemenov.com/plugins/magnific-popup/'),
('Tag Cloud', 'http://addywaddy.github.io/jquery.tagcloud.js/'),
)
# Social widget
#SOCIAL = (('You can add links in your config file', '#'),
# ('Another social link', '#'),)
DEFAULT_PAGINATION = 5
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
PDF_GENERATOR = False
# static paths will be copied without parsing their contents
STATIC_PATHS = ['images', 'style' ]
# tag cloud
TAG_CLOUD_SAYING = "Random Tags"
TAG_CLOUD_STEPS = 10
TAG_CLOUD_MAX_ITEMS = 15
TAG_CLOUD_SIZE = 15
JINJA_EXTENSIONS = ['jinja2.ext.loopcontrols']
| #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
import time
AUTHOR = u'Curtis Sand'
ABOUT_AUTHOR_URL = "pages/about-me.html"
SITENAME = u'Quizzical Silicon'
SITESUBTITLE=u'A charge carrier is not a wallet.'
SITEURL = 'http://curtissand.com/cs'
SITETHUMBNAIL_URL = SITEURL + '/images/site_thumb.jpg'
SITETHUMBNAIL_ALTTEXT = "Mah Guitjo!"
FOOTER_TEXT='Powered by pain and suffering, reading, beer and the sun.'
TIMEZONE = 'America/Edmonton'
DEFAULT_DATE_FORMAT = '%Y-%m-%d %H:%M'
ABOUT_SITE_URL = "pages/about-the-site.html"
SITE_LOGS = ["blog", "fret"]
LOG_PATH = "pages/%s-log.html"
LAST_UPDATED = time.strftime(DEFAULT_DATE_FORMAT)
WITH_FUTURE_DATES = True
DEFAULT_LANG = u'en'
THEME = 'theme/bootstrap'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
TYPOGRIFY = True
# Menu
DISPLAY_PAGES_ON_MENU = False
DISPLAY_CATEGORIES_ON_MENU = True
MENUITEMS = (('links for later', SITEURL+'/pages/links-for-later.html'),
('songs to learn', SITEURL+'/pages/songs-to-learn.html'),
('what\'s interesting?', SITEURL+'/pages/whats-interesting.html'))
# LINKS go in the sidebar
LINKS = (('tags', 'http://curtissand.com/cs/tags.html'),
('archives', 'http://curtissand.com/cs/archives.html'),
)
# FOOTER_LINKS go in the footer
FOOTER_LINKS = (('About this Site', SITEURL+'/pages/about-the-site.html'),
('Pelican', "http://getpelican.com/"),
('Twitter Bootstrap', 'http://twitter.github.com/bootstrap/'),
('Magnific Popup', 'http://dimsemenov.com/plugins/magnific-popup/'),
)
# Social widget
#SOCIAL = (('You can add links in your config file', '#'),
# ('Another social link', '#'),)
DEFAULT_PAGINATION = 5
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
PDF_GENERATOR = False
# static paths will be copied without parsing their contents
STATIC_PATHS = ['images', 'style' ]
# tag cloud
TAG_CLOUD_SAYING = "Random Tags"
TAG_CLOUD_STEPS = 4
TAG_CLOUD_MAX_ITEMS = 100
TAG_CLOUD_SIZE = 5
JINJA_EXTENSIONS = ['jinja2.ext.loopcontrols']
| apache-2.0 | Python |
9344f45bdd9677ffe120bd3b9d96620fe8a9255e | fix pelican_dynamic name | fly/burrito.sh,bsdlp/burrito.sh,fly/burrito.sh,bsdlp/burrito.sh | pelicanconf.py | pelicanconf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'jchen'
AUTHOR_FULLNAME = u'Jon Chen'
SITENAME = u'burrito'
SITEURL = 'http://burrito.sh'
SITETAGLINE = 'Excelling at mediocrity.'
TIMEZONE = 'Etc/UTC'
DEFAULT_LANG = u'en'
# theme stuff
THEME = './theme'
# plugins
PLUGIN_PATHS = ['./plugins', './plugins.d']
PLUGINS = ['assets', 'summary', 'thumbnailer', 'pelican_dynamic']
# gravatar email
AUTHOR_EMAIL = 'dabestmayne@burrito.sh'
# social
TWITTER_USERNAME = 's_jchen'
GOOGLE_ANALYTICS = 'UA-47876445-1'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
DEFAULT_PAGINATION = 4
DISPLAY_CATEGORIES_ON_MENU = False
DISPLAY_MENUITEMS_ON_MENU = False
DISPLAY_NAVBAR = False
DISPLAY_PAGES_ON_MENU = True
PAGE_URL = '{slug}.html'
PAGE_SAVE_AS = '{slug}.html'
OUTPUT_RETENTION = ("keybase.txt")
DEFAULT_DATE_FORMAT = ('%Y-%m-%d')
# Uncomment following line if you want document-relative URLs when developing
RELATIVE_URLS = True
# add paths to pelican
STATIC_PATHS = ['img']
# Thumbnailer plugin options
IMAGE_PATH = 'img'
THUMBNAIL_DIR = 'thumbs'
THUMBNAIL_SIZES = {
'thumbnail_square': '317',
'thumbnail_wide': '635x?',
}
# markdown extensions for syntax highlighting
MD_EXTENSIONS = ['codehilite(css_class=highlight, linenums=False)','extra']
WEBASSETS = True
ARTICLE_URL = 'posts/{date:%Y}{date:%m}{date:%d}/{slug}/'
ARTICLE_SAVE_AS = 'posts/{date:%Y}{date:%m}{date:%d}/{slug}/index.html'
| #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'jchen'
AUTHOR_FULLNAME = u'Jon Chen'
SITENAME = u'burrito'
SITEURL = 'http://burrito.sh'
SITETAGLINE = 'Excelling at mediocrity.'
TIMEZONE = 'Etc/UTC'
DEFAULT_LANG = u'en'
# theme stuff
THEME = './theme'
# plugins
PLUGIN_PATHS = ['./plugins', './plugins.d']
PLUGINS = ['assets', 'summary', 'thumbnailer', 'pelican-dynamic']
# gravatar email
AUTHOR_EMAIL = 'dabestmayne@burrito.sh'
# social
TWITTER_USERNAME = 's_jchen'
GOOGLE_ANALYTICS = 'UA-47876445-1'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
DEFAULT_PAGINATION = 4
DISPLAY_CATEGORIES_ON_MENU = False
DISPLAY_MENUITEMS_ON_MENU = False
DISPLAY_NAVBAR = False
DISPLAY_PAGES_ON_MENU = True
PAGE_URL = '{slug}.html'
PAGE_SAVE_AS = '{slug}.html'
OUTPUT_RETENTION = ("keybase.txt")
DEFAULT_DATE_FORMAT = ('%Y-%m-%d')
# Uncomment following line if you want document-relative URLs when developing
RELATIVE_URLS = True
# add paths to pelican
STATIC_PATHS = ['img']
# Thumbnailer plugin options
IMAGE_PATH = 'img'
THUMBNAIL_DIR = 'thumbs'
THUMBNAIL_SIZES = {
'thumbnail_square': '317',
'thumbnail_wide': '635x?',
}
# markdown extensions for syntax highlighting
MD_EXTENSIONS = ['codehilite(css_class=highlight, linenums=False)','extra']
WEBASSETS = True
ARTICLE_URL = 'posts/{date:%Y}{date:%m}{date:%d}/{slug}/'
ARTICLE_SAVE_AS = 'posts/{date:%Y}{date:%m}{date:%d}/{slug}/index.html'
| bsd-3-clause | Python |
b8dfd90bfa5b20dbfbc056ea5b461879bccfe6f8 | Improve smbio.util.pandas documentation. | brenns10/smbio | smbio/util/pandas.py | smbio/util/pandas.py | """Utility functions for pandas."""
def dataframe_append(dataframe, rowdict):
"""
Shortcut method for appending a row to a DataFrame.
:param pandas.DataFrame dataframe: The DataFrame to append to.
:param dict rowdict: A dictionary containing each column's value.
"""
newrow = len(dataframe)
dataframe.loc[newrow] = 0 # init with 0's
for k, v in rowdict.items():
dataframe.loc[newrow, k] = v
| """Utility functions for pandas."""
def dataframe_append(dataframe, rowdict):
"""
Shortcut method for appending a row to a DataFrame.
:param dataframe: The DataFrame to append to.
:param rowdict: A dictionary containing each column's value.
"""
newrow = len(dataframe)
dataframe.loc[newrow] = 0 # init with 0's
for k, v in rowdict.items():
dataframe.loc[newrow, k] = v
| mit | Python |
5f070174ea67966767b0ef9cf9ad6dcff24f245e | update copyright info | jdstemmler/jdstemmler.github.io,jdstemmler/jdstemmler.github.io | pelicanconf.py | pelicanconf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
import os
import datetime
AUTHOR = u'Jayson Stemmler'
SITENAME = u'Jayson Stemmler'
SITEURL = ''
COPYRIGHT_NAME = "Jayson Stemmler"
COPYRIGHT_YEAR = datetime.datetime.today().strftime('%Y')
# THEME_DIR = os.path.join(os.getenv("HOME"), 'Documents/Blogging/pelican-themes')
# THEME = os.path.join(THEME_DIR, 'Flex')
THEME = 'themes/Flex'
USE_FOLDER_AS_CATEGORY = True
PATH = 'content'
PAGE_PATHS = ['pages']
ARTICLE_PATHS = ['articles']
TIMEZONE = 'America/Los_Angeles'
DEFAULT_LANG = u'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
STATIC_PATHS = ['images', 'extra/CNAME']
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
ARTICLE_URL = 'posts/{date:%Y}/{date:%b}/{slug}'
ARTICLE_SAVE_AS = 'posts/{date:%Y}/{date:%b}/{slug}.html'
PAGE_URL = 'pages/{slug}'
PAGE_SAVE_AS = 'pages/{slug}.html'
YEAR_ARCHIVE_SAVE_AS = 'posts/{date:%Y}/index.html'
MONTH_ARCHIVE_SAVE_AS = 'posts/{date:%Y}/{date:%b}/index.html'
## THEME OPTIONS
MAIN_MENU = True
SITELOGO = '/images/profile.png'
LINKS = (('Resume', 'https://represent.io/jdstemmler'),)
SOCIAL = (('linkedin', 'https://linkedin.com/in/jdstemmler/en'),
('github', 'https://github.com/jdstemmler'))
MENUITEMS = (('Archives', '/archives.html'),
('Categories', '/categories.html'),
('Tags', '/tags.html'),)
BROWSER_COLOR = '#333'
ROBOTS = 'index, follow'
| #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
import os
AUTHOR = u'Jayson Stemmler'
SITENAME = u'Jayson Stemmler'
SITEURL = ''
THEME_DIR = os.path.join(os.getenv("HOME"), 'Documents/Blogging/pelican-themes')
THEME = os.path.join(THEME_DIR, 'Flex')
USE_FOLDER_AS_CATEGORY = True
PATH = 'content'
PAGE_PATHS = ['pages']
ARTICLE_PATHS = ['articles']
TIMEZONE = 'America/Los_Angeles'
DEFAULT_LANG = u'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
STATIC_PATHS = ['images', 'extra/CNAME']
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
ARTICLE_URL = 'posts/{date:%Y}/{date:%b}/{slug}'
ARTICLE_SAVE_AS = 'posts/{date:%Y}/{date:%b}/{slug}.html'
PAGE_URL = 'pages/{slug}'
PAGE_SAVE_AS = 'pages/{slug}.html'
YEAR_ARCHIVE_SAVE_AS = 'posts/{date:%Y}/index.html'
MONTH_ARCHIVE_SAVE_AS = 'posts/{date:%Y}/{date:%b}/index.html'
## THEME OPTIONS
MAIN_MENU = True
SITELOGO = '/images/profile.png'
LINKS = (('Resume', 'https://represent.io/jdstemmler'),)
SOCIAL = (('linkedin', 'https://linkedin.com/in/jdstemmler/en'),
('github', 'https://github.com/jdstemmler'))
MENUITEMS = (('Archives', '/archives.html'),
('Categories', '/categories.html'),
('Tags', '/tags.html'),)
BROWSER_COLOR = '#333'
ROBOTS = 'index, follow'
| mit | Python |
81e35a4cd97f34420b9b9bf0b59f245783df457d | Solve sum of multiples | rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism | python/sum-of-multiples/sum_of_multiples.py | python/sum-of-multiples/sum_of_multiples.py | def sum_of_multiples(limit, factors):
return sum(all_multiples(limit, factors))
def all_multiples(limit, factors):
multiples = set()
for factor in factors:
multiples = multiples.union(get_multiples(limit, factor))
return multiples
def get_multiples(limit, factor):
if factor == 0:
return []
multiples = set()
for i in range(0, limit):
if i % factor == 0:
multiples.add(i)
return multiples
| def sum_of_multiples():
pass
| mit | Python |
454b1718bf7daa6e224f911c9f807c3f4ed78981 | Set use_egg_info to True in freeze | osupython/pip2 | pip2/commands/freeze.py | pip2/commands/freeze.py | """
Returns a dictionary containing all installed packages.
return: A dictionary, key is package name value is a dictionary with
information about package.
"""
from pip2.compat import packaging
def freeze():
results = list(packaging.database.get_distributions(use_egg_info=True))
installed = dict()
for dist in results:
installed[dist.name] = dict()
installed[dist.name]['version'] = dist.version
return installed
| """
Returns a dictionary containing all installed packages.
return: A dictionary, key is package name value is a dictionary with
information about package.
"""
from pip2.compat import packaging
def freeze():
results = list(packaging.database.get_distributions())
installed = dict()
for dist in results:
installed[dist.name] = dict()
installed[dist.name]['version'] = dist.version
return installed
| mit | Python |
764b0125967fac2146e5b8cfb1e5d147bec862a8 | Add social links | glasslion/zha-beta,glasslion/zha-beta,glasslion/zha,glasslion/zha,glasslion/zha-beta,glasslion/zha | pelicanconf.py | pelicanconf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Leonardo Zhou'
SITENAME = u'\u4e91\u7ffc\u56fe\u5357'
SITEURL = ''
TIMEZONE = 'Asia/Shanghai'
DEFAULT_LANG = u'zh'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
# Blogroll
LINKS = (
('Python.org', 'http://python.org/'),
)
# Social widget
SOCIAL = (
('twitter', 'https://twitter.com/glasslion'),
('github', 'https://github.com/glasslion'),
('google-plus', 'https://google.com/+LeonardoZhou'),
('envelope', 'mailto:glasslion@gmail.com'),
)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
RELATIVE_URLS = True
# Static content
STATIC_PATHS = ['images', 'extra/CNAME',]
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
# Url
ARTICLE_URL = '{slug}/'
ARTICLE_SAVE_AS = '{slug}/index.html'
# Custom theme
THEME = 'themes/BT3-Flat-4zha'
# BT3-Flat-4zha settings
TEMPLATE_PAGES = {'blog.html': 'blog.html'}
DIRECT_TEMPLATES = ('index', 'tags', 'categories', 'archives', 'blog-index', 'blog')
PAGINATED_DIRECT_TEMPLATES = ('blog-index',)
POST_LIMIT = 10 | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Leonardo Zhou'
SITENAME = u'\u4e91\u7ffc\u56fe\u5357'
SITEURL = ''
TIMEZONE = 'Asia/Shanghai'
DEFAULT_LANG = u'zh'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
RELATIVE_URLS = True
# Static content
STATIC_PATHS = ['images', 'extra/CNAME',]
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
# Url
ARTICLE_URL = '{slug}/'
ARTICLE_SAVE_AS = '{slug}/index.html'
# Custom theme
THEME = 'themes/BT3-Flat-4zha'
# BT3-Flat-4zha settings
TEMPLATE_PAGES = {'blog.html': 'blog.html'}
DIRECT_TEMPLATES = ('index', 'tags', 'categories', 'archives', 'blog-index', 'blog')
PAGINATED_DIRECT_TEMPLATES = ('blog-index',)
POST_LIMIT = 10
# Social
GITHUB_URL = 'https://github.com/glasslion' | cc0-1.0 | Python |
507f90417338ffd3221723544bb68c680fbca1b8 | Add is in tree and fix is_in_tree | jeremykid/FunAlgorithm,jeremykid/FunAlgorithm,jeremykid/FunAlgorithm,jeremykid/FunAlgorithm | python_practice/graph/binary_search_tree.py | python_practice/graph/binary_search_tree.py |
class binary_search_tree(object):
def __init__(self, value):
self.value = value
self.right_node = None
self.left_node = None
def __str__(self):
print self.left_node
print self.value
print self.right_node
def insert_node(self, insert_value):
if insert_value == self.value:
return false
if insert_value > self.value:
if self.right_node == None:
self.right_node = binary_search_tree(insert_value)
else:
self.right_node.insert_node(insert_value)
else:
if self.left_node == None:
self.left_node = binary_search_tree(insert_value)
else:
self.left_node.insert_node(insert_value)
return true
def get_node_count(self):
return_count = 1
if self.right_node != None:
return_count += self.right_node.get_node_count()
if self.left_node != None:
return_count += self.left_node.get_node_count()
return return_count
def get_height(self):
return_height = 1
return_right_height = 0
return_left_height = 0
if self.right_node != None:
return_right_height += self.right_node.get_height()
if self.left_node != None:
return_left_height += self.left_node.get_height()
return_height += max(return_right_height,return_left_height)
def get_min(self):
if self.left_node == None:
return self.value
else:
return self.left_node.get_min()
def get_max(self):
if self.right_node == None:
return self.value
else:
return self.right_node.get_max()
def is_binary_search_tree(self):
result = False
if self.left_node.value < self.left_node.get_max():
return result
else:
return self.left_node.is_binary_search_tree()
if self.right_node.value > self.right_node.get_min():
return result
else:
return self.left_node.is_binary_search_tree()
def is_in_tree(self, val):
if self.value == val:
return True
elif self.value > val:
if self.left_node == None:
return False
else:
return self.left_node.is_in_tree(val))
else:
if self.right_node == None:
return False
else:
return self.right_node.is_in_tree(val)
|
class binary_search_tree(object):
def __init__(self, value):
self.value = value
self.right_node = None
self.left_node = None
def __str__(self):
print self.left_node
print self.value
print self.right_node
def insert_node(self, insert_value):
if insert_value == self.value:
return false
if insert_value > self.value:
if self.right_node == None:
self.right_node = binary_search_tree(insert_value)
else:
self.right_node.insert_node(insert_value)
else:
if self.left_node == None:
self.left_node = binary_search_tree(insert_value)
else:
self.left_node.insert_node(insert_value)
return true
def get_node_count(self):
return_count = 1
if self.right_node != None:
return_count += self.right_node.get_node_count()
if self.left_node != None:
return_count += self.left_node.get_node_count()
return return_count
def get_height(self):
return_height = 1
return_right_height = 0
return_left_height = 0
if self.right_node != None:
return_right_height += self.right_node.get_height()
if self.left_node != None:
return_left_height += self.left_node.get_height()
return_height += max(return_right_height,return_left_height)
def get_min(self):
if self.left_node == None:
return self.value
else:
return self.left_node.get_min()
def get_max(self):
if self.right_node == None:
return self.value
else:
return self.right_node.get_max()
def is_binary_search_tree(self):
result = false
if self.left_node.value < self.left_node.get_max():
return result
else:
result = (result and self.left_node.is_binary_search_tree())
if self.right_node.value > self.right_node.get_min():
return result
else:
result = (result and self.left_node.is_binary_search_tree())
return result
def is_in_tree(self):
#todo
return true
| mit | Python |
6cda967d56a3cdf672576260af4d1e0218771855 | replace DummyDocument by DummyDocument{Odt, Weasyprint} | Anaethelion/django-mapentity,Anaethelion/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity,Anaethelion/django-mapentity,makinacorpus/django-mapentity | mapentity/tests/views.py | mapentity/tests/views.py | from mapentity import views as mapentity_views
from .models import DummyModel
class DummyList(mapentity_views.MapEntityList):
model = DummyModel
class DummyLayer(mapentity_views.MapEntityLayer):
model = DummyModel
class DummyJsonList(mapentity_views.MapEntityJsonList, DummyList):
pass
class DummyFormat(mapentity_views.MapEntityFormat):
model = DummyModel
class DummyDocumentOdt(mapentity_views.MapEntityDocumentOdt):
model = DummyModel
class DummyDocumentWeasyprint(mapentity_views.MapEntityDocumentWeasyprint):
model = DummyModel
class DummyDetail(mapentity_views.MapEntityDetail):
model = DummyModel
class DummyCreate(mapentity_views.MapEntityCreate):
model = DummyModel
class DummyUpdate(mapentity_views.MapEntityUpdate):
model = DummyModel
class DummyDelete(mapentity_views.MapEntityDelete):
model = DummyModel
| from mapentity import views as mapentity_views
from .models import DummyModel
class DummyList(mapentity_views.MapEntityList):
model = DummyModel
class DummyLayer(mapentity_views.MapEntityLayer):
model = DummyModel
class DummyJsonList(mapentity_views.MapEntityJsonList, DummyList):
pass
class DummyFormat(mapentity_views.MapEntityFormat):
model = DummyModel
class DummyDocument(mapentity_views.MapEntityDocument):
model = DummyModel
class DummyDetail(mapentity_views.MapEntityDetail):
model = DummyModel
class DummyCreate(mapentity_views.MapEntityCreate):
model = DummyModel
class DummyUpdate(mapentity_views.MapEntityUpdate):
model = DummyModel
class DummyDelete(mapentity_views.MapEntityDelete):
model = DummyModel
| bsd-3-clause | Python |
cbc9c3d075f8128e25aa4707998b89eeeff903a8 | Add check for mainConnection being None, which will set it | ollien/Timpani,ollien/Timpani,ollien/Timpani | py/blog.py | py/blog.py | import database
import configmanager
def getMainConnection():
return database.ConnectionManager.getConnection("main")
mainConnection = getMainConnection()
def getPosts(connection = mainConnection):
global mainConnection
if connection == mainConnection and mainConnection == None:
mainConnection = getMainConnection()
connection = mainConnection
posts = {} #Will be a dict formatted as such {postId: {post: $POST_OBJECT_FROM_DATABASE, tags: [$TAGS_FROM_DATABASE]}}
postsAndTags = connection.session.query(database.tables.Post, database.tables.Tag).outerjoin(database.tables.TagRelation, database.tables.Tag)
for result in postsAndTags:
post, tag = result
if post.id in posts.keys():
posts[post.id]["tags"].append(tag)
else:
posts[post.id] = {"post": post, "tags": []}
if tag != None:
posts[post.id]["tags"].append(tag)
return posts
def addPost(title, body, time_posted, author, tags, connection = mainConnection):
global mainConnection
if connection == mainConnection and mainConnection == None:
print("setting...")
mainConnection = getMainConnection()
connection = mainConnection
if type(tags) == str:
tags = tags.split(" ")
post = database.tables.Post(title = title, body = body, time_posted = time_posted, author = author)
connection.session.add(post)
connection.session.flush()
for tag in tags:
tag = database.tables.Tag(name = tag)
connection.session.add(tag)
connection.session.flush()
relation = database.tables.TagRelation(post_id = post.id, tag_id = tag.id)
connection.session.add(relation)
connection.session.commit()
| import database
import configmanager
mainConnection = database.ConnectionManager.getConnection("main")
def getPosts(connection = mainConnection):
posts = {} #Will be a dict formatted as such {postId: {post: $POST_OBJECT_FROM_DATABASE, tags: [$TAGS_FROM_DATABASE]}}
postsAndTags = connection.session.query(database.tables.Post, database.tables.Tag).outerjoin(database.tables.TagRelation, database.tables.Tag)
for result in postsAndTags:
post, tag = result
if post.id in posts.keys():
posts[post.id]["tags"].append(tag)
else:
posts[post.id] = {"post": post, "tags": []}
if tag != None:
posts[post.id]["tags"].append(tag)
return posts
def addPost(title, body, time_posted, author, tags, connection = mainConnection):
if type(tags) == str:
tags = tags.split(" ")
post = database.tables.Post(title = title, body = body, time_posted = time_posted, author = author)
connection.session.add(post)
connection.session.flush()
for tag in tags:
tag = database.tables.Tag(name = tag)
connection.session.add(tag)
connection.session.flush()
relation = database.tables.TagRelation(post_id = post.id, tag_id = tag.id)
connection.session.add(relation)
connection.session.commit()
| mit | Python |
3aeaea75bd5b1db66e6dea9e5c1f41f3f9c6ed9e | Enable rudimentary logging to stderr in the Python start script. | NCI-Cloud/reporting-api,NCI-Cloud/reporting-api,NeCTAR-RC/reporting-api,NeCTAR-RC/reporting-api | bin/start.py | bin/start.py | #!/usr/bin/python
import sys, os
from paste.deploy import loadapp, loadserver
from paste import httpserver
import logging
if __name__ == '__main__':
logging.basicConfig()
realfile = os.path.realpath(__file__)
realdir = os.path.dirname(realfile)
pardir = os.path.realpath(os.path.join(realdir, os.pardir))
confdir = os.path.join(pardir, 'conf')
paste_config = os.path.join(confdir, 'paste.config')
sys.path.append(pardir)
reporting_app = loadapp('config:' + paste_config)
server = loadserver('config:' + paste_config)
server(reporting_app)
| #!/usr/bin/python
import sys, os
from paste.deploy import loadapp, loadserver
from paste import httpserver
if __name__ == '__main__':
realfile = os.path.realpath(__file__)
realdir = os.path.dirname(realfile)
pardir = os.path.realpath(os.path.join(realdir, os.pardir))
confdir = os.path.join(pardir, 'conf')
paste_config = os.path.join(confdir, 'paste.config')
sys.path.append(pardir)
reporting_app = loadapp('config:' + paste_config)
server = loadserver('config:' + paste_config)
server(reporting_app)
| apache-2.0 | Python |
a587d48694690957934a159bad98cacd3f012a6a | Change contextlib import to handle the new location in Python 3. | danielsamuels/cms,jamesfoley/cms,jamesfoley/cms,jamesfoley/cms,dan-gamble/cms,danielsamuels/cms,dan-gamble/cms,lewiscollard/cms,jamesfoley/cms,dan-gamble/cms,lewiscollard/cms,danielsamuels/cms,lewiscollard/cms | cms/tests/test_externals.py | cms/tests/test_externals.py | from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
| from django.test import TestCase
from ..externals import External
from contextlib import GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
| bsd-3-clause | Python |
9d57f85837d577e733cf1a45e560fc763a62be1f | Implement a custom exception type for undefined variable errors | dajose/cookiecutter,dajose/cookiecutter,Springerle/cookiecutter,pjbull/cookiecutter,audreyr/cookiecutter,audreyr/cookiecutter,stevepiercy/cookiecutter,stevepiercy/cookiecutter,terryjbates/cookiecutter,michaeljoseph/cookiecutter,luzfcb/cookiecutter,hackebrot/cookiecutter,pjbull/cookiecutter,hackebrot/cookiecutter,Springerle/cookiecutter,luzfcb/cookiecutter,michaeljoseph/cookiecutter,terryjbates/cookiecutter,willingc/cookiecutter,willingc/cookiecutter | cookiecutter/exceptions.py | cookiecutter/exceptions.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.exceptions
-----------------------
All exceptions used in the Cookiecutter code base are defined here.
"""
class CookiecutterException(Exception):
"""
Base exception class. All Cookiecutter-specific exceptions should subclass
this class.
"""
class NonTemplatedInputDirException(CookiecutterException):
"""
Raised when a project's input dir is not templated.
The name of the input directory should always contain a string that is
rendered to something else, so that input_dir != output_dir.
"""
class UnknownTemplateDirException(CookiecutterException):
"""
Raised when Cookiecutter cannot determine which directory is the project
template, e.g. more than one dir appears to be a template dir.
"""
class MissingProjectDir(CookiecutterException):
"""
Raised during cleanup when remove_repo() can't find a generated project
directory inside of a repo.
"""
class ConfigDoesNotExistException(CookiecutterException):
"""
Raised when get_config() is passed a path to a config file, but no file
is found at that path.
"""
class InvalidConfiguration(CookiecutterException):
"""
Raised if the global configuration file is not valid YAML or is
badly contructed.
"""
class UnknownRepoType(CookiecutterException):
"""
Raised if a repo's type cannot be determined.
"""
class VCSNotInstalled(CookiecutterException):
"""
Raised if the version control system (git or hg) is not installed.
"""
class ContextDecodingException(CookiecutterException):
"""
Raised when a project's JSON context file can not be decoded.
"""
class OutputDirExistsException(CookiecutterException):
"""
Raised when the output directory of the project exists already.
"""
class InvalidModeException(CookiecutterException):
"""
Raised when cookiecutter is called with both `no_input==True` and
`replay==True` at the same time.
"""
class FailedHookException(CookiecutterException):
"""
Raised when a hook script fails
"""
class UndefinedVariableInTemplate(CookiecutterException):
"""Raised when a template uses a variable which is not defined in the
context.
"""
def __init__(self, message, error, context):
self.message = message
self.error = error
self.context = context
def __str__(self):
return (
"{self.message}. "
"Error message: {self.error.message}. "
"Context: {self.context}"
).format(**locals())
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.exceptions
-----------------------
All exceptions used in the Cookiecutter code base are defined here.
"""
class CookiecutterException(Exception):
"""
Base exception class. All Cookiecutter-specific exceptions should subclass
this class.
"""
class NonTemplatedInputDirException(CookiecutterException):
"""
Raised when a project's input dir is not templated.
The name of the input directory should always contain a string that is
rendered to something else, so that input_dir != output_dir.
"""
class UnknownTemplateDirException(CookiecutterException):
"""
Raised when Cookiecutter cannot determine which directory is the project
template, e.g. more than one dir appears to be a template dir.
"""
class MissingProjectDir(CookiecutterException):
"""
Raised during cleanup when remove_repo() can't find a generated project
directory inside of a repo.
"""
class ConfigDoesNotExistException(CookiecutterException):
"""
Raised when get_config() is passed a path to a config file, but no file
is found at that path.
"""
class InvalidConfiguration(CookiecutterException):
"""
Raised if the global configuration file is not valid YAML or is
badly contructed.
"""
class UnknownRepoType(CookiecutterException):
"""
Raised if a repo's type cannot be determined.
"""
class VCSNotInstalled(CookiecutterException):
"""
Raised if the version control system (git or hg) is not installed.
"""
class ContextDecodingException(CookiecutterException):
"""
Raised when a project's JSON context file can not be decoded.
"""
class OutputDirExistsException(CookiecutterException):
"""
Raised when the output directory of the project exists already.
"""
class InvalidModeException(CookiecutterException):
"""
Raised when cookiecutter is called with both `no_input==True` and
`replay==True` at the same time.
"""
class FailedHookException(CookiecutterException):
"""
Raised when a hook script fails
"""
| bsd-3-clause | Python |
278069a0637f7f329ceaff0975e3b95d609a7b9f | Improve the command line interface | cosmoscope/cosmoscope | cosmoscope/cli.py | cosmoscope/cli.py | # -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
from .core.server import launch
@click.command()
@click.option('--server-address', default="tcp://127.0.0.1:4242", help="Server IP address.")
@click.option('--publisher-address', default="tcp://127.0.0.1:4243", help="Publisher IP address.")
def main(server_address=None, publisher_address=None):
"""Console interface for the cosmoscope server."""
launch(server_address, publisher_address)
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
| # -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for cosmoscope."""
click.echo("Replace this message by putting your code into "
"cosmoscope.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
| mit | Python |
780002f5df7278776a26bb65b4b0f3dce84537a1 | Allow unit tests to run on iOS. (filesystem restriction) | cropleyb/pentai,cropleyb/pentai,cropleyb/pentai | pentai/db/test_db.py | pentai/db/test_db.py | import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
# Use kivy's user_data_dir so we're guaranteed write access
os.environ['KIVY_NO_CONSOLELOG'] = ''
from kivy.app import App
a = App()
d = a.user_data_dir
z_m.set_db(os.path.join(d, "test.db"))
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
| import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
z_m.set_db("test.db")
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler('test.log')
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
| mit | Python |
c7dbbfff233f5b04cc1f99120a0f602daa8683a7 | load c functions on demand | lazka/pgi,lazka/pgi | pgi/gitypes/_util.py | pgi/gitypes/_util.py | # Copyright 2012 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
from ctypes import CDLL
_so_mapping = {
"glib-2.0": "libglib-2.0.so.0",
"gobject-2.0": "libgobject-2.0.so.0",
"girepository-1.0": "libgirepository-1.0.so.1",
}
load = lambda name: CDLL(_so_mapping[name])
class _CMethod(object):
def __init__(self, *args):
self.args = args
def __get__(self, instance, owner):
lib, name, prefix, ret, args, wrap = self.args
func = getattr(lib, prefix + name)
func.argtypes = args
func.restype = ret
if wrap:
setattr(owner, name, (lambda f: lambda *x: f(*x))(func))
return getattr(instance, name)
setattr(owner, name, func)
return func
def wrap_class(lib, base, ptr, prefix, methods):
for name, ret, args in methods:
if args and args[0] == ptr:
setattr(ptr, name, _CMethod(lib, name, prefix, ret, args, True))
else:
setattr(base, name, _CMethod(lib, name, prefix, ret, args, False))
| # Copyright 2012 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
_dll_cache = {}
_so_mapping = {
"glib-2.0": "libglib-2.0.so.0",
"gobject-2.0": "libgobject-2.0.so.0",
"girepository-1.0": "libgirepository-1.0.so.1",
}
def load(name):
global _dll_cache, _so_mapping
if name not in _dll_cache:
from ctypes import CDLL
_dll_cache[name] = CDLL(_so_mapping[name])
return _dll_cache[name]
count = 0
def _debug(f, name, base):
def _add(*args):
global count
count += 1
print count, base.__name__ + "." + name
return f(*args)
return _add
def wrap_class(lib, base, ptr, prefix, methods):
for name, ret, args in methods:
func = getattr(lib, prefix + name)
func.argtypes = args
func.restype = ret
if args and args[0] == ptr:
add_self = lambda f: lambda *args: f(*args)
setattr(ptr, name, add_self(func))
#setattr(ptr, name, _debug(func, name, base))
else:
setattr(base, name, func)
| lgpl-2.1 | Python |
c423a1198eb94866062fa88cc6e7bf0e83543291 | bump version to 2.2.6 | ababic/wagtailmodeladmin,rkhleics/wagtailmodeladmin,rkhleics/wagtailmodeladmin,ababic/wagtailmodeladmin | wagtailmodeladmin/__init__.py | wagtailmodeladmin/__init__.py | __version__ = '2.2.6'
| __version__ = '2.2.5'
| mit | Python |
c026de1bb6275dfe92cba55c2ec05ddf05656411 | update thread local set / get for bootstrap version | qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq | corehq/apps/style/utils.py | corehq/apps/style/utils.py | import threading
_thread_local = threading.local()
BOOTSTRAP_2 = 'bootstrap-2'
BOOTSTRAP_3 = 'bootstrap-3'
def get_bootstrap_version():
try:
bootstrap_version = _thread_local.BOOTSTRAP_VERSION
except AttributeError:
bootstrap_version = BOOTSTRAP_2
return bootstrap_version
def set_bootstrap_version3():
_thread_local.BOOTSTRAP_VERSION = BOOTSTRAP_3
def set_bootstrap_version2():
_thread_local.BOOTSTRAP_VERSION = BOOTSTRAP_2
| from thread import _local
BOOTSTRAP_2 = 'bootstrap-2'
BOOTSTRAP_3 = 'bootstrap-3'
def get_bootstrap_version():
bootstrap_version = _local.BOOTSTRAP_VERSION
if bootstrap_version is None:
bootstrap_version = BOOTSTRAP_2
return bootstrap_version
def set_bootstrap_version3():
_local.BOOTSTRAP_VERSION = BOOTSTRAP_3
def set_bootstrap_version2():
_local.BOOTSTRAP_VERSION = BOOTSTRAP_2
| bsd-3-clause | Python |
14adb95b1ade833ccbf1b3dbe7359f08c7f02923 | Update digis.py | anodos-ru/catalog,anodos-ru/catalog,anodos-ru/catalog | updaters/digis.py | updaters/digis.py | from datetime import date
from datetime import datetime
from catalog.models import Updater
from catalog.models import Distributor
from catalog.models import Stock
from catalog.models import Currency
from catalog.models import Unit
from catalog.models import CategorySynonym
from catalog.models import VendorSynonym
from catalog.models import Category
from catalog.models import Vendor
from catalog.models import Product
from catalog.models import Party
from catalog.models import PriceType
from catalog.models import Price
class Runner:
self.name = 'Digis'
self.alias = 'digis'
def __init__(self):
# Поставщик
self.distributor = Distributor.objects.take(alias = self.alias, name = self.name)
# Загрузчик
self.updater = Updater.objects.take(alias = self.alias, name = self.name, distributor = self.distributor)
# Склад
self.stock = Stock.objects.take(
alias = self.alias + '-stock',
name = self.name+': склад',
delivery_time_min = 3,
delivery_time_max = 10,
distributor = self.distributor)
Party.objects.clear(stock = self.stock)
# Транзит
self.transit = Stock.objects.take(
alias = self.alias + '-transit',
name = self.name + ': транзит',
delivery_time_min = 10,
delivery_time_max = 40,
distributor = self.distributor)
Party.objects.clear(stock=self.transit)
# Единица измерения
self.default_unit = Unit.objects.take(alias = 'pcs', name = 'шт.')
# Типы цен
self.rp = PriceType.objects.take(alias = 'RP', name = 'Розничная цена')
self.dp = PriceType.objects.take(alias = 'DP', name = 'Диллерская цена')
# Валюты
self.rub = Currency.objects.take(alias = 'RUB', name = 'р.', full_name = 'Российский рубль', rate = 1, quantity = 1)
self.usd = Currency.objects.take(alias = 'USD', name = '$', full_name = 'US Dollar', rate = 60, quantity = 1)
self.eur = Currency.objects.take(alias = 'EUR', name = 'EUR', full_name = 'Евро', rate = 80, quantity = 1)
| from datetime import date
from datetime import datetime
from catalog.models import Updater
from catalog.models import Distributor
from catalog.models import Stock
from catalog.models import Currency
from catalog.models import Unit
from catalog.models import CategorySynonym
from catalog.models import VendorSynonym
from catalog.models import Category
from catalog.models import Vendor
from catalog.models import Product
from catalog.models import Party
from catalog.models import PriceType
from catalog.models import Price
class Runner:
def __init__(self):
# Инициируем переменные
self.name = 'Treolan'
self.alias = 'treolan'
self.message = ''
# Получаем необходимые объекты
self.distributor = Distributor.objects.take(alias=self.alias, name=self.name)
self.updater = Updater.objects.take(alias=self.alias, name=self.name, distributor=self.distributor)
self.stock = Stock.objects.take(alias=self.alias+'-stock', name=self.name+': склад', delivery_time_min = 3, delivery_time_max = 10, distributor=self.distributor)
self.transit = Stock.objects.take(alias=self.alias+'-transit', name=self.name+': транзит', delivery_time_min = 10, delivery_time_max = 40, distributor=self.distributor)
self.default_unit = Unit.objects.take(alias='pcs', name='шт.')
self.rp = PriceType.objects.take(alias='RP', name='Розничная цена')
self.dp = PriceType.objects.take(alias='DP', name='Диллерская цена')
self.rub = Currency.objects.take(alias='RUB', name='р.', full_name='Российский рубль', rate=1, quantity=1)
self.usd = Currency.objects.take(alias='USD', name='$', full_name='US Dollar', rate=60, quantity=1)
self.eur = Currency.objects.take(alias='EUR', name='EUR', full_name='Евро', rate=80, quantity=1)
# Удаляем неактуальные партии
Party.objects.clear(stock=self.stock)
Party.objects.clear(stock=self.transit)
# Используемые ссылки
self.url_login = ''
self.url_price = ''
| mit | Python |
4efa9c87264eabb6712f4fb787ab0de42be18de6 | Move places urlpatterns to Django 2.0 preferred method | evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca | places/urls.py | places/urls.py | from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
| from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
| mit | Python |
4a540bb86aa9e1ef45358adc453f636f177efcfb | Remove print | bugsnag/bugsnag-python,overplumbum/bugsnag-python,overplumbum/bugsnag-python,bugsnag/bugsnag-python | bugsnag/django/__init__.py | bugsnag/django/__init__.py | from django.conf import settings
import bugsnag
def configure():
# Ignore django 404s by default
bugsnag.configuration.ignore_classes.append("django.http.Http404")
# Import Bugsnag settings from settings.py
django_bugsnag_settings = getattr(settings, 'BUGSNAG', {})
bugsnag.configure(**django_bugsnag_settings)
print bugsnag.configuration.api_key | from django.conf import settings
import bugsnag
def configure():
# Ignore django 404s by default
print "Configuring!"
bugsnag.configuration.ignore_classes.append("django.http.Http404")
# Import Bugsnag settings from settings.py
django_bugsnag_settings = getattr(settings, 'BUGSNAG', {})
bugsnag.configure(**django_bugsnag_settings)
print bugsnag.configuration.api_key | mit | Python |
ce0a013ad70362f6bdd52165d5b6c61a1bc82ba2 | Update bubble_sort.py (#535) | TheAlgorithms/Python | sorts/bubble_sort.py | sorts/bubble_sort.py | from __future__ import print_function
def bubble_sort(collection):
"""Pure implementation of bubble sort algorithm in Python
:param collection: some mutable ordered collection with heterogeneous
comparable items inside
:return: the same collection ordered by ascending
Examples:
>>> bubble_sort([0, 5, 3, 2, 2])
[0, 2, 2, 3, 5]
>>> bubble_sort([])
[]
>>> bubble_sort([-2, -5, -45])
[-45, -5, -2]
>>> bubble_sort([-23,0,6,-4,34])
[-23,-4,0,6,34]
"""
length = len(collection)
for i in range(length-1):
swapped = False
for j in range(length-1-i):
if collection[j] > collection[j+1]:
swapped = True
collection[j], collection[j+1] = collection[j+1], collection[j]
if not swapped: break # Stop iteration if the collection is sorted.
return collection
if __name__ == '__main__':
try:
raw_input # Python 2
except NameError:
raw_input = input # Python 3
user_input = raw_input('Enter numbers separated by a comma:').strip()
unsorted = [int(item) for item in user_input.split(',')]
print(*bubble_sort(unsorted), sep=',')
| from __future__ import print_function
def bubble_sort(collection):
"""Pure implementation of bubble sort algorithm in Python
:param collection: some mutable ordered collection with heterogeneous
comparable items inside
:return: the same collection ordered by ascending
Examples:
>>> bubble_sort([0, 5, 3, 2, 2])
[0, 2, 2, 3, 5]
>>> bubble_sort([])
[]
>>> bubble_sort([-2, -5, -45])
[-45, -5, -2]
"""
length = len(collection)
for i in range(length-1):
swapped = False
for j in range(length-1-i):
if collection[j] > collection[j+1]:
swapped = True
collection[j], collection[j+1] = collection[j+1], collection[j]
if not swapped: break # Stop iteration if the collection is sorted.
return collection
if __name__ == '__main__':
try:
raw_input # Python 2
except NameError:
raw_input = input # Python 3
user_input = raw_input('Enter numbers separated by a comma:').strip()
unsorted = [int(item) for item in user_input.split(',')]
print(*bubble_sort(unsorted), sep=',')
| mit | Python |
00aa6bd37a6a134ea44be7d2073ecac3148495a5 | support for yaml added | josuebrunel/yahoo-oauth | yahoo_oauth/utils.py | yahoo_oauth/utils.py | from __future__ import absolute_import, unicode_literals
import json
import yaml
from rauth import OAuth1Service, OAuth2Service
services = {
'oauth1': dict(
SERVICE = OAuth1Service,
REQUEST_TOKEN_URL = "https://api.login.yahoo.com/oauth/v2/get_request_token",
ACCESS_TOKEN_URL = "https://api.login.yahoo.com/oauth/v2/get_token",
AUTHORIZE_TOKEN_URL = "https://api.login.yahoo.com/oauth/v2/request_auth"
),
'oauth2': dict(
SERVICE = OAuth2Service,
AUTHORIZE_TOKEN_URL = "https://api.login.yahoo.com/oauth2/request_auth",
ACCESS_TOKEN_URL = "https://api.login.yahoo.com/oauth2/get_token"
)
}
CALLBACK_URI = 'oob'
def json_write_data(json_data, filename):
"""Write json data into a file
"""
with open(filename, 'w') as fp:
json.dump(json_data, fp, indent=4, sort_keys=True, ensure_ascii=False)
return True
return False
def json_get_data(filename):
"""Get data from json file
"""
with open(filename) as fp:
json_data = json.load(fp)
return json_data
return False
def yaml_get_data(filename):
"""Get data from .yml file
"""
with open(filename, 'rb') as fd:
yaml_data = yaml.load(fd)
return yaml_data
return False
def yaml_write_data(yaml_data, filename):
"""Write data into a .yml file
"""
with open(filename, 'w') as fd:
yaml.dump(yaml_data, fd, default_flow_style=False)
return True
return False
| from __future__ import absolute_import, unicode_literals
import json
from rauth import OAuth1Service, OAuth2Service
services = {
'oauth1': dict(
SERVICE = OAuth1Service,
REQUEST_TOKEN_URL = "https://api.login.yahoo.com/oauth/v2/get_request_token",
ACCESS_TOKEN_URL = "https://api.login.yahoo.com/oauth/v2/get_token",
AUTHORIZE_TOKEN_URL = "https://api.login.yahoo.com/oauth/v2/request_auth"
),
'oauth2': dict(
SERVICE = OAuth2Service,
AUTHORIZE_TOKEN_URL = "https://api.login.yahoo.com/oauth2/request_auth",
ACCESS_TOKEN_URL = "https://api.login.yahoo.com/oauth2/get_token"
)
}
CALLBACK_URI = 'oob'
def json_write_data(json_data, filename):
"""Write json data into a file
"""
with open(filename, 'w') as fp:
json.dump(json_data, fp, indent=4, sort_keys=True, ensure_ascii=False)
return True
return False
def json_get_data(filename):
"""Get data from json file
"""
with open(filename) as fp:
json_data = json.load(fp)
return json_data
return False
| mit | Python |
c3426ba34635c13c6d7aad95acf4507b97b0b256 | update website | it-projects-llc/pos-addons,it-projects-llc/pos-addons,it-projects-llc/pos-addons | pos_mobile_restaurant/__manifest__.py | pos_mobile_restaurant/__manifest__.py | {
"name": """POS Mobile UI for Waiters""",
"summary": """Your Restaurant in the Mobile Version""",
"category": "Point of Sale",
"live_test_url": "http://apps.it-projects.info/shop/product/pos-mobile-ui?version=11.0",
"images": ["images/pos_mobile_restaurant.png"],
"version": "12.0.1.2.6",
"application": False,
"author": "IT-Projects LLC, Dinar Gabbasov",
"support": "apps@it-projects.info",
"website": "https://apps.odoo.com/apps/modules/12.0/pos_mobile_restaurant/",
"license": "LGPL-3",
"price": 100.00,
"currency": "EUR",
"depends": [
"pos_restaurant_base",
"pos_mobile",
],
"external_dependencies": {"python": [], "bin": []},
"data": [
"views/pos_mobile_restaurant_template.xml",
"views/pos_mobile_restaurant_view.xml",
],
"qweb": [
"static/src/xml/pos.xml",
],
"demo": [
],
"post_load": None,
"pre_init_hook": None,
"post_init_hook": None,
"auto_install": True,
"installable": True,
}
| {
"name": """POS Mobile UI for Waiters""",
"summary": """Your Restaurant in the Mobile Version""",
"category": "Point of Sale",
"live_test_url": "http://apps.it-projects.info/shop/product/pos-mobile-ui?version=11.0",
"images": ["images/pos_mobile_restaurant.png"],
"version": "12.0.1.2.6",
"application": False,
"author": "IT-Projects LLC, Dinar Gabbasov",
"support": "apps@it-projects.info",
"website": "https://it-projects.info/team/GabbasovDinar",
"license": "LGPL-3",
"price": 100.00,
"currency": "EUR",
"depends": [
"pos_restaurant_base",
"pos_mobile",
],
"external_dependencies": {"python": [], "bin": []},
"data": [
"views/pos_mobile_restaurant_template.xml",
"views/pos_mobile_restaurant_view.xml",
],
"qweb": [
"static/src/xml/pos.xml",
],
"demo": [
],
"post_load": None,
"pre_init_hook": None,
"post_init_hook": None,
"auto_install": True,
"installable": True,
}
| mit | Python |
baf36a6b7bfbe149eeefde774be2b01d4b2f7167 | Add cookie sharing | andychase/codebook,andychase/codebook | codebook/settingslive.py | codebook/settingslive.py | import dj_database_url
from .settings import *
DEBUG = False
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static asset configuration
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# Parse database configuration from $DATABASE_URL
DATABASES['default'] = dj_database_url.config()
DATABASES['default']['CONN_MAX_AGE'] = 500
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 300
}
STATIC_ROOT = 'static'
static_path = os.path.join(BASE_DIR, 'static')
if not os.path.isdir(static_path):
os.mkdir(static_path)
STATICFILES_DIRS = (
static_path,
)
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
# Set session cookie to work on subdomains
SESSION_COOKIE_DOMAIN = '.snc.io'
# Use cached templates
if not DEBUG:
del TEMPLATES[0]['APP_DIRS']
TEMPLATES[0]['DIRS'] = []
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
])
]
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_USE_TLS = True
EMAIL_HOST = os.environ.get('MAILGUN_SMTP_SERVER')
EMAIL_HOST_USER = os.environ.get('MAILGUN_SMTP_LOGIN')
EMAIL_HOST_PASSWORD = os.environ.get('MAILGUN_SMTP_PASSWORD')
EMAIL_PORT = os.environ.get('MAILGUN_SMTP_PORT')
| import dj_database_url
from .settings import *
DEBUG = False
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static asset configuration
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# Parse database configuration from $DATABASE_URL
DATABASES['default'] = dj_database_url.config()
DATABASES['default']['CONN_MAX_AGE'] = 500
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 300
}
STATIC_ROOT = 'static'
static_path = os.path.join(BASE_DIR, 'static')
if not os.path.isdir(static_path):
os.mkdir(static_path)
STATICFILES_DIRS = (
static_path,
)
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
# Use cached templates
if not DEBUG:
del TEMPLATES[0]['APP_DIRS']
TEMPLATES[0]['DIRS'] = []
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
])
]
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_USE_TLS = True
EMAIL_HOST = os.environ.get('MAILGUN_SMTP_SERVER')
EMAIL_HOST_USER = os.environ.get('MAILGUN_SMTP_LOGIN')
EMAIL_HOST_PASSWORD = os.environ.get('MAILGUN_SMTP_PASSWORD')
EMAIL_PORT = os.environ.get('MAILGUN_SMTP_PORT')
| mit | Python |
5c2e19150e4de1d7f5e7f43061223f5b0bce1008 | clean up clm sampling a bit | xiamike/stanford-ctc,xiamike/stanford-ctc,amaas/stanford-ctc,xiamike/stanford-ctc,amaas/stanford-ctc,amaas/stanford-ctc,xiamike/stanford-ctc,amaas/stanford-ctc,xiamike/stanford-ctc,amaas/stanford-ctc,amaas/stanford-ctc,xiamike/stanford-ctc | ctc_fast/clm/sample_clm.py | ctc_fast/clm/sample_clm.py | import numpy as np
from srilm import LM
from decoder_config import LM_ARPA_FILE, SPACE, LM_ORDER
'''
Sample text from character language model
'''
def sample_continuation(s, lm, order, alpha=1.0):
# Higher alpha -> more and more like most likely sequence
n = lm.vocab.max_interned()
probs = np.empty(n, dtype=np.float64)
for k in range(1, n + 1):
# NOTE Assumes log10
probs[k-1] = (10 ** lm.logprob_strings(lm.vocab.extern(k), s[0:5])) ** alpha
probs = probs / np.sum(probs)
c = np.random.choice(range(1, n + 1), p=probs)
c = lm.vocab.extern(c)
return [c]
if __name__ == '__main__':
print 'Loading LM...'
lm = LM(LM_ARPA_FILE)
print 'Done.'
SAMPLE_LENGTH = 100
ALPHA = 1.0
for j in range(5):
# NOTE List is in reverse
sample_string = ['<s>']
for k in range(SAMPLE_LENGTH):
# Work in reverse
sample_string = sample_continuation(sample_string, lm, LM_ORDER, alpha=ALPHA) + sample_string
# Don't sample after </s>, get gibberish
if sample_string[0] == '</s>':
break
s = [c if c != SPACE else ' ' for c in sample_string]
print ''.join(s[::-1])
| import numpy as np
from srilm import LM
from decoder_config import LM_ARPA_FILE, SPACE
'''
Sample text from character language model
'''
def sample_continuation(s, lm, order):
n = lm.vocab.max_interned()
probs = np.empty(n, dtype=np.float64)
for k in range(1, n + 1):
probs[k-1] = 10 ** lm.logprob_strings(lm.vocab.extern(k), s[0:5])
probs = probs / np.sum(probs)
c = np.random.choice(range(1, n + 1), p=probs)
c = lm.vocab.extern(c)
return [c]
if __name__ == '__main__':
#clm = LM(LM_ARPA_FILE)
lm = LM('/scail/data/group/deeplearning/u/zxie/biglm/lms/biglm.5g.arpa')
ORDER = 5
SAMPLE_LENGTH = 100
for j in range(5):
sample_string = ['<s>']
for k in range(SAMPLE_LENGTH):
# Work in reverse
sample_string = sample_continuation(sample_string, lm, ORDER) + sample_string
s = [c if c != SPACE else ' ' for c in sample_string]
print ''.join(s[::-1])
| apache-2.0 | Python |
75e41b1b6e07bd14e1fd042d42b2450942fecad1 | Add in simple OtherNames admin | texas/tx_people,texas/tx_people | tx_people/admin.py | tx_people/admin.py | from django.contrib import admin
from django.db.models import Count
from django.utils.translation import ugettext_lazy as _
from . import models
class ParentOrganizationFilter(admin.SimpleListFilter):
title = _('Parent Organization')
parameter_name = 'parent'
def lookups(self, request, model_admin):
return list(models.Organization.objects
.annotate(children_count=Count('children'))
.filter(children_count__gt=1)
.values_list('pk', 'name')) + [('none', 'No Parent', ), ]
def queryset(self, request, queryset):
value = self.value()
if value == 'none':
return queryset.filter(parent_id__isnull=True)
elif value:
return queryset.filter(parent__id=value)
return queryset
class ContactDetailAdmin(admin.ModelAdmin):
raw_id_fields = ('sources', )
class IdentifierAdmin(admin.ModelAdmin):
list_display = ('scheme', 'identifier', )
list_display_links = ('identifier', )
list_filter = ('scheme', )
search_fields = ('identifier', 'people__name', )
class LinkAdmin(admin.ModelAdmin):
list_display = ('url', 'note', )
search_fields = ('url', 'note', )
class MembershipAdmin(admin.ModelAdmin):
list_display = ('person', 'organization', 'post', )
list_filter = ('organization', )
raw_id_fields = ('links', 'sources', )
search_fields = ('person__name', 'organization__name', 'post__label', )
class OrganizationAdmin(admin.ModelAdmin):
list_display = ('name', 'parent', )
list_filter = (ParentOrganizationFilter, )
raw_id_fields = ('identifiers', 'contact_details', 'links', 'sources', )
search_fields = ('name', )
class OtherNameAdmin(admin.ModelAdmin):
list_display = ('name', 'start_date', 'end_date', )
search_fields = ('name', )
class PeopleAdmin(admin.ModelAdmin):
raw_id_fields = ('identifiers', 'contact_details', 'links', 'sources', )
search_fields = ('name', 'email', )
class PostAdmin(admin.ModelAdmin):
list_display = ('label', 'organization', )
search_fields = ('label', 'organization__name', )
class SourceAdmin(admin.ModelAdmin):
search_fields = ('link', )
admin.site.register(models.ContactDetail, ContactDetailAdmin)
admin.site.register(models.Identifier, IdentifierAdmin)
admin.site.register(models.Link, LinkAdmin)
admin.site.register(models.Membership, MembershipAdmin)
admin.site.register(models.Organization, OrganizationAdmin)
admin.site.register(models.OtherNames, OtherNameAdmin)
admin.site.register(models.Person, PeopleAdmin)
admin.site.register(models.Post, PostAdmin)
admin.site.register(models.Source, SourceAdmin)
| from django.contrib import admin
from django.db.models import Count
from django.utils.translation import ugettext_lazy as _
from . import models
class ParentOrganizationFilter(admin.SimpleListFilter):
title = _('Parent Organization')
parameter_name = 'parent'
def lookups(self, request, model_admin):
return list(models.Organization.objects
.annotate(children_count=Count('children'))
.filter(children_count__gt=1)
.values_list('pk', 'name')) + [('none', 'No Parent', ), ]
def queryset(self, request, queryset):
value = self.value()
if value == 'none':
return queryset.filter(parent_id__isnull=True)
elif value:
return queryset.filter(parent__id=value)
return queryset
class ContactDetailAdmin(admin.ModelAdmin):
raw_id_fields = ('sources', )
class IdentifierAdmin(admin.ModelAdmin):
list_display = ('scheme', 'identifier', )
list_display_links = ('identifier', )
list_filter = ('scheme', )
search_fields = ('identifier', 'people__name', )
class LinkAdmin(admin.ModelAdmin):
list_display = ('url', 'note', )
search_fields = ('url', 'note', )
class MembershipAdmin(admin.ModelAdmin):
list_display = ('person', 'organization', 'post', )
list_filter = ('organization', )
raw_id_fields = ('links', 'sources', )
search_fields = ('person__name', 'organization__name', 'post__label', )
class OrganizationAdmin(admin.ModelAdmin):
list_display = ('name', 'parent', )
list_filter = (ParentOrganizationFilter, )
raw_id_fields = ('identifiers', 'contact_details', 'links', 'sources', )
search_fields = ('name', )
class PeopleAdmin(admin.ModelAdmin):
raw_id_fields = ('identifiers', 'contact_details', 'links', 'sources', )
search_fields = ('name', 'email', )
class PostAdmin(admin.ModelAdmin):
list_display = ('label', 'organization', )
search_fields = ('label', 'organization__name', )
class SourceAdmin(admin.ModelAdmin):
search_fields = ('link', )
admin.site.register(models.ContactDetail, ContactDetailAdmin)
admin.site.register(models.Identifier, IdentifierAdmin)
admin.site.register(models.Link, LinkAdmin)
admin.site.register(models.Membership, MembershipAdmin)
admin.site.register(models.Organization, OrganizationAdmin)
admin.site.register(models.Person, PeopleAdmin)
admin.site.register(models.Post, PostAdmin)
admin.site.register(models.Source, SourceAdmin)
| apache-2.0 | Python |
a9dc245f99e5c29f3b11cadc77dcfa0f44274b74 | Add login_required decorator to protected sites | c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend | ctfbackend/backend/urls.py | ctfbackend/backend/urls.py | from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
from django.contrib.auth.decorators import login_required
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$',
auth_views.logout,
{'next_page': '/'},
name='auth_logout'),
url(r'^accounts/',
include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$',
views.HomeView.as_view(),
name='home'),
url(r'^submit$',
login_required(views.SubmitView.as_view()),
name='submit'),
url(r'^scores$',
views.ScoreboardView.as_view(),
name='scores'),
url(r'^chals$',
login_required(views.ChallengesView.as_view()),
name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy',
login_required(views.ChallengesView.as_view()),
name='buy_hint'),
url(r'^stats$',
views.StatisticsView.as_view(),
name='stats'),
]
| from django.conf.urls import url, include
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$', auth_views.logout, {'next_page': '/'}, name='auth_logout'),
url(r'^accounts/', include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^submit$', views.SubmitView.as_view(), name='submit'),
url(r'^scores$', views.ScoreboardView.as_view(), name='scores'),
url(r'^chals$', views.ChallengesView.as_view(), name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy', views.ChallengesView.as_view(), name='buy_hint'),
url(r'^stats$', views.StatisticsView.as_view(), name='stats'),
]
| agpl-3.0 | Python |
c621495de901918c433eee7d7f4517c85df9c09b | change default | freeman-lab/pim | pim/commands/init.py | pim/commands/init.py | import os
import click
import getpass
from clint.textui import puts, colored, indent, prompt, validators
from ..utils import write, retrieve
@click.command('init', short_help='initialize a project', options_metavar='<options>')
@click.option('--force/--no-force', default=False, help='Force overwrite existing files.')
def init(force):
"""
Initialize a folder with typical contents for a python module.
"""
puts('\nThis utility will help you set up a new python module for publishing on PyPi!\n')
puts('After answering a few questions, it will create a few files.')
puts('\nPress ^C at any time to bail!\n')
d = {}
d['name'] = prompt.query('name:', default=os.getcwd().split('/')[-1])
d['version'] = prompt.query('version:', default='1.0.0')
gitauthor = retrieve(['git', 'config', '--get', 'user.username'], default=getpass.getuser())
gitemail = retrieve(['git', 'config', '--get', 'user.email'], default=getpass.getuser() + '@gmail.com')
d['author'] = prompt.query('author:', default=gitauthor)
d['email'] = prompt.query('email:', default=gitemail)
gitrepo = 'https://github.com/' + d['author'] + '/' + d['name']
d['repository'] = prompt.query('repository:', default=gitrepo, validators=[])
d['readme'] = prompt.query('readme:', default='README.md')
d['license'] = prompt.query('license:', default='MIT')
d['entry'] = prompt.query('entry point:', default='main.py')
d['description'] = prompt.query('description:', default='', validators=[])
puts('\nReady to create the following files:')
with indent(4, quote=' -'):
puts('setup.py')
puts('setup.cfg')
puts('MANIFEST.in')
puts(d['name'] + '/' + '__init__.py')
puts(d['name'] + '/' + d['entry'] + '.py')
puts('requirements.txt')
finalize = prompt.yn('\nSound like a plan?', default='y')
if finalize:
write('requirements.txt')
write('setup.py', fields=d)
write('setup.cfg', fields=d, stringify=False)
write('MANIFEST.in', fields=d, stringify=False)
write('__init__.py', fields=d, folder=d['name'])
write(d['entry'], folder=d['name']) | import os
import click
import getpass
from clint.textui import puts, colored, indent, prompt, validators
from ..utils import write, retrieve
@click.command('init', short_help='initialize a project', options_metavar='<options>')
@click.option('--force/--no-force', default=False, help='Force overwrite existing files.')
def init(force):
"""
Initialize a folder with typical contents for a python module.
"""
puts('\nThis utility will help you set up a new python module for publishing on PyPi!\n')
puts('After answering a few questions, it will create a few files.')
puts('\nPress ^C at any time to bail!\n')
d = {}
d['name'] = prompt.query('name:', default=os.getcwd().split('/')[-1])
d['version'] = prompt.query('version:', default='1.0.0')
gitauthor = retrieve(['git', 'config', '--get', 'user.username'], default=getpass.getuser())
gitemail = retrieve(['git', 'config', '--get', 'user.email'], default=getpass.getuser() + '@gmail.com')
d['author'] = prompt.query('author:', default=gitauthor)
d['email'] = prompt.query('email:', default=gitemail)
gitrepo = 'https://github.com/' + d['author'] + '/' + d['name']
d['repository'] = prompt.query('repository:', default=gitrepo, validators=[])
d['readme'] = prompt.query('readme:', default='README.md')
d['license'] = prompt.query('license:', default='MIT')
d['entry'] = prompt.query('entry point:', default=d['name'] + '.py')
d['description'] = prompt.query('description:', default='', validators=[])
puts('\nReady to create the following files:')
with indent(4, quote=' -'):
puts('setup.py')
puts('setup.cfg')
puts('MANIFEST.in')
puts(d['name'] + '/' + '__init__.py')
puts(d['name'] + '/' + d['entry'] + '.py')
puts('requirements.txt')
finalize = prompt.yn('\nSound like a plan?', default='y')
if finalize:
write('requirements.txt')
write('setup.py', fields=d)
write('setup.cfg', fields=d, stringify=False)
write('MANIFEST.in', fields=d, stringify=False)
write('__init__.py', fields=d, folder=d['name'])
write(d['entry'], folder=d['name']) | mit | Python |
a31459a3612e616e21d1d484f6757b29cdb6a21c | Add comments to the demo views. | vetional/django-socketio,Solution4Future/django-socketio,clarkperkins/django-socketio,freylis/django-socketio,clarkperkins/django-socketio,DESHRAJ/django-socketio,stephenmcd/django-socketio,pekermert/django-socketio,clarkperkins/django-socketio,vetional/django-socketio,vetional/django-socketio,DESHRAJ/django-socketio,stephenmcd/django-socketio,stephenmcd/django-socketio,freylis/django-socketio,pekermert/django-socketio,Solution4Future/django-socketio,kostyll/django-socketio,DESHRAJ/django-socketio,pekermert/django-socketio,Solution4Future/django-socketio,freylis/django-socketio,kostyll/django-socketio,kostyll/django-socketio | django_socketio/example_project/chat/views.py | django_socketio/example_project/chat/views.py |
from django.shortcuts import get_object_or_404, render, redirect
from django.utils.html import strip_tags
from django_socketio import events
from chat.models import ChatRoom, ChatUser
@events.on_message(channel="^room-")
def message(request, socket, context, message):
"""
Event handler for a room receiving a message. First validates a
joining user's name and sends them the list of users.
"""
message = message[0]
room = get_object_or_404(ChatRoom, id=message["room"])
if message["action"] == "start":
user, created = room.users.get_or_create(name=strip_tags(message["name"]))
if not created:
socket.send({"action": "in-use"})
else:
context["user"] = user
users = [u.name for u in room.users.exclude(id=user.id)]
socket.send({"action": "started", "users": users})
user.session = socket.session.session_id
user.save()
joined = {"action": "join", "name": user.name, "id": user.id}
socket.send(joined)
socket.broadcast_channel(joined)
else:
try:
user = context["user"]
except KeyError:
return
if message["action"] == "message":
message["message"] = strip_tags(message["message"])
message["name"] = user.name
socket.send(message)
socket.broadcast_channel(message)
@events.on_finish(channel="^room-")
def finish(request, socket, context):
"""
Event handler for a socket session ending in a room. Broadcast
the user leaving and delete them from the DB.
"""
try:
user = context["user"]
except KeyError:
return
socket.broadcast_channel({"action": "leave", "name": user.name, "id": user.id})
user.delete()
def rooms(request, template="rooms.html"):
"""
Homepage - lists all rooms.
"""
context = {"rooms": ChatRoom.objects.all()}
return render(request, template, context)
def room(request, slug, template="room.html"):
"""
Show a room.
"""
context = {"room": get_object_or_404(ChatRoom, slug=slug)}
return render(request, template, context)
def create(request):
"""
Handles post from the "Add room" form on the homepage, and
redirects to the new room.
"""
name = request.POST.get("name")
if name:
room, created = ChatRoom.objects.get_or_create(name=name)
return redirect(room)
return redirect(rooms)
|
from django.shortcuts import get_object_or_404, render, redirect
from django.utils.html import strip_tags
from django_socketio import events
from chat.models import ChatRoom, ChatUser
@events.on_message(channel="^room-")
def message(request, socket, context, message):
message = message[0]
room = get_object_or_404(ChatRoom, id=message["room"])
if message["action"] == "start":
user, created = room.users.get_or_create(name=strip_tags(message["name"]))
if not created:
socket.send({"action": "in-use"})
else:
context["user"] = user
users = [u.name for u in room.users.exclude(id=user.id)]
socket.send({"action": "started", "users": users})
user.session = socket.session.session_id
user.save()
joined = {"action": "join", "name": user.name, "id": user.id}
socket.send(joined)
socket.broadcast_channel(joined)
else:
try:
user = context["user"]
except KeyError:
return
if message["action"] == "message":
message["message"] = strip_tags(message["message"])
message["name"] = user.name
socket.send(message)
socket.broadcast_channel(message)
@events.on_finish(channel="^room-")
def finish(request, socket, context):
try:
user = context["user"]
except KeyError:
return
socket.broadcast_channel({"action": "leave", "name": user.name, "id": user.id})
user.delete()
def rooms(request, template="rooms.html"):
context = {"rooms": ChatRoom.objects.all()}
return render(request, template, context)
def room(request, slug, template="room.html"):
context = {"room": get_object_or_404(ChatRoom, slug=slug)}
return render(request, template, context)
def create(request):
name = request.POST.get("name")
if name:
room, created = ChatRoom.objects.get_or_create(name=name)
return redirect(room)
return redirect(rooms)
| bsd-2-clause | Python |
2f535837dfc026f3804b684541e257fba2a9a66b | Fix typo. | mono/bockbuild,BansheeMediaPlayer/bockbuild,bl8/bockbuild,mono/bockbuild,bl8/bockbuild,BansheeMediaPlayer/bockbuild,bl8/bockbuild,BansheeMediaPlayer/bockbuild | profiles/mono-mac-release/packages.py | profiles/mono-mac-release/packages.py | import os
from bockbuild.darwinprofile import DarwinProfile
class MonoReleasePackages:
def __init__(self):
# Toolchain
self.packages.extend([
# 'autoconf.py',
# 'automake.py',
# 'libtool.py',
'xz.py',
'tar.py',
'gettext.py',
'pkg-config.py'
])
# # Base Libraries
self.packages.extend([
'libpng.py',
'libjpeg.py',
'libtiff.py',
'libgif.py',
'libxml2.py',
'freetype.py',
'fontconfig.py',
'pixman.py',
'cairo.py',
'libffi.py',
'glib.py',
'pango.py',
'atk.py',
'intltool.py',
'gdk-pixbuf.py',
'gtk+.py',
'libglade.py',
'sqlite.py',
'expat.py',
'ige-mac-integration.py'
])
# # Theme
self.packages.extend([
'librsvg.py',
'hicolor-icon-theme.py',
'gtk-engines.py',
# 'gtk-quartz-engine.py'
])
# Mono
self.packages.extend([
'mono-llvm.py',
'mono-master.py',
'libgdiplus.py',
'xsp.py',
'gtk-sharp-2.12-release.py',
'boo.py',
# 'nant.py',
'ironlangs.py',
'fsharp.py',
'mono-addins.py',
'mono-basic.py',
])
self.packages = [os.path.join('..', '..', 'packages', p) for p in self.packages]
| import os
from bockbuild.darwinprofile import DarwinProfile
class MonoReleasePackages:
def __init__(self):
# Toolchain
self.packages.extend([
# 'autoconf.py',
# 'automake.py',
# 'libtool.py',
'xz.py',
'tar.py',
'gettext.py',
'pkg-config.py'
])
# # Base Libraries
self.packages.extend([
'libpng.py',
'libjpeg.py',
'libtiff.py',
'libgif.py',
'libxml2.py',
'freetype.py',
'fontconfig.py',
'pixman.py',
'cairo.py',
'libffi.pg',
'glib.py',
'pango.py',
'atk.py',
'intltool.py',
'gdk-pixbuf.py',
'gtk+.py',
'libglade.py',
'sqlite.py',
'expat.py',
'ige-mac-integration.py'
])
# # Theme
self.packages.extend([
'librsvg.py',
'hicolor-icon-theme.py',
'gtk-engines.py',
# 'gtk-quartz-engine.py'
])
# Mono
self.packages.extend([
'mono-llvm.py',
'mono-master.py',
'libgdiplus.py',
'xsp.py',
'gtk-sharp-2.12-release.py',
'boo.py',
# 'nant.py',
'ironlangs.py',
'fsharp.py',
'mono-addins.py',
'mono-basic.py',
])
self.packages = [os.path.join('..', '..', 'packages', p) for p in self.packages]
| mit | Python |
dfbb93a132d9165e53478b579db92fa0b4cf02ce | Update compiler test infrastructure | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | tests/compiler/__init__.py | tests/compiler/__init__.py | import itertools
from thinglang import pipeline
from thinglang.compiler.opcodes import OpcodeCallInternal
from thinglang.lexer.values.identifier import Identifier
from thinglang.symbols.symbol_mapper import SymbolMapper
from thinglang.utils.source_context import SourceContext
SELF_ID, A_ID, B_ID, INST_ID, LST_ID, A_INST, B_INST, C_INST, IMPLICIT_ITERATOR_ID, IMPLICIT_ITERATION_ID = range(10)
VAL1_ID, VAL2_ID, INNER_ID = 0, 1, 2
INNER1_ID, INNER2_ID = 0, 1
STATIC_START = 6
PROGRAM, CONTAINER, A_THING, B_THING, C_THING = range(5)
TEMPLATE = """
thing Program
has number val1
has number val2
has Container inner
setup
number a = 0
number b = 0
Program inst = Program()
list<number> lst = [0, 1, 2]
A a_inst = A()
B b_inst = B(42)
C c_inst = C()
{}
does action with number n
Console.print("action")
thing Container
has number inner1
has number inner2
thing A
has number a1
thing B extends A
has number b1
setup with number b1
self.b1 = b1
thing C extends B
has number c1
"""
def compile_base(code='', thing_id=0, method_id=0):
context = pipeline.compile(SourceContext.wrap(TEMPLATE.format(code)))
entry = context.methods[(thing_id, method_id)]
return entry[1].instructions
TRIM_START = len(compile_base()) - 1
def compile_snippet(code):
instructions = compile_base(code)[TRIM_START:-1] # Pop off boilerplate and the return instruction
print('Bytecode result: {}'.format(instructions))
return instructions
def internal_call(target):
symbols = SymbolMapper()
return OpcodeCallInternal.from_reference(symbols.resolve_named([Identifier(x) for x in target.split('.')]))
| from thinglang import pipeline
from thinglang.compiler.opcodes import OpcodeCallInternal
from thinglang.lexer.values.identifier import Identifier
from thinglang.symbols.symbol_mapper import SymbolMapper
from thinglang.utils.source_context import SourceContext
SELF_ID, A_ID, B_ID, INST_ID, LST_ID, IMPLICIT_ITERATOR_ID, IMPLICIT_ITERATION_ID = 0, 1, 2, 3, 4, 5, 6
VAL1_ID, VAL2_ID, INNER_ID = 0, 1, 2
INNER1_ID, INNER2_ID = 0, 1
STATIC_START = 5
TEMPLATE = """
thing Program
has number val1
has number val2
has Container inner
setup
number a = 0
number b = 0
Program inst = Program()
list<number> lst = [0, 1, 2]
{}
does action with number n
Console.print("action")
thing Container
has number inner1
has number inner2
"""
def compile_base(code):
context = pipeline.compile(SourceContext.wrap(TEMPLATE.format(code)))
entry = context.methods[(0, 0)]
return entry[1].instructions
TRIM_START = len(compile_base('')) - 1
def compile_local(code):
return compile_base(code)[TRIM_START:-1] # Pop off boilerplate and the return instruction
def internal_call(target):
symbols = SymbolMapper()
return OpcodeCallInternal.from_reference(symbols.resolve_named([Identifier(x) for x in target.split('.')]))
| mit | Python |
1a2de64b2f9828dcdc7d345808251e8979cea23b | Change test tasks | GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek | geotrek/common/tests/test_tasks.py | geotrek/common/tests/test_tasks.py | # -*- encoding: utf-8 -*-
from django.test import TestCase
from geotrek.common.tasks import import_datas
from geotrek.common.models import FileType
class TasksTest(TestCase):
def setUp(self):
self.filetype = FileType.objects.create(type=u"Photographie")
def test_import_exceptions(self):
self.assertRaises(
ImportError, import_datas, filename='bombadil', name='haricot', module='toto')
def test_import_message_exception(self):
self.assertRaisesMessage(
ImportError,
"Failed to import parser class 'haricot' from module 'toto'",
import_datas,
filename='bombadil',
name='haricot',
module='toto'
)
| # -*- encoding: utf-8 -*-
from django.test import TestCase
from geotrek.common.tasks import import_datas
from geotrek.common.models import FileType
class TasksTest(TestCase):
def setUp(self):
self.filetype = FileType.objects.create(type=u"Photographie")
def test_import_exceptions(self):
self.assertRaises(
ImportError, import_datas, filename='bombadil', class_name='haricot', module_name='toto')
def test_import_message_exception(self):
self.assertRaisesMessage(
ImportError,
"Failed to import parser class 'haricot' from module 'toto'",
import_datas,
filename='bombadil',
class_name='haricot',
module_name='toto'
)
| bsd-2-clause | Python |
5372c246587c6097ba42c846d3965a78273ca555 | update price | it-projects-llc/misc-addons,it-projects-llc/misc-addons,it-projects-llc/misc-addons | web_debranding/__openerp__.py | web_debranding/__openerp__.py | {
'name': "Backend debranding",
'version': '1.0.5',
'author': 'IT-Projects LLC, Ivan Yelizariev',
'license': 'GPL-3',
'category': 'Debranding',
'website': 'https://twitter.com/yelizariev',
'price': 150.00,
'currency': 'EUR',
'depends': ['web', 'share', 'disable_openerp_online', 'mail_delete_sent_by_footer'],
'data': [
'security/web_debranding_security.xml',
'data.xml',
'views.xml',
'js.xml',
'pre_install.yml',
],
'qweb': [
'static/src/xml/database_manager.xml',
],
'auto_install': False,
'installable': True
}
| {
'name': "Backend debranding",
'version': '1.0.5',
'author': 'IT-Projects LLC, Ivan Yelizariev',
'license': 'GPL-3',
'category': 'Debranding',
'website': 'https://twitter.com/yelizariev',
'price': 90.00,
'currency': 'EUR',
'depends': ['web', 'share', 'disable_openerp_online', 'mail_delete_sent_by_footer'],
'data': [
'security/web_debranding_security.xml',
'data.xml',
'views.xml',
'js.xml',
'pre_install.yml',
],
'qweb': [
'static/src/xml/database_manager.xml',
],
'auto_install': False,
'installable': True
}
| mit | Python |
defdf2220804ca492ec889c9f4b6eff9ff56eefc | Correct import statement after renaming test_lists.py to testcases | Baaaaam/cyBaM,Baaaaam/cyBaM,jlittell/cycamore,cyclus/cycaless,gonuke/cycamore,Baaaaam/cyBaM,Baaaaam/cycamore,rwcarlsen/cycamore,rwcarlsen/cycamore,Baaaaam/cycamore,rwcarlsen/cycamore,Baaaaam/cyCLASS,gonuke/cycamore,rwcarlsen/cycamore,Baaaaam/cyCLASS,cyclus/cycaless,jlittell/cycamore,Baaaaam/cyBaM,jlittell/cycamore,gonuke/cycamore,jlittell/cycamore,gonuke/cycamore,Baaaaam/cycamore | tests/create_references.py | tests/create_references.py | #! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from testcases import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
| #! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from tests_list import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
| bsd-3-clause | Python |
f89751b4f4b0f091561e614e0392dcd8b08ecf53 | Update pip package version | PAIR-code/lit,pair-code/lit,pair-code/lit,pair-code/lit,PAIR-code/lit,pair-code/lit,PAIR-code/lit,PAIR-code/lit,PAIR-code/lit,pair-code/lit | pip_package/setup.py | pip_package/setup.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""lit-nlp pip package configuration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
REQUIRED_PACKAGES = [
"absl-py",
"attrs",
"numpy",
"scipy",
"pandas",
"portpicker",
"scikit-learn",
"sacrebleu",
"umap-learn",
"Werkzeug",
"ml-collections",
]
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="lit-nlp",
version="0.3",
description="Language Interpretability Tool.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/pair-code/lit",
author="Google Inc.",
packages=setuptools.find_packages(),
license="Apache 2.0",
install_requires=REQUIRED_PACKAGES,
package_data={
"lit_nlp": [
"client/build/*", "client/build/default/*",
"client/build/default/static/*"
]
},
)
| # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""lit-nlp pip package configuration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
REQUIRED_PACKAGES = [
"absl-py",
"attrs",
"numpy",
"scipy",
"pandas",
"portpicker",
"scikit-learn",
"sacrebleu",
"umap-learn",
"Werkzeug",
"ml-collections",
]
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="lit-nlp",
version="0.2",
description="Language Interpretability Tool.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/pair-code/lit",
author="Google Inc.",
packages=setuptools.find_packages(),
license="Apache 2.0",
install_requires=REQUIRED_PACKAGES,
package_data={
"lit_nlp": [
"client/build/*", "client/build/default/*",
"client/build/default/static/*"
]
},
)
| apache-2.0 | Python |
3d5e798eafb3f33151de8032d14f1732030a9244 | Add quick todo so i don't forget | napalm-automation/napalm-nxos,spotify/napalm,spotify/napalm,napalm-automation/napalm | utils/__init__.py | utils/__init__.py | # TODO move utils folder inside napalm | apache-2.0 | Python | |
9cedca2bfc045f596945de33e1a9076150022396 | Add docstrings | BeatButton/beattie,BeatButton/beattie-bot | utils/aioutils.py | utils/aioutils.py | import functools
import aiofiles
import aitertools
aopen = functools.partial(aiofiles.open, encoding='utf-8')
async def areader(aiofile):
"""An async csv reader."""
async for line in aiofile:
yield [val.strip() for val in line.split(',')]
async def make_batches(iterable, size):
"""Make batches of size from iterable. This would be equivalent to slices
from 0 to size, then size to size * 2, etc, of a list of the iterable."""
iterator = await aitertools.aiter(iterable)
async for first in iterator:
yield aitertools.chain([first], aitertools.islice(iterator, size - 1))
| import functools
import aiofiles
import aitertools
aopen = functools.partial(aiofiles.open, encoding='utf-8')
async def areader(aiofile):
async for line in aiofile:
yield [val.strip() for val in line.split(',')]
async def make_batches(iterable, size):
iterator = await aitertools.aiter(iterable)
async for first in iterator:
yield aitertools.chain([first], aitertools.islice(iterator, size - 1))
| mit | Python |
c194696644c2caa40048e5508cf8624a65e7c9b8 | add 1 second to sleep in core take method for opera | 2gis/dali | common/core/dali_core.py | common/core/dali_core.py | import json
import sys
import time
from selenium.webdriver import Remote
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
from compare.pixel_diff import diff
from supplement.scripts import Scripts
class DaliRemote(Remote):
def __init__(self, remoteUrl, capabilities):
### @todo refactor me
self.command_executor = RemoteConnection(remoteUrl)
self.error_handler = ErrorHandler()
self._is_remote = True
self.start_client()
self.session_id = capabilities["webdriver.remote.sessionid"]
self.capabilities = capabilities
class DaliCore(object):
def init(self, remoteUrl, capabilities):
self.remote = DaliRemote(remoteUrl, json.loads(capabilities))
self.resolution = "default"
def resize(self, resolution):
self.resolution = resolution
w, h = tuple(resolution.split('x'))
self.remote.set_window_size(int(w), int(h))
### @todo need to add some waiting mechanism, local resize is too fast
time.sleep(1)
def take(self, save_path, options):
### @todo research and remove sleeps
time.sleep(2)
for key in options.substitute.keys():
elements = self.remote.find_elements_by_css_selector(key)
for element in elements:
script = "arguments[0].innerHTML='%s'" % options.substitute[key]
self.remote.execute_script(script, element)
for selector in options.hide_elements:
self.remote.execute_script(Scripts.hide_elements % selector)
if options.disable_animation:
self.remote.execute_script(Scripts.disable_animation)
filename = "%s/dali-%s-%s.png" % (save_path, time.time(), self.resolution)
self.remote.get_screenshot_as_file(filename)
return filename
@staticmethod
def compare(image1_path, image2_path, result_path):
return diff(image1_path, image2_path, result_path)
@staticmethod
def stop():
### @todo graceful shutdown
sys.exit(0)
| import json
import sys
import time
from selenium.webdriver import Remote
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
from compare.pixel_diff import diff
from supplement.scripts import Scripts
class DaliRemote(Remote):
def __init__(self, remoteUrl, capabilities):
### @todo refactor me
self.command_executor = RemoteConnection(remoteUrl)
self.error_handler = ErrorHandler()
self._is_remote = True
self.start_client()
self.session_id = capabilities["webdriver.remote.sessionid"]
self.capabilities = capabilities
class DaliCore(object):
def init(self, remoteUrl, capabilities):
self.remote = DaliRemote(remoteUrl, json.loads(capabilities))
self.resolution = "default"
def resize(self, resolution):
self.resolution = resolution
w, h = tuple(resolution.split('x'))
self.remote.set_window_size(int(w), int(h))
### @todo need to add some waiting mechanism, local resize is too fast
time.sleep(1)
def take(self, save_path, options):
### @todo research and remove sleeps
### @todo more common options
time.sleep(1)
for key in options.substitute.keys():
elements = self.remote.find_elements_by_css_selector(key)
for element in elements:
script = "arguments[0].innerHTML='%s'" % options.substitute[key]
self.remote.execute_script(script, element)
for selector in options.hide_elements:
self.remote.execute_script(Scripts.hide_elements % selector)
if options.disable_animation:
self.remote.execute_script(Scripts.disable_animation)
filename = "%s/dali-%s-%s.png" % (save_path, time.time(), self.resolution)
self.remote.get_screenshot_as_file(filename)
return filename
@staticmethod
def compare(image1_path, image2_path, result_path):
return diff(image1_path, image2_path, result_path)
@staticmethod
def stop():
### @todo graceful shutdown
sys.exit(0)
| mit | Python |
c2e0b640a5cb26626e914a0621e94f672304161e | remove irrelevant docs from pallet | agrc/raster,agrc/raster,agrc/raster | scripts/RasterPallet.py | scripts/RasterPallet.py | #!/usr/bin/env python
# * coding: utf8 *
'''
RasterPallet.py
A module that contains a forklift pallet definition for the Raster app.
'''
from json import loads
from os.path import dirname, join, realpath, basename
import arcpy
from forklift.models import Pallet
import raster_secrets as secrets
current_folder = dirname(realpath(__file__))
layers_json_file = join(current_folder, 'layers.json')
class RasterPallet(Pallet):
def build(self, configuration):
#: this is so that crates with sources that are not in 26912 will not choke on reprojecting
self.geographic_transformation = None
self.indices = join(self.staging_rack, 'indicies.gdb')
self.sgid = join(self.garage, 'SGID10 as INDICES.sde')
self.copy_data = [self.indices]
self.arcgis_services = [('Raster', 'MapServer')]
self.log.info('adding crates for extent feature classes in SGID')
self.add_crates([
"Aerial_Photography_Extents",
"AutoCorrelated_DEM_Extents",
"Contour_Line_Extents",
"USGS_DEM_Extents",
"LiDAR_Extents",
"DRG_Extents"
], {'source_workspace': self.sgid, 'destination_workspace': self.indices})
self.log.info('adding crates for all all layers from layer.json')
with open(layers_json_file) as file:
layers = loads(file.read())
arcpy.env.workspace = secrets.SHARE
for source_gdb in arcpy.ListWorkspaces(workspace_type='FileGDB'):
self.log.debug(source_gdb)
arcpy.env.workspace = source_gdb
destination_gdb = join(self.staging_rack, basename(source_gdb))
crate_layers = [name for name in arcpy.ListFeatureClasses() if name in layers]
if len(crate_layers) > 0:
self.add_crates(crate_layers, {'source_workspace': source_gdb, 'destination_workspace': destination_gdb})
self.copy_data.append(destination_gdb)
| #!/usr/bin/env python
# * coding: utf8 *
'''
RasterPallet.py
A module that contains a forklift pallet definition for the Raster app.
It updates all data in the raster.gdb database.
The DRG.gdb database is managed manually in staging.
'''
from json import loads
from os.path import dirname, join, realpath, basename
import arcpy
from forklift.models import Pallet
import raster_secrets as secrets
current_folder = dirname(realpath(__file__))
layers_json_file = join(current_folder, 'layers.json')
class RasterPallet(Pallet):
def build(self, configuration):
#: this is so that crates with sources that are not in 26912 will not choke on reprojecting
self.geographic_transformation = None
self.indices = join(self.staging_rack, 'indicies.gdb')
self.sgid = join(self.garage, 'SGID10 as INDICES.sde')
self.copy_data = [self.indices]
self.arcgis_services = [('Raster', 'MapServer')]
self.log.info('adding crates for extent feature classes in SGID')
self.add_crates([
"Aerial_Photography_Extents",
"AutoCorrelated_DEM_Extents",
"Contour_Line_Extents",
"USGS_DEM_Extents",
"LiDAR_Extents",
"DRG_Extents"
], {'source_workspace': self.sgid, 'destination_workspace': self.indices})
self.log.info('adding crates for all all layers from layer.json')
with open(layers_json_file) as file:
layers = loads(file.read())
arcpy.env.workspace = secrets.SHARE
for source_gdb in arcpy.ListWorkspaces(workspace_type='FileGDB'):
self.log.debug(source_gdb)
arcpy.env.workspace = source_gdb
destination_gdb = join(self.staging_rack, basename(source_gdb))
crate_layers = [name for name in arcpy.ListFeatureClasses() if name in layers]
if len(crate_layers) > 0:
self.add_crates(crate_layers, {'source_workspace': source_gdb, 'destination_workspace': destination_gdb})
self.copy_data.append(destination_gdb)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.