commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
69db18ef0f6898ac3b1709988a12e96a88383530 | allow update of view | ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo | addons/hr_timesheet_attendance/report/hr_timesheet_attendance_report.py | addons/hr_timesheet_attendance/report/hr_timesheet_attendance_report.py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, tools
class TimesheetAttendance(models.Model):
_name = 'hr.timesheet.attendance.report'
_auto = False
user_id = fields.Many2one('res.users')
date = fields.Date()
total_timesheet = fields.Float()
total_attendance = fields.Float()
total_difference = fields.Float()
@api.model_cr
def init(self):
tools.drop_view_if_exists(self.env.cr, self._table)
self._cr.execute("""CREATE OR REPLACE VIEW %s AS (
SELECT
max(id) AS id,
t.user_id,
t.date,
coalesce(sum(t.attendance), 0) AS total_attendance,
coalesce(sum(t.timesheet), 0) AS total_timesheet,
coalesce(sum(t.attendance), 0) - coalesce(sum(t.timesheet), 0) as total_difference
FROM (
SELECT
-hr_attendance.id AS id,
resource_resource.user_id AS user_id,
hr_attendance.worked_hours AS attendance,
NULL AS timesheet,
date_trunc('day', hr_attendance.check_in) AS date
FROM hr_attendance
LEFT JOIN hr_employee ON hr_employee.id = hr_attendance.employee_id
LEFT JOIN resource_resource on resource_resource.id = hr_employee.resource_id
UNION ALL
SELECT
ts.id AS id,
ts.user_id AS user_id,
NULL AS attendance,
ts.unit_amount AS timesheet,
date_trunc('day', ts.date) AS date
FROM account_analytic_line AS ts
WHERE ts.project_id IS NOT NULL
) AS t
GROUP BY t.user_id, t.date
ORDER BY t.date
)
""" % self._table)
| # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class TimesheetAttendance(models.Model):
_name = 'hr.timesheet.attendance.report'
_auto = False
user_id = fields.Many2one('res.users')
date = fields.Date()
total_timesheet = fields.Float()
total_attendance = fields.Float()
total_difference = fields.Float()
@api.model_cr
def init(self):
self._cr.execute("""CREATE OR REPLACE VIEW %s AS (
SELECT
max(id) AS id,
t.user_id,
t.date,
coalesce(sum(t.attendance), 0) AS total_attendance,
coalesce(sum(t.timesheet), 0) AS total_timesheet,
coalesce(sum(t.attendance), 0) - coalesce(sum(t.timesheet), 0) as total_difference
FROM (
SELECT
-hr_attendance.id AS id,
resource_resource.user_id AS user_id,
hr_attendance.worked_hours AS attendance,
NULL AS timesheet,
date_trunc('day', hr_attendance.check_in) AS date
FROM hr_attendance
LEFT JOIN hr_employee ON hr_employee.id = hr_attendance.employee_id
LEFT JOIN resource_resource on resource_resource.id = hr_employee.resource_id
UNION ALL
SELECT
ts.id AS id,
ts.user_id AS user_id,
NULL AS attendance,
ts.unit_amount AS timesheet,
date_trunc('day', ts.date) AS date
FROM account_analytic_line AS ts
WHERE ts.project_id IS NOT NULL
) AS t
GROUP BY t.user_id, t.date
ORDER BY t.date
)
""" % self._table)
| agpl-3.0 | Python |
584dcd7fa2703914ace833d9ab82a93da0d1f31b | Update P02_char_pic_grid: working solution | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/AutomateTheBoringStuffWithPython/Chapter04/PracticeProjects/P02_char_pic_grid.py | books/AutomateTheBoringStuffWithPython/Chapter04/PracticeProjects/P02_char_pic_grid.py | # Character Picture Grid: This program converts a matrix to an image
#
# Say you have a list of lists where each value in the inner lists is a one-character string, like this:
# grid = [['.', '.', '.', '.', '.', '.'],
# ['.', 'O', 'O', '.', '.', '.'],
# ['O', 'O', 'O', 'O', '.', '.'],
# ['O', 'O', 'O', 'O', 'O', '.'],
# ['.', 'O', 'O', 'O', 'O', 'O'],
# ['O', 'O', 'O', 'O', 'O', '.'],
# ['O', 'O', 'O', 'O', '.', '.'],
# ['.', 'O', 'O', '.', '.', '.'],
# ['.', '.', '.', '.', '.', '.']]
#
# You can think of grid[x][y] as being the character at the x- and y-coordinates of a “picture” drawn
# with text characters. The (0, 0) origin will be in the upper-left corner, the x-coordinates increase
# going right, and the y-coordinates increase going down.
#
# Copy the previous grid value, and write code that uses it to print the image.
# ..OO.OO..
# .OOOOOOO.
# .OOOOOOO.
# ..OOOOO..
# ...OOO...
# ....O....
def matrix_to_pic(matrix):
new_matrix = zip(*matrix) # Convert rows to columns in new matrix
for item in new_matrix:
print(''.join(item))
return None
def main():
grid = [['.', '.', '.', '.', '.', '.'],
['.', 'O', 'O', '.', '.', '.'],
['O', 'O', 'O', 'O', '.', '.'],
['O', 'O', 'O', 'O', 'O', '.'],
['.', 'O', 'O', 'O', 'O', 'O'],
['O', 'O', 'O', 'O', 'O', '.'],
['O', 'O', 'O', 'O', '.', '.'],
['.', 'O', 'O', '.', '.', '.'],
['.', '.', '.', '.', '.', '.']]
matrix_to_pic(grid)
if __name__ == "__main__":
main()
| # Character Picture Grid: This program converts a matrix to an image
#
# Say you have a list of lists where each value in the inner lists is a one-character string, like this:
# grid = [['.', '.', '.', '.', '.', '.'],
# ['.', 'O', 'O', '.', '.', '.'],
# ['O', 'O', 'O', 'O', '.', '.'],
# ['O', 'O', 'O', 'O', 'O', '.'],
# ['.', 'O', 'O', 'O', 'O', 'O'],
# ['O', 'O', 'O', 'O', 'O', '.'],
# ['O', 'O', 'O', 'O', '.', '.'],
# ['.', 'O', 'O', '.', '.', '.'],
# ['.', '.', '.', '.', '.', '.']]
#
# You can think of grid[x][y] as being the character at the x- and y-coordinates of a “picture” drawn
# with text characters. The (0, 0) origin will be in the upper-left corner, the x-coordinates increase
# going right, and the y-coordinates increase going down.
#
# Copy the previous grid value, and write code that uses it to print the image.
# ..OO.OO..
# .OOOOOOO.
# .OOOOOOO.
# ..OOOOO..
# ...OOO...
# ....O....
| mit | Python |
ecafd4eab0aca93b17de8f895e402fb917131da6 | Format output test_nodemap | kewitz/master,kewitz/master,kewitz/master | test_nodemap.py | test_nodemap.py | # -*- coding: utf-8 -*-
"""
Created on Mon Jun 1 16:46:00 2015
@author: leo
"""
from numpy import *
import NScheme as ns
import matplotlib.pyplot as plt
path = """./res/"""
split = ns.split
m = 1
print "DOF\tCUDA\tTime\tIter.\tErr."
for c in [(c, f) for f in ['teste1_1.msh', 'teste1_2.msh', 'teste1_3.msh', 'teste1_4.msh', 'teste1_5.msh'] for c in [False, True]]:
del m
cuda, fi = c
m = ns.Mesh(file=path + fi, verbose=False)
bound = {1: 100.0, 2: 66.0, 3: 33.0, 4: 0.0}
for n in m.nodesOnLine([1, 2, 3, 4]):
n.calc = False
if cuda:
vc = v
v, i, b = m.run(cuda=cuda, boundary=bound, R=0, errmin=1E-6, kmax=1000000)
e = 0.0
if cuda:
e = max(abs(vc - v))
print "%i\t%s\t%.4fs\t%i\t%.4E" % (len(m.nodes), cuda, b[0], i, e) | # -*- coding: utf-8 -*-
"""
Created on Mon Jun 1 16:46:00 2015
@author: leo
"""
from numpy import *
import NScheme as ns
import matplotlib.pyplot as plt
path = """./res/"""
split = ns.split
m = 1
print "DOF\tCUDA\tTime\t\tIterations\tErr."
for c in [(c, f) for f in ['teste1_1.msh', 'teste1_2.msh', 'teste1_3.msh', 'teste1_4.msh', 'teste1_5.msh'] for c in [False, True]]:
del m
cuda, fi = c
m = ns.Mesh(file=path + fi, verbose=False)
bound = {1: 100.0, 2: 66.0, 3: 33.0, 4: 0.0}
for n in m.nodesOnLine([1, 2, 3, 4]):
n.calc = False
if cuda:
vc = v
v, i, b = m.run(cuda=cuda, boundary=bound, R=0, errmin=1E-6, kmax=1000000)
e = 0.0
if cuda:
e = max(abs(vc - v))
print "%i\t%s\t%fs\t%i\t%f" % (len(m.nodes), cuda, b[0], i, e) | mit | Python |
d0271db1adbae453d8461a26c055387fac0da174 | Add eval ratio kwarg to fix melody rnn dataset script. (#1347) | jesseengel/magenta,magenta/magenta,magenta/magenta,adarob/magenta,jesseengel/magenta,adarob/magenta | magenta/models/melody_rnn/melody_rnn_create_dataset.py | magenta/models/melody_rnn/melody_rnn_create_dataset.py | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create a dataset of SequenceExamples from NoteSequence protos.
This script will extract melodies from NoteSequence protos and save them to
TensorFlow's SequenceExample protos for input to the melody RNN models.
"""
import os
import tensorflow as tf
from magenta.models.melody_rnn import melody_rnn_config_flags
from magenta.models.melody_rnn import melody_rnn_pipeline
from magenta.pipelines import pipeline
flags = tf.app.flags
FLAGS = tf.app.flags.FLAGS
flags.DEFINE_string(
'input', None,
'TFRecord to read NoteSequence protos from.')
flags.DEFINE_string(
'output_dir', None,
'Directory to write training and eval TFRecord files. The TFRecord files '
'are populated with SequenceExample protos.')
flags.DEFINE_float(
'eval_ratio', 0.1,
'Fraction of input to set aside for eval set. Partition is randomly '
'selected.')
flags.DEFINE_string(
'log', 'INFO',
'The threshold for what messages will be logged DEBUG, INFO, WARN, ERROR, '
'or FATAL.')
def main(unused_argv):
tf.logging.set_verbosity(FLAGS.log)
config = melody_rnn_config_flags.config_from_flags()
pipeline_instance = melody_rnn_pipeline.get_pipeline(
config, eval_ratio=FLAGS.eval_ratio)
FLAGS.input = os.path.expanduser(FLAGS.input)
FLAGS.output_dir = os.path.expanduser(FLAGS.output_dir)
pipeline.run_pipeline_serial(
pipeline_instance,
pipeline.tf_record_iterator(FLAGS.input, pipeline_instance.input_type),
FLAGS.output_dir)
def console_entry_point():
tf.app.run(main)
if __name__ == '__main__':
console_entry_point()
| # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create a dataset of SequenceExamples from NoteSequence protos.
This script will extract melodies from NoteSequence protos and save them to
TensorFlow's SequenceExample protos for input to the melody RNN models.
"""
import os
import tensorflow as tf
from magenta.models.melody_rnn import melody_rnn_config_flags
from magenta.models.melody_rnn import melody_rnn_pipeline
from magenta.pipelines import pipeline
flags = tf.app.flags
FLAGS = tf.app.flags.FLAGS
flags.DEFINE_string(
'input', None,
'TFRecord to read NoteSequence protos from.')
flags.DEFINE_string(
'output_dir', None,
'Directory to write training and eval TFRecord files. The TFRecord files '
'are populated with SequenceExample protos.')
flags.DEFINE_float(
'eval_ratio', 0.1,
'Fraction of input to set aside for eval set. Partition is randomly '
'selected.')
flags.DEFINE_string(
'log', 'INFO',
'The threshold for what messages will be logged DEBUG, INFO, WARN, ERROR, '
'or FATAL.')
def main(unused_argv):
tf.logging.set_verbosity(FLAGS.log)
config = melody_rnn_config_flags.config_from_flags()
pipeline_instance = melody_rnn_pipeline.get_pipeline(
config, FLAGS.eval_ratio)
FLAGS.input = os.path.expanduser(FLAGS.input)
FLAGS.output_dir = os.path.expanduser(FLAGS.output_dir)
pipeline.run_pipeline_serial(
pipeline_instance,
pipeline.tf_record_iterator(FLAGS.input, pipeline_instance.input_type),
FLAGS.output_dir)
def console_entry_point():
tf.app.run(main)
if __name__ == '__main__':
console_entry_point()
| apache-2.0 | Python |
1591ab302effbaa2abd6d0d07b75567930fa032e | Update extend schema test | dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq | corehq/ex-submodules/couchexport/tests/test_extend_schema.py | corehq/ex-submodules/couchexport/tests/test_extend_schema.py | from django.test import SimpleTestCase
from couchexport.schema import extend_schema
class ExtendSchemaTest(SimpleTestCase):
def setUp(self):
pass
def test_simple_schema(self):
"""
Testing the initial generation of the schema
"""
schema = {
'question1': 'string',
'question2': {
'inner1': 'string'
}
}
new_schema = extend_schema(None, schema)
self.assertEquals(new_schema, schema, 'Initial schema should just be what is sent in')
def test_reconcile_repeat_group(self):
schema = {
'question1': 'string',
'question2': {
'inner1': 'string'
}
}
# Once a repeat group has been discovered, is should convert previous schema to be a list
schema_repeat_group = {
'question1': 'string',
'question2': [{
'inner1': 'string'
}]
}
new_schema = extend_schema(schema, schema_repeat_group)
self.assertEquals(
new_schema,
schema_repeat_group,
)
def test_reconcile_delete_question_within_repeat_group(self):
"""
This test ensures that when you delete a question within a repeat group, that question stays in
the schema
"""
previous_schema = {
'question1': 'string',
'question2': [{
'inner1': 'string',
'inner2': 'string',
}]
}
# Once a repeat group has been discovered, is should convert previous schema to be a list
schema_repeat_deleted = {
'question1': 'string',
'question2': [{
'inner1': 'string'
}]
}
new_schema = extend_schema(previous_schema, schema_repeat_deleted)
self.assertEquals(
new_schema,
previous_schema,
)
| from django.test import SimpleTestCase
from couchexport.schema import extend_schema
class ExtendSchemaTest(SimpleTestCase):
def setUp(self):
pass
def test_simple_schema(self):
"""
Testing the initial generation of the schema
"""
schema = {
'question1': 'string',
'question2': {
'inner1': 'string'
}
}
new_schema = extend_schema(None, schema)
self.assertEquals(new_schema, schema, 'Initial schema should just be what is sent in')
def test_reconcile_repeat_group(self):
schema = {
'question1': 'string',
'question2': {
'inner1': 'string'
}
}
# Once a repeat group has been discovered, is should convert previous schema to be a list
schema_repeat_group = {
'question1': 'string',
'question2': [{
'inner1': 'string'
}]
}
new_schema = extend_schema(schema, schema_repeat_group)
self.assertEquals(
new_schema,
schema_repeat_group,
)
def test_reconcile_delete_question_within_repeat_group(self):
"""
This test ensures that when you delete a question within a repeat group, that question gets deleted in
the schema
"""
schema = {
'question1': 'string',
'question2': [{
'inner1': 'string',
'inner2': 'string',
}]
}
# Once a repeat group has been discovered, is should convert previous schema to be a list
schema_repeat_deleted = {
'question1': 'string',
'question2': [{
'inner1': 'string'
}]
}
new_schema = extend_schema(schema, schema_repeat_deleted)
self.assertEquals(
new_schema,
schema_repeat_deleted,
)
| bsd-3-clause | Python |
ca51f06f13f6ae3f5dff1a0e71f1d6670db1f607 | add support for pluralizing source in email body text | greglinch/sourcelist,greglinch/sourcelist | sources/management/commands/email_admin.py | sources/management/commands/email_admin.py | from django.core.management.base import BaseCommand, CommandError
from django.core.mail import send_mail
# from django.utils import timezone
from sources.models import Person
from sourcelist.settings import PROJECT_NAME, EMAIL_SENDER, SITE_URL
def email_admin():
path = '/admin/sources/sourceforadmin/'
path += '?approved_by_admin__exact=0'
# path += '&created={}'.format(timezone.now().date())
admin_url = SITE_URL + path
unapproved_sources = Person.objects.filter(
role='source',
approved_by_admin=False
)
unapproved_count = unapproved_sources.count()
if unapproved_count:
plural = ''
if unapproved_count > 1:
plural = 's'
source_list_items = ''
for source in unapproved_sources:
source_link = '{}/admin/sources/sourceforadmin/{}/change/'.format(SITE_URL, source.id)
item = '<li><a href="{}">{}</a></li>'.format(source_link, source)
source_list_items += item
subject = '[{}] {} sources pending approval'.format(PROJECT_NAME, unapproved_count)
message = ''
sender = 'greglinch@gmail.com' # EMAIL_SENDER
recipients = ['diversesources@gmail.com']
html_message = '<p>The following source{} need to be reviewed:</p>{}'.format(plural, source_list_items)
send_mail(
subject,
message,
sender,
recipients,
# reply_to=[reply_email],
html_message=html_message,
fail_silently=False,
)
class Command(BaseCommand):
help = 'Email admin to approve new user.'
def handle(self, *args, **options):
email_admin()
| from django.core.management.base import BaseCommand, CommandError
from django.core.mail import send_mail
# from django.utils import timezone
from sources.models import Person
from sourcelist.settings import PROJECT_NAME, EMAIL_SENDER, SITE_URL
def email_admin():
path = '/admin/sources/sourceforadmin/'
path += '?approved_by_admin__exact=0'
# path += '&created={}'.format(timezone.now().date())
admin_url = SITE_URL + path
unapproved_sources = Person.objects.filter(
role='source',
approved_by_admin=False
)
unapproved_count = unapproved_sources.count()
if unapproved_count:
source_list_items = ''
for source in unapproved_sources:
source_link = '{}/admin/sources/sourceforadmin/{}/change/'.format(SITE_URL, source.id)
item = '<li><a href="{}">{}</a></li>'.format(source_link, source)
source_list_items += item
subject = '[{}] {} sources pending approval'.format(PROJECT_NAME, unapproved_count)
message = ''
sender = 'greglinch@gmail.com' # EMAIL_SENDER
recipients = ['diversesources@gmail.com']
html_message = '<p>The following source need to be reviewed:</p>{}'.format(source_list_items)
send_mail(
subject,
message,
sender,
recipients,
# reply_to=[reply_email],
html_message=html_message,
fail_silently=False,
)
class Command(BaseCommand):
help = 'Email admin to approve new user.'
def handle(self, *args, **options):
email_admin()
| mit | Python |
9fada96919a1e6d2acc2d03549eb2f5b8db6c977 | Use LocMemCache while testing | potatolondon/djangae,potatolondon/djangae | test_settings.py | test_settings.py | import os
from django.urls import (
include,
path,
)
BASE_DIR = os.path.dirname(__file__)
STATIC_URL = "/static/"
# Set the cache during tests to local memory, which is threadsafe
# then our TestCase clears the cache in setUp()
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
}
}
# Default Django middleware
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'djangae.tasks.middleware.task_environment_middleware',
]
INSTALLED_APPS = (
'django.contrib.sessions',
'gcloudc',
'djangae',
'djangae.commands', # Takes care of emulator setup
'djangae.tasks',
)
DATABASES = {
'default': {
'ENGINE': 'gcloudc.db.backends.datastore',
'INDEXES_FILE': os.path.join(os.path.abspath(os.path.dirname(__file__)), "djangaeidx.yaml"),
"PROJECT": "test",
"NAMESPACE": "ns1", # Use a non-default namespace to catch edge cases where we forget
}
}
SECRET_KEY = "secret_key_for_testing"
USE_TZ = True
CSRF_USE_SESSIONS = True
CLOUD_TASKS_LOCATION = "[LOCATION]"
# Define two required task queues
CLOUD_TASKS_QUEUES = [
{
"name": "default"
},
{
"name": "another"
}
]
# Point the URL conf at this file
ROOT_URLCONF = __name__
urlpatterns = [
path('tasks/', include('djangae.tasks.urls')),
]
| import os
from django.urls import (
include,
path,
)
BASE_DIR = os.path.dirname(__file__)
STATIC_URL = "/static/"
# Default Django middleware
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'djangae.tasks.middleware.task_environment_middleware',
]
INSTALLED_APPS = (
'django.contrib.sessions',
'gcloudc',
'djangae',
'djangae.commands', # Takes care of emulator setup
'djangae.tasks',
)
DATABASES = {
'default': {
'ENGINE': 'gcloudc.db.backends.datastore',
'INDEXES_FILE': os.path.join(os.path.abspath(os.path.dirname(__file__)), "djangaeidx.yaml"),
"PROJECT": "test",
"NAMESPACE": "ns1", # Use a non-default namespace to catch edge cases where we forget
}
}
SECRET_KEY = "secret_key_for_testing"
USE_TZ = True
CSRF_USE_SESSIONS = True
CLOUD_TASKS_LOCATION = "[LOCATION]"
# Define two required task queues
CLOUD_TASKS_QUEUES = [
{
"name": "default"
},
{
"name": "another"
}
]
# Point the URL conf at this file
ROOT_URLCONF = __name__
urlpatterns = [
path('tasks/', include('djangae.tasks.urls')),
]
| bsd-3-clause | Python |
8fa0c2dbc1d535fcaec2cbf54e0315446aaf7a60 | fix cos dense test | stygstra/keras-contrib,keras-team/keras-contrib,farizrahman4u/keras-contrib,keras-team/keras-contrib,keras-team/keras-contrib | tests/keras_contrib/layers/test_core.py | tests/keras_contrib/layers/test_core.py | import pytest
import numpy as np
from keras import backend as K
from keras_contrib import backend as KC
from keras_contrib.layers import core
from keras.utils.test_utils import layer_test, keras_test
from numpy.testing import assert_allclose
@keras_test
def test_cosinedense():
from keras import regularizers
from keras import constraints
from keras.models import Sequential
layer_test(core.CosineDense,
kwargs={'units': 3},
input_shape=(3, 2))
layer_test(core.CosineDense,
kwargs={'units': 3},
input_shape=(3, 4, 2))
layer_test(core.CosineDense,
kwargs={'units': 3},
input_shape=(None, None, 2))
layer_test(core.CosineDense,
kwargs={'units': 3},
input_shape=(3, 4, 5, 2))
layer_test(core.CosineDense,
kwargs={'units': 3,
'kernel_regularizer': regularizers.l2(0.01),
'bias_regularizer': regularizers.l1(0.01),
'regularizer': regularizers.l2(0.01),
'kernel_constraint': constraints.MaxNorm(1),
'bias_constraint': constraints.MaxNorm(1)},
input_shape=(3, 2))
X = np.random.randn(1, 20)
model = Sequential()
model.add(core.CosineDense(1, bias=True, input_shape=(20,)))
model.compile(loss='mse', optimizer='rmsprop')
W = model.get_weights()
W[0] = X.T
W[1] = np.asarray([1.])
model.set_weights(W)
out = model.predict(X)
assert_allclose(out, np.ones((1, 1), dtype=K.floatx()), atol=1e-5)
X = np.random.randn(1, 20)
model = Sequential()
model.add(core.CosineDense(1, bias=False, input_shape=(20,)))
model.compile(loss='mse', optimizer='rmsprop')
W = model.get_weights()
W[0] = -2 * X.T
model.set_weights(W)
out = model.predict(X)
assert_allclose(out, -np.ones((1, 1), dtype=K.floatx()), atol=1e-5)
if __name__ == '__main__':
pytest.main([__file__])
| import pytest
import numpy as np
from keras import backend as K
from keras_contrib import backend as KC
from keras_contrib.layers import core
from keras.utils.test_utils import layer_test, keras_test
from numpy.testing import assert_allclose
@keras_test
def test_cosinedense():
from keras import regularizers
from keras import constraints
from keras.models import Sequential
layer_test(core.CosineDense,
kwargs={'units': 3},
input_shape=(3, 2))
layer_test(core.CosineDense,
kwargs={'units': 3},
input_shape=(3, 4, 2))
layer_test(core.CosineDense,
kwargs={'units': 3},
input_shape=(None, None, 2))
layer_test(core.CosineDense,
kwargs={'units': 3},
input_shape=(3, 4, 5, 2))
layer_test(core.CosineDense,
kwargs={'units': 3,
'W_regularizer': regularizers.l2(0.01),
'b_regularizer': regularizers.l1(0.01),
'regularizer': regularizers.l2(0.01),
'W_constraint': constraints.MaxNorm(1),
'b_constraint': constraints.MaxNorm(1)},
input_shape=(3, 2))
X = np.random.randn(1, 20)
model = Sequential()
model.add(core.CosineDense(1, bias=True, input_shape=(20,)))
model.compile(loss='mse', optimizer='rmsprop')
W = model.get_weights()
W[0] = X.T
W[1] = np.asarray([1.])
model.set_weights(W)
out = model.predict(X)
assert_allclose(out, np.ones((1, 1), dtype=K.floatx()), atol=1e-5)
X = np.random.randn(1, 20)
model = Sequential()
model.add(core.CosineDense(1, bias=False, input_shape=(20,)))
model.compile(loss='mse', optimizer='rmsprop')
W = model.get_weights()
W[0] = -2 * X.T
model.set_weights(W)
out = model.predict(X)
assert_allclose(out, -np.ones((1, 1), dtype=K.floatx()), atol=1e-5)
if __name__ == '__main__':
pytest.main([__file__])
| mit | Python |
cb0862f7da69dec4e865543b0565091980f2463e | Fix dig check test | pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine | tests/scoring_engine/checks/test_dns.py | tests/scoring_engine/checks/test_dns.py | from tests.scoring_engine.checks.check_test import CheckTest
class TestDNSCheck(CheckTest):
check_name = 'DNSCheck'
properties = {
'qtype': 'A',
'domain': 'www.google.com'
}
cmd = "dig +noedns @'127.0.0.1' -p 1234 -t 'A' -q 'www.google.com'"
| from tests.scoring_engine.checks.check_test import CheckTest
class TestDNSCheck(CheckTest):
check_name = 'DNSCheck'
properties = {
'qtype': 'A',
'domain': 'www.google.com'
}
cmd = "dig @'127.0.0.1' -p 1234 -t 'A' -q 'www.google.com'"
| mit | Python |
13c1745e73b7d3be162ce228d6c6ccf53f2438eb | Mark build 90 as 1.6.6 final | alby128/syncplay,Syncplay/syncplay,Syncplay/syncplay,alby128/syncplay | syncplay/__init__.py | syncplay/__init__.py | version = '1.6.6'
revision = ''
milestone = 'Yoitsu'
release_number = '90'
projectURL = 'https://syncplay.pl/'
| version = '1.6.6'
revision = ' beta 2'
milestone = 'Yoitsu'
release_number = '89'
projectURL = 'https://syncplay.pl/'
| apache-2.0 | Python |
0943d0629488f4263c9403ccfcc52cb3ea2df8b0 | Fix tests | slackapi/python-slackclient,slackhq/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient | tests/test_proxy_env_variable_loader.py | tests/test_proxy_env_variable_loader.py | import os
import unittest
from slack_sdk.proxy_env_variable_loader import load_http_proxy_from_env
from tests.helpers import remove_os_env_temporarily, restore_os_env
class TestProxyEnvVariableLoader(unittest.TestCase):
def setUp(self):
self.old_env = remove_os_env_temporarily()
def tearDown(self):
os.environ.clear()
restore_os_env(self.old_env)
def test_load_lower_case(self):
os.environ["https_proxy"] = "http://localhost:9999"
url = load_http_proxy_from_env()
self.assertEqual(url, "http://localhost:9999")
def test_load_upper_case(self):
os.environ["HTTPS_PROXY"] = "http://localhost:9999"
url = load_http_proxy_from_env()
self.assertEqual(url, "http://localhost:9999")
| import os
import unittest
from slack_sdk.proxy_env_variable_loader import load_http_proxy_from_env
from tests.helpers import remove_os_env_temporarily, restore_os_env
class TestProxyEnvVariableLoader(unittest.TestCase):
def setUp(self):
self.old_env = remove_os_env_temporarily()
def tearDown(self):
restore_os_env(self.old_env)
def test_load_lower_case(self):
os.environ["https_proxy"] = "http://localhost:9999"
url = load_http_proxy_from_env()
self.assertEqual(url, "http://localhost:9999")
def test_load_upper_case(self):
os.environ["HTTPS_PROXY"] = "http://localhost:9999"
url = load_http_proxy_from_env()
self.assertEqual(url, "http://localhost:9999")
| mit | Python |
8d4d22d17ff10cb3d95b6524221c2a6c01c0923c | Fix Python3 builds | avanov/Rhetoric,avanov/Rhetoric | tests/testapp/testapp/articles/views.py | tests/testapp/testapp/articles/views.py | from rhetoric.view import view_config, view_defaults
from ..types import Language
@view_defaults(route_name='articles.regional.index', renderer='json')
class ArticlesHandler(object):
def __init__(self, request, language):
self.request = request
self.language = language
self.region_strategy = Language.match(language)
@view_config(request_method='GET')
def show_local_entries(self):
return {
'language': self.language,
}
| from rhetoric.view import view_config, view_defaults
from ..types import Language
@view_defaults(route_name='articles.regional.index', renderer='json')
class ArticlesHandler(object):
def __init__(self, request, language):
self.request = request
self.language = language
self.region_strategy = Language.match(language)
@view_config(request_method='GET')
def show_local_entries(self):
print self.region_strategy
return {
'language': self.language,
}
| mit | Python |
3177cdf564f8d16b090506d3adf479959698bebd | Add component count. | barbarahui/nuxeo-calisphere,barbarahui/nuxeo-calisphere | utils/get_collection_object_count.py | utils/get_collection_object_count.py | #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, 'barbarahui_test_bucket', argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, 'barbarahui_test_bucket', argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
print "finished"
print "len(objects): {}".format(len(objects))
if __name__ == "__main__":
sys.exit(main())
| bsd-3-clause | Python |
bc0d314aad61fd252d4b85666fad5ea34706404f | remove unused operator classes | kishkaru/python-driver,stef1927/python-driver,thelastpickle/python-driver,coldeasy/python-driver,kishkaru/python-driver,datastax/python-driver,coldeasy/python-driver,markflorisson/python-driver,markflorisson/python-driver,thobbs/python-driver,vipjml/python-driver,vipjml/python-driver,datastax/python-driver,mambocab/python-driver,mambocab/python-driver,thelastpickle/python-driver,thobbs/python-driver,stef1927/python-driver | cassandra/cqlengine/operators.py | cassandra/cqlengine/operators.py | # Copyright 2013-2016 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cassandra.cqlengine import UnicodeMixin
class QueryOperatorException(Exception):
pass
class BaseQueryOperator(UnicodeMixin):
# The symbol that identifies this operator in kwargs
# ie: colname__<symbol>
symbol = None
# The comparator symbol this operator uses in cql
cql_symbol = None
def __unicode__(self):
if self.cql_symbol is None:
raise QueryOperatorException("cql symbol is None")
return self.cql_symbol
@classmethod
def get_operator(cls, symbol):
if cls == BaseQueryOperator:
raise QueryOperatorException("get_operator can only be called from a BaseQueryOperator subclass")
if not hasattr(cls, 'opmap'):
cls.opmap = {}
def _recurse(klass):
if klass.symbol:
cls.opmap[klass.symbol.upper()] = klass
for subklass in klass.__subclasses__():
_recurse(subklass)
pass
_recurse(cls)
try:
return cls.opmap[symbol.upper()]
except KeyError:
raise QueryOperatorException("{0} doesn't map to a QueryOperator".format(symbol))
class BaseWhereOperator(BaseQueryOperator):
""" base operator used for where clauses """
class EqualsOperator(BaseWhereOperator):
symbol = 'EQ'
cql_symbol = '='
class InOperator(EqualsOperator):
symbol = 'IN'
cql_symbol = 'IN'
class GreaterThanOperator(BaseWhereOperator):
symbol = "GT"
cql_symbol = '>'
class GreaterThanOrEqualOperator(BaseWhereOperator):
symbol = "GTE"
cql_symbol = '>='
class LessThanOperator(BaseWhereOperator):
symbol = "LT"
cql_symbol = '<'
class LessThanOrEqualOperator(BaseWhereOperator):
symbol = "LTE"
cql_symbol = '<='
| # Copyright 2013-2016 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cassandra.cqlengine import UnicodeMixin
class QueryOperatorException(Exception):
pass
class BaseQueryOperator(UnicodeMixin):
# The symbol that identifies this operator in kwargs
# ie: colname__<symbol>
symbol = None
# The comparator symbol this operator uses in cql
cql_symbol = None
def __unicode__(self):
if self.cql_symbol is None:
raise QueryOperatorException("cql symbol is None")
return self.cql_symbol
@classmethod
def get_operator(cls, symbol):
if cls == BaseQueryOperator:
raise QueryOperatorException("get_operator can only be called from a BaseQueryOperator subclass")
if not hasattr(cls, 'opmap'):
cls.opmap = {}
def _recurse(klass):
if klass.symbol:
cls.opmap[klass.symbol.upper()] = klass
for subklass in klass.__subclasses__():
_recurse(subklass)
pass
_recurse(cls)
try:
return cls.opmap[symbol.upper()]
except KeyError:
raise QueryOperatorException("{0} doesn't map to a QueryOperator".format(symbol))
class BaseWhereOperator(BaseQueryOperator):
""" base operator used for where clauses """
class EqualsOperator(BaseWhereOperator):
symbol = 'EQ'
cql_symbol = '='
class InOperator(EqualsOperator):
symbol = 'IN'
cql_symbol = 'IN'
class GreaterThanOperator(BaseWhereOperator):
symbol = "GT"
cql_symbol = '>'
class GreaterThanOrEqualOperator(BaseWhereOperator):
symbol = "GTE"
cql_symbol = '>='
class LessThanOperator(BaseWhereOperator):
symbol = "LT"
cql_symbol = '<'
class LessThanOrEqualOperator(BaseWhereOperator):
symbol = "LTE"
cql_symbol = '<='
class BaseAssignmentOperator(BaseQueryOperator):
""" base operator used for insert and delete statements """
class AssignmentOperator(BaseAssignmentOperator):
cql_symbol = "="
class AddSymbol(BaseAssignmentOperator):
cql_symbol = "+"
| apache-2.0 | Python |
52c7729e5423fc53281f2fa367d93c5bf3821f03 | Adjust translate.py for python3 support | sukeesh/Jarvis,appi147/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis,appi147/Jarvis | jarviscli/packages/translate.py | jarviscli/packages/translate.py | from googletrans import Translator
from utilities.GeneralUtilities import print_say
from googletrans.constants import LANGCODES, LANGUAGES, SPECIAL_CASES
import six
def main(self):
'''
source language
'''
print_say('\nEnter source language ', self)
if six.PY2:
srcs = raw_input()
else:
srcs = input()
while (srcs not in LANGUAGES) and (srcs not in SPECIAL_CASES) and (srcs not in LANGCODES):
if srcs in SPECIAL_CASES:
srcs = SPECIAL_CASES[srcs]
elif srcs in LANGCODES:
srcs = LANGCODES[srcs]
else:
print_say("\nInvalid source language\nEnter again", self)
if six.PY2:
srcs = raw_input()
else:
srcs = input()
print_say('\nEnter destination language ', self)
if six.PY2:
des = raw_input()
else:
des = input()
while (des not in LANGUAGES) and (des not in SPECIAL_CASES) and (des not in LANGCODES):
if des in SPECIAL_CASES:
des = SPECIAL_CASES[des]
elif des in LANGCODES:
des = LANGCODES[des]
else:
print_say("\nInvalid destination language\nEnter again", self)
if six.PY2:
des = raw_input()
else:
des = input()
print_say('\nEnter text ', self)
if six.PY2:
tex = raw_input()
else:
tex = input()
translator = Translator()
result = translator.translate(tex, dest=des, src=srcs)
result = u"""
[{src}] {original}
->
[{dest}] {text}
[pron.] {pronunciation}
""".strip().format(src=result.src, dest=result.dest, original=result.origin,
text=result.text, pronunciation=result.pronunciation)
print(result)
| from googletrans import Translator
from utilities.GeneralUtilities import print_say
from googletrans.constants import LANGCODES, LANGUAGES, SPECIAL_CASES
def main(self):
'''
source language
'''
print_say('\nEnter source language ', self)
srcs = raw_input()
while (srcs not in LANGUAGES) and (srcs not in SPECIAL_CASES) and (srcs not in LANGCODES):
if srcs in SPECIAL_CASES:
srcs = SPECIAL_CASES[srcs]
elif srcs in LANGCODES:
srcs = LANGCODES[srcs]
else:
print_say("\nInvalid source language\nEnter again", self)
srcs = raw_input()
print_say('\nEnter destination language ', self)
des = raw_input()
while (des not in LANGUAGES) and (des not in SPECIAL_CASES) and (des not in LANGCODES):
if des in SPECIAL_CASES:
des = SPECIAL_CASES[des]
elif des in LANGCODES:
des = LANGCODES[des]
else:
print_say("\nInvalid destination language\nEnter again", self)
des = raw_input()
print_say('\nEnter text ', self)
tex = raw_input()
translator = Translator()
result = translator.translate(tex, dest=des, src=srcs)
result = u"""
[{src}] {original}
->
[{dest}] {text}
[pron.] {pronunciation}
""".strip().format(src=result.src, dest=result.dest, original=result.origin,
text=result.text, pronunciation=result.pronunciation)
print(result)
| mit | Python |
f7b24b160ecd8e634310ad4a98aefb894c330c93 | Revert "Revert "Added `-fno-tree-sink` to cflags_cc list."" | royalpinto/node-cares,royalpinto/node-cares,royalpinto/node-cares | binding.gyp | binding.gyp | {
"targets": [{
"target_name": "cares_wrap",
"include_dirs": [
"<!(node -e \"require('nan')\")",
"deps/cares/include",
"deps/cares/src",
"deps/utils"
],
"sources": [
"src/cares_wrap.cc"
],
"dependencies": [ "deps/cares/cares.gyp:cares" ],
'cflags_cc!': [ '-fno-tree-sink' ],
"conditions": [
["OS!='win'", {
"libraries": [ "-Wl,-rpath,<!(pwd)/build/Release/" ]
}
]
]
}]
}
| {
"targets": [{
"target_name": "cares_wrap",
"include_dirs": [
"<!(node -e \"require('nan')\")",
"deps/cares/include",
"deps/cares/src",
"deps/utils"
],
"sources": [
"src/cares_wrap.cc"
],
"dependencies": [ "deps/cares/cares.gyp:cares" ],
"conditions": [
["OS!='win'", {
"libraries": [ "-Wl,-rpath,<!(pwd)/build/Release/" ]
}
]
]
}]
}
| mit | Python |
b0451cd77ed97373cb6748e6cca17b11220f74d0 | Add missing source file | thomasjo/node-cmark,thomasjo/node-cmark,thomasjo/node-cmark | binding.gyp | binding.gyp | {
'targets': [
{
'target_name': 'cmark',
'include_dirs': [
'<!(node -e "require(\'nan\')")',
'vendor/cmark/src',
'vendor/cmark/extensions',
'vendor/cmark/build/src',
'vendor/cmark/build/extensions',
],
'sources': [
'src/cmark.cpp',
'src/common.cpp',
'src/sync.cpp',
# TODO: Automate or improve this external source listing stuff?
'vendor/cmark/src/arena.c',
'vendor/cmark/src/blocks.c',
'vendor/cmark/src/buffer.c',
'vendor/cmark/src/cmark.c',
'vendor/cmark/src/cmark_ctype.c',
'vendor/cmark/src/commonmark.c',
'vendor/cmark/src/houdini_href_e.c',
'vendor/cmark/src/houdini_html_e.c',
'vendor/cmark/src/houdini_html_u.c',
'vendor/cmark/src/html.c',
'vendor/cmark/src/inlines.c',
'vendor/cmark/src/iterator.c',
'vendor/cmark/src/latex.c',
'vendor/cmark/src/linked_list.c',
'vendor/cmark/src/main.c',
'vendor/cmark/src/man.c',
'vendor/cmark/src/node.c',
'vendor/cmark/src/plaintext.c',
'vendor/cmark/src/plugin.c',
'vendor/cmark/src/references.c',
'vendor/cmark/src/registry.c',
'vendor/cmark/src/render.c',
'vendor/cmark/src/scanners.c',
'vendor/cmark/src/syntax_extension.c',
'vendor/cmark/src/utf8.c',
'vendor/cmark/src/xml.c',
'vendor/cmark/extensions/autolink.c',
'vendor/cmark/extensions/core-extensions.c',
'vendor/cmark/extensions/ext_scanners.c',
'vendor/cmark/extensions/strikethrough.c',
'vendor/cmark/extensions/table.c',
'vendor/cmark/extensions/tagfilter.c',
],
},
],
}
| {
'targets': [
{
'target_name': 'cmark',
'include_dirs': [
'<!(node -e "require(\'nan\')")',
'vendor/cmark/src',
'vendor/cmark/extensions',
'vendor/cmark/build/src',
'vendor/cmark/build/extensions',
],
'sources': [
'src/cmark.cpp',
'src/common.cpp',
'src/sync.cpp',
# TODO: Automate or improve this external source listing stuff?
'vendor/cmark/src/arena.c',
'vendor/cmark/src/blocks.c',
'vendor/cmark/src/buffer.c',
'vendor/cmark/src/cmark.c',
'vendor/cmark/src/cmark_ctype.c',
'vendor/cmark/src/commonmark.c',
'vendor/cmark/src/houdini_href_e.c',
'vendor/cmark/src/houdini_html_e.c',
'vendor/cmark/src/houdini_html_u.c',
'vendor/cmark/src/html.c',
'vendor/cmark/src/inlines.c',
'vendor/cmark/src/iterator.c',
'vendor/cmark/src/latex.c',
'vendor/cmark/src/linked_list.c',
'vendor/cmark/src/main.c',
'vendor/cmark/src/man.c',
'vendor/cmark/src/node.c',
'vendor/cmark/src/plugin.c',
'vendor/cmark/src/references.c',
'vendor/cmark/src/registry.c',
'vendor/cmark/src/render.c',
'vendor/cmark/src/scanners.c',
'vendor/cmark/src/syntax_extension.c',
'vendor/cmark/src/utf8.c',
'vendor/cmark/src/xml.c',
'vendor/cmark/extensions/autolink.c',
'vendor/cmark/extensions/core-extensions.c',
'vendor/cmark/extensions/ext_scanners.c',
'vendor/cmark/extensions/strikethrough.c',
'vendor/cmark/extensions/table.c',
'vendor/cmark/extensions/tagfilter.c',
],
},
],
}
| mit | Python |
cb028376c813b001d59de56b6973cd745b224279 | Fix test addressing object | storax/qmenuview | test/test_view.py | test/test_view.py | from PySide import QtGui
import qmenuview
def test_title(qtbot):
title = 'Test title'
mv = qmenuview.MenuView(title)
assert mv.title() == title
def test_parent(qtbot):
p = QtGui.QWidget()
mv = qmenuview.MenuView(parent=p)
assert mv.parent() is p
| from PySide import QtGui
import qmenuview
def test_title(qtbot):
title = 'Test title'
qmenuview.MenuView(title)
assert qmenuview.title() == title
def test_parent(qtbot):
p = QtGui.QWidget()
qmenuview.MenuView(parent=p)
assert qmenuview.parent() is p
| bsd-3-clause | Python |
78a681055722b181312cedf0c735366c9b90bdb5 | Revert "Revert "Задание 2"" | VLovets/python_01 | test_add_group.py | test_add_group.py | # -*- coding: utf-8 -*-
from selenium.webdriver.firefox.webdriver import WebDriver
import unittest
from group import Group
def is_alert_present(wd):
try:
wd.switch_to_alert().text
return True
except:
return False
class test_add_group(unittest.TestCase):
def setUp(self):
self.wd = WebDriver()
self.wd.implicitly_wait(60)
def open_home_page(self, wd):
wd.get("http://localhost/addressbook/")
def log_in(self, wd, username, password):
wd.find_element_by_name("user").click()
wd.find_element_by_name("user").send_keys(username)
wd.find_element_by_name("pass").click()
wd.find_element_by_name("pass").send_keys(password)
wd.find_element_by_css_selector("input[type=\"submit\"]").click()
def opens_group_page(self, wd):
wd.find_element_by_link_text("groups").click()
def creation_group(self, wd, group):
# Init group creation
wd.find_element_by_name("new").click()
# Fill group form
wd.find_element_by_name("group_name").click()
wd.find_element_by_name("group_name").clear()
wd.find_element_by_name("group_name").send_keys(group.name)
wd.find_element_by_name("group_header").click()
wd.find_element_by_name("group_header").clear()
wd.find_element_by_name("group_header").send_keys(group.header)
wd.find_element_by_name("group_footer").click()
wd.find_element_by_name("group_footer").clear()
wd.find_element_by_name("group_footer").send_keys(group.footer)
# Submit creation
wd.find_element_by_name("submit").click()
def return_to_groups_page(self, wd):
wd.find_element_by_link_text("groups").click()
def log_out(self, wd):
wd.find_element_by_link_text("Logout").click()
def test_(self):
wd = self.wd
self.open_home_page(wd)
self.log_in(wd, username="admin", password="secret")
self.opens_group_page(wd)
self.creation_group(wd, Group(name="new3", header="new3", footer="new3"))
self.return_to_groups_page(wd)
self.log_out(wd)
def test_add_empty_group(self):
wd = self.wd
self.open_home_page(wd)
self.log_in(wd, username="admin", password="secret")
self.opens_group_page(wd)
self.creation_group(wd, Group(name="", header="", footer=""))
self.return_to_groups_page(wd)
self.log_out(wd)
def tearDown(self):
self.wd.quit()
if __name__ == '__main__':
unittest.main()
| # -*- coding: utf-8 -*-
from selenium.webdriver.firefox.webdriver import WebDriver
from selenium.webdriver.common.action_chains import ActionChains
import time, unittest
def is_alert_present(wd):
try:
wd.switch_to_alert().text
return True
except:
return False
class test_add_group(unittest.TestCase):
def setUp(self):
self.wd = WebDriver()
self.wd.implicitly_wait(60)
def test_(self):
success = True
wd = self.wd
wd.get("http://localhost/addressbook/")
wd.find_element_by_name("user").click()
wd.find_element_by_name("user").send_keys("admin")
wd.find_element_by_name("pass").click()
wd.find_element_by_name("pass").send_keys("secret")
wd.find_element_by_css_selector("input[type=\"submit\"]").click()
wd.find_element_by_link_text("groups").click()
wd.find_element_by_name("new").click()
wd.find_element_by_name("group_name").click()
wd.find_element_by_name("group_name").clear()
wd.find_element_by_name("group_name").send_keys("new3")
wd.find_element_by_name("group_header").click()
wd.find_element_by_name("group_header").clear()
wd.find_element_by_name("group_header").send_keys("new3")
wd.find_element_by_name("group_footer").click()
wd.find_element_by_name("group_footer").clear()
wd.find_element_by_name("group_footer").send_keys("new3")
wd.find_element_by_name("submit").click()
wd.find_element_by_link_text("groups").click()
wd.find_element_by_link_text("Logout").click()
self.assertTrue(success)
def tearDown(self):
self.wd.quit()
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
9a7b8c72ced3e051154646be16ebd6b48f8c1333 | implement list_reachable_roles() | darren-wang/ks3,darren-wang/ks3 | keystone/role_hierarchy/core.py | keystone/role_hierarchy/core.py | import abc
from keystone.common import dependency
from keystone.common import manager
from keystone import exception
import six
@dependency.provider('rh_api')
@dependency.requires('role_api')
class Manager(manager.Manager):
def add_inheritance(self, asc_role_id, desc_role_id):
return self.driver.add_inheritance(asc_role_id, desc_role_id)
def del_inheritance(self, asc_role_id, desc_role_id):
pass
def check_inheritance(self, asc, desc):
try:
edge = self.driver.get_inheritance(asc, desc)
if edge:
return True
except exception.InheritanceNotFound:
return False
def list_immediate_ids(self, src_role_id):
idesc = self.driver.list_immediate_desc(src_role_id)
iid = [role['id'] for role in idesc]
return iid
def list_reachable_roles(self, src_role_id):
# ir, immediate reachable
# rr, reachable roles
rr = set()
ir = set(self.list_immediate_ids(src_role_id))
while ir:
for role_id in ir:
delta_ir = set(self.list_immediate_ids(role_id))
new_ir = ir.union(delta_ir)
new_ir.difference_update(rr)
new_ir.remove(role_id)
rr.add(role_id)
ir = new_ir
return rr
def _reachable_role_ids(self, src_role_id):
reachable_ids = []
reachable = self._list_reachable_roles(src_role_id)
for role in reachable:
reachable_ids.append(role['id'])
return reachable_ids
def check_reachable(self, src_role_id, dest_role_id):
reachable_ids = self._reachable_role_ids(src_role_id)
if dest_role_id in reachable_ids:
return True
return False
@six.add_metaclass(abc.ABCMeta)
class Driver(object):
def add_inheritance(self, asc, desc):
raise exception.NotImplemented()
def del_inheritance(self, asc, desc):
raise exception.NotImplemented()
def get_inheritance(self, asc, desc):
raise exception.NotImplemented()
| import abc
from keystone.common import dependency
from keystone.common import manager
from keystone import exception
import six
@dependency.provider('rh_api')
@dependency.requires('role_api')
class Manager(manager.Manager):
def add_inheritance(self, asc_role_id, desc_role_id):
return self.driver.add_inheritance(asc_role_id, desc_role_id)
def del_inheritance(self, asc_role_id, desc_role_id):
pass
def check_inheritance(self, asc, desc):
try:
edge = self.driver.get_inheritance(asc, desc)
if edge:
return True
except exception.InheritanceNotFound:
return False
def list_reachable_roles(self, src_role_id):
pass
def _reachable_role_ids(self, src_role_id):
reachable_ids = []
reachable = self._list_reachable_roles(src_role_id)
for role in reachable:
reachable_ids.append(role['id'])
return reachable_ids
def check_reachable(self, src_role_id, dest_role_id):
reachable_ids = self._reachable_role_ids(src_role_id)
if dest_role_id in reachable_ids:
return True
return False
@six.add_metaclass(abc.ABCMeta)
class Driver(object):
def add_inheritance(self, asc, desc):
raise exception.NotImplemented()
def del_inheritance(self, asc, desc):
raise exception.NotImplemented()
def get_inheritance(self, asc, desc):
raise exception.NotImplemented()
| apache-2.0 | Python |
1c038d6c79b7da9149af42b76377a5c25d4f1551 | Update wallarm.py | sandrogauci/wafw00f,EnableSecurity/wafw00f | wafw00f/plugins/wallarm.py | wafw00f/plugins/wallarm.py | #!/usr/bin/env python
NAME = 'Wallarm'
def is_waf(self):
return self.matchheader(('server', "nginx-wallarm"))
| #!/usr/bin/env python
NAME = 'Wallarm'
def is_waf(self):
if self.matchheader(('server', "nginx-wallarm")):
return True
| bsd-3-clause | Python |
de13352c488cf7c18490ba2e0e61307456402a4c | migrate res.company object to new API | akretion/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil | l10n_br_purchase/res_company.py | l10n_br_purchase/res_company.py | # -*- encoding: utf-8 -*-
###############################################################################
# #
# Copyright (C) 2009 Renato Lima - Akretion #
# #
#This program is free software: you can redistribute it and/or modify #
#it under the terms of the GNU Affero General Public License as published by #
#the Free Software Foundation, either version 3 of the License, or #
#(at your option) any later version. #
# #
#This program is distributed in the hope that it will be useful, #
#but WITHOUT ANY WARRANTY; without even the implied warranty of #
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
#GNU Affero General Public License for more details. #
# #
#You should have received a copy of the GNU Affero General Public License #
#along with this program. If not, see <http://www.gnu.org/licenses/>. #
###############################################################################
from openerp import models, fields
class ResCompany(models.Model):
_inherit = 'res.company'
purchase_fiscal_category_id = fields.Many2one(
'l10n_br_account.fiscal.category',
u'Categoria Fiscal Padrão Compras',
domain="[('journal_type', '=', 'purchase')]")
| # -*- encoding: utf-8 -*-
###############################################################################
# #
# Copyright (C) 2009 Renato Lima - Akretion #
# #
#This program is free software: you can redistribute it and/or modify #
#it under the terms of the GNU Affero General Public License as published by #
#the Free Software Foundation, either version 3 of the License, or #
#(at your option) any later version. #
# #
#This program is distributed in the hope that it will be useful, #
#but WITHOUT ANY WARRANTY; without even the implied warranty of #
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
#GNU Affero General Public License for more details. #
# #
#You should have received a copy of the GNU Affero General Public License #
#along with this program. If not, see <http://www.gnu.org/licenses/>. #
###############################################################################
from openerp.osv import orm, fields
class ResCompany(orm.Model):
_inherit = 'res.company'
_columns = {
'purchase_fiscal_category_id': fields.many2one(
'l10n_br_account.fiscal.category',
u'Categoria Fiscal Padrão Compras',
domain="[('journal_type', '=', 'purchase')]"),
}
| agpl-3.0 | Python |
2650d2555a6adb9eee8beff4fb6751dfdd784c48 | Fix a style error. | datawagovau/datacats,datacats/datacats,JJediny/datacats,JJediny/datacats,poguez/datacats,JackMc/datacats,reneenoble/datacats,florianm/datacats,florianm/datacats,datacats/datacats,dborzov/datacats,wardi/datacats,deniszgonjanin/datacats,reneenoble/datacats,dborzov/datacats,JackMc/datacats,datawagovau/datacats,deniszgonjanin/datacats,wardi/datacats,poguez/datacats | datacats/cli/lesscd.py | datacats/cli/lesscd.py | """
Watches a CKAN environment for changes in its .less files, and recompiles them when they do.
Usage:
datacats-lesscd [--help] TARGET
--help -h Show this help and quit.
"""
from os.path import expanduser, join as path_join, exists
import signal
from docopt import docopt
from datacats.version import __version__
from datacats.environment import Environment
from datacats.cli.less import less
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler, FileModifiedEvent
class LessCompileEventHandler(FileSystemEventHandler):
def __init__(self, environment):
self.environment = environment
def on_modified(self, event):
if isinstance(event, FileModifiedEvent):
less(self.environment, {})
def main():
opts = docopt(__doc__, version=__version__)
env_path = expanduser(opts['TARGET'])
environment = Environment.load(env_path)
env_path = environment.target
less_path = path_join(env_path, 'ckan', 'ckan', 'public', 'base', 'less')
if not env_path or not exists(less_path):
print 'No source code to watch found'
return
observer = Observer()
event_handler = LessCompileEventHandler(environment)
observer.schedule(event_handler, less_path, recursive=True)
observer.start()
# HACK: We make it so that the OS doesn't consult us and just kills us.
signal.signal(signal.SIGINT, signal.SIG_DFL)
observer.join()
| """
Watches a CKAN environment for changes in its .less files, and recompiles them when they do.
Usage:
datacats-lesscd [--help] TARGET
--help -h Show this help and quit.
"""
from os.path import expanduser, join as path_join, split as path_split, exists
import signal
from docopt import docopt
from datacats.version import __version__
from datacats.environment import Environment
from datacats.cli.less import less
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler, FileModifiedEvent
class LessCompileEventHandler(FileSystemEventHandler):
def __init__(self, environment):
self.environment = environment
def on_modified(self, event):
if isinstance(event, FileModifiedEvent):
less(self.environment, {})
def main():
opts = docopt(__doc__, version=__version__)
env_path = expanduser(opts['TARGET'])
environment = Environment.load(env_path)
env_path = environment.target
less_path = path_join(env_path, 'ckan', 'ckan', 'public', 'base', 'less')
if not env_path or not exists(less_path):
print 'No source code to watch found'
return
observer = Observer()
event_handler = LessCompileEventHandler(environment)
observer.schedule(event_handler, less_path, recursive=True)
observer.start()
# HACK: We make it so that the OS doesn't consult us and just kills us.
signal.signal(signal.SIGINT, signal.SIG_DFL)
observer.join()
| agpl-3.0 | Python |
37ef9412a39972229f253bba4963b1eb840f2572 | add a db cleanup for items over a year old | Flexget/Flexget,Flexget/Flexget,crawln45/Flexget,Flexget/Flexget,crawln45/Flexget,Flexget/Flexget,crawln45/Flexget,crawln45/Flexget | flexget/components/history/db.py | flexget/components/history/db.py | from datetime import datetime, timedelta
from loguru import logger
from sqlalchemy import Column, DateTime, Integer, String, Unicode
from flexget.event import event
from flexget.manager import Base
logger = logger.bind(name='history.db')
class History(Base):
__tablename__ = 'history'
id = Column(Integer, primary_key=True)
task = Column('feed', String)
filename = Column(String)
url = Column(String)
title = Column(Unicode)
time = Column(DateTime)
details = Column(String)
def __init__(self):
self.time = datetime.now()
def __str__(self):
return '<History(filename=%s,task=%s)>' % (self.filename, self.task)
def to_dict(self):
return {
'id': self.id,
'task': self.task,
'filename': self.filename,
'url': self.url,
'title': self.title,
'time': self.time.isoformat(),
'details': self.details,
}
@event('manager.db_cleanup')
def db_cleanup(manager, session):
# Purge task executions older than 1 year
result = (
session.query(History).filter(History.time < datetime.now() - timedelta(days=365)).delete()
)
if result:
logger.verbose('Removed {} accepted entries from history older than 1 year', result)
| from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, String, Unicode
from flexget.manager import Base
class History(Base):
__tablename__ = 'history'
id = Column(Integer, primary_key=True)
task = Column('feed', String)
filename = Column(String)
url = Column(String)
title = Column(Unicode)
time = Column(DateTime)
details = Column(String)
def __init__(self):
self.time = datetime.now()
def __str__(self):
return '<History(filename=%s,task=%s)>' % (self.filename, self.task)
def to_dict(self):
return {
'id': self.id,
'task': self.task,
'filename': self.filename,
'url': self.url,
'title': self.title,
'time': self.time.isoformat(),
'details': self.details,
}
| mit | Python |
dbd0a79ffc7e3672e42de6fd635ac4951e6470dd | move timeline and link to dynamic links | frappe/frappe,saurabh6790/frappe,almeidapaulopt/frappe,yashodhank/frappe,mhbu50/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,vjFaLk/frappe,almeidapaulopt/frappe,frappe/frappe,vjFaLk/frappe,saurabh6790/frappe,StrellaGroup/frappe,yashodhank/frappe,adityahase/frappe,saurabh6790/frappe,vjFaLk/frappe,vjFaLk/frappe,saurabh6790/frappe,yashodhank/frappe,adityahase/frappe,mhbu50/frappe,adityahase/frappe,StrellaGroup/frappe,StrellaGroup/frappe,mhbu50/frappe,frappe/frappe,adityahase/frappe,yashodhank/frappe,mhbu50/frappe | frappe/patches/v12_0/move_timeline_links_to_dynamic_links.py | frappe/patches/v12_0/move_timeline_links_to_dynamic_links.py | from __future__ import unicode_literals
import frappe
def execute():
comm_lists = []
for communication in frappe.get_list("Communication", filters={"communication_medium": "Email"},
fields=[
"name", "creation", "modified", "modified_by",
"timeline_doctype", "timeline_name",
"link_doctype", "link_name"
]):
counter = 1
if communication.timeline_doctype and communication.timeline_name:
comm_lists.append((
counter, frappe.generate_hash(length=10), "timeline_links", "Communication",
communication.name, communication.timeline_doctype, communication.timeline_name,
communication.creation, communication.modified, communication.modified_by
))
counter += 1
if communication.link_doctype and communication.link_name:
comm_lists.append((
counter, frappe.generate_hash(length=10), "timeline_links", "Communication",
communication.name, communication.link_doctype, communication.link_name,
communication.creation, communication.modified, communication.modified_by
))
for comm_list in comm_lists:
frappe.db.sql("""
insert into table `tabDynamic Link` (idx, name, parentfield, parenttype, parent, link_doctype, link_name, creation, modified, modified_by)
values %(values)s""",
{
"values": comm_list
}) | from __future__ import unicode_literals
import frappe
def execute():
comm_lists = []
for communication in frappe.get_list("Communication", filters={"communication_medium": "Email"},
fields=[
"name", "creation", "modified", "modified_by",
"timeline_doctype", "timeline_name",
]):
if communication.timeline_doctype and communication.timeline_name:
comm_lists.append((
"1", frappe.generate_hash(length=10), "dynamic_links", "Communication",
communication.name, communication.timeline_doctype, communication.timeline_name,
communication.creation, communication.modified, communication.modified_by
))
for comm_list in comm_lists:
frappe.db.sql("""
insert into table `tabDynamic Link` (idx, name, parentfield, parenttype, parent, link_doctype, link_name, creation, modified, modified_by)
values %(values)s""",
{
"values": comm_list
}) | mit | Python |
38891fee1afb02b4b92391f85839af93c820663b | order persons by | danielbaak/imperii-viz,danielbaak/imperii-viz,danielbaak/imperii-viz,danielbaak/imperii-viz | person/views.py | person/views.py | from person.serializers import PersonSerializer
from person.models import Person
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.decorators import api_view
from regeste.models import Regeste
from regeste.serializers import RegesteSerializer
class PersonView(APIView):
def get(self, request, format=None):
events = Person.objects.all().order_by('image_url')
serializer = PersonSerializer(events, many = True)
return Response(serializer.data)
class PersonDetail(APIView):
def get(self, request, pk, format=None):
person = get_object_or_404(Person.objects.filter(pk=pk))
serializer = PersonSerializer(person)
return Response(serializer.data)
@api_view(['GET'])
def regentenList(request, person_id, format=None):
person = get_object_or_404(Person.objects.filter(pk=person_id))
regesten = Regeste.objects.filter(issuer=person)
serializer = RegesteSerializer(regesten, many=True)
return Response(serializer.data)
| from person.serializers import PersonSerializer
from person.models import Person
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.decorators import api_view
from regeste.models import Regeste
from regeste.serializers import RegesteSerializer
class PersonView(APIView):
def get(self, request, format=None):
events = Person.objects.all()
serializer = PersonSerializer(events, many = True)
return Response(serializer.data)
class PersonDetail(APIView):
def get(self, request, pk, format=None):
person = get_object_or_404(Person.objects.filter(pk=pk))
serializer = PersonSerializer(person)
return Response(serializer.data)
@api_view(['GET'])
def regentenList(request, person_id, format=None):
person = get_object_or_404(Person.objects.filter(pk=person_id))
regesten = Regeste.objects.filter(issuer=person)
serializer = RegesteSerializer(regesten, many=True)
return Response(serializer.data)
| mit | Python |
588f3046dbd13e737f5e9c665b981dfaa401e6ab | move logging verbosity to top | eywalker/datajoint-python,datajoint/datajoint-python,dimitri-yatsenko/datajoint-python,fabiansinz/datajoint-python | tests/__init__.py | tests/__init__.py | """
Package for testing datajoint. Setup fixture will be run
to ensure that proper database connection and access privilege
exists. The content of the test database will be destroyed
after the test.
"""
__author__ = 'Edgar Walker, Fabian Sinz, Dimitri Yatsenko'
import logging
from os import environ
# turn on verbose logging
logging.basicConfig(level=logging.DEBUG)
import datajoint as dj
__all__ = ['__author__', 'PREFIX', 'CONN_INFO']
# Connection for testing
CONN_INFO = dict(
host=environ.get('DJ_TEST_HOST', 'localhost'),
user=environ.get('DJ_TEST_USER', 'datajoint'),
passwd=environ.get('DJ_TEST_PASSWORD', 'datajoint'))
# Prefix for all databases used during testing
PREFIX = environ.get('DJ_TEST_DB_PREFIX', 'djtest')
def setup_package():
"""
Package-level unit test setup
Turns off safemode
"""
dj.config['safemode'] = False
def teardown_package():
"""
Package-level unit test teardown.
Removes all databases with name starting with PREFIX.
To deal with possible foreign key constraints, it will unset
and then later reset FOREIGN_KEY_CHECKS flag
"""
conn = dj.conn(**CONN_INFO)
conn.query('SET FOREIGN_KEY_CHECKS=0')
cur = conn.query('SHOW DATABASES LIKE "{}\_%%"'.format(PREFIX))
for db in cur.fetchall():
conn.query('DROP DATABASE `{}`'.format(db[0]))
conn.query('SET FOREIGN_KEY_CHECKS=1')
| """
Package for testing datajoint. Setup fixture will be run
to ensure that proper database connection and access privilege
exists. The content of the test database will be destroyed
after the test.
"""
__author__ = 'Edgar Walker, Fabian Sinz, Dimitri Yatsenko'
import logging
from os import environ
import datajoint as dj
__all__ = ['__author__', 'PREFIX', 'CONN_INFO']
logging.basicConfig(level=logging.DEBUG)
# Connection for testing
CONN_INFO = dict(
host=environ.get('DJ_TEST_HOST', 'localhost'),
user=environ.get('DJ_TEST_USER', 'datajoint'),
passwd=environ.get('DJ_TEST_PASSWORD', 'datajoint'))
# Prefix for all databases used during testing
PREFIX = environ.get('DJ_TEST_DB_PREFIX', 'djtest')
def setup_package():
"""
Package-level unit test setup
:return:
"""
dj.config['safemode'] = False
def teardown_package():
"""
Package-level unit test teardown.
Removes all databases with name starting with PREFIX.
To deal with possible foreign key constraints, it will unset
and then later reset FOREIGN_KEY_CHECKS flag
"""
conn = dj.conn(**CONN_INFO)
conn.query('SET FOREIGN_KEY_CHECKS=0')
cur = conn.query('SHOW DATABASES LIKE "{}\_%%"'.format(PREFIX))
for db in cur.fetchall():
conn.query('DROP DATABASE `{}`'.format(db[0]))
conn.query('SET FOREIGN_KEY_CHECKS=1')
| lgpl-2.1 | Python |
9ac0a34c7853bfe03d81f0b4ddf233b307dccf3c | Remove debug print (oops) | shohei/Octoprint,masterhou/OctoPrint,beeverycreative/BEEweb,abinashk-inf/AstroBox,foosel/OctoPrint,shaggythesheep/OctoPrint,dragondgold/OctoPrint,mrbeam/OctoPrint,CapnBry/OctoPrint,MolarAmbiguity/OctoPrint,nickverschoor/OctoPrint,ryanneufeld/OctoPrint,SeveQ/OctoPrint,MaxOLydian/OctoPrint,abinashk-inf/AstroBox,ryanneufeld/OctoPrint,mcanes/OctoPrint,DanLipsitt/OctoPrint,MolarAmbiguity/OctoPrint,uuv/OctoPrint,hudbrog/OctoPrint,spapadim/OctoPrint,CapnBry/OctoPrint,C-o-r-E/OctoPrint,Salandora/OctoPrint,Jaesin/OctoPrint,MoonshineSG/OctoPrint,nicanor-romero/OctoPrint,rurkowce/octoprint-fork,spapadim/OctoPrint,jneves/OctoPrint,AstroPrint/AstroBox,javivi001/OctoPrint,senttech/OctoPrint,AstroPrint/AstroBox,Catrodigious/OctoPrint-TAM,bicephale/OctoPrint,markwal/OctoPrint,rurkowce/octoprint-fork,beeverycreative/BEEweb,EZ3-India/EZ-Remote,alephobjects/Cura,ryanneufeld/OctoPrint,ymilord/OctoPrint-MrBeam,mrbeam/OctoPrint,shohei/Octoprint,leductan-nguyen/RaionPi,sstocker46/OctoPrint,spapadim/OctoPrint,aerickson/OctoPrint,shohei/Octoprint,MaxOLydian/OctoPrint,nickverschoor/OctoPrint,beeverycreative/BEEweb,bicephale/OctoPrint,d42/octoprint-fork,MoonshineSG/OctoPrint,JackGavin13/octoprint-test-not-finished,eliasbakken/OctoPrint,chriskoz/OctoPrint,eliasbakken/OctoPrint,ErikDeBruijn/OctoPrint,JackGavin13/octoprint-test-not-finished,alex1818/OctoPrint,dansantee/OctoPrint,javivi001/OctoPrint,skieast/OctoPrint,punkkeks/OctoPrint,sstocker46/OctoPrint,markwal/OctoPrint,ymilord/OctoPrint-MrBeam,CapnBry/OctoPrint,alex1818/OctoPrint,C-o-r-E/OctoPrint,dansantee/OctoPrint,MaxOLydian/OctoPrint,d42/octoprint-fork,abinashk-inf/AstroBox,Mikk36/OctoPrint,bicephale/OctoPrint,AstroPrint/AstroBox,alephobjects/Cura,beeverycreative/BEEweb,mayoff/OctoPrint,uuv/OctoPrint,madhuni/AstroBox,Skeen/OctoPrint,alex1818/OctoPrint,Skeen/OctoPrint,SeveQ/OctoPrint,nicanor-romero/OctoPrint,chriskoz/OctoPrint,ErikDeBruijn/OctoPrint,nicanor-romero/OctoPrint,EZ3-India/EZ-Remote,Javierma/OctoPrint-TFG,javivi001/OctoPrint,chriskoz/OctoPrint,Javierma/OctoPrint-TFG,Jaesin/OctoPrint,dansantee/OctoPrint,3dprintcanalhouse/octoprint2,MoonshineSG/OctoPrint,leductan-nguyen/RaionPi,Voxel8/OctoPrint,madhuni/AstroBox,SeveQ/OctoPrint,ryanneufeld/OctoPrint,punkkeks/OctoPrint,JackGavin13/octoprint-test-not-finished,mrbeam/OctoPrint,Catrodigious/OctoPrint-TAM,CapnBry/OctoPrint,Voxel8/OctoPrint,sstocker46/OctoPrint,shaggythesheep/OctoPrint,leductan-nguyen/RaionPi,dragondgold/OctoPrint,3dprintcanalhouse/octoprint2,MoonshineSG/OctoPrint,chriskoz/OctoPrint,MolarAmbiguity/OctoPrint,shohei/Octoprint,C-o-r-E/OctoPrint,aerickson/OctoPrint,Salandora/OctoPrint,mayoff/OctoPrint,ymilord/OctoPrint-MrBeam,DanLipsitt/OctoPrint,ymilord/OctoPrint-MrBeam,eliasbakken/OctoPrint,senttech/OctoPrint,markwal/OctoPrint,Jaesin/OctoPrint,madhuni/AstroBox,eddieparker/OctoPrint,skieast/OctoPrint,aerickson/OctoPrint,foosel/OctoPrint,mcanes/OctoPrint,alephobjects/Cura,uuv/OctoPrint,EZ3-India/EZ-Remote,senttech/OctoPrint,mayoff/OctoPrint,punkkeks/OctoPrint,nickverschoor/OctoPrint,foosel/OctoPrint,ymilord/OctoPrint-MrBeam,Mikk36/OctoPrint,Salandora/OctoPrint,foosel/OctoPrint,dragondgold/OctoPrint,hudbrog/OctoPrint,hudbrog/OctoPrint,3dprintcanalhouse/octoprint1,3dprintcanalhouse/octoprint1,shohei/Octoprint,eddieparker/OctoPrint,nickverschoor/OctoPrint,eddieparker/OctoPrint,leductan-nguyen/RaionPi,madhuni/AstroBox,masterhou/OctoPrint,Javierma/OctoPrint-TFG,bicephale/OctoPrint,Skeen/OctoPrint,jneves/OctoPrint,ErikDeBruijn/OctoPrint,senttech/OctoPrint,Catrodigious/OctoPrint-TAM,masterhou/OctoPrint,jneves/OctoPrint,Javierma/OctoPrint-TFG,Salandora/OctoPrint,JackGavin13/octoprint-test-not-finished,Voxel8/OctoPrint,abinashk-inf/AstroBox,Jaesin/OctoPrint,Mikk36/OctoPrint,EZ3-India/EZ-Remote,skieast/OctoPrint,mcanes/OctoPrint,shaggythesheep/OctoPrint | SkeinPyPy_NewUI/newui/skeinRun.py | SkeinPyPy_NewUI/newui/skeinRun.py | from __future__ import absolute_import
import platform, os, subprocess, sys
from skeinforge_application.skeinforge_utilities import skeinforge_craft
def getPyPyExe():
"Return the path to the pypy executable if we can find it. Else return False"
if platform.system() == "Windows":
checkSSE2exe = os.path.dirname(os.path.abspath(__file__)) + "/checkSSE2.exe"
if os.path.exists(checkSSE2exe):
if subprocess.call(checkSSE2exe) != 0:
print "*****************************************************"
print "* Your CPU is lacking SSE2 support, cannot use PyPy *"
print "*****************************************************"
return False
if platform.system() == "Windows":
pypyExe = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../pypy/pypy.exe"));
else:
pypyExe = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../pypy/bin/pypy"));
if os.path.exists(pypyExe):
return pypyExe
pypyExe = "/bin/pypy";
if os.path.exists(pypyExe):
return pypyExe
pypyExe = "/usr/bin/pypy";
if os.path.exists(pypyExe):
return pypyExe
pypyExe = "/usr/local/bin/pypy";
if os.path.exists(pypyExe):
return pypyExe
return False
def runSkein(fileNames):
"Run the slicer on the files. If we are running with PyPy then just do the slicing action. If we are running as Python, try to find pypy."
pypyExe = getPyPyExe()
for fileName in fileNames:
if platform.python_implementation() == "PyPy":
skeinforge_craft.writeOutput(fileName)
elif pypyExe == False:
print "************************************************"
print "* Failed to find pypy, so slicing with python! *"
print "************************************************"
skeinforge_craft.writeOutput(fileName)
print "************************************************"
print "* Failed to find pypy, so sliced with python! *"
print "************************************************"
else:
subprocess.call([pypyExe, os.path.join(sys.path[0], sys.argv[0]), fileName])
def getSkeinCommand(filename):
pypyExe = getPyPyExe()
if pypyExe == False:
pypyExe = sys.executable
return [pypyExe, os.path.join(sys.path[0], os.path.split(sys.argv[0])[1]), filename]
| from __future__ import absolute_import
import platform, os, subprocess, sys
from skeinforge_application.skeinforge_utilities import skeinforge_craft
def getPyPyExe():
"Return the path to the pypy executable if we can find it. Else return False"
if platform.system() == "Windows":
checkSSE2exe = os.path.dirname(os.path.abspath(__file__)) + "/checkSSE2.exe"
if os.path.exists(checkSSE2exe):
if subprocess.call(checkSSE2exe) != 0:
print "*****************************************************"
print "* Your CPU is lacking SSE2 support, cannot use PyPy *"
print "*****************************************************"
return False
if platform.system() == "Windows":
pypyExe = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../pypy/pypy.exe"));
else:
pypyExe = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../pypy/bin/pypy"));
print pypyExe
if os.path.exists(pypyExe):
return pypyExe
pypyExe = "/bin/pypy";
if os.path.exists(pypyExe):
return pypyExe
pypyExe = "/usr/bin/pypy";
if os.path.exists(pypyExe):
return pypyExe
pypyExe = "/usr/local/bin/pypy";
if os.path.exists(pypyExe):
return pypyExe
return False
def runSkein(fileNames):
"Run the slicer on the files. If we are running with PyPy then just do the slicing action. If we are running as Python, try to find pypy."
pypyExe = getPyPyExe()
for fileName in fileNames:
if platform.python_implementation() == "PyPy":
skeinforge_craft.writeOutput(fileName)
elif pypyExe == False:
print "************************************************"
print "* Failed to find pypy, so slicing with python! *"
print "************************************************"
skeinforge_craft.writeOutput(fileName)
print "************************************************"
print "* Failed to find pypy, so sliced with python! *"
print "************************************************"
else:
subprocess.call([pypyExe, os.path.join(sys.path[0], sys.argv[0]), fileName])
def getSkeinCommand(filename):
pypyExe = getPyPyExe()
if pypyExe == False:
pypyExe = sys.executable
return [pypyExe, os.path.join(sys.path[0], os.path.split(sys.argv[0])[1]), filename]
| agpl-3.0 | Python |
9ef425adc683b173b34880e692177e6a35c89b70 | Update longest-substring-with-at-most-k-distinct-characters.py | yiwen-luo/LeetCode,jaredkoontz/leetcode,yiwen-luo/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,githubutilities/LeetCode,kamyu104/LeetCode,githubutilities/LeetCode,jaredkoontz/leetcode,jaredkoontz/leetcode,kamyu104/LeetCode,jaredkoontz/leetcode,githubutilities/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,githubutilities/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,yiwen-luo/LeetCode,kamyu104/LeetCode,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,githubutilities/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015 | Python/longest-substring-with-at-most-k-distinct-characters.py | Python/longest-substring-with-at-most-k-distinct-characters.py | # Time: O(n)
# Space: O(1)
class Solution(object):
def lengthOfLongestSubstringKDistinct(self, s, k):
"""
:type s: str
:type k: int
:rtype: int
"""
longest, start, distinct_count, visited = 0, 0, 0, [0 for _ in xrange(256)]
for i, char in enumerate(s):
if visited[ord(char)] == 0:
distinct_count += 1
visited[ord(char)] += 1
while distinct_count > k:
visited[ord(s[start])] -= 1
if visited[ord(s[start])] == 0:
distinct_count -= 1
start += 1
longest = max(longest, i - start + 1)
return longest
| # Time: O(n^2)
# Space: O(1)
class Solution(object):
def lengthOfLongestSubstringKDistinct(self, s, k):
"""
:type s: str
:type k: int
:rtype: int
"""
longest, start, distinct_count, visited = 0, 0, 0, [0 for _ in xrange(256)]
for i, char in enumerate(s):
if visited[ord(char)] == 0:
distinct_count += 1
visited[ord(char)] += 1
while distinct_count > k:
visited[ord(s[start])] -= 1
if visited[ord(s[start])] == 0:
distinct_count -= 1
start += 1
longest = max(longest, i - start + 1)
return longest
| mit | Python |
413d879a973f845ca5920167c4ab54676f145578 | bump version | facebookresearch/detectron2,facebookresearch/detectron2,facebookresearch/detectron2 | detectron2/__init__.py | detectron2/__init__.py | # Copyright (c) Facebook, Inc. and its affiliates.
from .utils.env import setup_environment
setup_environment()
# This line will be programatically read/write by setup.py.
# Leave them at the bottom of this file and don't touch them.
__version__ = "0.4.1"
| # Copyright (c) Facebook, Inc. and its affiliates.
from .utils.env import setup_environment
setup_environment()
# This line will be programatically read/write by setup.py.
# Leave them at the bottom of this file and don't touch them.
__version__ = "0.4"
| apache-2.0 | Python |
a611d0141d9273260da68ade308f613e5f1986e3 | add heterogenous results plot | kondra/latent_ssvm | plot_results.py | plot_results.py | import numpy as np
import pylab as pl
import os
def plot_full_weak():
weak_results = np.genfromtxt('weak_labeled.csv', delimiter=',')
full_results = np.genfromtxt('full_labeled.csv', delimiter=',')
weak_results[0, 2] = 0.5
full_results[0, 1] = 0.5
y = np.mean(weak_results[:17, :6], axis=0)
y_min = np.min(weak_results[:17, :6], axis=0)
y_max = np.max(weak_results[:17, :6], axis=0)
x = np.array([0, 2, 4, 10, 25, 100])
full_y = np.mean(full_results[:17, :5], axis=0)
full_y_min = np.min(full_results[:17, :5], axis=0)
full_y_max = np.max(full_results[:17, :5], axis=0)
pl.errorbar([0, 1, 2, 3, 4, 5], 1 - y, yerr=[y_max - y, y - y_min],
label='+weak')
pl.errorbar([1, 2, 3, 4, 5], 1 - full_y,
yerr=[full_y_max - full_y, full_y - full_y_min], label='full')
pl.xticks(np.arange(0, 6), x)
pl.title('C=0.01')
pl.xlabel('number of fully-labeled examples')
pl.ylabel('hamming loss')
pl.ylim([0, 1])
pl.xlim([-0.1, 5.1])
pl.legend(loc='lower right')
def plot_heterogenous_weak():
results = np.genfromtxt('heterogenous_plain')
results = np.reshape(results, (-1, 6))
y = np.mean(results, axis=0)
y_min = np.min(results, axis=0)
y_max = np.max(results, axis=0)
x = np.array([0, 2, 4, 10, 25, 100])
pl.errorbar([0, 1, 2, 3, 4, 5], 1 - y,
yerr=[y_max - y, y - y_min], label='+weak')
pl.xticks(np.arange(0, 6), x)
pl.title('C=0.1')
pl.xlabel('number of fully-labeled examples')
pl.ylabel('hamming loss')
pl.ylim([0, 1])
pl.xlim([-0.1, 5.1])
if __name__ == '__main__':
os.chdir("results/syntetic")
plot_full_weak()
plot_heterogenous_weak()
pl.show()
| import numpy as np
import pylab as pl
import os
os.chdir("results")
weak_results = np.genfromtxt('weak_labeled.csv', delimiter=',')
full_results = np.genfromtxt('full_labeled.csv', delimiter=',')
weak_results[0, 2] = 0.5
full_results[0, 1] = 0.5
y = np.mean(weak_results[:17, :6], axis=0)
y_min = np.min(weak_results[:17, :6], axis=0)
y_max = np.max(weak_results[:17, :6], axis=0)
x = np.array([0, 2, 4, 10, 25, 100])
full_y = np.mean(full_results[:17, :5], axis=0)
full_y_min = np.min(full_results[:17, :5], axis=0)
full_y_max = np.max(full_results[:17, :5], axis=0)
full_x = np.array([2, 4, 10, 25, 100])
pl.errorbar([0, 1, 2, 3, 4, 5], 1 - y, yerr=[y_max - y, y - y_min],
label='+weak')
pl.errorbar([1, 2, 3, 4, 5], 1 - full_y,
yerr=[full_y_max - full_y, full_y - full_y_min], label='full')
pl.xticks(np.arange(0, 6), x)
pl.title('C=0.01')
pl.xlabel('number of fully-labeled examples')
pl.ylabel('hamming loss')
pl.ylim([0, 1])
pl.xlim([-0.1, 5.1])
pl.legend(loc='lower right')
pl.show()
| bsd-2-clause | Python |
6c3a7ff4ba2c680294ee67ce5efc42380d426236 | order mentors/mentees by crsid | rjw57/edpcmentoring,cuedpc/edpcmentoring,rjw57/edpcmentoring,rjw57/edpcmentoring,rjw57/edpcmentoring,cuedpc/edpcmentoring,cuedpc/edpcmentoring,cuedpc/edpcmentoring | frontend/views.py | frontend/views.py | from django.shortcuts import render
from cuedmembers.decorators import member_required
from matching.models import Preferences
from mentoring.models import Relationship
from .forms import MentoringPreferencesForm
from .queries import select_member_details
@member_required
def index(request):
mentees = select_member_details(
Relationship.objects.mentees_for_user(request.user)).order_by('username')
mentors = select_member_details(
Relationship.objects.mentors_for_user(request.user)).order_by('username')
preferences, _ = Preferences.objects.get_or_create(user=request.user)
preferences_form = MentoringPreferencesForm({
'is_seeking_mentor': preferences.is_seeking_mentor,
'mentor_requirements': preferences.mentor_requirements,
'is_seeking_mentee': preferences.is_seeking_mentee,
'mentee_requirements': preferences.mentee_requirements,
})
return render(request, 'frontend/index.html', {
'mentees': mentees, 'mentors': mentors,
'preferences_form': preferences_form,
})
| from django.shortcuts import render
from cuedmembers.decorators import member_required
from matching.models import Preferences
from mentoring.models import Relationship
from .forms import MentoringPreferencesForm
from .queries import select_member_details
@member_required
def index(request):
mentees = select_member_details(
Relationship.objects.mentees_for_user(request.user))
mentors = select_member_details(
Relationship.objects.mentors_for_user(request.user))
preferences, _ = Preferences.objects.get_or_create(user=request.user)
preferences_form = MentoringPreferencesForm({
'is_seeking_mentor': preferences.is_seeking_mentor,
'mentor_requirements': preferences.mentor_requirements,
'is_seeking_mentee': preferences.is_seeking_mentee,
'mentee_requirements': preferences.mentee_requirements,
})
return render(request, 'frontend/index.html', {
'mentees': mentees, 'mentors': mentors,
'preferences_form': preferences_form,
})
| mit | Python |
f3ccd8df07c6834dcbdba6a7be0b66fb711ae826 | update shuffling | dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy | disaggregator/utils.py | disaggregator/utils.py | import appliance
import pandas as pd
import numpy as np
import os
import pickle
def aggregate_instances(instances, metadata, how="strict"):
'''
Given a list of temporally aligned instances, aggregate them into a single
signal.
'''
if how == "strict":
traces = [instance.traces for instance in instances]
traces = [list(t) for t in zip(*traces)] # transpose
traces = [ aggregate_traces(t,{}) for t in traces]
return appliance.ApplianceInstance(traces, metadata)
else:
return NotImplementedError
def aggregate_traces(traces, metadata, how="strict"):
'''
Given a list of temporally aligned traces, aggregate them into a single
signal.
'''
if how == "strict":
# require that traces are exactly aligned
summed_series = traces[0].series
for trace in traces[1:]:
summed_series += trace.series
return appliance.ApplianceTrace(summed_series, metadata)
else:
return NotImplementedError
def concatenate_traces(traces, metadata=None, how="strict"):
'''
Given a list of appliance traces, returns a single concatenated
trace. With how="strict" option, must be sampled at the same rate and
consecutive, without overlapping datapoints.
'''
if not metadata:
metadata = traces[0].metadata
if how == "strict":
# require ordered list of consecutive, similarly sampled traces with no
# missing data.
return appliance.ApplianceTrace(pd.concat([t.series for t in traces]),metadata)
else:
raise NotImplementedError
def concatenate_traces_lists(traces, metadata=None, how="strict"):
'''
Takes a list of lists of n traces and concatenates them into a single
list of n traces.
'''
if not metadata:
metadata = [trace.metadata for trace in traces[0]]
if how == "strict":
traces = [list(t) for t in zip(*traces)]
traces = [concatenate_traces(t,m) for t,m in zip(traces,metadata)]
return traces
else:
raise NotImplementedError
def order_traces(traces):
'''
Given a set of traces, orders them chronologically and catches
overlapping traces.
'''
order = np.argsort([t.series[0] for t in traces])
new_traces = [traces[i] for i in order]
return new_traces
def pickle_object(obj,title):
'''
Given an object and a filename saves the object in pickled format to the data directory.
'''
with open(os.path.join(os.pardir,'data/{}.p'.format(title)),'wb') as f:
pickle.dump(obj, f)
def shuffle_appliance_sets(sets,other_params):
pass
| import appliance
import pandas as pd
import numpy as np
import os
import pickle
def concatenate_traces(traces, metadata=None, how="strict"):
'''
Given a list of appliance traces, returns a single concatenated
trace. With how="strict" option, must be sampled at the same rate and
consecutive, without overlapping datapoints.
'''
if not metadata:
metadata = traces[0].metadata
if how == "strict":
# require ordered list of consecutive, similarly sampled traces with no
# missing data.
return appliance.ApplianceTrace(pd.concat([t.series for t in traces]),metadata)
else:
raise NotImplementedError
def concatenate_traces_lists(traces, metadata=None, how="strict"):
'''
Takes a list of lists of n traces and concatenates them into a single
list of n traces.
'''
if not metadata:
metadata = [trace.metadata for trace in traces[0]]
if how == "strict":
traces = [list(t) for t in zip(*traces)]
traces = [concatenate_traces(t,m) for t,m in zip(traces,metadata)]
return traces
else:
raise NotImplementedError
def aggregate_traces(traces, metadata, how="strict"):
'''
Given a list of temporally aligned traces, aggregate them into a single
signal.
'''
if how == "strict":
# require that traces are exactly aligned
summed_series = traces[0].series
for trace in traces[1:]:
summed_series += trace.series
return appliance.ApplianceTrace(summed_series, metadata)
else:
return NotImplementedError
def aggregate_instances(instances, metadata, how="strict"):
'''
Given a list of temporally aligned instances, aggregate them into a single
signal.
'''
if how == "strict":
traces = [instance.traces for instance in instances]
traces = [list(t) for t in zip(*traces)] # transpose
traces = [ aggregate_traces(t,{}) for t in traces]
return appliance.ApplianceInstance(traces, metadata)
else:
return NotImplementedError
def order_traces(traces):
'''
Given a set of traces, orders them chronologically and catches
overlapping traces.
'''
order = np.argsort([t.series[0] for t in traces])
new_traces = [traces[i] for i in order]
return new_traces
def pickle_object(obj,title):
'''
Given an object and a filename saves the object in pickled format to the data directory.
'''
with open(os.path.join(os.pardir,'data/{}.p'.format(title)),'wb') as f:
pickle.dump(obj, f)
| mit | Python |
1017facb639d974de3c8d90a6e0642bbd86a1b4d | update .ycm_extra_conf.py | xu-cheng/pbc-bindings,xu-cheng/pbc-bindings,xu-cheng/pbc-bindings | .ycm_extra_conf.py | .ycm_extra_conf.py | import subprocess
def SystemIncludePathasFlags():
cmd = "c++ -E -x c++ - -v </dev/null 2>&1"
out = subprocess.check_output(cmd, shell=True).decode("utf-8")
out = out.split("\n")
out = out[out.index("#include <...> search starts here:") + 1:-1]
out = out[0:out.index("End of search list.")]
out = [p.strip() for p in out if not p.endswith("(framework directory)")]
flags = [["-isystem", p] for p in out]
return [f for ff in flags for f in ff]
def FlagsForFile(filename, **kwargs):
flags = [
"-Wall",
"-Wextra",
"-Werror",
"-Wno-long-long",
"-Wno-variadic-macros",
"-fexceptions",
"-DNDEBUG",
"-std=c++14",
"-x",
"c++",
"-I./include",
]
flags += SystemIncludePathasFlags()
return { "flags": flags, "do_cache": True}
| import os
import subprocess
def SystemIncludePathasFlags():
cmd = "c++ -E -x c++ - -v </dev/null 2>&1"
out = subprocess.check_output(cmd, shell=True).decode("utf-8")
out = out.split("\n")
out = out[out.index("#include <...> search starts here:") + 1:-1]
out = out[0:out.index("End of search list.")]
out = [p.strip() for p in out if not p.endswith("(framework directory)")]
flags = [["-isystem", p] for p in out]
return [f for ff in flags for f in ff]
def FlagsForFile(filename, **kwargs):
flags = [
"-Wall",
"-Wextra",
"-Werror",
"-Wno-long-long",
"-Wno-variadic-macros",
"-fexceptions",
"-DNDEBUG",
"-std=c++14",
"-x",
"c++",
"-I./include",
]
flags += SystemIncludePathasFlags()
return { "flags": flags, "do_cache": True}
| mit | Python |
dc58eb174f65c508e63aedc617a51d7478db0358 | Update vouch flags when migrating autovouches. | akatsoulas/mozillians,mozilla/mozillians,mozilla/mozillians,mozilla/mozillians,akatsoulas/mozillians,mozilla/mozillians,akatsoulas/mozillians,akatsoulas/mozillians | mozillians/users/migrations/0038_auto_20180815_0108.py | mozillians/users/migrations/0038_auto_20180815_0108.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-08-15 08:08
from __future__ import unicode_literals
from django.db import migrations
from django.conf import settings
from django.utils.timezone import now
def add_missing_employee_vouches(apps, schema_editor):
UserProfile = apps.get_model('users', 'UserProfile')
IdpProfile = apps.get_model('users', 'IdpProfile')
for profile in UserProfile.objects.all():
emails = [idp.email for idp in IdpProfile.objects.filter(profile=profile)]
email_exists = any([email for email in set(emails)
if email.split('@')[1] in settings.AUTO_VOUCH_DOMAINS])
if email_exists and not profile.vouches_received.filter(
description=settings.AUTO_VOUCH_REASON, autovouch=True).exists():
profile.vouches_received.create(
voucher=None,
date=now(),
description=settings.AUTO_VOUCH_REASON,
autovouch=True
)
vouches = profile.vouches_received.all().count()
UserProfile.objects.filter(pk=profile.pk).update(
is_vouched=vouches > 0,
can_vouch=vouches >= settings.CAN_VOUCH_THRESHOLD
)
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0037_auto_20180720_0305'),
]
operations = [
migrations.RunPython(add_missing_employee_vouches, backwards),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-08-15 08:08
from __future__ import unicode_literals
from django.db import migrations
from django.conf import settings
from django.utils.timezone import now
def add_missing_employee_vouches(apps, schema_editor):
UserProfile = apps.get_model('users', 'UserProfile')
IdpProfile = apps.get_model('users', 'IdpProfile')
for profile in UserProfile.objects.all():
emails = [idp.email for idp in IdpProfile.objects.filter(profile=profile)]
email_exists = any([email for email in set(emails)
if email.split('@')[1] in settings.AUTO_VOUCH_DOMAINS])
if email_exists and not profile.vouches_received.filter(
description=settings.AUTO_VOUCH_REASON, autovouch=True).exists():
profile.vouches_received.create(
voucher=None,
date=now(),
description=settings.AUTO_VOUCH_REASON,
autovouch=True
)
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0037_auto_20180720_0305'),
]
operations = [
migrations.RunPython(add_missing_employee_vouches, backwards),
]
| bsd-3-clause | Python |
7716a82ffba6881e34f6e497c4bbf3704bc0ac23 | change padding char | lvxunDev/yunhetong-python-sdk | lib/aes.py | lib/aes.py | import base64
import json
import time
from Crypto.Cipher import AES
from Crypto import Random
class aes:
def __init__(self, key=''):
self.bs = 16
if '' != key:
self.key = json.loads(key)
self.secretKey = base64.b64decode(self.key['key'])
self.iv = base64.b64decode(self.key['iv'])
self.bt = self.key['bt']
else:
self.refresh()
def encrypt(self, data):
data = self._pad(data)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.secretKey, AES.MODE_CBC, iv)
return base64.b64encode(cipher.encrypt(data))
def decrypt(self, dataB):
data = base64.b64decode(dataB)
iv = data[:AES.block_size]
cipher = AES.new(self.secretKey, AES.MODE_CBC, iv)
res = cipher.decrypt(data[AES.block_size:])
return self._unpad(res).decode('utf-8')
def toString(self):
self.iv += "=" * ((4 - len(self.iv) % 4) % 4)
ret_map = {
'key': base64.b64encode(self.secretKey),
'iv': base64.b64encode(self.iv),
'bt': self.bt
}
return json.dumps(ret_map)
def refresh(self):
self.iv = Random.new().read(AES.block_size)
# todo create key generator
self.secretKey = '1234567812345678' # SeanWu told me to write this
self.bt = 1234567890123
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * '\t'
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s) - 1:])]
| import base64
import json
import time
from Crypto.Cipher import AES
from Crypto import Random
class aes:
def __init__(self, key=''):
self.bs = 16
if '' != key:
self.key = json.loads(key)
self.secretKey = base64.b64decode(self.key['key'])
self.iv = base64.b64decode(self.key['iv'])
self.bt = self.key['bt']
else:
self.refresh()
def encrypt(self, data):
data = self._pad(data)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.secretKey, AES.MODE_CBC, iv)
return base64.b64encode(cipher.encrypt(data))
def decrypt(self, dataB):
data = base64.b64decode(dataB)
iv = data[:AES.block_size]
cipher = AES.new(self.secretKey, AES.MODE_CBC, iv)
res = cipher.decrypt(data[AES.block_size:])
return self._unpad(res).decode('utf-8')
def toString(self):
self.iv += "=" * ((4 - len(self.iv) % 4) % 4)
ret_map = {
'key': base64.b64encode(self.secretKey),
'iv': base64.b64encode(self.iv),
'bt': self.bt
}
return json.dumps(ret_map)
def refresh(self):
self.iv = Random.new().read(AES.block_size)
# todo create key generator
self.secretKey = '1234567812345678' # SeanWu told me to write this
self.bt = 1234567890123
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs)
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s) - 1:])]
| mit | Python |
eb84ba559d7d83f0917e257a6631b8eedcf33f92 | Fix Moshe's typos | SocialNPHS/SocialNPHS | tests/language.py | tests/language.py | """
Unit tests for the portion of this project which processes raw text using
natural language processing techniques.
"""
import sys
import unittest
from os import path
# PATH MAGIC
# Project root
base = path.abspath(path.join(path.dirname(path.abspath(__file__)), ".."))
sys.path.insert(0, base)
from SocialNPHS.language import tweet
from SocialNPHS.sources.twitter import user
class testLanguage(unittest.TestCase):
words = "@1Defenestrator is really awesome! Although, @G4_Y5_3X is cooler."
def test_tweet_tagging(self):
# Test that tagging works in its most basic form
moshe = user.NPUser("G4_Y5_3X")
tags = tweet.get_tweet_tags(moshe.tweepy.status.text)
self.assertIsInstance(tags, list)
# Test that names are replaced and recognized as proper nouns
tags = tweet.get_tweet_tags(self.words)
self.assertEqual(tags[0][0], "Luke Taylor")
self.assertEqual(tags[0][1], "NNP")
def test_tweet_analysis(self):
# Test that basic sentiment analysis works
self.assertEqual(
tweet.tweet_connotation(self.words)['compound'],
0.345
)
self.assertEqual(
tweet.person_connotation(self.words, 'Luke Taylor')['neu'],
0.461
)
| """
Unit tests for the portion of this project which processes raw text using
natural language processing techniques.
"""
import sys
import unittest
from os import path
# PATH MAGIC
# Project root
base = path.abspath(path.join(path.dirname(path.abspath(__file__)), ".."))
sys.path.insert(0, base)
from SocialNPHS.language import tweet
from SocialNPHS.sources.twitter import user
class testLanguage(unittest.TestCase):
test = "@1Defenestrator is really awesome! Although, @G4_Y5_3X is cooler."
def test_tweet_tagging(self):
# Test that tagging works in its most basic form
moshe = user.NPUser("G4_Y5_3X")
tags = tweet.get_tweet_tags(moshe.tweepy.status.text)
self.assertIsInstance(tags, list)
# Test that names are replaced and recognized as proper nouns
tags = tweet.get_tweet_tags(test)
self.assertEqual(tags[0][0], "Luke Taylor")
self.assertEqual(tags[0][1], "NNP")
def test_tweet_analysis(self):
# Test that basic sentiment analysis works (current values hardcoded)
self.assertEqual(tweet.tweet_connotation(test)['compound'], 0.345)
self.assertEqual(tweet.person_connotation(test, 'Luke Taylor')['neu'],
0.461) # pep8 line limit bite my shiny python ass
| mit | Python |
f40e72362c0992700eda182e4dbf3da6d46f5fec | Refactor and documented cdms_pi.tests.utils.populate_data | UKTradeInvestment/crm-poc,UKTradeInvestment/crm-poc | crm-poc/apps/cdms_api/tests/utils.py | crm-poc/apps/cdms_api/tests/utils.py | import datetime
from unittest import mock
from django.utils import timezone
from cdms_api.base import CDMSApi
from cdms_api.utils import datetime_to_cdms_datetime
def populate_data(service, data={}, guid=None):
"""
Returns a dict of cdms data to be used as return value of mocked cdms api calls.
This data will always include:
- ModifiedOn
- CreatedOn
- {service}Id: id of the record
which are autogenerated if not specified.
datatime values are converted to cdms ones automatically.
Examples of how to use it:
1. with defaults:
populate_data('MyService'):
will return:
{
'ModifiedOn': ...,
'CreatedOn': ...,
'MyServiceId': 'cdms-pk'
}
2. with overriding values:
poplate_data(
'MyService',
data={
'CreatedOn': datetime.now(),
'ExtraField': 'extra'
},
guid='my-id'
)
will return:
{
'ModifiedOn': ...,
'CreatedOn': ..., # cdms equivalent of the one provided
'MyServiceId': 'my-id',
'ExtraField'': 'extra'
}
"""
_data = dict(data)
_data['ModifiedOn'] = _data.get('ModifiedOn', timezone.now())
_data['CreatedOn'] = _data.get('CreatedOn', timezone.now())
id_key = '{0}Id'.format(service)
_data[id_key] = guid or _data.get(id_key, 'cdms-pk')
for k, v in _data.items():
if isinstance(v, datetime.datetime):
_data[k] = datetime_to_cdms_datetime(v)
return _data
def mocked_cdms_get(get_data={}):
def internal(service, guid):
return populate_data(service, get_data, guid)
return internal
def mocked_cdms_create(create_data={}):
def internal(service, data):
return populate_data(service, create_data)
return internal
def mocked_cdms_update(update_data={}):
def internal(service, guid, data):
return populate_data(service, update_data, guid)
return internal
def mocked_cdms_list(list_data=[]):
def internal(service, *args, **kwargs):
return [populate_data(service, item) for item in list_data]
return internal
def get_mocked_api():
api = mock.MagicMock(spec=CDMSApi)
api.create.side_effect = mocked_cdms_create()
api.get.side_effect = mocked_cdms_get()
api.update.side_effect = mocked_cdms_update()
api.list.side_effect = mocked_cdms_list()
return api
| import datetime
from unittest import mock
from django.utils import timezone
from cdms_api.base import CDMSApi
from cdms_api.utils import datetime_to_cdms_datetime
def populate_data(data, service, guid=None):
_data = dict(data)
_data['ModifiedOn'] = _data.get('ModifiedOn', timezone.now())
_data['CreatedOn'] = _data.get('CreatedOn', timezone.now())
id_key = '{0}Id'.format(service)
_data[id_key] = guid or _data.get(id_key, 'cdms-pk')
for k, v in _data.items():
if isinstance(v, datetime.datetime):
_data[k] = datetime_to_cdms_datetime(v)
return _data
def mocked_cdms_get(get_data={}):
def internal(service, guid):
return populate_data(get_data, service, guid)
return internal
def mocked_cdms_create(create_data={}):
def internal(service, data):
return populate_data(create_data, service)
return internal
def mocked_cdms_update(update_data={}):
def internal(service, guid, data):
return populate_data(update_data, service, guid)
return internal
def mocked_cdms_list(list_data=[]):
def internal(service, *args, **kwargs):
return [populate_data(item, service) for item in list_data]
return internal
def get_mocked_api():
api = mock.MagicMock(spec=CDMSApi)
api.create.side_effect = mocked_cdms_create()
api.get.side_effect = mocked_cdms_get()
api.update.side_effect = mocked_cdms_update()
api.list.side_effect = mocked_cdms_list()
return api
| bsd-3-clause | Python |
ea989e0df29d9f000a0e841d56a321e9edea0427 | bump to 0.3.1 | tito/telenium,tito/telenium,tito/telenium | telenium/__init__.py | telenium/__init__.py | # coding=utf-8
__version__ = "0.3.1"
| # coding=utf-8
__version__ = "0.3.0"
| mit | Python |
076502a20d2a69b4d6f82dd411de6e2b0a50139f | add state to the instance and adjust status | tsuru/varnishapi,tsuru/varnishapi | tests/managers.py | tests/managers.py | # Copyright 2014 varnishapi authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from varnishapi import storage
class FakeInstance(object):
def __init__(self, name, state):
self.name = name
self.state = state
self.bound = []
def bind(self, app_host):
self.bound.append(app_host)
def unbind(self, app_host):
self.bound.remove(app_host)
class FakeManager(object):
def __init__(self, storage=None):
self.instances = []
def add_instance(self, name, state="running"):
self.instances.append(FakeInstance(name, state))
def bind(self, name, app_host):
index, instance = self.find_instance(name)
if index < 0:
raise storage.InstanceNotFoundError()
instance.bind(app_host)
def unbind(self, name, app_host):
index, instance = self.find_instance(name)
if index < 0:
raise storage.InstanceNotFoundError()
instance.unbind(app_host)
def remove_instance(self, name):
index, _ = self.find_instance(name)
if index > -1:
del self.instances[index]
else:
raise storage.InstanceNotFoundError()
def info(self, name):
index, instance = self.find_instance(name)
if index < 0:
raise storage.InstanceNotFoundError()
return {"name": instance.name}
def status(self, name):
index, instance = self.find_instance(name)
if index < 0:
raise storage.InstanceNotFoundError()
return instance.state
def find_instance(self, name):
for i, instance in enumerate(self.instances):
if instance.name == name:
return i, instance
return -1, None
def reset(self):
self.instances = []
| # Copyright 2014 varnishapi authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from varnishapi import storage
class FakeInstance(object):
def __init__(self, name):
self.name = name
self.bound = []
def bind(self, app_host):
self.bound.append(app_host)
def unbind(self, app_host):
self.bound.remove(app_host)
class FakeManager(object):
def __init__(self, storage=None):
self.instances = []
def add_instance(self, name):
self.instances.append(FakeInstance(name))
def bind(self, name, app_host):
index, instance = self.find_instance(name)
if index < 0:
raise storage.InstanceNotFoundError()
instance.bind(app_host)
def unbind(self, name, app_host):
index, instance = self.find_instance(name)
if index < 0:
raise storage.InstanceNotFoundError()
instance.unbind(app_host)
def remove_instance(self, name):
index, _ = self.find_instance(name)
if index > -1:
del self.instances[index]
else:
raise storage.InstanceNotFoundError()
def info(self, name):
index, instance = self.find_instance(name)
if index < 0:
raise storage.InstanceNotFoundError()
return {"name": instance.name}
def status(self, name):
index, _ = self.find_instance(name)
if index < 0:
raise storage.InstanceNotFoundError()
return "running"
def find_instance(self, name):
for i, instance in enumerate(self.instances):
if instance.name == name:
return i, instance
return -1, None
def reset(self):
self.instances = []
| bsd-3-clause | Python |
fa2ecdc0bcb30415699baf4f014b390d4473d43c | Fix Sphinx warnings about duplicate objects | astropy/photutils,larrybradley/photutils | photutils/psf/__init__.py | photutils/psf/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This subpackage contains tools to perform point-spread-function (PSF)
photometry.
"""
from . import epsf
from .epsf import * # noqa
from . import epsf_stars
from .epsf_stars import * # noqa
from . import groupstars
from .groupstars import * # noqa
from .matching import * # noqa
from . import models
from .models import * # noqa
from . import photometry
from .photometry import * # noqa
from . import utils
from .utils import * # noqa
__all__ = []
__all__.extend(epsf.__all__)
__all__.extend(epsf_stars.__all__)
__all__.extend(groupstars.__all__)
__all__.extend(models.__all__)
__all__.extend(photometry.__all__)
__all__.extend(utils.__all__)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This subpackage contains tools to perform point-spread-function (PSF)
photometry.
"""
from .epsf import * # noqa
from .epsf_stars import * # noqa
from .groupstars import * # noqa
from .matching import * # noqa
from .models import * # noqa
from .photometry import * # noqa
from .utils import * # noqa
| bsd-3-clause | Python |
a76865c3009e6f890b0cecc8d3e744ea22559d02 | test project: add default values to BooleanFields | askinteractive/mezzanine-advanced-admin,IMAmuseum/django-admin-bootstrapped,andrewyager/django-admin-bootstrapped,avara1986/django-admin-bootstrapped,askinteractive/mezzanine-advanced-admin,avara1986/django-admin-bootstrapped,benthomasson/django-admin-bootstrapped,mynksngh/django-admin-bootstrapped,sn0wolf/django-admin-bootstrapped,askinteractive/mezzanine-advanced-admin-new,squallcs12/django-admin-bootstrapped,merlian/django-admin-bootstrapped,jmagnusson/django-admin-bootstrapped,bformet/django-admin-bootstrapped,andrewyager/django-admin-bootstrapped,kevingu1003/django-admin-bootstrapped,Corner1024/django-admin-bootstrapped,Corner1024/django-admin-bootstrapped,andrewyager/django-admin-bootstrapped,pombredanne/django-admin-bootstrapped,askinteractive/mezzanine-advanced-admin-new,merlian/django-admin-bootstrapped,xrmx/django-admin-bootstrapped,sn0wolf/django-admin-bootstrapped,xrmx/django-admin-bootstrapped,pombredanne/django-admin-bootstrapped,mynksngh/django-admin-bootstrapped,django-admin-bootstrapped/django-admin-bootstrapped,kevingu1003/django-admin-bootstrapped,xrmx/django-admin-bootstrapped,pombredanne/django-admin-bootstrapped,merlian/django-admin-bootstrapped,jmagnusson/django-admin-bootstrapped,askinteractive/mezzanine-advanced-admin,mynksngh/django-admin-bootstrapped,squallcs12/django-admin-bootstrapped,jmagnusson/django-admin-bootstrapped,bformet/django-admin-bootstrapped,django-admin-bootstrapped/django-admin-bootstrapped,avara1986/django-admin-bootstrapped,kevingu1003/django-admin-bootstrapped,django-admin-bootstrapped/django-admin-bootstrapped,benthomasson/django-admin-bootstrapped,askinteractive/mezzanine-advanced-admin-new,squallcs12/django-admin-bootstrapped,Corner1024/django-admin-bootstrapped,IMAmuseum/django-admin-bootstrapped,bformet/django-admin-bootstrapped,IMAmuseum/django-admin-bootstrapped,sn0wolf/django-admin-bootstrapped,benthomasson/django-admin-bootstrapped | test_django_admin_bootstrapped/test_django_admin_bootstrapped/models.py | test_django_admin_bootstrapped/test_django_admin_bootstrapped/models.py | from django.db import models
class TestMe(models.Model):
test_m2m = models.ManyToManyField('self', blank=True, help_text="Lorem dolor")
test_ip = models.IPAddressField(help_text="Lorem dolor")
test_url = models.URLField(help_text="Lorem dolor")
test_int = models.IntegerField(help_text="Lorem dolor")
test_img = models.ImageField(upload_to='dummy', blank=True)
test_file = models.FileField(upload_to='dummy', blank=True)
test_date = models.DateField(help_text="Lorem dolor")
test_char = models.CharField(max_length=50, help_text="Lorem dolor")
test_bool = models.BooleanField(help_text="Lorem dolor", default=False)
test_time = models.TimeField(help_text="Lorem dolor")
test_slug = models.SlugField(help_text="Lorem dolor")
test_text = models.TextField(help_text="Lorem dolor")
test_email = models.EmailField(help_text="Lorem dolor")
test_float = models.FloatField(help_text="Lorem dolor")
test_bigint = models.BigIntegerField(help_text="Lorem dolor")
test_positive_integer = models.PositiveIntegerField(help_text="Lorem dolor")
test_decimal = models.DecimalField(max_digits=5, decimal_places=2, help_text="Lorem dolor")
test_comma_separated_int = models.CommaSeparatedIntegerField(max_length=100, help_text="Lorem dolor")
test_small_int = models.SmallIntegerField(help_text="Lorem dolor")
test_nullbool = models.NullBooleanField(help_text="Lorem dolor")
test_filepath = models.FilePathField(blank=True, help_text="Lorem dolor")
test_positive_small_int = models.PositiveSmallIntegerField(help_text="Lorem dolor")
def get_absolute_url(self):
return ''
class Meta:
verbose_name = u'Test me'
verbose_name_plural = u'Lot of Test me'
class TestMeProxyForFieldsets(TestMe):
class Meta:
proxy = True
verbose_name = u'Test me fieldsets'
verbose_name_plural = u'Lot of Test me fieldsets'
class TestThat(models.Model):
that = models.ForeignKey(TestMe, help_text="Lorem dolor")
test_ip = models.IPAddressField(help_text="Lorem dolor")
test_url = models.URLField(help_text="Lorem dolor")
test_int = models.IntegerField(help_text="Lorem dolor")
test_date = models.DateField(help_text="Lorem dolor")
test_bool = models.BooleanField(help_text="Lorem dolor", default=True)
class Meta:
verbose_name = u'Test that'
verbose_name_plural = u'Lot of Test that'
class TestSortable(models.Model):
that = models.ForeignKey(TestMe)
position = models.PositiveSmallIntegerField("Position")
test_char = models.CharField(max_length=5)
class Meta:
ordering = ('position', )
| from django.db import models
class TestMe(models.Model):
test_m2m = models.ManyToManyField('self', blank=True, help_text="Lorem dolor")
test_ip = models.IPAddressField(help_text="Lorem dolor")
test_url = models.URLField(help_text="Lorem dolor")
test_int = models.IntegerField(help_text="Lorem dolor")
test_img = models.ImageField(upload_to='dummy', blank=True)
test_file = models.FileField(upload_to='dummy', blank=True)
test_date = models.DateField(help_text="Lorem dolor")
test_char = models.CharField(max_length=50, help_text="Lorem dolor")
test_bool = models.BooleanField(help_text="Lorem dolor")
test_time = models.TimeField(help_text="Lorem dolor")
test_slug = models.SlugField(help_text="Lorem dolor")
test_text = models.TextField(help_text="Lorem dolor")
test_email = models.EmailField(help_text="Lorem dolor")
test_float = models.FloatField(help_text="Lorem dolor")
test_bigint = models.BigIntegerField(help_text="Lorem dolor")
test_positive_integer = models.PositiveIntegerField(help_text="Lorem dolor")
test_decimal = models.DecimalField(max_digits=5, decimal_places=2, help_text="Lorem dolor")
test_comma_separated_int = models.CommaSeparatedIntegerField(max_length=100, help_text="Lorem dolor")
test_small_int = models.SmallIntegerField(help_text="Lorem dolor")
test_nullbool = models.NullBooleanField(help_text="Lorem dolor")
test_filepath = models.FilePathField(blank=True, help_text="Lorem dolor")
test_positive_small_int = models.PositiveSmallIntegerField(help_text="Lorem dolor")
def get_absolute_url(self):
return ''
class Meta:
verbose_name = u'Test me'
verbose_name_plural = u'Lot of Test me'
class TestMeProxyForFieldsets(TestMe):
class Meta:
proxy = True
verbose_name = u'Test me fieldsets'
verbose_name_plural = u'Lot of Test me fieldsets'
class TestThat(models.Model):
that = models.ForeignKey(TestMe, help_text="Lorem dolor")
test_ip = models.IPAddressField(help_text="Lorem dolor")
test_url = models.URLField(help_text="Lorem dolor")
test_int = models.IntegerField(help_text="Lorem dolor")
test_date = models.DateField(help_text="Lorem dolor")
test_bool = models.BooleanField(help_text="Lorem dolor")
class Meta:
verbose_name = u'Test that'
verbose_name_plural = u'Lot of Test that'
class TestSortable(models.Model):
that = models.ForeignKey(TestMe)
position = models.PositiveSmallIntegerField("Position")
test_char = models.CharField(max_length=5)
class Meta:
ordering = ('position', )
| apache-2.0 | Python |
0648b377b4e764514491623e20b38d9ec19f71af | Fix formatting | breznak/nupic,breznak/nupic,breznak/nupic | tests/unit/py2/nupic/support/consoleprinter_test/consoleprinter_test.py | tests/unit/py2/nupic/support/consoleprinter_test/consoleprinter_test.py | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from __future__ import with_statement
import os
import unittest2 as unittest
from nupic.support.consoleprinter import ConsolePrinterMixin, Tee
# Class used for testing
class MyClass(ConsolePrinterMixin):
def __init__(self):
ConsolePrinterMixin.__init__(self)
def run(self):
for i in xrange(0, 4):
self.cPrint(i, "message at level %d", i)
class ConsolePrinterTest(unittest.TestCase):
def testPrint(self):
mydir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.abspath("console_output.txt")
if os.path.exists(filename):
os.remove(filename)
# Capture output to a file so that we can compare it
with Tee(filename):
c1 = MyClass()
print "Running with default verbosity"
c1.run()
print
print "Running with verbosity 2"
c1.consolePrinterVerbosity = 2
c1.run()
print
print "Running with verbosity 0"
c1.consolePrinterVerbosity = 0
c1.run()
print
c1.cPrint(0, "Message %s two %s", "with", "args")
c1.cPrint(0, "Message with no newline", newline=False)
c1.cPrint(0, " Message with newline")
c1.cPrint(0, "Message with %s and %s",
"no newline", "args", newline=False)
c1.cPrint(0, " Message with %s and %s", "newline", "args")
print "Done"
with self.assertRaises(KeyError):
c1.cPrint(0, "Message", badkw="badvalue")
referenceFilename = os.path.join(mydir, "testconsoleprinter_output.txt")
expected = open(referenceFilename).readlines()
actual = open(filename).readlines()
print ("Comparing files '%s'" % referenceFilename)
print ("and '%s'" % filename)
self.assertEqual(len(expected), len(actual))
for i in xrange(len(expected)):
self.assertEqual(expected[i].strip(), actual[i].strip())
# Clean up
os.remove(filename)
if __name__ == "__main__":
unittest.main()
| #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from __future__ import with_statement
import os
import unittest2 as unittest
from nupic.support.consoleprinter import ConsolePrinterMixin, Tee
# Class used for testing
class MyClass(ConsolePrinterMixin):
def __init__(self):
ConsolePrinterMixin.__init__(self)
def run(self):
for i in xrange(0, 4):
self.cPrint(i, "message at level %d", i)
class ConsolePrinterTest(unittest.TestCase):
def testPrint(self):
mydir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.abspath("console_output.txt")
if os.path.exists(filename):
os.remove(filename)
# Capture output to a file so that we can compare it
with Tee(filename):
c1 = MyClass()
print "Running with default verbosity"
c1.run()
print
print "Running with verbosity 2"
c1.consolePrinterVerbosity = 2
c1.run()
print
print "Running with verbosity 0"
c1.consolePrinterVerbosity = 0
c1.run()
print
c1.cPrint(0, "Message %s two %s", "with", "args")
c1.cPrint(0, "Message with no newline", newline=False)
c1.cPrint(0, " Message with newline")
c1.cPrint(0, "Message with %s and %s", "no newline", "args", newline=False)
c1.cPrint(0, " Message with %s and %s", "newline", "args")
print "Done"
with self.assertRaises(KeyError):
c1.cPrint(0, "Message", badkw="badvalue")
referenceFilename = os.path.join(mydir, "testconsoleprinter_output.txt")
expected = open(referenceFilename).readlines()
actual = open(filename).readlines()
print ("Comparing files '%s'" % referenceFilename)
print ("and '%s'" % filename)
self.assertEqual(len(expected), len(actual))
for i in xrange(len(expected)):
self.assertEqual(expected[i].strip(), actual[i].strip())
# Clean up
os.remove(filename)
if __name__ == "__main__":
unittest.main()
| agpl-3.0 | Python |
c7b4be1ee3597c8b9521e27dd0adfbe36efe17f1 | Update test_ADC.py | IEEERobotics/bot,deepakiam/bot,deepakiam/bot,IEEERobotics/bot,deepakiam/bot,IEEERobotics/bot | tests/test_ADC.py | tests/test_ADC.py | """Test cases for the ADS7830 ADC. """
import time
from unittest import TestCase, expectedFailure
import struct
import bot.lib.lib as lib
from i2c_device.i2c_device import I2CDevice
from bot.hardware.ADC import ADC
config_file = path.dirname(path.realpath(__file__))+"/test_config.yaml"
config = lib.get_config(config_file)
class TestADC(TestCase):
| """Test cases for the ADS7830 ADC. ""
import time
from unittest import TestCase, expectedFailure
import struct
import bot.lib.lib as lib
from i2c_device.i2c_device import I2CDevice
from bot.hardware.ADC import ADC
config_file = path.dirname(path.realpath(__file__))+"/test_config.yaml"
config = lib.get_config(config_file)
class TestADC(TestCase):
| bsd-2-clause | Python |
612d2aa966329c7a953fb6a0f36830a5f0e567bd | change description field to optional in poll model | kamarkiewicz/django-polls,byteweaver/django-polls,miraculixx/django-polls,miraculixx/django-polls,kamarkiewicz/django-polls,byteweaver/django-polls | polls/models.py | polls/models.py | from django.db import models
from django.contrib.auth.models import User
class Poll(models.Model):
question = models.CharField(max_length=255)
description = models.TextField(blank=True)
def __unicode__(self):
return self.question
class Choice(models.Model):
poll = models.ForeignKey(Poll)
choice = models.CharField(max_length=255)
def __unicode__(self):
return self.choice
class Vote(models.Model):
user = models.ForeignKey(User)
poll = models.ForeignKey(Poll)
choice = models.ForeignKey(Choice)
class Meta:
unique_together = (('user', 'poll'))
| from django.db import models
from django.contrib.auth.models import User
class Poll(models.Model):
question = models.CharField(max_length=255)
description = models.TextField()
def __unicode__(self):
return self.question
class Choice(models.Model):
poll = models.ForeignKey(Poll)
choice = models.CharField(max_length=255)
def __unicode__(self):
return self.choice
class Vote(models.Model):
user = models.ForeignKey(User)
poll = models.ForeignKey(Poll)
choice = models.ForeignKey(Choice)
class Meta:
unique_together = (('user', 'poll'))
| bsd-3-clause | Python |
2032a37dd8c8b528971c58c3309424e8b8d0f7b9 | Fix missing import in volunteer views | mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign | src/volunteers/views.py | src/volunteers/views.py | # coding: utf-8
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse
from django.core import signing
from utils import string_template
from campaigns.models import Campaign, CampaignLocationShift
from .models import Volunteer
TEMPLATE = u'''{% extends "campaigns/base.html" %}
{% block title %}Tere {{ volunteer.name }}!{% endblock title %}
{% block header %}
<h1>Tere {{ volunteer.name }}!</h1>
{% endblock header %}
{% block content %}
<div class="col-md-12">
<p>Valitud vahetused:</p>
{% for shift in volunteer.shifts %}
<ul>
<li><b>{{ shift.detailed_info }}</b></li>
</ul>
{% endfor %}
<p>Käesolev info on saadetud ka sisestatud meiliaadressile.</p>
${content}
</div>
{% endblock content %}
'''
def volunteer_detail(request, key):
try:
campaign = Campaign.objects.get(is_active=True)
except Campaign.DoesNotExist:
return render(request, 'campaigns/no-active-campaign.html')
data = signing.loads(key)
volunteer = get_object_or_404(Volunteer, pk=data['pk'])
context = {'volunteer': volunteer}
content = string_template.render_campaign_registration_template(TEMPLATE,
campaign, request, context)
return HttpResponse(content)
| # coding: utf-8
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from django.core import signing
from utils import string_template
from campaigns.models import Campaign, CampaignLocationShift
from .models import Volunteer
TEMPLATE = u'''{% extends "campaigns/base.html" %}
{% block title %}Tere {{ volunteer.name }}!{% endblock title %}
{% block header %}
<h1>Tere {{ volunteer.name }}!</h1>
{% endblock header %}
{% block content %}
<div class="col-md-12">
<p>Valitud vahetused:</p>
{% for shift in volunteer.shifts %}
<ul>
<li><b>{{ shift.detailed_info }}</b></li>
</ul>
{% endfor %}
<p>Käesolev info on saadetud ka sisestatud meiliaadressile.</p>
${content}
</div>
{% endblock content %}
'''
def volunteer_detail(request, key):
try:
campaign = Campaign.objects.get(is_active=True)
except Campaign.DoesNotExist:
return render(request, 'campaigns/no-active-campaign.html')
data = signing.loads(key)
volunteer = get_object_or_404(Volunteer, pk=data['pk'])
context = {'volunteer': volunteer}
content = string_template.render_campaign_registration_template(TEMPLATE,
campaign, request, context)
return HttpResponse(content)
| mit | Python |
9e7c88dc031b24eafcfde7bda7585ce5828281f0 | remove debugging statement | datascopeanalytics/scrubadub,datascopeanalytics/scrubadub,deanmalmgren/scrubadub,deanmalmgren/scrubadub | tests/test_api.py | tests/test_api.py | import unittest
import scrubadub
class APITestCase(unittest.TestCase):
def test_clean(self):
"""Test the top level clean api"""
self.assertEqual(
scrubadub.clean("This is a test message for example@exampe.com"),
"This is a test message for {{EMAIL}}",
)
def test_clean_docuemnts(self):
"""Test the top level clean_docuemnts api"""
self.assertEqual(
scrubadub.clean_documents(
{
"first.txt": "This is a test message for example@exampe.com",
"second.txt": "Hello Jane I am Tom.",
}
),
{
"first.txt": "This is a test message for {{EMAIL}}",
"second.txt": "Hello {{NAME}} I am {{NAME}}.",
}
)
def test_list_filth(self):
"""Test the top level list_filth api"""
filths = scrubadub.list_filth("This is a test message for example@example.com")
self.assertEqual(
filths,
[scrubadub.filth.EmailFilth(text='example@example.com', detector_name='email', beg=27, end=46)],
)
def test_list_filth_docuemnts(self):
"""Test the top level list_filth_docuemnts api"""
filths = scrubadub.list_filth_documents(
{
"first.txt": "This is a test message for example@example.com",
"second.txt": "Hello Jane, I am Tom.",
}
)
self.assertEqual(
filths,
[
scrubadub.filth.EmailFilth(
text='example@example.com', document_name='first.txt', detector_name='email', beg=27, end=46
),
scrubadub.filth.NameFilth(text='Jane', document_name='second.txt', detector_name='name', beg=6, end=10),
scrubadub.filth.NameFilth(text='Tom', document_name='second.txt', detector_name='name', beg=17, end=20),
]
)
| import unittest
import scrubadub
class APITestCase(unittest.TestCase):
def test_clean(self):
"""Test the top level clean api"""
self.assertEqual(
scrubadub.clean("This is a test message for example@exampe.com"),
"This is a test message for {{EMAIL}}",
)
def test_clean_docuemnts(self):
"""Test the top level clean_docuemnts api"""
self.assertEqual(
scrubadub.clean_documents(
{
"first.txt": "This is a test message for example@exampe.com",
"second.txt": "Hello Jane I am Tom.",
}
),
{
"first.txt": "This is a test message for {{EMAIL}}",
"second.txt": "Hello {{NAME}} I am {{NAME}}.",
}
)
def test_list_filth(self):
"""Test the top level list_filth api"""
filths = scrubadub.list_filth("This is a test message for example@example.com")
self.assertEqual(
filths,
[scrubadub.filth.EmailFilth(text='example@example.com', detector_name='email', beg=27, end=46)],
)
def test_list_filth_docuemnts(self):
"""Test the top level list_filth_docuemnts api"""
filths = scrubadub.list_filth_documents(
{
"first.txt": "This is a test message for example@example.com",
"second.txt": "Hello Jane, I am Tom.",
}
)
print(filths)
self.assertEqual(
filths,
[
scrubadub.filth.EmailFilth(
text='example@example.com', document_name='first.txt', detector_name='email', beg=27, end=46
),
scrubadub.filth.NameFilth(text='Jane', document_name='second.txt', detector_name='name', beg=6, end=10),
scrubadub.filth.NameFilth(text='Tom', document_name='second.txt', detector_name='name', beg=17, end=20),
]
)
| mit | Python |
ffda8a65517678f7bbd6e8c8f3bbe44ca31b3727 | Change order in 2D template | mchels/FolderBrowser | templates/plot_2D.py | templates/plot_2D.py | import sys
sys.path.append({path_to_folderbrowser_dir})
import numpy as np
from plothandler import plot_handler_factory
from datahandler import data_handler_factory
from custom_colormap import get_colormap
from sweep import Sweep
import matplotlib.pyplot as plt
import importlib.util
pcols_path = {pcols_path}
spec = importlib.util.spec_from_file_location('', pcols_path)
pcols = importlib.util.module_from_spec(spec)
spec.loader.exec_module(pcols)
sweep = Sweep({sweep_path})
sweep.set_pdata(pcols.name_func_dict)
x = sweep.get_data({x_name})
y = sweep.get_data({y_name})
z = {z_data_code}
data_h = data_handler_factory(x, y, z)
fig, ax = plt.subplots()
plot_h = plot_handler_factory(ax, data_h, plot_dim=2)
ax.ticklabel_format(style='sci', axis='both',
scilimits={scilimits}, useOffset=False)
ax.set_xlabel({xlabel})
ax.set_ylabel({ylabel})
ax.set_xlim({xlim})
ax.set_ylim({ylim})
im = plot_h.plot(plot_type={plot_2D_type})
# Colorbar
# --------
cbar = fig.colorbar(mappable=im)
cbar.formatter.set_powerlimits({scilimits})
cbar.set_label({zlabel})
zlim = {zlim}
im.set_clim(zlim)
cmap = get_colormap({cmap_name}, zlim)
im.set_cmap(cmap)
plt.tight_layout()
plt.show()
# fig.savefig('foo.pdf', bbox_inches='tight', pad_inches=0.05)
| import sys
sys.path.append({path_to_folderbrowser_dir})
import numpy as np
from plothandler import plot_handler_factory
from datahandler import data_handler_factory
from custom_colormap import get_colormap
from sweep import Sweep
import matplotlib.pyplot as plt
import importlib.util
pcols_path = {pcols_path}
spec = importlib.util.spec_from_file_location('', pcols_path)
pcols = importlib.util.module_from_spec(spec)
spec.loader.exec_module(pcols)
sweep = Sweep({sweep_path})
sweep.set_pdata(pcols.name_func_dict)
x = sweep.get_data({x_name})
y = sweep.get_data({y_name})
z = {z_data_code}
data_h = data_handler_factory(x, y, z)
fig, ax = plt.subplots()
plot_h = plot_handler_factory(ax, data_h, plot_dim=2)
ax.ticklabel_format(style='sci', axis='both',
scilimits={scilimits}, useOffset=False)
ax.set_xlabel({xlabel})
ax.set_ylabel({ylabel})
ax.set_xlim({xlim})
ax.set_ylim({ylim})
im = plot_h.plot(plot_type={plot_2D_type})
# Colorbar
# --------
zlim = {zlim}
cmap = get_colormap({cmap_name}, zlim)
cbar = fig.colorbar(mappable=im)
cbar.formatter.set_powerlimits({scilimits})
im.set_cmap(cmap)
im.set_clim(zlim)
cbar.set_label({zlabel})
plt.tight_layout()
plt.show()
# fig.savefig('foo.pdf', bbox_inches='tight', pad_inches=0.05)
| mit | Python |
c8cd5502f685a83a026d4584c95860e6feb7c74a | Update setup.py | analyst-collective/dbt,analyst-collective/dbt | plugins/bigquery/setup.py | plugins/bigquery/setup.py | #!/usr/bin/env python
import os
import sys
if sys.version_info < (3, 6):
print('Error: dbt does not support this version of Python.')
print('Please upgrade to Python 3.6 or higher.')
sys.exit(1)
from setuptools import setup
try:
from setuptools import find_namespace_packages
except ImportError:
# the user has a downlevel version of setuptools.
print('Error: dbt requires setuptools v40.1.0 or higher.')
print('Please upgrade setuptools with "pip install --upgrade setuptools" '
'and try again')
sys.exit(1)
package_name = "dbt-bigquery"
package_version = "0.19.0"
description = """The bigquery adapter plugin for dbt (data build tool)"""
this_directory = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_directory, 'README.md')) as f:
long_description = f.read()
setup(
name=package_name,
version=package_version,
description=description,
long_description=long_description,
long_description_content_type='text/markdown',
author="Fishtown Analytics",
author_email="info@fishtownanalytics.com",
url="https://github.com/fishtown-analytics/dbt",
packages=find_namespace_packages(include=['dbt', 'dbt.*']),
package_data={
'dbt': [
'include/bigquery/dbt_project.yml',
'include/bigquery/sample_profiles.yml',
'include/bigquery/macros/*.sql',
'include/bigquery/macros/**/*.sql',
]
},
install_requires=[
'dbt-core=={}'.format(package_version),
'protobuf>=3.13.0,<4',
'google-cloud-core>=1.3.0,<2',
'google-cloud-bigquery>=1.25.0,<3',
'google-api-core>=1.16.0,<2',
'googleapis-common-protos>=1.6.0,<2',
'six>=1.14.0',
],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: Apache Software License',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
python_requires=">=3.6.2",
)
| #!/usr/bin/env python
import os
import sys
if sys.version_info < (3, 6):
print('Error: dbt does not support this version of Python.')
print('Please upgrade to Python 3.6 or higher.')
sys.exit(1)
from setuptools import setup
try:
from setuptools import find_namespace_packages
except ImportError:
# the user has a downlevel version of setuptools.
print('Error: dbt requires setuptools v40.1.0 or higher.')
print('Please upgrade setuptools with "pip install --upgrade setuptools" '
'and try again')
sys.exit(1)
package_name = "dbt-bigquery"
package_version = "0.19.0"
description = """The bigquery adapter plugin for dbt (data build tool)"""
this_directory = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_directory, 'README.md')) as f:
long_description = f.read()
setup(
name=package_name,
version=package_version,
description=description,
long_description=long_description,
long_description_content_type='text/markdown',
author="Fishtown Analytics",
author_email="info@fishtownanalytics.com",
url="https://github.com/fishtown-analytics/dbt",
packages=find_namespace_packages(include=['dbt', 'dbt.*']),
package_data={
'dbt': [
'include/bigquery/dbt_project.yml',
'include/bigquery/sample_profiles.yml',
'include/bigquery/macros/*.sql',
'include/bigquery/macros/**/*.sql',
]
},
install_requires=[
'dbt-core=={}'.format(package_version),
'protobuf>=3.13.0,<4',
# These are more tightly pinned, as they have a track record of
# breaking changes in minor releases.
'google-cloud-core>=1.3.0,<2',
'google-cloud-bigquery>=1.25.0,<3',
'google-api-core>=1.16.0,<2',
'googleapis-common-protos>=1.6.0,<2',
'six>=1.14.0',
],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: Apache Software License',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
python_requires=">=3.6.2",
)
| apache-2.0 | Python |
98b7920f267b42d4da7f108e801323ea99b81644 | Fix title string issues, for real this time | rrttyy/google-tasks-porter,romans74/google-tasks-porter,cztchoice/cztchoice-gtask-porter,Jagsir/google-tasks-porter,Froleyks/google-tasks-porter,Jagsir/google-tasks-porter,theloniuser/google-tasks-porter,rrttyy/google-tasks-porter,karalan/google-tasks-porter,romans74/google-tasks-porter,cztchoice/cztchoice-gtask-porter,Froleyks/google-tasks-porter,theloniuser/google-tasks-porter,karalan/google-tasks-porter | model.py | model.py | #!/usr/bin/python2.5
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Represents the App Engine model of Google Tasks data in the datastore."""
from apiclient.oauth2client import appengine
from google.appengine.ext import db
class Credentials(db.Model):
"""Represents the credentials of a particular user."""
credentials = appengine.CredentialsProperty()
class Snapshot(db.Model):
"""The datastore entity for a Snapshot of a user's data."""
user = db.UserProperty()
type = db.StringProperty(choices=("import", "export"))
timestamp = db.DateTimeProperty(auto_now_add=True)
status = db.StringProperty(choices=("building", "completed", "error"))
errorMessage = db.StringProperty()
class TaskList(db.Model):
"""The datastore entity for a list of tasks."""
id = db.StringProperty()
title = db.TextProperty() #CATEGORIES/Categories
selfLink = db.LinkProperty()
class Task(db.Model):
"""The datastore entity for a single task."""
parent_entity = db.ReferenceProperty(TaskList, collection_name="tasks")
id = db.StringProperty() #UID
selfLink = db.LinkProperty()
title = db.TextProperty() #SUMMARY/Subject
notes = db.TextProperty() #DESCRIPTION/Notes
parent_ = db.SelfReferenceProperty(collection_name="children")
position = db.StringProperty()
updated = db.DateTimeProperty() #LAST-MODIFIED
due = db.DateProperty() #DUE/Due Date
hidden = db.BooleanProperty()
status = db.StringProperty(choices=("completed",
"needsAction")) #STATUS/Status
deleted = db.BooleanProperty()
completed = db.DateTimeProperty() #COMPLETED/Date Completed
child_mapping = {}
many_many_mapping = {}
| #!/usr/bin/python2.5
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Represents the App Engine model of Google Tasks data in the datastore."""
from apiclient.oauth2client import appengine
from google.appengine.ext import db
class Credentials(db.Model):
"""Represents the credentials of a particular user."""
credentials = appengine.CredentialsProperty()
class Snapshot(db.Model):
"""The datastore entity for a Snapshot of a user's data."""
user = db.UserProperty()
type = db.StringProperty(choices=("import", "export"))
timestamp = db.DateTimeProperty(auto_now_add=True)
status = db.StringProperty(choices=("building", "completed", "error"))
errorMessage = db.StringProperty()
class TaskList(db.Model):
"""The datastore entity for a list of tasks."""
id = db.StringProperty()
title = db.StringProperty() #CATEGORIES/Categories
selfLink = db.LinkProperty()
class Task(db.Model):
"""The datastore entity for a single task."""
parent_entity = db.ReferenceProperty(TaskList, collection_name="tasks")
id = db.StringProperty() #UID
selfLink = db.LinkProperty()
title = db.StringProperty() #SUMMARY/Subject
notes = db.TextProperty() #DESCRIPTION/Notes
parent_ = db.SelfReferenceProperty(collection_name="children")
position = db.StringProperty()
updated = db.DateTimeProperty() #LAST-MODIFIED
due = db.DateProperty() #DUE/Due Date
hidden = db.BooleanProperty()
status = db.StringProperty(choices=("completed",
"needsAction")) #STATUS/Status
deleted = db.BooleanProperty()
completed = db.DateTimeProperty() #COMPLETED/Date Completed
child_mapping = {}
many_many_mapping = {}
| apache-2.0 | Python |
ccfa54c3845985b6ee85f1ad635cb93a2dc9e80f | remove leftover debug print | SoftwareHeritage/swh-web-ui,SoftwareHeritage/swh-web-ui,SoftwareHeritage/swh-web-ui | swh/web/ui/service.py | swh/web/ui/service.py | # Copyright (C) 2015 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.web.ui import main
from swh.web.ui import query
def lookup_hash(q):
"""Given a string query q of one hash, lookup its hash to the backend.
Args:
query, hash as a string (sha1, sha256, etc...)
Returns:
a string message (found, not found or a potential error explanation)
Raises:
OSError (no route to host), etc... Network issues in general
"""
hash = query.categorize_hash(q)
if hash != {}:
present = main.storage().content_exist(hash)
return 'Found!' if present else 'Not Found'
return """This is not a hash.
Hint: hexadecimal string with length either 20 (sha1) or 32 (sha256)."""
def _origin_seen(hash, data):
"""Given an origin, compute a message string with the right information.
Args:
origin: a dictionary with keys:
- origin: a dictionary with type and url keys
- occurrence: a dictionary with a validity range
Returns:
message as a string
"""
if data is None:
return 'Content with hash %s is unknown as of now.' % hash
origin_type = data['origin_type']
origin_url = data['origin_url']
revision = data['revision']
branch = data['branch']
path = data['path']
return """The content with hash %s has been seen on origin with type '%s'
at url '%s'. The revision was identified at '%s' on branch '%s'.
The file's path referenced was '%s'.""" % (hash,
origin_type,
origin_url,
revision,
branch,
path)
def lookup_hash_origin(hash):
"""Given a hash, return the origin of such content if any is found.
Args:
hash: key/value dictionary
Returns:
The origin for such hash if it's found.
Raises:
OSError (no route to host), etc... Network issues in general
"""
data = main.storage().content_find_occurrence(hash)
return _origin_seen(hash, data)
def stat_counters():
"""Return the stat counters for Software Heritage
Returns:
A dict mapping textual labels to integer values.
"""
return main.storage().stat_counters()
| # Copyright (C) 2015 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.web.ui import main
from swh.web.ui import query
def lookup_hash(q):
"""Given a string query q of one hash, lookup its hash to the backend.
Args:
query, hash as a string (sha1, sha256, etc...)
Returns:
a string message (found, not found or a potential error explanation)
Raises:
OSError (no route to host), etc... Network issues in general
"""
hash = query.categorize_hash(q)
if hash != {}:
present = main.storage().content_exist(hash)
return 'Found!' if present else 'Not Found'
return """This is not a hash.
Hint: hexadecimal string with length either 20 (sha1) or 32 (sha256)."""
def _origin_seen(hash, data):
"""Given an origin, compute a message string with the right information.
Args:
origin: a dictionary with keys:
- origin: a dictionary with type and url keys
- occurrence: a dictionary with a validity range
Returns:
message as a string
"""
if data is None:
return 'Content with hash %s is unknown as of now.' % hash
origin_type = data['origin_type']
origin_url = data['origin_url']
revision = data['revision']
branch = data['branch']
path = data['path']
print("data:", data)
return """The content with hash %s has been seen on origin with type '%s'
at url '%s'. The revision was identified at '%s' on branch '%s'.
The file's path referenced was '%s'.""" % (hash,
origin_type,
origin_url,
revision,
branch,
path)
def lookup_hash_origin(hash):
"""Given a hash, return the origin of such content if any is found.
Args:
hash: key/value dictionary
Returns:
The origin for such hash if it's found.
Raises:
OSError (no route to host), etc... Network issues in general
"""
data = main.storage().content_find_occurrence(hash)
return _origin_seen(hash, data)
def stat_counters():
"""Return the stat counters for Software Heritage
Returns:
A dict mapping textual labels to integer values.
"""
return main.storage().stat_counters()
| agpl-3.0 | Python |
396e8077a1a0798bd6586bc37d10028ba69ca256 | bump version | dxiao2003/katapal_util | katapal_util/__init__.py | katapal_util/__init__.py | # -*- coding: utf-8 -*-
__title__ = 'katapal_util'
__version__ = '0.5'
__author__ = 'David Xiao'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Katapal, Inc.'
# Version synonym
VERSION = __version__
| # -*- coding: utf-8 -*-
__title__ = 'katapal_util'
__version__ = '0.4'
__author__ = 'David Xiao'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Katapal, Inc.'
# Version synonym
VERSION = __version__
| mit | Python |
c4a31578b40bab83abcd3dadf1d3b04ead33f376 | Add test for jungle ec2 ls -l | achiku/jungle | tests/test_ec2.py | tests/test_ec2.py | # -*- coding: utf-8 -*-
import boto3
import pytest
from moto import mock_ec2
from jungle import cli
@pytest.yield_fixture(scope='function')
def ec2():
"""EC2 mock service"""
mock = mock_ec2()
mock.start()
ec2 = boto3.resource('ec2')
ec2.create_instances(ImageId='ami-xxxxx', MinCount=1, MaxCount=1)
servers = ec2.create_instances(ImageId='ami-xxxxx', MinCount=2, MaxCount=2)
for i, s in enumerate(servers):
ec2.create_tags(
Resources=[s.id],
Tags=[{'Key': 'Name', 'Value': 'server{:0>2d}'.format(i)}])
yield ec2
mock.stop()
@pytest.mark.parametrize('arg, expected_server_names', [
('*', ['server00', 'server01']),
('server01', ['server01']),
('fake-server', []),
])
def test_ec2_ls(runner, ec2, arg, expected_server_names):
"""jungle ec2 ls test"""
result = runner.invoke(cli.cli, ['ec2', 'ls', arg])
assert result.exit_code == 0
assert expected_server_names == [x for x in expected_server_names if x in result.output]
@pytest.mark.parametrize('opt, arg, expected_server_names', [
('-l', '*', ['server00', 'server01']),
('-l', 'server01', ['server01']),
('-l', 'fake-server', []),
])
def test_ec2_ls_formatted(runner, ec2, opt, arg, expected_server_names):
"""jungle ec2 ls test"""
result = runner.invoke(cli.cli, ['ec2', 'ls', opt, arg])
assert result.exit_code == 0
assert expected_server_names == [x for x in expected_server_names if x in result.output]
@pytest.mark.parametrize('tags, key, expected', [
([{'Key': 'Name', 'Value': 'server01'}, {'Key': 'env', 'Value': 'prod'}], 'Name', 'server01'),
([{'Key': 'Name', 'Value': 'server01'}, {'Key': 'env', 'Value': 'prod'}], 'env', 'prod'),
([{'Key': 'Name', 'Value': 'server01'}, {'Key': 'env', 'Value': 'prod'}], 'dummy', ''),
([], 'dummy', ''),
(None, 'dummy', ''),
])
def test_get_tag_value(tags, key, expected):
"""get_tag_value utility test"""
from jungle.ec2 import get_tag_value
assert get_tag_value(tags, key) == expected
| # -*- coding: utf-8 -*-
import boto3
import pytest
from moto import mock_ec2
from jungle import cli
@pytest.yield_fixture(scope='function')
def ec2():
"""EC2 mock service"""
mock = mock_ec2()
mock.start()
ec2 = boto3.resource('ec2')
ec2.create_instances(ImageId='ami-xxxxx', MinCount=1, MaxCount=1)
servers = ec2.create_instances(ImageId='ami-xxxxx', MinCount=2, MaxCount=2)
for i, s in enumerate(servers):
ec2.create_tags(
Resources=[s.id],
Tags=[{'Key': 'Name', 'Value': 'server{:0>2d}'.format(i)}])
yield ec2
mock.stop()
@pytest.mark.parametrize('arg, expected_server_names', [
('*', ['server00', 'server01']),
('server01', ['server01']),
('fake-server', []),
])
def test_ec2_ls(runner, ec2, arg, expected_server_names):
"""jungle ec2 ls test"""
result = runner.invoke(cli.cli, ['ec2', 'ls', arg])
assert result.exit_code == 0
assert expected_server_names == [x for x in expected_server_names if x in result.output]
@pytest.mark.parametrize('tags, key, expected', [
([{'Key': 'Name', 'Value': 'server01'}, {'Key': 'env', 'Value': 'prod'}], 'Name', 'server01'),
([{'Key': 'Name', 'Value': 'server01'}, {'Key': 'env', 'Value': 'prod'}], 'env', 'prod'),
([{'Key': 'Name', 'Value': 'server01'}, {'Key': 'env', 'Value': 'prod'}], 'dummy', ''),
([], 'dummy', ''),
(None, 'dummy', ''),
])
def test_get_tag_value(tags, key, expected):
"""get_tag_value utility test"""
from jungle.ec2 import get_tag_value
assert get_tag_value(tags, key) == expected
| mit | Python |
08244ad58a4641857e3514331a98fdf01790d2fa | add repr method to changes | dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq | pillowtop/feed/interface.py | pillowtop/feed/interface.py | from abc import ABCMeta, abstractmethod
class Change(object):
"""
A record of a change. Provides a dict-like interface for backwards compatibility with couch changes.
"""
PROPERTY_DICT_MAP = {
'id': 'id',
'sequence_id': 'seq',
'document': 'doc',
'deleted': 'deleted'
}
def __init__(self, id, sequence_id, document=None, deleted=False):
self._dict = {}
self.id = id
self.sequence_id = sequence_id
self.document = document
self.deleted = deleted
self._dict = {
'id': self.id,
'seq': self.sequence_id,
'doc': self.document,
'deleted': self.deleted,
}
def __repr__(self):
return u'Change id: {}, seq: {}, deleted: {}, doc: {}'.format(
self.id, self.sequence_id, self.deleted, self.document
)
def __len__(self):
return len(self._dict)
def __setattr__(self, name, value):
super(Change, self).__setattr__(name, value)
if name in self.PROPERTY_DICT_MAP:
self._dict[self.PROPERTY_DICT_MAP[name]] = value
def __getitem__(self, key):
return self._dict[key]
def __setitem__(self, key, value):
raise NotImplemented('This is a read-only dictionary!')
def __delitem__(self, key, value):
raise NotImplemented('This is a read-only dictionary!')
def __iter__(self):
return iter(self._dict)
def __contains__(self, item):
return item in self._dict
def get(self, key, default=None):
return self._dict.get(key, default)
def pop(self, key, default):
raise NotImplemented('This is a read-only dictionary!')
def to_dict(self):
return self._dict
class ChangeFeed(object):
"""
Basic change feed API.
"""
__metaclass__ = ABCMeta
@abstractmethod
def iter_changes(self, since, forever):
"""
Iterates through all changes since a certain sequence ID.
"""
pass
| from abc import ABCMeta, abstractmethod
class Change(object):
"""
A record of a change. Provides a dict-like interface for backwards compatibility with couch changes.
"""
PROPERTY_DICT_MAP = {
'id': 'id',
'sequence_id': 'seq',
'document': 'doc',
'deleted': 'deleted'
}
def __init__(self, id, sequence_id, document=None, deleted=False):
self._dict = {}
self.id = id
self.sequence_id = sequence_id
self.document = document
self.deleted = deleted
self._dict = {
'id': self.id,
'seq': self.sequence_id,
'doc': self.document,
'deleted': self.deleted,
}
def __len__(self):
return len(self._dict)
def __setattr__(self, name, value):
super(Change, self).__setattr__(name, value)
if name in self.PROPERTY_DICT_MAP:
self._dict[self.PROPERTY_DICT_MAP[name]] = value
def __getitem__(self, key):
return self._dict[key]
def __setitem__(self, key, value):
raise NotImplemented('This is a read-only dictionary!')
def __delitem__(self, key, value):
raise NotImplemented('This is a read-only dictionary!')
def __iter__(self):
return iter(self._dict)
def __contains__(self, item):
return item in self._dict
def get(self, key, default=None):
return self._dict.get(key, default)
def pop(self, key, default):
raise NotImplemented('This is a read-only dictionary!')
def to_dict(self):
return self._dict
class ChangeFeed(object):
"""
Basic change feed API.
"""
__metaclass__ = ABCMeta
@abstractmethod
def iter_changes(self, since, forever):
"""
Iterates through all changes since a certain sequence ID.
"""
pass
| bsd-3-clause | Python |
ad945381a1477509204a4ec952e5e822034823c1 | Implement some neutral epic minions | liujimj/fireplace,Ragowit/fireplace,butozerca/fireplace,oftc-ftw/fireplace,butozerca/fireplace,Meerkov/fireplace,amw2104/fireplace,NightKev/fireplace,smallnamespace/fireplace,Ragowit/fireplace,oftc-ftw/fireplace,jleclanche/fireplace,smallnamespace/fireplace,liujimj/fireplace,beheh/fireplace,amw2104/fireplace,Meerkov/fireplace | fireplace/carddata/minions/neutral/epic.py | fireplace/carddata/minions/neutral/epic.py | from ...card import *
from fireplace.enums import Race
# Big Game Hunter
class EX1_005(Card):
action = destroyTarget
# Murloc Warleader
class EX1_507(Card):
aura = "EX1_507e"
class EX1_507e(Card):
atk = 2
health = 1
def isValidTarget(self, target):
return target.race == Race.MURLOC and target is not self.source
# Hungry Crab
class NEW1_017(Card):
def action(self, target):
target.destroy()
self.buff("NEW1_017e")
class NEW1_017e(Card):
atk = 2
health = 2
# Southsea Captain
class NEW1_027(Card):
aura = "NEW1_027e"
class NEW1_027e(Card):
atk = 1
health = 1
def isValidTarget(self, target):
return target.race == Race.PIRATE and target is not self.source
| from ...card import *
| agpl-3.0 | Python |
e297b5d9c91743f68e333bac88e154d06406c908 | add unit test | Wolfium/pyjwt,dotsbb/pyjwt,mark-adams/pyjwt,mindw/pyjwt,progrium/pyjwt,jpadilla/pyjwt,rainierwolfcastle/pyjwt,jmgamboa/pyjwt | tests/test_jwt.py | tests/test_jwt.py | import unittest
import time
import jwt
class TestJWT(unittest.TestCase):
def setUp(self):
self.payload = {"iss": "jeff", "exp": int(time.time()), "claim": "insanity"}
def test_encode_decode(self):
secret = 'secret'
jwt_message = jwt.encode(self.payload, secret)
decoded_payload = jwt.decode(jwt_message, secret)
self.assertEqual(decoded_payload, self.payload)
def test_bad_secret(self):
right_secret = 'foo'
bad_secret = 'bar'
jwt_message = jwt.encode(self.payload, right_secret)
self.assertRaises(jwt.DecodeError, jwt.decode, jwt_message, bad_secret)
def test_decodes_valid_jwt(self):
example_payload = {"hello": "world"}
example_secret = "secret"
example_jwt = "eyJhbGciOiAiSFMyNTYiLCAidHlwIjogIkpXVCJ9.eyJoZWxsbyI6ICJ3b3JsZCJ9.tvagLDLoaiJKxOKqpBXSEGy7SYSifZhjntgm9ctpyj8"
decoded_payload = jwt.decode(example_jwt, example_secret)
self.assertEqual(decoded_payload, example_payload)
def test_allow_skip_verification(self):
right_secret = 'foo'
bad_secret = 'bar'
jwt_message = jwt.encode(self.payload, right_secret)
decoded_payload = jwt.decode(jwt_message, verify=False)
self.assertEqual(decoded_payload, self.payload)
def test_no_secret(self):
right_secret = 'foo'
bad_secret = 'bar'
jwt_message = jwt.encode(self.payload, right_secret)
self.assertRaises(jwt.DecodeError, jwt.decode, jwt_message)
def test_invalid_crypto_alg(self):
self.assertRaises(NotImplementedError, jwt.encode, self.payload, "secret", "HS1024")
def test_unicode_secret(self):
secret = u'\xc2'
jwt_message = jwt.encode(self.payload, secret)
decoded_payload = jwt.decode(jwt_message, secret)
self.assertEqual(decoded_payload, self.payload)
def test_nonascii_secret(self):
secret = '\xc2' # char value that ascii codec cannot decode
jwt_message = jwt.encode(self.payload, secret)
decoded_payload = jwt.decode(jwt_message, secret)
self.assertEqual(decoded_payload, self.payload)
if __name__ == '__main__':
unittest.main()
| import unittest
import time
import jwt
class TestJWT(unittest.TestCase):
def setUp(self):
self.payload = {"iss": "jeff", "exp": int(time.time()), "claim": "insanity"}
def test_encode_decode(self):
secret = 'secret'
jwt_message = jwt.encode(self.payload, secret)
decoded_payload = jwt.decode(jwt_message, secret)
self.assertEqual(decoded_payload, self.payload)
def test_bad_secret(self):
right_secret = 'foo'
bad_secret = 'bar'
jwt_message = jwt.encode(self.payload, right_secret)
self.assertRaises(jwt.DecodeError, jwt.decode, jwt_message, bad_secret)
def test_decodes_valid_jwt(self):
example_payload = {"hello": "world"}
example_secret = "secret"
example_jwt = "eyJhbGciOiAiSFMyNTYiLCAidHlwIjogIkpXVCJ9.eyJoZWxsbyI6ICJ3b3JsZCJ9.tvagLDLoaiJKxOKqpBXSEGy7SYSifZhjntgm9ctpyj8"
decoded_payload = jwt.decode(example_jwt, example_secret)
self.assertEqual(decoded_payload, example_payload)
def test_allow_skip_verification(self):
right_secret = 'foo'
bad_secret = 'bar'
jwt_message = jwt.encode(self.payload, right_secret)
decoded_payload = jwt.decode(jwt_message, verify=False)
self.assertEqual(decoded_payload, self.payload)
def test_no_secret(self):
right_secret = 'foo'
bad_secret = 'bar'
jwt_message = jwt.encode(self.payload, right_secret)
self.assertRaises(jwt.DecodeError, jwt.decode, jwt_message)
def test_invalid_crypto_alg(self):
self.assertRaises(NotImplementedError, jwt.encode, self.payload, "secret", "HS1024")
def test_unicode_secret(self):
secret = u'\xc2'
jwt_message = jwt.encode(self.payload, secret)
decoded_payload = jwt.decode(jwt_message, secret)
self.assertEqual(decoded_payload, self.payload)
if __name__ == '__main__':
unittest.main() | mit | Python |
ca8bdf9385b0dea52293b1ac55dc3e8bb2183c92 | use `Functor` rather than `Monad` in `Zip.apzip` | tek/amino | amino/tc/zip.py | amino/tc/zip.py | import abc
from typing import TypeVar, Generic, Callable, Tuple
from amino.tc.base import TypeClass, tc_prop
from amino.tc.monoid import Monoid
from amino.tc.foldable import Foldable
from amino.tc.applicative import Applicative
from amino.tc.functor import Functor
F = TypeVar('F')
A = TypeVar('A')
B = TypeVar('B')
class Zip(Generic[F], TypeClass):
@abc.abstractmethod
def zip(self, fa: F, fb: F, *fs) -> F:
...
def __and__(self, fa: F, fb: F):
return self.zip(fa, fb)
def apzip(self, fa: F, f: Callable[[A], B]) -> F:
return self.zip(fa, Functor[type(fa)].map(fa, f))
@tc_prop
def unzip(self, fa: F) -> Tuple[F, F]:
tpe = type(fa)
f = Foldable[tpe]
m = Monoid[tpe]
a = Applicative[tpe]
def folder(z, b):
l, r = z
x, y = b
return m.combine(l, a.pure(x)), m.combine(r, a.pure(y))
return f.fold_left(fa, (m.empty, m.empty), folder)
__all__ = ('Zip',)
| import abc
from typing import TypeVar, Generic, Callable, Tuple
from amino.tc.base import TypeClass, tc_prop
from amino.tc.monad import Monad
from amino.tc.monoid import Monoid
from amino.tc.foldable import Foldable
from amino.tc.applicative import Applicative
F = TypeVar('F')
A = TypeVar('A')
B = TypeVar('B')
class Zip(Generic[F], TypeClass):
@abc.abstractmethod
def zip(self, fa: F, fb: F, *fs) -> F:
...
def __and__(self, fa: F, fb: F):
return self.zip(fa, fb)
def apzip(self, fa: F, f: Callable[[A], B]) -> F:
return self.zip(fa, Monad[type(fa)].map(fa, f))
@tc_prop
def unzip(self, fa: F) -> Tuple[F, F]:
tpe = type(fa)
f = Foldable[tpe]
m = Monoid[tpe]
a = Applicative[tpe]
def folder(z, b):
l, r = z
x, y = b
return m.combine(l, a.pure(x)), m.combine(r, a.pure(y))
return f.fold_left(fa, (m.empty, m.empty), folder)
__all__ = ('Zip',)
| mit | Python |
85c6252b54108750291021a74dc00895f79a7ccf | update test_git.py to use the new single-character flag style | pengshp/GitPython,roidelapluie/GitPython,Conjuro/GitPython,nvie/GitPython,zsjohny/GitPython,cool-RR/GitPython,zsjohny/GitPython,bwrsandman/GitPython,hvnsweeting/GitPython,bwrsandman/GitPython,jeblair/GitPython,cool-RR/GitPython,dpursehouse/GitPython,h4ck3rm1k3/GitPython,Conjuro/GitPython,StyXman/GitPython,breavo/GitPython,Conjuro/GitPython,StyXman/GitPython,beni55/GitPython,nvie/GitPython,cdht/GitPython,cool-RR/GitPython,StyXman/GitPython,roidelapluie/GitPython,cdht/GitPython,manasdk/GitPython,pengshp/GitPython,ghtdak/GitPython,gitpython-developers/GitPython,manasdk/GitPython,ghtdak/GitPython,gitpython-developers/GitPython,gitpython-developers/gitpython,avinassh/GitPython,expobrain/GitPython,bwrsandman/GitPython,jeblair/GitPython,gitpython-developers/gitpython,h4ck3rm1k3/GitPython,dpursehouse/GitPython,avinassh/GitPython,OpenInkpot-archive/iplinux-python-git,beni55/GitPython,expobrain/GitPython,cdht/GitPython,pengshp/GitPython,hvnsweeting/GitPython,breavo/GitPython,avinassh/GitPython,zsjohny/GitPython | test/git/test_git.py | test/git/test_git.py | import os
from test.testlib import *
from git_python import *
class TestGit(object):
def setup(self):
base = os.path.join(os.path.dirname(__file__), "../..")
self.git = Git(base)
@patch(Git, 'execute')
def test_method_missing_calls_execute(self, git):
git.return_value = ''
self.git.version()
assert_true(git.called)
# assert_equal(git.call_args, ((("%s version " % self.git_bin_base),), {}))
def test_it_transforms_kwargs_into_git_command_arguments(self):
assert_equal(["-s"], self.git.transform_kwargs(**{'s': True}))
assert_equal(["-s5"], self.git.transform_kwargs(**{'s': 5}))
assert_equal(["--max-count"], self.git.transform_kwargs(**{'max_count': True}))
assert_equal(["--max-count=5"], self.git.transform_kwargs(**{'max_count': 5}))
assert_equal(["-s", "-t"], self.git.transform_kwargs(**{'s': True, 't': True}))
def test_it_executes_git_to_shell_and_returns_result(self):
assert_match('^git version [\d\.]*$', self.git.execute(["git","version"]))
| import os
from test.testlib import *
from git_python import *
class TestGit(object):
def setup(self):
base = os.path.join(os.path.dirname(__file__), "../..")
self.git = Git(base)
@patch(Git, 'execute')
def test_method_missing_calls_execute(self, git):
git.return_value = ''
self.git.version()
assert_true(git.called)
# assert_equal(git.call_args, ((("%s version " % self.git_bin_base),), {}))
def test_it_transforms_kwargs_into_git_command_arguments(self):
assert_equal(["-s"], self.git.transform_kwargs(**{'s': True}))
assert_equal(["-s", 5], self.git.transform_kwargs(**{'s': 5}))
assert_equal(["--max-count"], self.git.transform_kwargs(**{'max_count': True}))
assert_equal(["--max-count=5"], self.git.transform_kwargs(**{'max_count': 5}))
assert_equal(["-s", "-t"], self.git.transform_kwargs(**{'s': True, 't': True}))
def test_it_executes_git_to_shell_and_returns_result(self):
assert_match('^git version [\d\.]*$', self.git.execute(["git","version"]))
| bsd-3-clause | Python |
a22d05c15be5c84dacfe35817cc843ba236b112b | remove unused space. | jonhadfield/acli,jonhadfield/acli | tests/test_vpc.py | tests/test_vpc.py | from __future__ import (absolute_import, print_function, unicode_literals)
from acli.services.vpc import (vpc_list, vpc_info)
from acli.config import Config
from moto import mock_ec2
import pytest
from boto3.session import Session
session = Session(region_name="eu-west-1")
config = Config(cli_args={'--region': 'eu-west-1',
'--access_key_id': 'AKIAIOSFODNN7EXAMPLE',
'--secret_access_key': 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'})
@pytest.yield_fixture(scope='function')
def fake_vpcs():
"""VPC mock service"""
mock = mock_ec2()
mock.start()
client = session.client('ec2')
client.create_vpc(CidrBlock='10.0.0.0/16')
yield client.describe_vpcs()
mock.stop()
@pytest.yield_fixture(scope='function')
def fake_empty_vpcs():
"""VPC mock service"""
mock = mock_ec2()
mock.start()
yield None
mock.stop()
def test_vpc_list_service(fake_vpcs):
with pytest.raises(SystemExit):
assert vpc_list(aws_config=config)
def test_vpc_list_service_empty(fake_empty_vpcs, capsys):
with pytest.raises(SystemExit):
out, err = capsys.readouterr(vpc_list(aws_config=config))
assert err == "No VPCs found."
def test_vpc_info_service(fake_vpcs):
with pytest.raises(SystemExit):
assert vpc_info(aws_config=config, vpc_id=fake_vpcs.get('Vpcs')[0].get('VpcId'))
def test_vpc_info_service_empty(fake_empty_vpcs, capsys):
with pytest.raises(SystemExit):
invalid_vpc_id = 'invalid'
out, err = capsys.readouterr(vpc_info(aws_config=config, vpc_id=invalid_vpc_id))
assert err == "Cannot find VPC: {0}".format(invalid_vpc_id)
| from __future__ import (absolute_import, print_function, unicode_literals)
from acli.services.vpc import (vpc_list, vpc_info)
from acli.config import Config
from moto import mock_ec2
import pytest
from boto3.session import Session
session = Session(region_name="eu-west-1")
config = Config(cli_args={'--region': 'eu-west-1',
'--access_key_id': 'AKIAIOSFODNN7EXAMPLE',
'--secret_access_key': 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'})
@pytest.yield_fixture(scope='function')
def fake_vpcs():
"""VPC mock service"""
mock = mock_ec2()
mock.start()
client = session.client('ec2')
client.create_vpc(CidrBlock='10.0.0.0/16')
yield client.describe_vpcs()
mock.stop()
@pytest.yield_fixture(scope='function')
def fake_empty_vpcs():
"""VPC mock service"""
mock = mock_ec2()
mock.start()
yield None
mock.stop()
def test_vpc_list_service(fake_vpcs):
with pytest.raises(SystemExit):
assert vpc_list(aws_config=config)
def test_vpc_list_service_empty(fake_empty_vpcs, capsys):
with pytest.raises(SystemExit):
out, err = capsys.readouterr(vpc_list(aws_config=config))
assert err == "No VPCs found."
def test_vpc_info_service(fake_vpcs):
with pytest.raises(SystemExit):
assert vpc_info(aws_config=config, vpc_id=fake_vpcs.get('Vpcs')[0].get('VpcId'))
def test_vpc_info_service_empty(fake_empty_vpcs, capsys):
with pytest.raises(SystemExit):
invalid_vpc_id = 'invalid'
out, err = capsys.readouterr(vpc_info(aws_config=config, vpc_id=invalid_vpc_id))
assert err == "Cannot find VPC: {0}".format(invalid_vpc_id)
| mit | Python |
89dd0572acbf97bd6df0a9772c25dc66b674a10f | change default for read_files to look for stan suffix | jburos/survivalstan,jburos/survivalstan | survivalstan/utils.py | survivalstan/utils.py | import os
from fnmatch import fnmatch
import ntpath
import pkg_resources
## indexes a directory of stan files
## returns as dictionary containing contents of files
def _list_files_in_path(path, pattern = "*.stan"):
results = []
for dirname, subdirs, files in os.walk(path):
for name in files:
if fnmatch(name, pattern):
results.append(os.path.join(dirname, name))
return(results)
def _read_file(filepath, resource = None):
print(filepath)
if not(resource):
with open(filepath, 'r') as myfile:
data=myfile.read()
else:
data = pkg_resources.resource_string(
resource, filepath)
return data
def read_files(path, pattern = '*.stan', encoding="utf-8", resource = None):
files = _list_files_in_path(path = path, pattern=pattern)
results = {}
for file in files:
file_data = {}
file_data['path'] = file
file_data['basename'] = ntpath.basename(file)
file_data['code'] = _read_file(file, resource = resource).decode(encoding)
results[file_data['basename']] = file_data['code']
return(results)
| import os
from fnmatch import fnmatch
import ntpath
import pkg_resources
## indexes a directory of stan files
## returns as dictionary containing contents of files
def _list_files_in_path(path, pattern = "*.stan"):
results = []
for dirname, subdirs, files in os.walk(path):
for name in files:
if fnmatch(name, pattern):
results.append(os.path.join(dirname, name))
return(results)
def _read_file(filepath, resource = None):
print(filepath)
if not(resource):
with open(filepath, 'r') as myfile:
data=myfile.read()
else:
data = pkg_resources.resource_string(
resource, filepath)
return data
def read_files(path, pattern, encoding="utf-8", resource = None):
files = _list_files_in_path(path = path, pattern=pattern)
results = {}
for file in files:
file_data = {}
file_data['path'] = file
file_data['basename'] = ntpath.basename(file)
file_data['code'] = _read_file(file, resource = resource).decode(encoding)
results[file_data['basename']] = file_data['code']
return(results)
| apache-2.0 | Python |
55e29c2a39b40bf136f41819bb08623e858fffef | update who test | openhealthalgorithms/openhealthalgorithms | tests/test_who.py | tests/test_who.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from nose_parameterized import parameterized
from OHA.WHO import WHO
from OHA.param_builders.who_param_builder import WhoParamsBuilder
from tests.helpers.DataHelper import DataHelper
class WhoTest(unittest.TestCase):
def test_should_produce_exception(self):
params = WhoParamsBuilder().gender('M').age(18).sbp1(130).sbp2(145).chol(5).smoker().diabetic().build()
result = WHO().calculate(params)
self.assertEqual(result['exception'], 'color chart not found.')
params = WhoParamsBuilder().gender('M').age(100).sbp1(130).sbp2(145).chol(5).smoker().diabetic().build()
result = WHO().calculate(params)
self.assertEqual(result['exception'], 'color chart not found.')
@parameterized.expand(DataHelper.who_test_data())
def test_who_algorithm(self, region, age, gender, bp, total_chol, smoker, diabetes, cvd_risk):
params = WhoParamsBuilder() \
.region(region) \
.gender(gender). \
age(age). \
sbp1(bp). \
sbp2(bp). \
chol(total_chol). \
smoker(smoker == 1). \
diabetic(diabetes == 1) \
.build()
result = WHO().calculate(params)
self.assertEqual(result['risk_range'], cvd_risk)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from nose_parameterized import parameterized
from OHA.WHO import WHO
from OHA.param_builders.who_param_builder import WhoParamsBuilder
from tests.helpers.DataHelper import DataHelper
class WhoTest(unittest.TestCase):
def test_should_produce_exception(self):
params = WhoParamsBuilder().gender('M').age(18).sbp1(130).sbp2(145).chol(5).smoker().diabetic().build()
result = WHO().calculate(params)
self.assertEqual(result['exception'], 'color chart not found.')
params = WhoParamsBuilder().gender('M').age(100).sbp1(130).sbp2(145).chol(5).smoker().diabetic().build()
result = WHO().calculate(params)
self.assertEqual(result['exception'], 'color chart not found.')
@parameterized.expand(DataHelper.who_test_data())
def test_sequence(self, region, age, gender, bp, total_chol, smoker, diabetes, cvd_risk):
params = WhoParamsBuilder() \
.region(region) \
.gender(gender). \
age(age). \
sbp1(bp). \
sbp2(bp). \
chol(total_chol). \
smoker(smoker == 1). \
diabetic(diabetes == 1) \
.build()
result = WHO().calculate(params)
self.assertEqual(result['risk_range'], cvd_risk)
| apache-2.0 | Python |
01358a34c59281dd19ced11eb6eb9b2a59856e70 | fix parser bugs | lonelyandrew/IRC | proj_1/bool_re/bool_re.py | proj_1/bool_re/bool_re.py | #! /usr/local/bin Python3
def parser(command):
main_q = []
branch_q = []
command = command.strip()
command = command.replace('AND NOT', '^')
command = command.replace('AND', '&')
command = command.replace('OR', '|')
return command
| #! /usr/local/bin Python3
def parser(command):
main_q = []
branch_q = []
command.replace('AND NOT', '^')
command.replace('AND', '&')
command.replace('OR', '|')
return command
| mit | Python |
2ced12bdb90754e12521e7bbe46b57bb11f4eff6 | Update tests/test_zmq.py | pyghassen/zerorpc-python,danielrowles-wf/zerorpc-python,kencochrane/zerorpc-python,madscheme/zerorpc-python,yishilin/zerorpc-python,wfxiang08/zerorpc-python,alemic/zerorpc-python,pramukta/zerorpc-python,psantann/zerorpc-python,vidyacraghav/zerorpc-python,pchomik/zerorpc-python,gmarceau/zerorpc-python,abarnert/zerorpc-python,jiajie999/zerorpc-python,fahhem/zerorpc-python,lucius-feng/zerorpc-python,ormsbee/zerorpc-python,nkhuyu/zerorpc-python,winggynOnly/zerorpc-python,dariobottazzi/zerorpc-python,alekibango/zerorpc-python,zapier/zerorpc-python,faith0811/zerorpc-python,kanghtta/zerorpc-python,qz267/zerorpc-python,mahendra/zerorpc-python,JamshedVesuna/zerorpc-python,stdrickforce/zerorpc-python,spang/zerorpc-python,jgeewax/zerorpc-python,dotcloud/zerorpc-python,bombela/zerorpc-python,rainslytherin/zerorpc-python,tempbottle/zerorpc-python,strawerry/zerorpc-python,Laeeth/zerorpc-python,cpennington/zerorpc-python,joequant/zerorpc-python,pombredanne/zerorpc-python,yonglehou/zerorpc-python,thedrow/zerorpc-python,pramukta/zerorpc-python,b8045901/zerorpc-python,fast01/zerorpc-python,maxekman/zerorpc-python,jpetazzo/zerorpc-python,val314159/zerorpc-python,semond/zerorpc-python,ethifus/zerorpc-python,virqin/zerorpc-python,adieu/zerorpc-python,arnoutaertgeerts/zerorpc-python,mengzhuo/zerorpc-python,topbrightwen/zerorpc-python,jimbog/zerorpc-python,afghanistanyn/zerorpc-python,tutengfei/zerorpc-python,yiliaofan/zerorpc-python,lopter/zerorpc-python,summer1988/zerorpc-python,jhu-xx/zerorpc-python | tests/test_zmq.py | tests/test_zmq.py | # -*- coding: utf-8 -*-
# Open Source Initiative OSI - The MIT License (MIT):Licensing
#
# The MIT License (MIT)
# Copyright (c) 2012 DotCloud Inc (opensource@dotcloud.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import gevent
from zerorpc import zmq
def test1():
def server():
c = zmq.Context()
s = c.socket(zmq.REP)
s.bind('tcp://0.0.0.0:9999')
while True:
print 'srv recving...'
r = s.recv()
print 'srv', r
print 'srv sending...'
s.send('world')
s.close()
c.term()
def client():
c = zmq.Context()
s = c.socket(zmq.REQ)
s.connect('tcp://localhost:9999')
print 'cli sending...'
s.send('hello')
print 'cli recving...'
r = s.recv()
print 'cli', r
s.close()
c.term()
s = gevent.spawn(server)
c = gevent.spawn(client)
c.join()
| # -*- coding: utf-8 -*-
# Open Source Initiative OSI - The MIT License (MIT):Licensing
#
# The MIT License (MIT)
# Copyright (c) 2012 DotCloud Inc (opensource@dotcloud.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import gevent
import gevent.local
import gevent.queue
import gevent.event
from zerorpc import zmq
def test1():
def server():
c = zmq.Context()
s = c.socket(zmq.REP)
s.bind('tcp://0.0.0.0:9999')
while True:
print 'srv recving...'
r = s.recv()
print 'srv', r
print 'srv sending...'
s.send('world')
s.close()
c.term()
def client():
c = zmq.Context()
s = c.socket(zmq.REQ)
s.connect('tcp://localhost:9999')
print 'cli sending...'
s.send('hello')
print 'cli recving...'
r = s.recv()
print 'cli', r
s.close()
c.term()
s = gevent.spawn(server)
c = gevent.spawn(client)
c.join()
| mit | Python |
a80b1292e9c9a29921a3e42c7c60d861b4bf7965 | Enable with statement tests for Python 2.5 | frankier/mock,dannybtran/mock,testing-cabal/mock,sorenh/mock,scorphus/mock,nett55/mock,rbtcollins/mock,fladi/mock,OddBloke/mock,derwiki-adroll/mock,kostyll/mock,pypingou/mock,lord63-forks/mock,johndeng/mock | tests/testwith.py | tests/testwith.py | import sys
if sys.version_info[:2] >= (2, 5):
from tests._testwith import *
else:
from tests.support import unittest2
class TestWith(unittest2.TestCase):
@unittest2.skip('tests using with statement skipped on Python 2.4')
def testWith(self):
pass
if __name__ == '__main__':
unittest2.main() | import sys
if sys.version_info[:2] > (2, 5):
from tests._testwith import *
else:
from tests.support import unittest2
class TestWith(unittest2.TestCase):
@unittest2.skip('tests using with statement skipped on Python 2.4')
def testWith(self):
pass
if __name__ == '__main__':
unittest2.main() | bsd-2-clause | Python |
1bfa91ebc84f1bf1866eb3fc5751ce3187d90790 | Update urls.py | funkybob/antfarm | antfarm/urls.py | antfarm/urls.py | '''
Django-style URL dispatcher view.
App(root_url=url_dispatcher([
(r'^/$', views.index),
(re.compile(r'^/(?P<foo>\d+)/'), views.detail, {'bar': True}),
])
The view will be called with the request, and any matched _named_ groups.
Extra kwargs can be passed as a 3rd positional argument.
There is a namedtuple class defined called URL provided. All patterns will be
assembled as a URL instance, and their regex compiled. If kwargs are not
specified, they will default to {}.
'''
from collections import namedtuple
import re
from . import response
class KeepLooking(Exception):
'''Used to tell a url_dispatcher to skip this pattern and keep looking.'''
pass
URL = namedtuple('url', ('regex', 'view'))
class url_dispatcher(object):
def __init__(self, patterns):
self.patterns = map(self._make_url, patterns)
def _make_url(self, pattern):
'''Helper to ensure all patterns are url instances.'''
if not isinstance(pattern, URL):
# Ensure the regex is compiled
pattern = URL(re.compile(pattern[0], pattern[1])
return pattern
def __call__(self, request, *args, **kwargs):
path = getattr(request, 'remaining_path', request.path)
for pattern in self.patterns:
m = pattern.regex.match(path)
if m:
path.remaining_path = path[:m.end()]
try:
return pattern.view(request, *args, **kwargs)
except KeepLooking:
pass
return self.handle_not_found(request)
def handle_not_found(self, request):
return response.NotFound()
| '''
Django-style URL dispatcher view.
App(root_url=url_dispatcher([
(r'^/$', views.index),
(re.compile(r'^/(?P<foo>\d+)/'), views.detail, {'bar': True}),
])
The view will be called with the request, and any matched _named_ groups.
Extra kwargs can be passed as a 3rd positional argument.
There is a namedtuple class defined called URL provided. All patterns will be
assembled as a URL instance, and their regex compiled. If kwargs are not
specified, they will default to {}.
'''
from collections import namedtuple
import re
from . import response
class KeepLooking(Exception):
'''Used to tell a url_dispatcher to skip this pattern and keep looking.'''
pass
URL = namedtuple('url', ('regex', 'view'))
class url_dispatcher(object):
def __init__(self, patterns):
self.patterns = map(self._make_url, patterns)
def _make_url(self, pattern):
'''Helper to ensure all patterns are url instances.'''
if not isinstance(pattern, URL):
# Ensure the regex is compiled
pattern[0] = re.compile(pattern[0])
pattern = URL(*pattern)
return pattern
def __call__(self, request, *args, **kwargs):
path = getattr(request, 'remaining_path', request.path)
for pattern in self.patterns:
m = pattern.regex.match(path)
if m:
path.remaining_path = path[:m.end()]
try:
return pattern.view(request, *args, **kwargs)
except KeepLooking:
pass
return self.handle_not_found(request)
def handle_not_found(self, request):
return response.NotFound()
| mit | Python |
9e27327d942b576551671c9800cea2d772ce1b08 | Remove Account | brunoliveira8/managyment,brunoliveira8/managyment,brunoliveira8/managyment | project/gym_app/models.py | project/gym_app/models.py | from django.db import models
# Create your models here.
class WeightProgress(models.Model):
startDate = models.DateField(auto_now_add=True)
startWeight = models.IntegerField(max_length=4)
previousDate = models.DateField()
previousWeight = models.IntegerField(max_length=4)
lastDate = models.DateField(auto_now=True)
lastWeight = models.IntegerField(max_length=4)
class Task(models.Model):
name = models.CharField(max_length=32)
LEG = 'LG'
CHEST = 'CH'
SHOULDER = 'SH'
NOTYPE = 'NT'
TYPE_OF_TASKS_CHOICES = (
(NOTYPE, 'No type'),
(LEG, 'Leg'),
(SHOULDER, 'Shoulder'),
(CHEST, 'Chest'),
)
typeTask = models.CharField(max_length=2,
choices=TYPE_OF_TASKS_CHOICES,
default=NOTYPE)
class Exercise(models.Model):
task = models.ManyToManyField(Task)
weight = models.IntegerField(max_length=4)
repetition = models.IntegerField(max_length=4)
sets = models.IntegerField(max_length=4)
class Workout(models.Model):
day = models.ManyToManyField(Exercise)
| from django.db import models
# Create your models here.
#Abstract class that all user accounts will inherit from
class Account(models.Model):
firstName = models.CharField(max_length=32)
lastName = models.CharField(max_length=32)
username = models.CharField(max_length=32, unique=True)
password = models.CharField(max_length=32)
email = models.EmailField(max_length=64)
class Meta:
abstract = True
class RegularAthlete(Account):
goalWeight = models.IntegerField(max_length=4)
class WeightProgress(models.Model):
startDate = models.DateField(auto_now_add=True)
startWeight = models.IntegerField(max_length=4)
previousDate = models.DateField()
previousWeight = models.IntegerField(max_length=4)
lastDate = models.DateField(auto_now=True)
lastWeight = models.IntegerField(max_length=4)
class Task(models.Model):
name = models.CharField(max_length=32)
LEG = 'LG'
CHEST = 'CH'
SHOULDER = 'SH'
NOTYPE = 'NT'
TYPE_OF_TASKS_CHOICES = (
(NOTYPE, 'No type'),
(LEG, 'Leg'),
(SHOULDER, 'Shoulder'),
(CHEST, 'Chest'),
)
typeTask = models.CharField(max_length=2,
choices=TYPE_OF_TASKS_CHOICES,
default=NOTYPE)
class Exercise(models.Model):
task = models.ManyToManyField(Task)
weight = models.IntegerField(max_length=4)
repetition = models.IntegerField(max_length=4)
sets = models.IntegerField(max_length=4)
class Workout(models.Model):
day = models.ManyToManyField(Exercise)
| mit | Python |
6055b7eb6b34ed22eb3c7cd17a975d4728be1360 | Add test for new edge sampling | Eigenstate/msmbuilder,Eigenstate/msmbuilder,msmbuilder/msmbuilder,msmbuilder/msmbuilder,Eigenstate/msmbuilder,brookehus/msmbuilder,brookehus/msmbuilder,brookehus/msmbuilder,brookehus/msmbuilder,Eigenstate/msmbuilder,msmbuilder/msmbuilder,msmbuilder/msmbuilder,brookehus/msmbuilder,msmbuilder/msmbuilder,Eigenstate/msmbuilder | msmbuilder/tests/test_sampling.py | msmbuilder/tests/test_sampling.py | import numpy as np
from msmbuilder.decomposition import tICA
from msmbuilder.io.sampling import sample_dimension
def test_sample_dimension():
np.random.seed(42)
X = np.random.randn(500, 5)
data = [X, X, X]
tica = tICA(n_components=2, lag_time=1).fit(data)
tica_trajs = {k: tica.partial_transform(v) for k, v in enumerate(data)}
res = sample_dimension(tica_trajs, 0, 10, scheme="linear")
res2 = sample_dimension(tica_trajs, 1, 10, scheme="linear")
assert len(res) == len(res2) == 10
def test_sample_dimension_2():
np.random.seed(42)
X = np.random.randn(500, 5)
data = [X, X, X]
tica = tICA(n_components=2, lag_time=1).fit(data)
tica_trajs = {k: tica.partial_transform(v) for k, v in enumerate(data)}
res = sample_dimension(tica_trajs, 0, 10, scheme="random")
res2 = sample_dimension(tica_trajs, 1, 10, scheme="edge")
assert len(res) == len(res2) == 10
| import numpy as np
from msmbuilder.decomposition import tICA
from msmbuilder.io.sampling import sample_dimension
def test_sample_dimension():
np.random.seed(42)
X = np.random.randn(500, 5)
data = [X, X, X]
tica = tICA(n_components=2, lag_time=1).fit(data)
tica_trajs = {k: tica.partial_transform(v) for k, v in enumerate(data)}
res = sample_dimension(tica_trajs, 0, 10, scheme="linear")
res2 = sample_dimension(tica_trajs, 1, 10, scheme="linear")
assert len(res) == len(res2) == 10
| lgpl-2.1 | Python |
9d4afec3fd71b017da0a9911872fd56ba81a6f1c | Rename eul2quat() as angle2quat() as agreed upon. Rename input_check_1d() to input_check_Nx1() for clarity. Begin quat2angle(), still emtpy. | NavPy/NavPy,hamid-m/NavPy,adhika/NavPy | navpy.py | navpy.py | import numpy as np
def angle2quat(rotAngle1,rotAngle2,rotAngle3,
input_unit='rad',rotation_sequence='ZYX'):
"""
"""
# INPUT CHECK
rotAngle1,N1 = input_check_Nx1(rotAngle1)
rotAngle2,N2 = input_check_Nx1(rotAngle2)
rotAngle3,N3 = input_check_Nx1(rotAngle3)
if( (N1!=N2) | (N1!=N3) | (N2!=N3) ):
raise ValueError('Inputs are not of same dimensions')
q0 = np.zeros(N1)
qvec = np.zeros((N1,3))
if(input_unit=='deg'):
rotAngle1 = np.deg2rad(rotAngle1)
rotAngle2 = np.deg2rad(rotAngle2)
rotAngle3 = np.deg2rad(rotAngle3)
rotAngle1 /= 2.0
rotAngle2 /= 2.0
rotAngle3 /= 2.0
if(rotation_sequence=='ZYX'):
q0[:] = np.cos(rotAngle1)*np.cos(rotAngle2)*np.cos(rotAngle3) + \
np.sin(rotAngle1)*np.sin(rotAngle2)*np.sin(rotAngle3)
qvec[:,0] = np.cos(rotAngle1)*np.cos(rotAngle2)*np.sin(rotAngle3) - \
np.sin(rotAngle1)*np.sin(rotAngle2)*np.cos(rotAngle3)
qvec[:,1] = np.cos(rotAngle1)*np.sin(rotAngle2)*np.cos(rotAngle3) + \
np.sin(rotAngle1)*np.cos(rotAngle2)*np.sin(rotAngle3)
qvec[:,2] = np.sin(rotAngle1)*np.cos(rotAngle2)*np.cos(rotAngle3) - \
np.cos(rotAngle1)*np.sin(rotAngle2)*np.sin(rotAngle3)
return q0, qvec
def quat2angle(q0,qvec,output_unit='rad',rotation_sequence='ZYX'):
"""
"""
def input_check_Nx1(x):
x = np.atleast_1d(x)
theSize = np.shape(x)
if(len(theSize)>1):
#1. Input must be of size N x 1
if ((theSize[0]!=1) & (theSize[1]!=1)):
raise ValueError('Not an N x 1 array')
#2. Make it into a 1-D array
x = x.reshape(np.size(x))
return x,np.size(x)
| import numpy as np
def eul2quat(rotAngle1,rotAngle2,rotAngle3,
input_unit='rad',rotation_sequence='ZYX'):
"""
"""
# INPUT CHECK
rotAngle1,N1 = input_check_1d(rotAngle1)
rotAngle2,N2 = input_check_1d(rotAngle2)
rotAngle3,N3 = input_check_1d(rotAngle3)
if( (N1!=N2) | (N1!=N3) | (N2!=N3) ):
raise ValueError('Inputs are not of same dimensions')
q0 = np.zeros(N1)
qvec = np.zeros((N1,3))
if(input_unit=='deg'):
rotAngle1 = np.deg2rad(rotAngle1)
rotAngle2 = np.deg2rad(rotAngle2)
rotAngle3 = np.deg2rad(rotAngle3)
rotAngle1 /= 2.0
rotAngle2 /= 2.0
rotAngle3 /= 2.0
if(rotation_sequence=='ZYX'):
q0[:] = np.cos(rotAngle1)*np.cos(rotAngle2)*np.cos(rotAngle3) + \
np.sin(rotAngle1)*np.sin(rotAngle2)*np.sin(rotAngle3)
qvec[:,0] = np.cos(rotAngle1)*np.cos(rotAngle2)*np.sin(rotAngle3) - \
np.sin(rotAngle1)*np.sin(rotAngle2)*np.cos(rotAngle3)
qvec[:,1] = np.cos(rotAngle1)*np.sin(rotAngle2)*np.cos(rotAngle3) + \
np.sin(rotAngle1)*np.cos(rotAngle2)*np.sin(rotAngle3)
qvec[:,2] = np.sin(rotAngle1)*np.cos(rotAngle2)*np.cos(rotAngle3) - \
np.cos(rotAngle1)*np.sin(rotAngle2)*np.sin(rotAngle3)
return q0, qvec
def input_check_1d(x):
x = np.atleast_1d(x)
theSize = np.shape(x)
if(len(theSize)>1):
#1. Input must be of size N x 1
if ((theSize[0]!=1) & (theSize[1]!=1)):
raise ValueError('Not an N x 1 array')
#2. Make it into a 1-D array
x = x.reshape(np.size(x))
return x,np.size(x)
| bsd-3-clause | Python |
cebd4f1ee9a87cc2652ebf8981df20121ec257b2 | Raise ValueError instead of struct.error | gulopine/steel-experiment | steel/fields/numbers.py | steel/fields/numbers.py | import struct
from steel.fields import Field
__all__ = ['Integer']
class Integer(Field):
"An integer represented as a sequence and bytes"
# These map a number of bytes to a struct format code
size_formats = {
1: 'B', # char
2: 'H', # short
4: 'L', # long
8: 'Q', # long long
}
def __init__(self, *args, endianness='<', **kwargs):
super(Integer, self).__init__(*args, **kwargs)
self.format_code = endianness + self.size_formats[self.size]
def encode(self, value):
try:
return struct.pack(self.format_code, value)
except struct.error as e:
raise ValueError(*e.args)
def decode(self, value):
# The index on the end is because unpack always returns a tuple
try:
return struct.unpack(self.format_code, value)[0]
except struct.error as e:
raise ValueError(*e.args)
| import struct
from steel.fields import Field
__all__ = ['Integer']
class Integer(Field):
"An integer represented as a sequence and bytes"
# These map a number of bytes to a struct format code
size_formats = {
1: 'B', # char
2: 'H', # short
4: 'L', # long
8: 'Q', # long long
}
def __init__(self, *args, endianness='<', **kwargs):
super(Integer, self).__init__(*args, **kwargs)
self.format_code = endianness + self.size_formats[self.size]
def encode(self, value):
return struct.pack(self.format_code, value)
def decode(self, value):
# The index on the end is because unpack always returns a tuple
return struct.unpack(self.format_code, value)[0]
| bsd-3-clause | Python |
fc37e171edb11ecc2aa0096ceea14bc539d61d70 | bump dev version | pyconau2017/symposion,TheOpenBastion/symposion,faulteh/symposion,euroscipy/symposion,euroscipy/symposion,pydata/symposion,faulteh/symposion,pyohio/symposion,mbrochh/symposion,TheOpenBastion/symposion,pinax/symposion,python-spain/symposion,pydata/symposion,mbrochh/symposion,toulibre/symposion,NelleV/pyconfr-test,miurahr/symposion,python-spain/symposion,pyohio/symposion,pyconau2017/symposion,pinax/symposion,toulibre/symposion,pyconca/2013-web,miurahr/symposion,NelleV/pyconfr-test,pyconca/2013-web | symposion/__init__.py | symposion/__init__.py | __version__ = "1.0b1.dev50"
| __version__ = "1.0b1.dev49"
| bsd-3-clause | Python |
148ac18c2660b9ec6482a3026dda7e42542a27d5 | bump dev version | mbrochh/symposion,pyconau2017/symposion,pyconca/2013-web,TheOpenBastion/symposion,pydata/symposion,pyohio/symposion,pydata/symposion,mbrochh/symposion,miurahr/symposion,python-spain/symposion,NelleV/pyconfr-test,pinax/symposion,pinax/symposion,NelleV/pyconfr-test,euroscipy/symposion,python-spain/symposion,toulibre/symposion,faulteh/symposion,euroscipy/symposion,pyconca/2013-web,pyohio/symposion,faulteh/symposion,miurahr/symposion,pyconau2017/symposion,TheOpenBastion/symposion,toulibre/symposion | symposion/__init__.py | symposion/__init__.py | __version__ = "1.0b1.dev49"
| __version__ = "1.0b1.dev48"
| bsd-3-clause | Python |
0641e55bd6a87557c5cb89d073ae067e8a57fcc1 | Fix a path issue | amperser/proselint,amperser/proselint,jstewmon/proselint,jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint | proselint/command_line.py | proselint/command_line.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
base_url = "prose.lifelinter.com/"
proselint_path = os.path.dirname(os.path.realpath(__file__))
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
base_url + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
# Return the version number.
if version:
print "v0.0.1"
return
if not file:
raise ValueError("Specify a file to lint using the --file flag.")
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(proselint_path, "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("", os.path.join(proselint_path, "checks", f))
checks.append(getattr(m, 'check'))
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
if errors:
for error in errors:
log_error(*error)
| #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
base_url = "prose.lifelinter.com/"
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
base_url + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
# Return the version number.
if version:
print "v0.0.1"
return
if not file:
raise ValueError("Specify a file to lint using the --file flag.")
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
if errors:
for error in errors:
log_error(*error)
| bsd-3-clause | Python |
92f6cd6804e71245ba9b91e5bedf4c0b4b07e30b | Make retrieval of the detectron ops lib more robust | facebookresearch/Detectron,facebookresearch/Detectron,facebookresearch/Detectron | lib/utils/env.py | lib/utils/env.py | # Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""Environment helper functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import sys
# Default value of the CMake install prefix
_CMAKE_INSTALL_PREFIX = '/usr/local'
def get_runtime_dir():
"""Retrieve the path to the runtime directory."""
return sys.path[0]
def get_py_bin_ext():
"""Retrieve python binary extension."""
return '.py'
def set_up_matplotlib():
"""Set matplotlib up."""
import matplotlib
# Use a non-interactive backend
matplotlib.use('Agg')
def exit_on_error():
"""Exit from a detectron tool when there's an error."""
sys.exit(1)
def import_nccl_ops():
"""Import NCCL ops."""
# There is no need to load NCCL ops since the
# NCCL dependency is built into the Caffe2 gpu lib
pass
def get_detectron_ops_lib():
"""Retrieve Detectron ops library."""
# Candidate prefixes for the detectron ops lib path
prefixes = [_CMAKE_INSTALL_PREFIX, sys.prefix, sys.exec_prefix] + sys.path
# Search for detectron ops lib
for prefix in prefixes:
ops_path = os.path.join(prefix, 'lib/libcaffe2_detectron_ops_gpu.so')
if os.path.exists(ops_path):
# TODO(ilijar): Switch to using a logger
print('Found Detectron ops lib: {}'.format(ops_path))
break
assert os.path.exists(ops_path), \
('Detectron ops lib not found; make sure that your Caffe2 '
'version includes Detectron module')
return ops_path
def get_custom_ops_lib():
"""Retrieve custom ops library."""
lib_dir, _utils = os.path.split(os.path.dirname(__file__))
custom_ops_lib = os.path.join(
lib_dir, 'build/libcaffe2_detectron_custom_ops_gpu.so')
assert os.path.exists(custom_ops_lib), \
'Custom ops lib not found at \'{}\''.format(custom_ops_lib)
return custom_ops_lib
| # Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""Environment helper functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import imp
import os
import sys
def get_runtime_dir():
"""Retrieve the path to the runtime directory."""
return sys.path[0]
def get_py_bin_ext():
"""Retrieve python binary extension."""
return '.py'
def set_up_matplotlib():
"""Set matplotlib up."""
import matplotlib
# Use a non-interactive backend
matplotlib.use('Agg')
def exit_on_error():
"""Exit from a detectron tool when there's an error."""
sys.exit(1)
def import_nccl_ops():
"""Import NCCL ops."""
# There is no need to load NCCL ops since the
# NCCL dependency is built into the Caffe2 gpu lib
pass
def get_caffe2_dir():
"""Retrieve Caffe2 dir path."""
_fp, c2_path, _desc = imp.find_module('caffe2')
assert os.path.exists(c2_path), \
'Caffe2 not found at \'{}\''.format(c2_path)
c2_dir = os.path.dirname(os.path.abspath(c2_path))
return c2_dir
def get_detectron_ops_lib():
"""Retrieve Detectron ops library."""
c2_dir = get_caffe2_dir()
detectron_ops_lib = os.path.join(
c2_dir, 'lib/libcaffe2_detectron_ops_gpu.so')
assert os.path.exists(detectron_ops_lib), \
('Detectron ops lib not found at \'{}\'; make sure that your Caffe2 '
'version includes Detectron module').format(detectron_ops_lib)
return detectron_ops_lib
def get_custom_ops_lib():
"""Retrieve custom ops library."""
lib_dir, _utils = os.path.split(os.path.dirname(__file__))
custom_ops_lib = os.path.join(
lib_dir, 'build/libcaffe2_detectron_custom_ops_gpu.so')
assert os.path.exists(custom_ops_lib), \
'Custom ops lib not found at \'{}\''.format(custom_ops_lib)
return custom_ops_lib
| apache-2.0 | Python |
856b3420a5c580aa93cc79d4f977e268a6364268 | remove migration stuff from the alembic script, apparently I don't know how that works yet | Lancey6/redwind,Lancey6/redwind,Lancey6/redwind | migrations/versions/93bd528a83_add_attachment_element.py | migrations/versions/93bd528a83_add_attachment_element.py | """add Attachment element
Revision ID: 93bd528a83
Revises: 51d6e03ecba
Create Date: 2015-06-04 22:02:36.082013
"""
# revision identifiers, used by Alembic.
revision = '93bd528a83'
down_revision = '51d6e03ecba'
from alembic import op
import sqlalchemy as sa
from redwind import create_app
from redwind import util
from redwind import admin
from redwind.models import Post, Attachment
from redwind.extensions import db
import os
import datetime
import random
import string
import mimetypes
import shutil
from flask import current_app
def upgrade():
# commands auto generated by Alembic - please adjust! ###
op.create_table(
'attachment',
sa.Column('id', sa.Integer(), nullable=False, index=True),
sa.Column('filename', sa.String(length=256), nullable=True),
sa.Column('mimetype', sa.String(length=256), nullable=True),
sa.Column('storage_path', sa.String(length=256), nullable=True),
sa.Column('post_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['post_id'], ['post.id'], ),
sa.PrimaryKeyConstraint('id'))
# end Alembic commands ###
def downgrade():
# commands auto generated by Alembic - please adjust! ###
op.drop_table('attachment')
# end Alembic commands ###
| """add Attachment element
Revision ID: 93bd528a83
Revises: 51d6e03ecba
Create Date: 2015-06-04 22:02:36.082013
"""
# revision identifiers, used by Alembic.
revision = '93bd528a83'
down_revision = '51d6e03ecba'
from alembic import op
import sqlalchemy as sa
from redwind import create_app
from redwind import util
from redwind import admin
from redwind.models import Post, Attachment
from redwind.extensions import db
import os
import datetime
import random
import string
import mimetypes
import shutil
from flask import current_app
def upgrade():
# commands auto generated by Alembic - please adjust! ###
op.create_table(
'attachment',
sa.Column('id', sa.Integer(), nullable=False, index=True),
sa.Column('filename', sa.String(length=256), nullable=True),
sa.Column('mimetype', sa.String(length=256), nullable=True),
sa.Column('storage_path', sa.String(length=256), nullable=True),
sa.Column('post_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['post_id'], ['post.id'], ),
sa.PrimaryKeyConstraint('id'))
# end Alembic commands ###
convert_files_to_attachments()
def downgrade():
# commands auto generated by Alembic - please adjust! ###
op.drop_table('attachment')
# end Alembic commands ###
def convert_file_to_attachment(post, filename):
_, ext = os.path.splitext(filename)
fullpath = os.path.join(current_app.root_path, '_data',
post.path, 'files', filename)
if not os.path.exists(fullpath):
print('could not find', fullpath)
return
now = post.published
storage_path = '{}/{:02d}/{:02d}/{}'.format(
now.year, now.month, now.day,
''.join(random.choice(string.ascii_letters + string.digits)
for _ in range(8)) + '-' + filename)
mimetype, _ = mimetypes.guess_type(filename)
attachment = Attachment(filename=filename,
mimetype=mimetype,
storage_path=storage_path)
print(attachment.disk_path)
os.makedirs(os.path.dirname(attachment.disk_path), exist_ok=True)
shutil.copy2(fullpath, attachment.disk_path)
post.attachments.append(attachment)
def convert_files_to_attachments():
app = create_app()
with app.app_context():
for post in Post.query.all():
for a in post.attachments:
if os.path.exists(a.disk_path):
os.remove(a.disk_path)
db.session.delete(a)
if not post.photos:
# check for files
filedir = os.path.join(
current_app.root_path, '_data', post.path, 'files')
if os.path.exists(filedir):
for filename in os.listdir(filedir):
convert_file_to_attachment(post, filename)
else:
for photo in (post.photos or []):
filename = photo.get('filename')
convert_file_to_attachment(post, filename)
db.session.commit()
| bsd-2-clause | Python |
32f99cd7a9f20e2c8d7ebd140c23ac0e43b1284c | Add logging to track down a bug | xchewtoyx/pulldb | pulldb/users.py | pulldb/users.py | # Copyright 2013 Russell Heilling
import logging
from google.appengine.api import users
from pulldb import base
from pulldb import session
from pulldb.models.users import User
class Profile(session.SessionHandler):
def get(self):
app_user = users.get_current_user()
template_values = self.base_template_values()
template_values.update({
'user': user_key(app_user).get(),
})
template = self.templates.get_template('users_profile.html')
self.response.write(template.render(template_values))
def user_key(app_user=users.get_current_user(), create=True):
logging.debug("Looking up user key for: %r", app_user)
key = None
user = User.query(User.userid == app_user.user_id()).get()
if user:
key = user.key
elif create:
logging.info('Adding user to datastore: %s', app_user.nickname())
user = User(userid=app_user.user_id(),
nickname=app_user.nickname())
user.put()
key = user.key
return user.key
app = base.create_app([
('/users/me', Profile),
])
| # Copyright 2013 Russell Heilling
import logging
from google.appengine.api import users
from pulldb import base
from pulldb import session
from pulldb.models.users import User
class Profile(session.SessionHandler):
def get(self):
app_user = users.get_current_user()
template_values = self.base_template_values()
template_values.update({
'user': user_key(app_user).get(),
})
template = self.templates.get_template('users_profile.html')
self.response.write(template.render(template_values))
def user_key(app_user=users.get_current_user(), create=True):
key = None
user = User.query(User.userid == app_user.user_id()).get()
if user:
key = user.key
elif create:
logging.info('Adding user to datastore: %s', app_user.nickname())
user = User(userid=app_user.user_id(),
nickname=app_user.nickname())
user.put()
key = user.key
return user.key
app = base.create_app([
('/users/me', Profile),
])
| mit | Python |
2d6f8b7b64a2d3e5d99254efd4cf79556692822b | Initialize flask-babel | finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is | app/__init__.py | app/__init__.py | import logging
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.uploads import UploadSet, configure_uploads, IMAGES
from flask.ext.mail import Mail
from flask.ext.whooshalchemy import whoosh_index
from flask.ext.babel import Babel
from helpers.text import slugify, truncate
from config import config
bootstrap = Bootstrap()
db = SQLAlchemy()
login_manager = LoginManager()
login_manager.login_view = 'auth.login'
ads = UploadSet('ads', IMAGES)
imgs = UploadSet('imgs', IMAGES)
mail = Mail()
babel = Babel()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
configure_logging(app)
app.jinja_env.globals.update(slugify=slugify)
app.jinja_env.globals.update(truncate=truncate)
app.jinja_env.globals.update(url=ads.url)
bootstrap.init_app(app)
db.init_app(app)
login_manager.init_app(app)
configure_uploads(app, (ads, imgs))
mail.init_app(app)
babel.init_app(app)
from .models import Post
whoosh_index(app, Post)
from .aflafrettir import aflafrettir as afla_blueprint
from .auth import auth as auth_blueprint
from .admin import admin as admin_blueprint
from .rss import feed as feed_blueprint
app.register_blueprint(afla_blueprint)
app.register_blueprint(auth_blueprint, url_prefix='/auth')
app.register_blueprint(admin_blueprint, url_prefix='/admin')
app.register_blueprint(feed_blueprint, url_prefix='/feed')
from helpers.image import start_image_deletion_thread
@app.before_first_request
def before_first_request():
start_image_deletion_thread()
return app
def configure_logging(app, logger='logger.yml'):
import os, yaml
import logging.config
try:
os.makedirs('log', exist_ok=True)
except OSError:
logging.exception('OSError: ')
if os.path.exists(logger):
with open(logger) as f:
config = yaml.load(f.read())
logging.config.dictConfig(config)
class LevelFilter(logging.Filter):
def __init__(self, level):
self.__level = level
def filter(self, record):
return record.levelno == self.__level
| import logging
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.uploads import UploadSet, configure_uploads, IMAGES
from flask.ext.mail import Mail
from flask.ext.whooshalchemy import whoosh_index
from helpers.text import slugify, truncate
from config import config
bootstrap = Bootstrap()
db = SQLAlchemy()
login_manager = LoginManager()
login_manager.login_view = 'auth.login'
ads = UploadSet('ads', IMAGES)
imgs = UploadSet('imgs', IMAGES)
mail = Mail()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
configure_logging(app)
app.jinja_env.globals.update(slugify=slugify)
app.jinja_env.globals.update(truncate=truncate)
app.jinja_env.globals.update(url=ads.url)
bootstrap.init_app(app)
db.init_app(app)
login_manager.init_app(app)
configure_uploads(app, (ads, imgs))
mail.init_app(app)
from .models import Post
whoosh_index(app, Post)
from .aflafrettir import aflafrettir as afla_blueprint
from .auth import auth as auth_blueprint
from .admin import admin as admin_blueprint
from .rss import feed as feed_blueprint
app.register_blueprint(afla_blueprint)
app.register_blueprint(auth_blueprint, url_prefix='/auth')
app.register_blueprint(admin_blueprint, url_prefix='/admin')
app.register_blueprint(feed_blueprint, url_prefix='/feed')
from helpers.image import start_image_deletion_thread
@app.before_first_request
def before_first_request():
start_image_deletion_thread()
return app
def configure_logging(app, logger='logger.yml'):
import os, yaml
import logging.config
try:
os.makedirs('log', exist_ok=True)
except OSError:
logging.exception('OSError: ')
if os.path.exists(logger):
with open(logger) as f:
config = yaml.load(f.read())
logging.config.dictConfig(config)
class LevelFilter(logging.Filter):
def __init__(self, level):
self.__level = level
def filter(self, record):
return record.levelno == self.__level
| mit | Python |
38dc0f85d4a147591d1815834113541e517a5ffb | bump to 1.0.0b3 | dmpetrov/dataversioncontrol,dmpetrov/dataversioncontrol,efiop/dvc,efiop/dvc | dvc/version.py | dvc/version.py | # Used in setup.py, so don't pull any additional dependencies
#
# Based on:
# - https://github.com/python/mypy/blob/master/mypy/version.py
# - https://github.com/python/mypy/blob/master/mypy/git.py
import os
import subprocess
_BASE_VERSION = "1.0.0b3"
def _generate_version(base_version):
"""Generate a version with information about the Git repository."""
pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if not _is_git_repo(pkg_dir) or not _have_git():
return base_version
if _is_release(pkg_dir, base_version) and not _is_dirty(pkg_dir):
return base_version
return "{base_version}+{short_sha}{dirty}".format(
base_version=base_version,
short_sha=_git_revision(pkg_dir).decode("utf-8")[0:6],
dirty=".mod" if _is_dirty(pkg_dir) else "",
)
def _is_git_repo(dir_path):
"""Is the given directory version-controlled with Git?"""
return os.path.exists(os.path.join(dir_path, ".git"))
def _have_git():
"""Can we run the git executable?"""
try:
subprocess.check_output(["git", "--help"])
return True
except subprocess.CalledProcessError:
return False
except OSError:
return False
def _is_release(dir_path, base_version):
try:
output = subprocess.check_output(
["git", "describe", "--tags", "--exact-match"],
cwd=dir_path,
stderr=subprocess.STDOUT,
).decode("utf-8")
tag = output.strip()
return tag == base_version
except subprocess.CalledProcessError:
return False
def _git_revision(dir_path):
"""Get SHA of the HEAD of a Git repository."""
return subprocess.check_output(
["git", "rev-parse", "HEAD"], cwd=dir_path
).strip()
def _is_dirty(dir_path):
"""Check whether a git repository has uncommitted changes."""
try:
subprocess.check_call(["git", "diff", "--quiet"], cwd=dir_path)
return False
except subprocess.CalledProcessError:
return True
__version__ = _generate_version(_BASE_VERSION)
| # Used in setup.py, so don't pull any additional dependencies
#
# Based on:
# - https://github.com/python/mypy/blob/master/mypy/version.py
# - https://github.com/python/mypy/blob/master/mypy/git.py
import os
import subprocess
_BASE_VERSION = "1.0.0b2"
def _generate_version(base_version):
"""Generate a version with information about the Git repository."""
pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if not _is_git_repo(pkg_dir) or not _have_git():
return base_version
if _is_release(pkg_dir, base_version) and not _is_dirty(pkg_dir):
return base_version
return "{base_version}+{short_sha}{dirty}".format(
base_version=base_version,
short_sha=_git_revision(pkg_dir).decode("utf-8")[0:6],
dirty=".mod" if _is_dirty(pkg_dir) else "",
)
def _is_git_repo(dir_path):
"""Is the given directory version-controlled with Git?"""
return os.path.exists(os.path.join(dir_path, ".git"))
def _have_git():
"""Can we run the git executable?"""
try:
subprocess.check_output(["git", "--help"])
return True
except subprocess.CalledProcessError:
return False
except OSError:
return False
def _is_release(dir_path, base_version):
try:
output = subprocess.check_output(
["git", "describe", "--tags", "--exact-match"],
cwd=dir_path,
stderr=subprocess.STDOUT,
).decode("utf-8")
tag = output.strip()
return tag == base_version
except subprocess.CalledProcessError:
return False
def _git_revision(dir_path):
"""Get SHA of the HEAD of a Git repository."""
return subprocess.check_output(
["git", "rev-parse", "HEAD"], cwd=dir_path
).strip()
def _is_dirty(dir_path):
"""Check whether a git repository has uncommitted changes."""
try:
subprocess.check_call(["git", "diff", "--quiet"], cwd=dir_path)
return False
except subprocess.CalledProcessError:
return True
__version__ = _generate_version(_BASE_VERSION)
| apache-2.0 | Python |
c3e5d3b13f9f074efe9797897ffb6429a2e8c6fe | improve hpd.py | tomba/kmsxx,tomba/kmsxx,tomba/kmsxx,tomba/kmsxx | py/tests/hpd.py | py/tests/hpd.py | #!/usr/bin/python3
import pyudev
import pykms
card = pykms.Card()
conns = card.connectors
context = pyudev.Context()
dev = pyudev.Devices.from_name(context, 'drm', 'card0')
monitor = pyudev.Monitor.from_netlink(context)
monitor.filter_by('drm')
for device in iter(monitor.poll, None):
if 'HOTPLUG' in device:
print("HPD")
for conn in conns:
conn.refresh()
modes = conn.get_modes()
print(" ", conn.fullname, ["{}x{}".format(m.hdisplay, m.vdisplay) for m in modes])
| #!/usr/bin/python3
import pyudev
import pykms
card = pykms.Card()
res = pykms.ResourceManager(card)
conn = res.reserve_connector("hdmi")
context = pyudev.Context()
dev = pyudev.Devices.from_name(context, 'drm', 'card0')
monitor = pyudev.Monitor.from_netlink(context)
monitor.filter_by('drm')
for device in iter(monitor.poll, None):
if 'HOTPLUG' in device:
conn.refresh()
mode = conn.get_modes()
print("HPD")
print(mode)
| mpl-2.0 | Python |
448d3213860d377e146aa4afa5087f5f3ccfcdb9 | test file updated | DiCarloLab-Delft/PycQED_py3,DiCarloLab-Delft/PycQED_py3,DiCarloLab-Delft/PycQED_py3 | pycqed/tests/analysis_v2/test_Two_state_T1_analysis.py | pycqed/tests/analysis_v2/test_Two_state_T1_analysis.py | import unittest
import pycqed as pq
import os
from pycqed.analysis_v2 import measurement_analysis as ma
from pycqed.analysis_v2 import Two_state_T1_analysis as Ta
class Test_efT1_analysis(unittest.TestCase):
@classmethod
def setUpClass(self):
self.datadir = os.path.join(pq.__path__[0], 'tests', 'test_data')
ma.a_tools.datadir = self.datadir
def test_efT1_analysis(self):
b = Ta.efT1_analysis(
t_start='20180606_144110', auto=True, close_figs=False)
t1 = b.fit_res['fit_res_P0'].params['tau1'].value
self.assertAlmostEqual(t1*1e6, 33.393, places=1)
| import unittest
import pycqed as pq
import os
from pycqed.analysis_v2 import measurement_analysis as ma
from pycqed.analysis_v2 import Two_state_T1_analysis as Ta
class Test_efT1_analysis(unittest.TestCase):
@classmethod
def setUpClass(self):
self.datadir = os.path.join(pq.__path__[0], 'tests', 'test_data')
ma.a_tools.datadir = self.datadir
def test_efT1_analysis(self):
Ta.efT1_analysis(
t_start='20180606_144110', auto=True, close_figs=False)
self.fit_res['fit_res_P0'].params['tau1'].value
| mit | Python |
9c528a9ce2d768fa44e821dce0104636d2de19db | Fix tests | NickVolynkin/SmokeDetector,Charcoal-SE/SmokeDetector,Charcoal-SE/SmokeDetector,ArtOfCode-/SmokeDetector,NickVolynkin/SmokeDetector,ArtOfCode-/SmokeDetector | test/test_regexes.py | test/test_regexes.py | from findspam import FindSpam
import pytest
@pytest.mark.parametrize("title, body, username, site, match", [
('18669786819 gmail customer service number 1866978-6819 gmail support number', '', '', '', True),
('Is there any http://www.hindawi.com/ template for Cloud-Oriented Data Center Networking?', '', '', '', True),
('', '', 'bagprada', '', True),
('HOW DO YOU SOLVE THIS PROBLEM?', '', '', '', True),
('12 Month Loans quick @ http://www.quick12monthpaydayloans.co.uk/Elimination of collateral pledging', '', '', '', True),
('support for yahoo mail 18669786819 @call for helpline number', '', '', '', True),
('yahoo email tech support 1 866 978 6819 Yahoo Customer Phone Number ,Shortest Wait', '', '', '', True),
('What is the value of MD5 checksums if the MD5 hash itself could potentially also have been manipulated?', '', '', '', False),
('Probability: 6 Dice are rolled. Which is more likely, that you get exactly one 6, or that you get 6 different numbers?', '', '', '', False),
('The Challenge of Controlling a Powerful AI', '', 'Serban Tanasa', '', False),
('Reproducing image of a spiral using TikZ', '', 'Kristoffer Ryhl', '', False),
('What is the proper way to say "queryer"', '', 'jedwards', '', False),
('What\'s a real-world example of "overfitting"?', '', 'user3851283', '', False),
('How to avoid objects when traveling at greater than .75 light speed. or How Not to Go SPLAT?', '', 'bowlturner', '', False),
('Is it unfair to regrade prior work after detecting cheating?', '', 'Village', '', False),
('Inner workings of muscles', '', '', 'fitness.stackexchange.com', False),
('Cannot access http://stackoverflow.com/ with proxy enabled', '', '', 'superuser.com', False)
])
def test_regexes(title, body, username, site, match):
result = FindSpam.testpost(title, body, username, site)
print title
print result
isspam = False
if (len(result) > 0):
isspam = True
assert match == isspam
| from findspam import FindSpam
import pytest
@pytest.mark.parametrize("title, body, username, site, match", [
('18669786819 gmail customer service number 1866978-6819 gmail support number', '', '', '', True),
('Is there any http://www.hindawi.com/ template for Cloud-Oriented Data Center Networking?', '', '', '', True),
('', 'bagprada', '', '', True),
('HOW DO YOU SOLVE THIS PROBLEM?', '', '', '', True),
('12 Month Loans quick @ http://www.quick12monthpaydayloans.co.uk/Elimination of collateral pledging', '', '', '', True),
('support for yahoo mail 18669786819 @call for helpline number', '', '', '', True),
('yahoo email tech support 1 866 978 6819 Yahoo Customer Phone Number ,Shortest Wait', '', '', '', True),
('What is the value of MD5 checksums if the MD5 hash itself could potentially also have been manipulated?', '', '', '', False),
('Probability: 6 Dice are rolled. Which is more likely, that you get exactly one 6, or that you get 6 different numbers?', '', '', '', False),
('The Challenge of Controlling a Powerful AI', '', 'Serban Tanasa', '', False),
('Reproducing image of a spiral using TikZ', '', 'Kristoffer Ryhl', '', False),
('What is the proper way to say "queryer"', '', 'jedwards', '', False),
('What\'s a real-world example of "overfitting"?', '', 'user3851283', '', False),
('How to avoid objects when traveling at greater than .75 light speed. or How Not to Go SPLAT?', '', 'bowlturner', '', False),
('Is it unfair to regrade prior work after detecting cheating?', 'Village', '', '', False),
('Inner workings of muscles', '', '', 'fitness.stackexchange.com', False),
('Cannot access http://stackoverflow.com/ with proxy enabled', '', '', 'superuser.com', False)
])
def test_regexes(title, body, username, site, match):
result = FindSpam.testpost(title, body, username, site)
print title
print result
isspam = False
if (len(result) > 0):
isspam = True
assert match == isspam
| apache-2.0 | Python |
57e58772ef95f56b8cb0207469f6b1721811d5cc | update dev version after 1.0.0 tag [ci skip] | desihub/desitarget,desihub/desitarget | py/desitarget/_version.py | py/desitarget/_version.py | __version__ = '1.0.0.dev5020'
| __version__ = '1.0.0'
| bsd-3-clause | Python |
27bd3012ca38e08d5c8b2f0e0144c649f1726a26 | make stat example work | osmcode/pyosmium,osmcode/pyosmium,osmcode/pyosmium | examples/osm_file_stats.py | examples/osm_file_stats.py | import osmium as o
import sys
class FileStatsHandler(o.SimpleHandler):
def __init__(self):
o.SimpleHandler.__init__(self)
self.nodes = 0
self.ways = 0
self.rels = 0
def node(self, n):
self.nodes += 1
def way(self, w):
self.ways += 1
def relation(self, r):
self.rels += 1
fd = o.Reader(sys.argv[1])
h = FileStatsHandler()
o.apply(fd, h)
print "Nodes:", h.nodes
print "Ways:", h.ways
print "Relations:", h.rels
| import osmium as o
import sys
class FileStatsHandler(o.SimpleHandler):
def __init__(self):
o.SimpleHandler.__init__(self)
self.nodes = 0
self.ways = 0
self.rels = 0
def node(self, n):
pass self.nodes += 1
def way(self, w):
pass self.ways += 1
def relation(self, r):
pass self.rels += 1
fd = o.Reader(sys.argv[1])
h = FileStatsHandler()
o.apply(fd, h)
print "Nodes:", h.nodes
print "Ways:", h.ways
print "Relations:", h.rels
| bsd-2-clause | Python |
84ced24ccfea8402e0fce1847b6f44bac470c58c | fix the old function using yaml input so that it is consistent with the new format | necozay/tulip-control,necozay/tulip-control,necozay/tulip-control,tulip-control/tulip-control,tulip-control/tulip-control,tulip-control/tulip-control,necozay/tulip-control,necozay/tulip-control,tulip-control/tulip-control | tools/autopart.py | tools/autopart.py | #!/usr/bin/env python
"""
Read data for a continuous transition system from a YAML file,
optionally visualize the partition, and save the result into a
tulipcon XML file.
Flags: -v verbose;
-p generate figure using functions in polytope.plot module.
SCL; 1 Apr 2012.
"""
import numpy as np
import sys
from StringIO import StringIO
from tulip import conxml, discretize, prop2part, polytope as pc
import tulip.polytope.plot as pplot
if __name__ == "__main__":
if len(sys.argv) < 2 or len(sys.argv) > 5:
print "Usage: %s input.yaml [-p] [-v] [output.xml]" % sys.argv[0]
exit(1)
if "-p" in sys.argv:
show_pplot = True
sys.argv.remove("-p")
else:
show_pplot = False
if "-v" in sys.argv:
verbose = 1
sys.argv.remove("-v")
else:
verbose = 0
if len(sys.argv) == 2:
out_fname = sys.argv[1]+".xml"
else:
out_fname = sys.argv[2]
(sys_dyn, initial_partition, N) = conxml.readYAMLfile(sys.argv[1], verbose=verbose)[0:3]
disc_dynamics = discretize.discretize(initial_partition, sys_dyn, N=N,
use_mpt=False, verbose=verbose)
with open(out_fname, "w") as f:
f.write(conxml.dumpXMLtrans(sys_dyn, disc_dynamics, N,
extra="This data file only contains a continuous transition system definition.",
pretty=True))
if show_pplot:
pplot.plot_partition(disc_dynamics, plot_transitions=True)
| #!/usr/bin/env python
"""
Read data for a continuous transition system from a YAML file,
optionally visualize the partition, and save the result into a
tulipcon XML file.
Flags: -v verbose;
-p generate figure using functions in polytope.plot module.
SCL; 1 Apr 2012.
"""
import numpy as np
import sys
from StringIO import StringIO
from tulip import conxml, discretize, prop2part, polytope as pc
import tulip.polytope.plot as pplot
if __name__ == "__main__":
if len(sys.argv) < 2 or len(sys.argv) > 5:
print "Usage: %s input.yaml [-p] [-v] [output.xml]" % sys.argv[0]
exit(1)
if "-p" in sys.argv:
show_pplot = True
sys.argv.remove("-p")
else:
show_pplot = False
if "-v" in sys.argv:
verbose = 1
sys.argv.remove("-v")
else:
verbose = 0
if len(sys.argv) == 2:
out_fname = sys.argv[1]+".xml"
else:
out_fname = sys.argv[2]
(sys_dyn, initial_partition, N) = conxml.readYAMLfile(sys.argv[1], verbose=verbose)
disc_dynamics = discretize.discretize(initial_partition, sys_dyn, N=N,
use_mpt=False, verbose=verbose)
with open(out_fname, "w") as f:
f.write(conxml.dumpXMLtrans(sys_dyn, disc_dynamics, N,
extra="This data file only contains a continuous transition system definition.",
pretty=True))
if show_pplot:
pplot.plot_partition(disc_dynamics, plot_transitions=True)
| bsd-3-clause | Python |
fc73dfb33f4e19d649672f19a1dc4cf09b229d29 | Add response_ok and response_error methods which return byte strings. | bm5w/network_tools | echo_server.py | echo_server.py | #! /usr/bin/env python
"""Echo server in socket connection: receives and sends back a message."""
import socket
def response_ok():
"""Return byte string 200 ok response."""
return u"HTTP/1.1 200 OK\nContent-Type: text/plain\nContent-length: 18\n\r\neverything is okay".encode('utf-8')
def reponse_error(error_code, reason):
"""Return byte string error code."""
return u"HTTP/1.1 {} {}".format(error_code, reason).encode('utf-8')
if __name__ == '__main__':
"""Run from terminal, this will recieve a messages and send them back."""
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
buffsize = 32
try:
while True:
msg = ''
done = False
conn, addr = server_socket.accept()
while not done:
msg_part = conn.recv(buffsize)
msg += msg_part
if len(msg_part) < buffsize:
done = True
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
except KeyboardInterrupt:
print 'I successfully stopped.'
server_socket.close()
| #! /usr/bin/env python
"""Echo server in socket connection: receives and sends back a message."""
import socket
if __name__ == '__main__':
"""Run from terminal, this will recieve a messages and send them back."""
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
buffsize = 32
try:
while True:
msg = ''
done = False
conn, addr = server_socket.accept()
while not done:
msg_part = conn.recv(buffsize)
msg += msg_part
if len(msg_part) < buffsize:
done = True
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
except KeyboardInterrupt:
print 'I successfully stopped.'
server_socket.close()
| mit | Python |
f20902b941e2f0b10ba6fab18f902f38b9cb7241 | Fix up time stamps | bbayles/py3flowtools | py3flowtools/flow_line.py | py3flowtools/flow_line.py | # flow_line.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import datetime
import socket
import struct
inet_aton = lambda x: ipv4_struct.unpack(socket.inet_aton(x))[0]
ipv4_struct = struct.Struct('!I')
def get_utc_time(unix_secs, unix_nsecs, sysuptime, x):
# unix_secs is the whole number of seconds since the epoch
# unix_nsecs is the number of residual nanoseconds
unix_time = unix_secs + (unix_nsecs / 1E9)
# sysuptime is the number of milliseconds the collecting device has been on
unix_base = unix_time - (sysuptime / 1000)
# x (either first or start) is the number of milliseconds of uptime at the
# start or end of the flow
ret = datetime.datetime.utcfromtimestamp(unix_base + (x / 1000))
return ret
class FlowLine(object):
def __init__(self, line):
line = line.decode('ascii').split(',')
# Base times
unix_secs = int(line[0])
unix_nsecs = int(line[1])
sysuptime = int(line[2])
first = int(line[6])
last = int(line[7])
# Fields to expose
self.dOctets = int(line[5])
self.dPkts = int(line[4])
self.first = get_utc_time(unix_secs, unix_nsecs, sysuptime, first)
self.last = get_utc_time(unix_secs, unix_nsecs, sysuptime, last)
self.srcaddr = line[10]
self.srcaddr_raw = inet_aton(line[10])
self.dstaddr = line[11]
self.dstaddr_raw = inet_aton(line[11])
self.srcport = int(line[15])
self.dstport = int(line[16])
self.prot = int(line[17])
self.tcp_flags = int(line[19])
| # flow_line.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import datetime
import socket
import struct
inet_aton = lambda x: ipv4_struct.unpack(socket.inet_aton(x))[0]
ipv4_struct = struct.Struct('!I')
utc_time = lambda x: datetime.datetime.utcfromtimestamp(int(x) / 1000)
class FlowLine(object):
def __init__(self, line):
line = line.decode('ascii').split(',')
self.first = utc_time(int(line[6]))
self.last = utc_time(int(line[7]))
self.srcaddr = line[10]
self.srcaddr_raw = inet_aton(line[10])
self.dstaddr = line[11]
self.dstaddr_raw = inet_aton(line[11])
self.srcport = int(line[15])
self.dstport = int(line[16])
self.prot = int(line[17])
self.dOctets = int(line[5])
self.dPkts = int(line[4])
self.tcp_flags = int(line[19])
| mit | Python |
3d4d9a69190c3d5b8d40a6eeee0b49a59e831606 | Use max to get the number of RNA sequences | RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode | rnacentral/portal/management/commands/xml_export_parallel.py | rnacentral/portal/management/commands/xml_export_parallel.py | """
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Max
from optparse import make_option
from portal.models import Rna
class Command(BaseCommand):
"""
Usage:
python manage.py xml_export_parallel --destination /full/path/to/output/location
Help:
python manage.py xml_export_parallel -h
"""
########################
# Command line options #
########################
option_list = BaseCommand.option_list + (
make_option('-d', '--destination',
default='',
dest='destination',
help='[Required] Full path to the output directory'),
)
# shown with -h, --help
help = ('Create LSF commands for parallelizing xml export.')
######################
# Django entry point #
######################
def handle(self, *args, **options):
"""
Main function, called by django.
"""
if not options['destination']:
raise CommandError('Please specify --destination')
total = Rna.objects.all().aggregate(Max('id'))['id__max']
step = pow(10, 5) * 2
start = 0
stop = 0
for i in xrange(step, total, step):
start = stop
stop = min(total, i)
print get_lsf_command(start, stop, options['destination'])
if stop < total:
start = stop
stop = total
print get_lsf_command(start, stop, options['destination'])
def get_lsf_command(start, stop, destination):
"""
Get LSF command.
"""
return ('bsub '
'-o output__{0}__{1}.txt '
'-e errors__{0}__{1}.txt '
'python manage.py xml_export '
'--min {0} '
'--max {1} '
'-d {2}').format(start, stop, destination)
| """
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from portal.models import Rna
class Command(BaseCommand):
"""
Usage:
python manage.py xml_export_parallel --destination /full/path/to/output/location
Help:
python manage.py xml_export_parallel -h
"""
########################
# Command line options #
########################
option_list = BaseCommand.option_list + (
make_option('-d', '--destination',
default='',
dest='destination',
help='[Required] Full path to the output directory'),
)
# shown with -h, --help
help = ('Create LSF commands for parallelizing xml export.')
######################
# Django entry point #
######################
def handle(self, *args, **options):
"""
Main function, called by django.
"""
if not options['destination']:
raise CommandError('Please specify --destination')
total = Rna.objects.count()
step = pow(10,5) * 2
start = 0
stop = 0
for i in xrange(step,total,step):
start = stop
stop = min(total, i)
print get_lsf_command(start, stop, options['destination'])
if stop < total:
start = stop
stop = total
print get_lsf_command(start, stop, options['destination'])
def get_lsf_command(start, stop, destination):
"""
Get LSF command.
"""
return ('bsub '
'-o output__{0}__{1}.txt '
'-e errors__{0}__{1}.txt '
'python manage.py xml_export '
'--min {0} '
'--max {1} '
'-d {2}').format(start, stop, destination)
| apache-2.0 | Python |
2dde9c34499e73d2fa317111745ce9ea7cdfe903 | add compat for celery 5.x | ulule/django-courriers,ulule/django-courriers | courriers/tasks.py | courriers/tasks.py | try:
from celery.task import task
except ImportError:
from celery import shared_task as task
@task(bind=True)
def subscribe(self, email, newsletter_list_id, user_id=None, **kwargs):
from courriers.backends import get_backend
from courriers.models import NewsletterList
from courriers import signals
from django.contrib.auth import get_user_model
User = get_user_model()
backend = get_backend()()
newsletter_list = None
if newsletter_list_id:
newsletter_list = NewsletterList.objects.get(pk=newsletter_list_id)
user = None
if user_id is not None:
user = User.objects.get(pk=user_id)
else:
user = User.objects.filter(email=email).last()
if user:
signals.subscribed.send(sender=User, user=user, newsletter_list=newsletter_list)
else:
try:
backend.subscribe(newsletter_list.list_id, email)
except Exception as e:
raise self.retry(exc=e, countdown=60)
@task(bind=True)
def unsubscribe(self, email, newsletter_list_id=None, user_id=None, **kwargs):
from courriers.backends import get_backend
from courriers.models import NewsletterList
from courriers import signals
from django.contrib.auth import get_user_model
User = get_user_model()
newsletter_lists = NewsletterList.objects.all()
if newsletter_list_id:
newsletter_lists = NewsletterList.objects.filter(pk=newsletter_list_id)
user = None
if user_id is not None:
user = User.objects.get(pk=user_id)
else:
user = User.objects.filter(email=email).last()
if user:
for newsletter_list in newsletter_lists:
signals.unsubscribed.send(
sender=User, user=user, newsletter_list=newsletter_list
)
else:
backend = get_backend()()
for newsletter in newsletter_lists:
backend.unsubscribe(newsletter.list_id, email)
| from __future__ import absolute_import
from celery.task import task
@task(bind=True)
def subscribe(self, email, newsletter_list_id, user_id=None, **kwargs):
from courriers.backends import get_backend
from courriers.models import NewsletterList
from courriers import signals
from django.contrib.auth import get_user_model
User = get_user_model()
backend = get_backend()()
newsletter_list = None
if newsletter_list_id:
newsletter_list = NewsletterList.objects.get(pk=newsletter_list_id)
user = None
if user_id is not None:
user = User.objects.get(pk=user_id)
else:
user = User.objects.filter(email=email).last()
if user:
signals.subscribed.send(sender=User, user=user, newsletter_list=newsletter_list)
else:
try:
backend.subscribe(newsletter_list.list_id, email)
except Exception as e:
raise self.retry(exc=e, countdown=60)
@task(bind=True)
def unsubscribe(self, email, newsletter_list_id=None, user_id=None, **kwargs):
from courriers.backends import get_backend
from courriers.models import NewsletterList
from courriers import signals
from django.contrib.auth import get_user_model
User = get_user_model()
newsletter_lists = NewsletterList.objects.all()
if newsletter_list_id:
newsletter_lists = NewsletterList.objects.filter(pk=newsletter_list_id)
user = None
if user_id is not None:
user = User.objects.get(pk=user_id)
else:
user = User.objects.filter(email=email).last()
if user:
for newsletter_list in newsletter_lists:
signals.unsubscribed.send(
sender=User, user=user, newsletter_list=newsletter_list
)
else:
backend = get_backend()()
for newsletter in newsletter_lists:
backend.unsubscribe(newsletter.list_id, email)
| mit | Python |
4eabdfeb2a228e650fc5822df405ac51b448a167 | Clarify monitor command help text and log message | ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend | cla_backend/apps/cla_butler/management/commands/monitor_missing_outcome_codes.py | cla_backend/apps/cla_butler/management/commands/monitor_missing_outcome_codes.py | # coding=utf-8
import logging
from django.core.management.base import BaseCommand
from cla_butler.stack import is_first_instance, InstanceNotInAsgException, StackException
from cla_eventlog.constants import LOG_LEVELS, LOG_TYPES
from cla_eventlog.models import Log
from legalaid.models import Case
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'LGA-275 specific monitoring command. Count and alert when outcome codes expected to be ' \
'denormalized to Case instances are missing'
def handle(self, *args, **options):
if self.should_run_housekeeping(**options):
self.check_for_missing_outcome_codes()
else:
self.stdout.write('Not doing housekeeping because running on secondary instance')
@staticmethod
def check_for_missing_outcome_codes():
outcomes_that_should_be_denormed = Log.objects.filter(level=LOG_LEVELS.HIGH, type=LOG_TYPES.OUTCOME)
outcomes_missing_denormed_code = outcomes_that_should_be_denormed.filter(case__outcome_code='')
cases_missing_denormed_code_pks = outcomes_missing_denormed_code.values_list('case__id', flat=True)
cases_to_re_denorm = Case.objects.filter(id__in=cases_missing_denormed_code_pks, outcome_code_id__isnull=False,
outcome_code='')
if cases_to_re_denorm.exists():
case_references = cases_to_re_denorm.values_list('reference', flat=True)
logger.warning('LGA-275 investigation. Cases found with outcome code missing; '
'value expected to be denormalized from log. Number of cases: {}\nReferences: {}'
.format(len(case_references), case_references))
else:
logger.info('LGA-275 No cases found missing denormalized outcome codes')
@staticmethod
def should_run_housekeeping(**options):
if options.get('force', False):
return True
try:
return is_first_instance()
except InstanceNotInAsgException:
logger.info('EC2 instance not in an ASG')
return True
except StackException:
logger.info('Not running on EC2 instance')
return True
| # coding=utf-8
import logging
from django.core.management.base import BaseCommand
from cla_butler.stack import is_first_instance, InstanceNotInAsgException, StackException
from cla_eventlog.constants import LOG_LEVELS, LOG_TYPES
from cla_eventlog.models import Log
from legalaid.models import Case
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Count and alert when outcome codes denormalized to Case instances are missing'
def handle(self, *args, **options):
if self.should_run_housekeeping(**options):
self.check_for_missing_outcome_codes()
else:
self.stdout.write('Not doing housekeeping because running on secondary instance')
@staticmethod
def check_for_missing_outcome_codes():
outcomes_that_should_be_denormed = Log.objects.filter(level=LOG_LEVELS.HIGH, type=LOG_TYPES.OUTCOME)
outcomes_missing_denormed_code = outcomes_that_should_be_denormed.filter(case__outcome_code='')
cases_missing_denormed_code_pks = outcomes_missing_denormed_code.values_list('case__id', flat=True)
cases_to_re_denorm = Case.objects.filter(id__in=cases_missing_denormed_code_pks, outcome_code_id__isnull=False,
outcome_code='')
if cases_to_re_denorm.exists():
case_references = cases_to_re_denorm.values_list('reference', flat=True)
logger.warning('Cases found missing denormalized outcome codes: {}\n'
'References: {}'.format(len(case_references), case_references))
else:
logger.info('No cases found missing denormalized outcome codes')
@staticmethod
def should_run_housekeeping(**options):
if options.get('force', False):
return True
try:
return is_first_instance()
except InstanceNotInAsgException:
logger.info('EC2 instance not in an ASG')
return True
except StackException:
logger.info('Not running on EC2 instance')
return True
| mit | Python |
d97144e2b45750c416d3adbf9f49f78bfa8e7e6e | Set def arguments to immutable to avoid nasty side effect. | scipy/scipy-svn,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,scipy/scipy-svn,lesserwhirls/scipy-cwt,scipy/scipy-svn,scipy/scipy-svn,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor | Lib/sandbox/pyem/misc.py | Lib/sandbox/pyem/misc.py | # Last Change: Sat Jun 09 08:00 PM 2007 J
#========================================================
# Constants used throughout the module (def args, etc...)
#========================================================
# This is the default dimension for representing confidence ellipses
DEF_VIS_DIM = (0, 1)
DEF_ELL_NP = 100
DEF_LEVEL = 0.39
#=====================================================================
# "magic number", that is number used to control regularization and co
# Change them at your risk !
#=====================================================================
# max deviation allowed when comparing double (this is actually stupid,
# I should actually use a number of decimals)
_MAX_DBL_DEV = 1e-10
# max conditional number allowed
_MAX_COND = 1e8
_MIN_INV_COND = 1/_MAX_COND
# Default alpha for regularization
_DEF_ALPHA = 1e-1
# Default min delta for regularization
_MIN_DBL_DELTA = 1e-5
| # Last Change: Sat Jun 09 07:00 PM 2007 J
#========================================================
# Constants used throughout the module (def args, etc...)
#========================================================
# This is the default dimension for representing confidence ellipses
DEF_VIS_DIM = [0, 1]
DEF_ELL_NP = 100
DEF_LEVEL = 0.39
#=====================================================================
# "magic number", that is number used to control regularization and co
# Change them at your risk !
#=====================================================================
# max deviation allowed when comparing double (this is actually stupid,
# I should actually use a number of decimals)
_MAX_DBL_DEV = 1e-10
# max conditional number allowed
_MAX_COND = 1e8
_MIN_INV_COND = 1/_MAX_COND
# Default alpha for regularization
_DEF_ALPHA = 1e-1
# Default min delta for regularization
_MIN_DBL_DELTA = 1e-5
| bsd-3-clause | Python |
de2e3dd947660b4b1222820141c5c7cd66098349 | Add an explicit related name | prophile/django_split | django_split/models.py | django_split/models.py | from django.db import models
class ExperimentGroup(models.Model):
experiment = models.CharField(max_length=48)
user = models.ForeignKey(
'auth.User',
related_name='django_split_experiment_groups',
)
group = models.IntegerField()
class Meta:
unique_together = (
('experiment', 'user'),
)
class ExperimentState(models.Model):
experiment = models.CharField(max_length=48, primary_key=True)
started = models.DateTimeField(null=True)
completed = models.DateTimeField(null=True)
class ExperimentResult(models.Model):
experiment = models.CharField(max_length=48)
group = models.IntegerField()
metric = models.IntegerField()
percentile = models.IntegerField()
value = models.FloatField()
class Meta:
unique_together = (
('experiment', 'group', 'metric', 'percentile'),
)
| from django.db import models
class ExperimentGroup(models.Model):
experiment = models.CharField(max_length=48)
user = models.ForeignKey('auth.User', related_name=None)
group = models.IntegerField()
class Meta:
unique_together = (
('experiment', 'user'),
)
class ExperimentState(models.Model):
experiment = models.CharField(max_length=48, primary_key=True)
started = models.DateTimeField(null=True)
completed = models.DateTimeField(null=True)
class ExperimentResult(models.Model):
experiment = models.CharField(max_length=48)
group = models.IntegerField()
metric = models.IntegerField()
percentile = models.IntegerField()
value = models.FloatField()
class Meta:
unique_together = (
('experiment', 'group', 'metric', 'percentile'),
)
| mit | Python |
f8be19809f5614482e7e3d3ea673c08a97f09163 | Clean up | Koheron/koheron-server,Koheron/koheron-server,Koheron/koheron-server,Koheron/tcp-server,Koheron/koheron-server,Koheron/tcp-server,Koheron/tcp-server,Koheron/tcp-server,Koheron/tcp-server | APIs/python/connect_test.py | APIs/python/connect_test.py | from koheron_tcp_client import KClient, command
class Tests:
def __init__(self, client):
self.client = client
@command('TESTS')
def set_mean(self, mean): pass
@command('TESTS')
def send_std_array(self):
return client.recv_buffer(10, data_type='float32')
client = KClient('127.0.0.1', 36000, verbose=True)
client.get_stats()
tests = Tests(client)
tests.set_mean(12.5)
print tests.send_std_array()
| from koheron_tcp_client import KClient, command
class Tests:
def __init__(self, client):
self.client = client
@command('TESTS')
def set_mean(self, mean): pass
@command('TESTS')
def send_std_array(self):
return client.recv_buffer(10, data_type='float32')
client = KClient('127.0.0.1', 36000, verbose=True)
client.get_stats()
tests = Tests(client)
tests.set_mean(12.5)
print tests.send_std_array()
# x = 10
# y = 20
# buff = bytearray()
# buff.append((x >> 8) & 0xff)
# buff.append(x & 0xff)
# buff.append((y >> 8) & 0xff)
# buff.append(y & 0xff)
# for char in buff:
# print char | agpl-3.0 | Python |
0447cc261f54a46d712a78009bb1c1a5a5cddb74 | update version to fb8 | cvubrugier/targetcli-fb,cloud4life/targetcli-fb,agrover/targetcli-fb | targetcli/__init__.py | targetcli/__init__.py | '''
This file is part of targetcli.
Copyright (c) 2011 by RisingTide Systems LLC
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, version 3 (AGPLv3).
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from ui_root import UIRoot
__version__ = "2.0rc1.fb8"
__author__ = "Jerome Martin <jxm@risingtidesystems.com>"
__url__ = "http://www.risingtidesystems.com"
__description__ = "An administration shell for RTS storage targets."
__license__ = __doc__
| '''
This file is part of targetcli.
Copyright (c) 2011 by RisingTide Systems LLC
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, version 3 (AGPLv3).
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from ui_root import UIRoot
__version__ = "2.0rc1.fb6"
__author__ = "Jerome Martin <jxm@risingtidesystems.com>"
__url__ = "http://www.risingtidesystems.com"
__description__ = "An administration shell for RTS storage targets."
__license__ = __doc__
| apache-2.0 | Python |
386edcc628ad39ce63d39f6d597711552bf5b67a | Add dmoj crate for Rust submissions (#215) | DMOJ/judge,DMOJ/judge,DMOJ/judge | dmoj/executors/RUST.py | dmoj/executors/RUST.py | import os
from .base_executor import CompiledExecutor
CARGO_TOML = '''\
[package]
name = "{name}"
version = "1.0.0"
'''
HELLO_WORLD_PROGRAM = '''\
fn main() {
println!("echo: Hello, World!");
}
'''
class Executor(CompiledExecutor):
name = 'RUST'
command = 'cargo'
test_program = HELLO_WORLD_PROGRAM
def create_files(self, problem_id, source_code, *args, **kwargs):
os.mkdir(self._file('src'))
with open(self._file('src', 'main.rs'), 'wb') as f:
f.write(source_code)
with open(self._file('Cargo.toml'), 'wb') as f:
f.write(CARGO_TOML.format(name=problem_id))
def get_compile_args(self):
return [self.get_command(), 'build', '--release']
def get_compiled_file(self):
return self._file('target', 'release', self.problem)
| import os
from .base_executor import CompiledExecutor
CARGO_TOML = '''\
[package]
name = "{name}"
version = "0.0.0"
'''
class Executor(CompiledExecutor):
name = 'RUST'
command = 'cargo'
test_program = 'fn main() { println!("echo: Hello, World!"); }'
def create_files(self, problem_id, source_code, *args, **kwargs):
os.mkdir(self._file('src'))
with open(self._file('src', 'main.rs'), 'wb') as f:
f.write(source_code)
with open(self._file('Cargo.toml'), 'wb') as f:
f.write(CARGO_TOML.format(name=problem_id))
def get_compile_args(self):
return [self.get_command(), 'build', '--release']
def get_compiled_file(self):
return self._file('target', 'release', self.problem)
| agpl-3.0 | Python |
972a3b7e719316db3befc673a5a22b76120098d3 | add pixel size and orientation to telescope properties | MaxNoe/cta_event_viewer | telescope/__init__.py | telescope/__init__.py | import pandas as pd
lst_mapping = pd.read_csv('telescope/lst.csv')
class Telescope(object):
"""The base Telescope class"""
def __init__(self, position_x, position_y, telescope_id):
"""
:position_x: x position of the telescope in meter
:position_y: y position of the telescope in meter
:id: id of the telescope
"""
self.position_x = position_x
self.position_y = position_y
self.telescope_id = telescope_id
class LST(Telescope):
"""the CTA large size telescope"""
pixel_x = lst_mapping.pixel_x.values
pixel_y = lst_mapping.pixel_y.values
n_pixel = len(pixel_x)
pixel_shape = 'hexagon'
pixel_size = 0.025
pixel_orientation = 0
def __init__(self, position_x, position_y, telescope_id):
super().__init__(position_x, position_y, telescope_id)
| import pandas as pd
lst_mapping = pd.read_csv('telescope/lst.csv')
class Telescope(object):
"""The base Telescope class"""
def __init__(self, position_x, position_y, telescope_id):
"""
:position_x: x position of the telescope in meter
:position_y: y position of the telescope in meter
:id: id of the telescope
"""
self.position_x = position_x
self.position_y = position_y
self.telescope_id = telescope_id
class LST(Telescope):
"""the CTA large size telescope"""
pixel_x = lst_mapping.pixel_x.values
pixel_y = lst_mapping.pixel_y.values
n_pixel = len(pixel_x)
pixel_shape = 'hexagon'
def __init__(self, position_x, position_y, telescope_id):
super().__init__(position_x, position_y, telescope_id)
| mit | Python |
50163035d306e4cbd57937ec4f74d939b420f6b3 | move pd.DataFrame logic into separate fn | eltonlaw/impyute | impyute/util/preprocess.py | impyute/util/preprocess.py | """ impyute.util.preprocess """
from functools import wraps
# Hacky way to handle python2 not having `ModuleNotFoundError`
# pylint: disable=redefined-builtin, missing-docstring
try:
raise ModuleNotFoundError
except NameError:
class ModuleNotFoundError(Exception):
pass
except ModuleNotFoundError:
pass
# pylint: enable=redefined-builtin, missing-docstring
def execute_fn_with_args_and_or_kwargs(fn, args, kwargs):
""" If args + kwargs aren't accepted only args are passed in"""
try:
return fn(*args, **kwargs)
except TypeError:
return fn(*args)
def get_pandas_df():
""" Gets pandas DataFrame if we can import it """
try:
import pandas as pd
df = pd.DataFrame
except (ModuleNotFoundError, ImportError):
df = None
return df
def preprocess(fn):
""" Base preprocess function for commonly used preprocessing
PARAMETERS
----------
data: numpy.ndarray
Data to impute.
RETURNS
-------
bool
True if `data` is correctly formatted
"""
@wraps(fn)
def wrapper(*args, **kwargs):
""" Run input checks"""
# convert tuple to list so args can be modified
args = list(args)
# Either make a copy or use a pointer to the original
if "inplace" in kwargs and kwargs['inplace']:
args[0] = args[0]
else:
args[0] = args[0].copy()
results = execute_fn_with_args_and_or_kwargs(fn, args, kwargs)
## If Pandas exists, and the input data is a dataframe then cast the input
## to an np.array and cast the output back to a DataFrame.
pd_DataFrame = get_pandas_df()
if pd_DataFrame and isinstance(args[0], pd_DataFrame):
args[0] = args[0].values
results = pd_DataFrame(results)
return results
return wrapper
| """ impyute.util.preprocess """
from functools import wraps
# Hacky way to handle python2 not having `ModuleNotFoundError`
# pylint: disable=redefined-builtin, missing-docstring
try:
raise ModuleNotFoundError
except NameError:
class ModuleNotFoundError(Exception):
pass
except ModuleNotFoundError:
pass
# pylint: enable=redefined-builtin, missing-docstring
def execute_fn_with_args_and_or_kwargs(fn, args, kwargs):
""" If args + kwargs aren't accepted only args are passed in"""
try:
return fn(*args, **kwargs)
except TypeError:
return fn(*args)
def preprocess(fn):
""" Base preprocess function for commonly used preprocessing
PARAMETERS
----------
data: numpy.ndarray
Data to impute.
RETURNS
-------
bool
True if `data` is correctly formatted
"""
@wraps(fn)
def wrapper(*args, **kwargs):
""" Run input checks"""
# convert tuple to list so args can be modified
args = list(args)
# Either make a copy or use a pointer to the original
if "inplace" in kwargs and kwargs['inplace']:
args[0] = args[0]
else:
args[0] = args[0].copy()
# Check if Pandas exists
try:
import pandas as pd
pd_DataFrame = pd.DataFrame
except (ModuleNotFoundError, ImportError):
pd_DataFrame = None
results = execute_fn_with_args_and_or_kwargs(fn, args, kwargs)
# If Pandas exists, and the input data is a dataframe
# then cast the input to an np.array and cast the output
# back to a DataFrame.
if pd_DataFrame and isinstance(args[0], pd_DataFrame):
args[0] = args[0].values
results = pd_DataFrame(results)
return results
return wrapper
| mit | Python |
4299f4f410f768066aaacf885ff0a38e8af175c9 | Add custom form validation enforcing that each new book is unique | nirajkvinit/python3-study,nirajkvinit/python3-study,nirajkvinit/python3-study,nirajkvinit/python3-study | intro-django/readit/books/forms.py | intro-django/readit/books/forms.py | from django import forms
from .models import Book
class ReviewForm(forms.Form):
"""
Form for reviewing a book
"""
is_favourite = forms.BooleanField(
label = 'Favourite?',
help_text = 'In your top 100 books of all time?',
required = False,
)
review = forms.CharField(
widget = forms.Textarea,
min_length = 100,
error_messages = {
'required': 'Please enter your review',
'min_length': 'Please write atleast 100 characters (You have written %(show_value)s',
}
)
class BookForm(forms.ModelForm) :
class Meta:
model = Book
fields = ['title', 'authors']
def clean(self):
# Super the clean method to maintain main validation and error messages
super(BookForm, self).clean()
try:
title = self.cleaned_data.get('title')
authors = self.cleaned_data.get('authors')
book = Book.objects.get(title=title, authors=authors)
raise forms.ValidationError(
'The book {} by {} already exists.'.format(title, book.list_authors()),
code = 'bookexists'
)
except Book.DoesNotExist:
return self.cleaned_data
| from django import forms
from .models import Book
class ReviewForm(forms.Form):
"""
Form for reviewing a book
"""
is_favourite = forms.BooleanField(
label = 'Favourite?',
help_text = 'In your top 100 books of all time?',
required = False,
)
review = forms.CharField(
widget = forms.Textarea,
min_length = 100,
error_messages = {
'required': 'Please enter your review',
'min_length': 'Please write atleast 100 characters (You have written %(show_value)s',
}
)
class BookForm(forms.ModelForm) :
class Meta:
model = Book
fields = ['title', 'authors'] | mit | Python |
7910c191d687148055878ec3cf4851efb6e064a4 | reduce bsize | minggli/fisheries-convnet,minggli/fisheries-convnet | app/settings.py | app/settings.py | # -*- coding: utf-8 -*-
MODEL_PATH = './trained_models/'
IMAGE_PATH = './data/'
IMAGE_SHAPE = (45, 80, 3)
# IMAGE_SHAPE = (720, 1280, 3)
BATCH_SIZE = 32
MAX_STEPS = 1500
ALPHA = 1e-3
| # -*- coding: utf-8 -*-
MODEL_PATH = './trained_models/'
IMAGE_PATH = './data/'
IMAGE_SHAPE = (45, 80, 3)
# IMAGE_SHAPE = (720, 1280, 3)
BATCH_SIZE = 128
MAX_STEPS = 1000
ALPHA = 1e-3
| mit | Python |
286c7d43707439373c5e5e2e021ee3c6d5f3bed3 | fix password change test by logging out before trying | anthraxx/arch-security-tracker,jelly/arch-security-tracker,jelly/arch-security-tracker,anthraxx/arch-security-tracker,archlinux/arch-security-tracker,archlinux/arch-security-tracker,anthraxx/arch-security-tracker | test/test_profile.py | test/test_profile.py | from flask import url_for
from flask_login import current_user
from .conftest import logged_in, assert_logged_in, assert_not_logged_in, DEFAULT_USERNAME
@logged_in
def test_change_password(db, client):
new_password = DEFAULT_USERNAME + '1'
resp = client.post(url_for('edit_own_user_profile'), follow_redirects=True,
data=dict(password=new_password, new_password=new_password, password_current=DEFAULT_USERNAME))
assert resp.status_code == 200
# logout and test if new password was applied
resp = client.post(url_for('logout'), follow_redirects=True)
assert_not_logged_in(resp)
resp = client.post(url_for('login'), follow_redirects=True,
data=dict(username=DEFAULT_USERNAME, password=new_password))
assert_logged_in(resp)
assert DEFAULT_USERNAME == current_user.name
@logged_in
def test_invalid_password_length(db, client):
resp = client.post(url_for('edit_own_user_profile'), follow_redirects=True,
data=dict(password='1234', new_password='1234', password_current=DEFAULT_USERNAME))
assert b'Field must be between 16 and 64 characters long.' in resp.data
assert resp.status_code == 200
| from flask import url_for
from flask_login import current_user
from .conftest import logged_in, assert_logged_in, DEFAULT_USERNAME
@logged_in
def test_change_password(db, client):
new_password = DEFAULT_USERNAME + '1'
resp = client.post(url_for('edit_own_user_profile'), follow_redirects=True,
data=dict(password=new_password, new_password=new_password, password_current=DEFAULT_USERNAME))
assert resp.status_code == 200
resp = client.post(url_for('login'), follow_redirects=True,
data=dict(username=DEFAULT_USERNAME, password=new_password))
assert_logged_in(resp)
assert DEFAULT_USERNAME == current_user.name
@logged_in
def test_invalid_password_length(db, client):
resp = client.post(url_for('edit_own_user_profile'), follow_redirects=True,
data=dict(password='1234', new_password='1234', password_current=DEFAULT_USERNAME))
assert b'Field must be between 16 and 64 characters long.' in resp.data
assert resp.status_code == 200
| mit | Python |
c2f3e4b22783aaa34671d5779436cea4b21f3226 | fix redirect to episode page after posted shownote. | gmkou/FikaNote,sassy/FikaNote,sassy/FikaNote,gmkou/FikaNote,gmkou/FikaNote,sassy/FikaNote | app/shownote.py | app/shownote.py | #!/usr/bin/env python
#coding:utf-8
from django.http import Http404,HttpResponseRedirect
from app.models import FikanoteDB, AgendaDB, Shownote
from shownoteform import ShownoteForm
from agendaform import AgendaForm
from django.shortcuts import render
from app.views import episode
import datetime
def shownote(request):
if request.method == 'GET':
agendas = AgendaDB.objects().order_by('-date')
return render(request, 'edit_shownote.html',
{'agendas': agendas
, 'agendaform': AgendaForm()
, 'shownoteform': ShownoteForm()
} )
elif request.method == 'POST':
form = ShownoteForm(request.POST)
if form.is_valid():
number = FikanoteDB.objects().count()+1
# add to shownote
shownotes = []
list_title = request.POST.getlist('agenda_title')
list_url = request.POST.getlist('agenda_url')
list_id = request.POST.getlist('agenda_id')
for i in range(len(list_title)):
shownotes.append(Shownote(title=list_title[i], url=list_url[i]))
FikanoteDB(number = number
, title=form.cleaned_data['title']
, person=form.cleaned_data['person'].split(",")
, agenda=form.cleaned_data['agenda']
, date=datetime.datetime.utcnow()
, shownotes=shownotes
).save()
# delete id's item from agendadb
for i in range(len(list_id)):
AgendaDB.objects.filter(id__exact=list_id[i]).delete()
return episode(request, number)
return HttpResponseRedirect('/')
else:
raise Http404
| #!/usr/bin/env python
#coding:utf-8
from django.http import Http404,HttpResponseRedirect
from app.models import FikanoteDB, AgendaDB, Shownote
from shownoteform import ShownoteForm
from agendaform import AgendaForm
from django.shortcuts import render
import datetime
def shownote(request):
if request.method == 'GET':
agendas = AgendaDB.objects().order_by('-date')
return render(request, 'edit_shownote.html',
{'agendas': agendas
, 'agendaform': AgendaForm()
, 'shownoteform': ShownoteForm()
} )
elif request.method == 'POST':
form = ShownoteForm(request.POST)
if form.is_valid():
number = FikanoteDB.objects().count()+1
# add to shownote
shownotes = []
list_title = request.POST.getlist('agenda_title')
list_url = request.POST.getlist('agenda_url')
list_id = request.POST.getlist('agenda_id')
for i in range(len(list_title)):
shownotes.append(Shownote(title=list_title[i], url=list_url[i]))
FikanoteDB(number = number
, title=form.cleaned_data['title']
, person=form.cleaned_data['person'].split(",")
, agenda=form.cleaned_data['agenda']
, date=datetime.datetime.utcnow()
, shownotes=shownotes
).save()
# delete id's item from agendadb
for i in range(len(list_id)):
AgendaDB.objects.filter(id__exact=list_id[i]).delete()
return HttpResponseRedirect('/')
else:
raise Http404
| mit | Python |
5afec1ecdd550eb28c0ec9a789fb215d9799563c | Fix init container delete command | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | polyaxon/scheduler/spawners/templates/init_containers.py | polyaxon/scheduler/spawners/templates/init_containers.py | class InitCommands(object):
COPY = 'copy'
CREATE = 'create'
DELETE = 'delete'
@classmethod
def is_copy(cls, command):
return command == cls.COPY
@classmethod
def is_create(cls, command):
return command == cls.CREATE
@classmethod
def is_delete(cls, command):
return command == cls.DELETE
def get_output_args(command, outputs_path, original_outputs_path=None):
get_or_create = 'if [ ! -d "{dir}" ]; then mkdir -p {dir}; fi;'.format(dir=outputs_path)
delete_dir = ('if [ -d {path} ] && [ "$(ls -A {path})" ]; '
'then rm -r {path}/*; fi;'.format(path=outputs_path))
copy_file_if_exist = 'if [ -f {original_path} ]; then cp {original_path} {path}; fi;'.format(
original_path=original_outputs_path, path=outputs_path)
copy_dir_if_exist = 'if [ -d {original_path} ]; then cp -r {original_path} {path}; fi;'.format(
original_path=original_outputs_path, path=outputs_path)
if InitCommands.is_create(command=command):
return '{} {}'.format(get_or_create, delete_dir)
if InitCommands.is_copy(command=command):
return '{} {} {} {}'.format(
get_or_create, delete_dir, copy_dir_if_exist, copy_file_if_exist)
if InitCommands.is_delete(command=command):
return '{}'.format(delete_dir)
| class InitCommands(object):
COPY = 'copy'
CREATE = 'create'
DELETE = 'delete'
@classmethod
def is_copy(cls, command):
return command == cls.COPY
@classmethod
def is_create(cls, command):
return command == cls.CREATE
@classmethod
def is_delete(cls, command):
return command == cls.DELETE
def get_output_args(command, outputs_path, original_outputs_path=None):
get_or_create = 'if [ ! -d "{dir}" ]; then mkdir -p {dir}; fi;'.format(dir=outputs_path)
delete_dir = 'if [ -d {path} ]; then rm -r {path}/*; fi;'.format(path=outputs_path)
copy_file_if_exist = 'if [ -f {original_path} ]; then cp {original_path} {path}; fi;'.format(
original_path=original_outputs_path, path=outputs_path)
copy_dir_if_exist = 'if [ -d {original_path} ]; then cp -r {original_path} {path}; fi;'.format(
original_path=original_outputs_path, path=outputs_path)
if InitCommands.is_create(command=command):
return '{} {}'.format(get_or_create, delete_dir)
if InitCommands.is_copy(command=command):
return '{} {} {} {}'.format(
get_or_create, delete_dir, copy_dir_if_exist, copy_file_if_exist)
if InitCommands.is_delete(command=command):
return '{}'.format(delete_dir)
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.