commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
87dd116aea6e3c8d9d436ce6b5bf1fbbe0ff0788
|
Fix incorrect role assignment in migration.
|
keystone/common/sql/migrate_repo/versions/020_migrate_metadata_table_roles.py
|
keystone/common/sql/migrate_repo/versions/020_migrate_metadata_table_roles.py
|
import json
import sqlalchemy as sql
from keystone import config
CONF = config.CONF
def upgrade(migrate_engine):
meta = sql.MetaData()
meta.bind = migrate_engine
sql.Table('user', meta, autoload=True)
sql.Table('role', meta, autoload=True)
sql.Table('project', meta, autoload=True)
new_metadata_table = sql.Table('user_project_metadata',
meta,
autoload=True)
conn = migrate_engine.connect()
old_metadata_table = sql.Table('metadata', meta, autoload=True)
session = sql.orm.sessionmaker(bind=migrate_engine)()
for metadata in session.query(old_metadata_table):
if config.CONF.member_role_id not in metadata.data:
data = json.loads(metadata.data)
data['roles'].append(config.CONF.member_role_id)
else:
data = metadata.data
r = session.query(new_metadata_table).filter_by(
user_id=metadata.user_id,
project_id=metadata.tenant_id).first()
if r is not None:
# roles should be the union of the two role lists
old_roles = data['roles']
new_roles = json.loads(r.data)['roles']
data['roles'] = list(set(old_roles) | set(new_roles))
q = new_metadata_table.update().where(
new_metadata_table.c.user_id == metadata.user_id and
new_metadata_table.c.project_id == metadata.tenant_id).values(
data=json.dumps(data))
else:
q = new_metadata_table.insert().values(
user_id=metadata.user_id,
project_id=metadata.tenant_id,
data=json.dumps(data))
conn.execute(q)
session.close()
old_metadata_table.drop()
def downgrade(migrate_engine):
meta = sql.MetaData()
meta.bind = migrate_engine
sql.Table('user', meta, autoload=True)
sql.Table('project', meta, autoload=True)
metadata_table = sql.Table(
'metadata',
meta,
sql.Column(
u'user_id',
sql.String(64),
primary_key=True),
sql.Column(
u'tenant_id',
sql.String(64),
primary_key=True),
sql.Column('data',
sql.Text()))
metadata_table.create(migrate_engine, checkfirst=True)
user_project_metadata_table = sql.Table(
'user_project_metadata',
meta,
autoload=True)
metadata_table = sql.Table(
'metadata',
meta,
autoload=True)
session = sql.orm.sessionmaker(bind=migrate_engine)()
for metadata in session.query(user_project_metadata_table):
if 'roles' in metadata:
metadata_table.insert().values(
user_id=metadata.user_id,
tenant_id=metadata.project_id)
session.close()
|
Python
| 0.000729
|
@@ -458,45 +458,8 @@
e)%0A%0A
- conn = migrate_engine.connect()%0A%0A
@@ -1360,13 +1360,21 @@
r_id
- and%0A
+).where(%0A
@@ -1415,24 +1415,44 @@
roject_id ==
+%0A
metadata.te
@@ -1467,37 +1467,16 @@
.values(
-%0A
data=jso
@@ -1693,19 +1693,22 @@
-con
+sessio
n.execut
@@ -1726,20 +1726,21 @@
ession.c
-lose
+ommit
()%0A o
|
d4655d2b84154da76c6b8e4c2517577b70f16ce7
|
Complete iter sol
|
lc0198_house_robber.py
|
lc0198_house_robber.py
|
"""Leetcode 198. House Robber
Easy
URL: https://leetcode.com/problems/house-robber/
You are a professional robber planning to rob houses along a street.
Each house has a certain amount of money stashed,
the only constraint stopping you from robbing each of them is that
adjacent houses have security system connected and it will
automatically contact the police if two adjacent houses were broken into
on the same night.
Given a list of non-negative integers representing the amount of money of
each house, determine the maximum amount of money you can rob tonight
without alerting the police.
Example 1:
Input: [1,2,3,1]
Output: 4
Explanation: Rob house 1 (money = 1) and then rob house 3 (money = 3).
Total amount you can rob = 1 + 3 = 4.
Example 2:
Input: [2,7,9,3,1]
Output: 12
Explanation: Rob house 1 (money = 2), rob house 3 (money = 9) and
rob house 5 (money = 1).
Total amount you can rob = 2 + 9 + 1 = 12.
"""
class SolutionRecur(object):
def _recur(self, nums, n):
if n < 0:
return 0
# To rob or not to rob house n:
# T[n] = max(nums[n] + T[n-2], T[n-1]).
amount_in_n = nums[n] + self._recur(nums, n - 2)
amount_ex_n = self._recur(nums, n - 1)
return max(amount_in_n, amount_ex_n)
def rob(self, nums):
"""
:type nums: List[int]
:rtype: int
Time complexity: O(2^n).
Space complexity: O(n).
"""
# Apply top-down recursion.
if not nums:
return 0
return self._recur(nums, len(nums) - 1)
class SolutionMemo(object):
def _recurMemo(self, nums, n, T):
if n < 0:
return 0
if T[n]:
return T[n]
# To rob or not to rob house n:
# T[n] = max(nums[n] + T[n-2], T[n-1]).
amount_in_n = nums[n] + self._recurMemo(nums, n - 2, T)
amount_ex_n = self._recurMemo(nums, n - 1, T)
T[n] = max(amount_in_n, amount_ex_n)
return T[n]
def rob(self, nums):
"""
:type nums: List[int]
:rtype: int
Time complexity: O(n).
Space complexity: O(n).
"""
# Apply top-down recursion with memoization.
if not nums:
return 0
T = [None] * len(nums)
return self._recurMemo(nums, len(nums) - 1, T)
class SolutionDp(object):
def rob(self, nums):
"""
:type nums: List[int]
:rtype: int
Time complexity: O(n).
Space complexity: O(n).
"""
# Apply bottom-up dynamic programming.
if not nums:
return 0
if len(nums) == 1:
return nums[0]
T = [0] * len(nums)
# If only 1 or 2 houses, get the max amount.
T[0] = nums[0]
T[1] = max(nums[0], nums[1])
for i in range(2, len(nums)):
T[i] = max(nums[i] + T[i-2], T[i-1])
return T[-1]
def main():
# Output: 4.
nums = [1,2,3,1]
print SolutionRecur().rob(nums)
print SolutionMemo().rob(nums)
print SolutionDp().rob(nums)
# Outpyt: 12.
nums = [2,7,9,3,1]
print SolutionRecur().rob(nums)
print SolutionMemo().rob(nums)
print SolutionDp().rob(nums)
if __name__ == '__main__':
main()
|
Python
| 0.999986
|
@@ -2946,16 +2946,573 @@
T%5B-1%5D%0A%0A%0A
+class SolutionIter(object):%0A def rob(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: int%0A%0A Time complexity: O(n).%0A Space complexity: O(1).%0A %22%22%22%0A # Apply bottom-up dynamic programming w/ iteration.%0A if not nums:%0A return 0%0A%0A if len(nums) == 1:%0A return nums%5B0%5D%0A%0A # If only 1 or 2 houses, get the max amount.%0A a = nums%5B0%5D%0A b = max(nums%5B0%5D, nums%5B1%5D)%0A%0A for i in range(2, len(nums)):%0A a, b = b, max(nums%5Bi%5D + a, b)%0A%0A return b%0A%0A%0A
def main
@@ -3652,24 +3652,59 @@
().rob(nums)
+%0A print SolutionIter().rob(nums)
%0A%0A # Outp
@@ -3798,32 +3798,32 @@
emo().rob(nums)%0A
-
print Soluti
@@ -3839,16 +3839,51 @@
b(nums)%0A
+ print SolutionIter().rob(nums)%0A
%0A%0Aif __n
|
00ca89242b64d29a034aa03b1e76abef617f1b26
|
put validation back
|
application/frontend/forms.py
|
application/frontend/forms.py
|
from datetime import date
from flask import request
from flask_wtf import Form
from wtforms import (
StringField,
HiddenField,
BooleanField,
DateField,
PasswordField,
SubmitField,
SelectField,
RadioField,
TextAreaField
)
from wtforms.validators import DataRequired, ValidationError, NumberRange
from datatypes import country_code_validator
from application.frontend.field_helpers import countries_list_for_selector
class ValidateDateNotInFuture(object):
def __init__(self):
self.message = "The date must not be in the future"
def __call__(self, form, field):
self._validate_date_not_in_future(form, field.data)
def _validate_date_not_in_future(self, form, date_field):
if date_field > date.today():
raise ValidationError('Date cannot be in the future')
class LoginForm(Form):
email = StringField(validators=[DataRequired()])
password = PasswordField(validators=[DataRequired()])
submit = SubmitField('Login')
remember = BooleanField('Remember me')
next = HiddenField()
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
if not self.next.data:
self.next.data = request.args.get('next', '')
self.remember.default = True
class ChangeForm(Form):
title_number = HiddenField('Title Number')
confirm = BooleanField('Confirm')
proprietor_previous_full_name = HiddenField('Previous full name')
proprietor_new_full_name = StringField('New full name', validators=[DataRequired()])
partner_name = StringField('Partner\'s full name', validators=[DataRequired()])
marriage_date = DateField('Date of marriage', format='%d-%m-%Y',
validators=[DataRequired(), ValidateDateNotInFuture()],
description="For example, 20-08-2011")
marriage_place = StringField('Location of marriage ceremony', validators=[DataRequired()])
marriage_country = SelectField('Country',
validators=[DataRequired(), country_code_validator.wtform_validator()],
choices=countries_list_for_selector)
marriage_certificate_number = StringField('Marriage certificate number', validators=[DataRequired()])
class ConfirmForm(ChangeForm):
"""
Inherits from ChangeForm and makes all the data caught on the first page hidden.
"""
title_number = HiddenField('Title Number')
confirm = BooleanField('Confirm')
proprietor_previous_full_name = HiddenField('Previous full name')
proprietor_new_full_name = HiddenField('New full name')
partner_name = HiddenField('Partner\'s full name')
marriage_date = HiddenField('Date of marriage')
marriage_place = HiddenField('Location of marriage ceremony')
marriage_country = HiddenField('Country of marriage ceremony')
marriage_certificate_number = HiddenField('Marriage certificate number')
class ConveyancerAddClientForm(Form):
full_name = StringField('Full name', validators=[DataRequired()])
date_of_birth = DateField('Date of birth', format='%d-%m-%Y',
validators=[DataRequired(), ValidateDateNotInFuture()],
description="For example, 20-08-2011")
address = TextAreaField('Address', validators=[DataRequired()])
telephone = StringField('Telephone', validators=[DataRequired()])
email = StringField('Email address', validators=[DataRequired()])
class SelectTaskForm(Form):
another_task = StringField('Please specify:')
buying_or_selling_property = RadioField(
'Is your client buying or selling this property?',
choices=[
('buying', 'Buying this property'),
('selling', 'Selling this property'),
('other', 'Another task')
])
class ConveyancerAddClientsForm(Form):
# # num_of_clients = StringField('How many clients will you act for?',
# validators=[DataRequired(),
# NumberRange(1, 2, "Number of clients cannot be more than two.")])
num_of_clients = StringField('How many clients will you act for?',
validators=[DataRequired()])
|
Python
| 0
|
@@ -3910,12 +3910,8 @@
%0A
- # #
num
@@ -3974,25 +3974,24 @@
for?',%0A
-#
@@ -3999,33 +3999,32 @@
-
validators=%5BData
@@ -4042,10 +4042,8 @@
%0A
- #
@@ -4150,132 +4150,4 @@
)%5D)%0A
- num_of_clients = StringField('How many clients will you act for?',%0A validators=%5BDataRequired()%5D)
|
4197a102be1d832d418888178264e00f819febef
|
Add test_return_codes check for valid subunit output
|
stestr/tests/test_return_codes.py
|
stestr/tests/test_return_codes.py
|
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
import shutil
import subprocess
import tempfile
import six
from six import StringIO
from stestr.tests import base
class TestReturnCodes(base.TestCase):
def setUp(self):
super(TestReturnCodes, self).setUp()
# Setup test dirs
self.directory = tempfile.mkdtemp(prefix='stestr-unit')
self.addCleanup(shutil.rmtree, self.directory)
self.test_dir = os.path.join(self.directory, 'tests')
os.mkdir(self.test_dir)
# Setup Test files
self.testr_conf_file = os.path.join(self.directory, '.stestr.conf')
self.setup_cfg_file = os.path.join(self.directory, 'setup.cfg')
self.passing_file = os.path.join(self.test_dir, 'test_passing.py')
self.failing_file = os.path.join(self.test_dir, 'test_failing.py')
self.init_file = os.path.join(self.test_dir, '__init__.py')
self.setup_py = os.path.join(self.directory, 'setup.py')
shutil.copy('stestr/tests/files/testr-conf', self.testr_conf_file)
shutil.copy('stestr/tests/files/passing-tests', self.passing_file)
shutil.copy('stestr/tests/files/failing-tests', self.failing_file)
shutil.copy('setup.py', self.setup_py)
shutil.copy('stestr/tests/files/setup.cfg', self.setup_cfg_file)
shutil.copy('stestr/tests/files/__init__.py', self.init_file)
self.stdout = StringIO()
self.stderr = StringIO()
# Change directory, run wrapper and check result
self.addCleanup(os.chdir, os.path.abspath(os.curdir))
os.chdir(self.directory)
subprocess.call('stestr init', shell=True)
def assertRunExit(self, cmd, expected, subunit=False):
p = subprocess.Popen(
"%s" % cmd, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if not subunit:
self.assertEqual(
p.returncode, expected,
"Stdout: %s; Stderr: %s" % (out, err))
else:
self.assertEqual(p.returncode, expected,
"Expected return code: %s doesn't match actual "
"return code of: %s" % (expected, p.returncode))
def test_parallel_passing(self):
self.assertRunExit('stestr run passing', 0)
def test_parallel_passing_bad_regex(self):
self.assertRunExit('stestr run bad.regex.foobar', 1)
def test_parallel_fails(self):
self.assertRunExit('stestr run', 1)
def test_serial_passing(self):
self.assertRunExit('stestr run --serial passing', 0)
def test_serial_fails(self):
self.assertRunExit('stestr run --serial', 1)
def test_serial_subunit_passing(self):
self.assertRunExit('stestr run --subunit passing', 0,
subunit=True)
def test_parallel_subunit_passing(self):
self.assertRunExit('stestr run --subunit passing', 0,
subunit=True)
def test_until_failure_fails(self):
self.assertRunExit('stestr run --until-failure', 1)
def test_until_failure_with_subunit_fails(self):
self.assertRunExit('stestr run --until-failure --subunit', 1,
subunit=True)
def test_list(self):
self.assertRunExit('stestr list', 0)
def test_no_command(self):
self.assertRunExit('stestr', 2)
def _get_cmd_stdout(self, cmd):
p = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE)
out = p.communicate()
self.assertEqual(0, p.returncode)
return out
def test_combine_results(self):
self.assertRunExit('stestr run passing', 0)
stdout = self._get_cmd_stdout(
'stestr last --no-subunit-trace')
stdout = six.text_type(stdout[0])
test_count_split = stdout.split(' ')
test_count = test_count_split[1]
test_count = int(test_count)
id_regex = re.compile('\(id=(.*?)\)')
test_id = id_regex.search(stdout).group(0)
self.assertRunExit('stestr run --combine passing', 0)
combine_stdout = self._get_cmd_stdout(
'stestr last --no-subunit-trace')[0]
combine_stdout = six.text_type(combine_stdout)
combine_test_count_split = combine_stdout.split(' ')
combine_test_count = combine_test_count_split[1]
combine_test_count = int(combine_test_count)
combine_test_id = id_regex.search(combine_stdout).group(0)
self.assertEqual(test_id, combine_test_id)
# The test results from running the same tests twice with combine
# should return a test count 2x as big at the end of the run
self.assertEqual(test_count * 2, combine_test_count)
|
Python
| 0.000001
|
@@ -627,16 +627,43 @@
cense.%0A%0A
+import functools%0Aimport io%0A
import o
@@ -758,16 +758,63 @@
StringIO
+%0Aimport subunit as subunit_lib%0Aimport testtools
%0A%0Afrom s
@@ -2918,16 +2918,692 @@
rncode))
+%0A output_stream = io.BytesIO(out)%0A stream = subunit_lib.ByteStreamToStreamResult(output_stream)%0A starts = testtools.StreamResult()%0A summary = testtools.StreamSummary()%0A tests = %5B%5D%0A%0A def _add_dict(test):%0A tests.append(test)%0A%0A outcomes = testtools.StreamToDict(functools.partial(_add_dict))%0A result = testtools.CopyStreamResult(%5Bstarts, outcomes, summary%5D)%0A result.startTestRun()%0A try:%0A stream.run(result)%0A finally:%0A result.stopTestRun()%0A self.assertThat(len(tests), testtools.matchers.GreaterThan(0))
%0A%0A de
|
48a4303f5940989ee92fbcab00a7889ef8a4f237
|
Update model_sim.py
|
TrecQA_CNN+Sim/model_sim.py
|
TrecQA_CNN+Sim/model_sim.py
|
# -*- coding: utf-8 -*-
from keras import backend as K
from keras.models import Model
from keras import regularizers
from keras.engine.topology import Layer
from keras.layers.core import Dense, Reshape, Permute
from keras.layers import Input, Embedding, GlobalAveragePooling2D, GlobalMaxPooling2D,GlobalMaxPooling1D, Bidirectional, Dense, Dropout, Merge, Multiply, Conv1D, Lambda, Flatten, LSTM, TimeDistributed, Conv2D, MaxPooling2D, UpSampling2D
from dl_text.dl import word2vec_embedding_layer
def cnn_sim(embedding_matrix, dimx=50, dimy=50, nb_filter = 120,
embedding_dim = 50,filter_length = (50,4), vocab_size = 8000, depth = 1):
print 'Model Uses CNN with Sim......'
inpx = Input(shape=(dimx,),dtype='int32',name='inpx')
inpy = Input(shape=(dimy,),dtype='int32',name='inpy')
x = word2vec_embedding_layer(embedding_matrix,train=True)(inpx)
y = word2vec_embedding_layer(embedding_matrix,train=True)(inpy)
x = Permute((2,1))(x)
y = Permute((2,1))(y)
conv1 = Reshape((embedding_dim,dimx,1))(x)
conv2 = Reshape((embedding_dim,dimy,1))(y)
channel_1, channel_2 = [], []
for dep in range(depth):
#conv1 = ZeroPadding2D((filter_width - 1, 0))(conv1)
#conv2 = ZeroPadding2D((filter_width - 1, 0))(conv2)
ques = Conv2D(nb_filter=nb_filter, kernel_size = filter_length, activation='relu',
data_format = 'channels_last')(conv1)
ans = Conv2D(nb_filter, kernel_size = filter_length, activation='relu',
data_format="channels_last")(conv2)
ques = Dropout(0.5)(ques)
ans = Dropout(0.5)(ans)
ques = GlobalMaxPooling2D()(ques)
ans = GlobalMaxPooling2D()(ans)
ques = Dropout(0.5)(ques)
ans = Dropout(0.5)(ans)
channel_1.append(ques)
channel_2.append(ans)
#channel_1.append(GlobalAveragePooling2D()(ques))
#channel_2.append(GlobalAveragePooling2D()(ans))
h1 = channel_1.pop(-1)
if channel_1:
h1 = merge([h1] + channel_1, mode="concat")
h2 = channel_2.pop(-1)
if channel_2:
h2 = merge([h2] + channel_2, mode="concat")
sim = Similarity(nb_filter)([h1,h2])
h = Merge(mode="concat",name='h')([h1, sim, h2])
#h = Dropout(0.2)(h)
#h = Dense(50, kernel_regularizer=regularizers.l2(reg2),activation='relu')(h)
#wrap = Dropout(0.5)(h)
#wrap = Dense(64, activation='tanh')(h)
score = Dense(2,activation='softmax',name='score')(h)
model = Model([inpx, inpy],[score])
model.compile( loss='categorical_crossentropy',optimizer='adam')
return model
def cnn_sim_ft(embedding_matrix, dimx=50, dimy=50, dimft=44, nb_filter = 120,
embedding_dim = 50,filter_length = (50,4), vocab_size = 8000, depth = 1):
print 'Model Uses CNN with Sim and Features......'
inpx = Input(shape=(dimx,),dtype='int32',name='inpx')
inpy = Input(shape=(dimy,),dtype='int32',name='inpy')
inpft = Input(shape=(dimft,),dtype='int32',name='inpft')
x = word2vec_embedding_layer(embedding_matrix,train=True)(inpx)
y = word2vec_embedding_layer(embedding_matrix,train=True)(inpy)
x = Permute((2,1))(x)
y = Permute((2,1))(y)
conv1 = Reshape((embedding_dim,dimx,1))(x)
conv2 = Reshape((embedding_dim,dimy,1))(y)
channel_1, channel_2 = [], []
for dep in range(depth):
#filter_width = filter_length[1]
#conv1 = ZeroPadding2D((filter_width - 1, 0))(conv1)
#conv2 = ZeroPadding2D((filter_width - 1, 0))(conv2)
ques = Conv2D(nb_filter=nb_filter, kernel_size = filter_length, activation='relu',
data_format = 'channels_last',border_mode="valid")(conv1)
ans = Conv2D(nb_filter, kernel_size = filter_length, activation='relu',
data_format="channels_last",border_mode="valid")(conv2)
ques = Dropout(0.5)(ques)
ans = Dropout(0.5)(ans)
ques = GlobalMaxPooling2D()(ques)
ans = GlobalMaxPooling2D()(ans)
ques = Dropout(0.5)(ques)
ans = Dropout(0.5)(ans)
channel_1.append(ques)
channel_2.append(ans)
#channel_1.append(GlobalAveragePooling2D()(ques))
#channel_2.append(GlobalAveragePooling2D()(ans))
h1 = channel_1.pop(-1)
if channel_1:
h1 = merge([h1] + channel_1, mode="concat")
h2 = channel_2.pop(-1)
if channel_2:
h2 = merge([h2] + channel_2, mode="concat")
sim = Similarity(nb_filter)([h1,h2])
h = Merge(mode="concat",name='h')([h1, sim, h2, inpft])
#h = Dropout(0.2)(h)
#h = Dense(50, kernel_regularizer=regularizers.l2(reg2),activation='relu')(h)
#wrap = Dropout(0.5)(h)
#wrap = Dense(64, activation='tanh')(h)
score = Dense(2,activation='softmax',name='score')(h)
model = Model([inpx, inpy, inpft],[score])
model.compile( loss='categorical_crossentropy',optimizer='adam')
return model
class Similarity(Layer):
def __init__(self, v_dim, kernel_regularizer=None, **kwargs):
self.v_dim = v_dim
self.kernel_regularizer = regularizers.get(kernel_regularizer)
super(Similarity, self).__init__(**kwargs)
def build(self,input_shape):
self.W = self.add_weight(name='w',shape=(self.v_dim, self.v_dim),
initializer='glorot_uniform',
regularizer=self.kernel_regularizer,
trainable=True)
super(Similarity, self).build(input_shape)
def call(self, data, mask=None):
v1 = data[0]
v2 = data[1]
sim = K.dot(v1,self.W)
sim = K.batch_dot(sim,v2,axes=1)
return sim
def compute_output_shape(self, input_shape):
return (input_shape[0][0],1)
|
Python
| 0.000001
|
@@ -490,16 +490,56 @@
ng_layer
+%0Afrom dl_layers.layers import Similarity
%0A%0Adef cn
@@ -5208,898 +5208,4 @@
del%0A
-%0Aclass Similarity(Layer):%0A %0A def __init__(self, v_dim, kernel_regularizer=None, **kwargs):%0A self.v_dim = v_dim%0A self.kernel_regularizer = regularizers.get(kernel_regularizer)%0A super(Similarity, self).__init__(**kwargs)%0A%0A def build(self,input_shape):%0A self.W = self.add_weight(name='w',shape=(self.v_dim, self.v_dim),%0A initializer='glorot_uniform',%0A regularizer=self.kernel_regularizer,%0A trainable=True) %0A %0A super(Similarity, self).build(input_shape)%0A%0A def call(self, data, mask=None):%0A v1 = data%5B0%5D%0A v2 = data%5B1%5D%0A sim = K.dot(v1,self.W)%0A sim = K.batch_dot(sim,v2,axes=1)%0A return sim%0A%0A def compute_output_shape(self, input_shape):%0A return (input_shape%5B0%5D%5B0%5D,1)%0A
|
21ec9335fc10d6b7d936ce11b59f44ea47c33d7c
|
Add support for Galician language in Subtitulos
|
subliminal/services/subtitulos.py
|
subliminal/services/subtitulos.py
|
# -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from . import ServiceBase
from ..language import language_set, Language
from ..subtitles import get_subtitle_path, ResultSubtitle
from ..videos import Episode
from bs4 import BeautifulSoup
from subliminal.utils import get_keywords, split_keyword
import logging
import re
import unicodedata
import urllib
logger = logging.getLogger(__name__)
class Subtitulos(ServiceBase):
server_url = 'http://www.subtitulos.es'
api_based = False
languages = language_set(['eng-US', 'eng-GB', 'eng', 'fre', 'por-BR', 'por', 'spa-ES', u'spa', u'ita', u'cat'])
language_map = {u'Español': Language('spa'), u'Español (España)': Language('spa'), u'Español (Latinoamérica)': Language('spa'),
u'Català': Language('cat'), u'Brazilian': Language('por-BR'), u'English (US)': Language('eng-US'),
u'English (UK)': Language('eng-GB')}
language_code = 'name'
videos = [Episode]
require_video = False
required_features = ['permissive']
# the '.+' in the pattern for Version allows us to match both 'ó'
# and the 'ó' char directly. This is because now BS4 converts the html
# code chars into their equivalent unicode char
release_pattern = re.compile('Versi.+n (.+) ([0-9]+).([0-9])+ megabytes')
def list_checked(self, video, languages):
return self.query(video.path or video.release, languages, get_keywords(video.guess), video.series, video.season, video.episode)
def query(self, filepath, languages, keywords, series, season, episode):
request_series = series.lower().replace(' ', '_')
if isinstance(request_series, unicode):
request_series = unicodedata.normalize('NFKD', request_series).encode('ascii', 'ignore')
logger.debug(u'Getting subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages))
r = self.session.get('%s/%s/%sx%.2d' % (self.server_url, urllib.quote(request_series), season, episode))
if r.status_code == 404:
logger.debug(u'Could not find subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages))
return []
if r.status_code != 200:
logger.error(u'Request %s returned status code %d' % (r.url, r.status_code))
return []
soup = BeautifulSoup(r.content, self.required_features)
subtitles = []
for sub in soup('div', {'id': 'version'}):
sub_keywords = split_keyword(self.release_pattern.search(sub.find('p', {'class': 'title-sub'}).contents[1]).group(1).lower())
if not keywords & sub_keywords:
logger.debug(u'None of subtitle keywords %r in %r' % (sub_keywords, keywords))
continue
for html_language in sub.findAllNext('ul', {'class': 'sslist'}):
language = self.get_language(html_language.findNext('li', {'class': 'li-idioma'}).find('strong').contents[0].string.strip())
if language not in languages:
logger.debug(u'Language %r not in wanted languages %r' % (language, languages))
continue
html_status = html_language.findNext('li', {'class': 'li-estado green'})
status = html_status.contents[0].string.strip()
if status != 'Completado':
logger.debug(u'Wrong subtitle status %s' % status)
continue
path = get_subtitle_path(filepath, language, self.config.multi)
subtitle = ResultSubtitle(path, language, service=self.__class__.__name__.lower(), link=html_status.findNext('span', {'class': 'descargar green'}).find('a')['href'], keywords=sub_keywords)
subtitles.append(subtitle)
return subtitles
Service = Subtitulos
|
Python
| 0
|
@@ -1628,16 +1628,43 @@
eng-GB')
+, 'Galego': Language('glg')
%7D%0A la
|
c9187cecbdb196343586378ca637d76079ff058f
|
Improve sub-package imports
|
src/minerva/storage/notification/__init__.py
|
src/minerva/storage/notification/__init__.py
|
# -*- coding: utf-8 -*-
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2011-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
from engine import NotificationEngine
|
Python
| 0.000017
|
@@ -377,37 +377,310 @@
rom
-engine import NotificationEngine%0A
+minerva.storage.notification.notificationstore import NotificationStore, %5C%0A NotificationStoreDescriptor%0Afrom minerva.storage.notification.attribute import Attribute, %5C%0A AttributeDescriptor%0Afrom minerva.storage.notification.package import Package%0Afrom minerva.storage.notification.record import Record
|
2c17f8997bbfb5f6a943b29bb9ef024fff304302
|
hard-negative-mine
|
4_hard_negative_mine.py
|
4_hard_negative_mine.py
|
import object_detector.file_io as file_io
import object_detector.detector as detector
import object_detector.factory as factory
import argparse as ap
DEFAULT_CONFIG_FILE = "conf/car_side.json"
if __name__ == "__main__":
parser = ap.ArgumentParser()
parser.add_argument('-c', "--config", help="Configuration File", default=DEFAULT_CONFIG_FILE)
args = vars(parser.parse_args())
conf = file_io.FileJson().read(args["config"])
#1. Create detector
detector = factory.Factory.create_detector(conf["descriptor"]["algorithm"],
conf["descriptor"]["parameters"],
conf["classifier"]["algorithm"],
conf["classifier"]["parameters"],
conf["classifier"]["output_file"])
#2. Load negative images
negative_image_files = file_io.list_files(conf["dataset"]["neg_data_dir"],
conf["dataset"]["neg_format"],
n_files_to_sample=conf["hard_negative_mine"]["n_images"])
#3. Get hard negative mined features
features, probs = detector.hard_negative_mine(negative_image_files,
conf["detector"]["window_dim"],
conf["hard_negative_mine"]["window_step"],
conf["hard_negative_mine"]["pyramid_scale"],
threshold_prob=conf["hard_negative_mine"]["min_probability"])
print "[HNM INFO] : number of mined negative patches {}".format(len(features))
print "[HNM INFO] : probabilities of mined negative patches {}".format(probs)
#4. Add hard negative mined features to the extractor
extractor = factory.Factory.create_extractor(conf["descriptor"]["algorithm"],
conf["descriptor"]["parameters"],
conf["detector"]["window_dim"],
conf["extractor"]["output_file"])
print "Before adding hard-negative-mined samples"
extractor.summary()
extractor.add_data(features, -1)
print "After adding hard-negative-mined samples"
extractor.summary()
extractor.save(data_file=conf["extractor"]["output_file"])
|
Python
| 0.999416
|
@@ -183,16 +183,12 @@
onf/
-car_side
+svhn
.jso
|
334334c95a543de3e92c96ef807b2cad684f4362
|
Update URL construction from FPLX db_refs
|
indra/databases/__init__.py
|
indra/databases/__init__.py
|
import logging
logger = logging.getLogger('databases')
def get_identifiers_url(db_name, db_id):
"""Return an identifiers.org URL for a given database name and ID.
Parameters
----------
db_name : str
An internal database name: HGNC, UP, CHEBI, etc.
db_id : str
An identifier in the given database.
Returns
-------
url : str
An identifiers.org URL corresponding to the given database name and ID.
"""
identifiers_url = 'http://identifiers.org/'
if db_name == 'UP':
url = identifiers_url + 'uniprot/%s' % db_id
elif db_name == 'HGNC':
url = identifiers_url + 'hgnc/HGNC:%s' % db_id
elif db_name == 'IP':
url = identifiers_url + 'interpro/%s' % db_id
elif db_name == 'CHEBI':
url = identifiers_url + 'chebi/%s' % db_id
elif db_name == 'NCIT':
url = identifiers_url + 'ncit/%s' % db_id
elif db_name == 'GO':
url = identifiers_url + 'go/%s' % db_id
elif db_name == 'PUBCHEM':
if db_id.startswith('PUBCHEM:'):
db_id = db_id[8:]
url = identifiers_url + 'pubchem.compound/%s' % db_id
elif db_name == 'PF':
url = identifiers_url + 'pfam/%s' % db_id
elif db_name == 'MIRBASEM':
url = identifiers_url + 'mirbase.mature/%s' % db_id
elif db_name == 'MIRBASE':
url = identifiers_url + 'mirbase/%s' % db_id
elif db_name == 'MESH':
url = identifiers_url + 'mesh/%s' % db_id
elif db_name == 'HMDB':
url = identifiers_url + 'hmdb/%s' % db_id
# Special cases with no identifiers entry
elif db_name == 'FPLX':
url = 'http://sorger.med.harvard.edu/indra/entities/%s' % db_id
elif db_name == 'NXPFA':
url = 'https://www.nextprot.org/term/FA-%s' % db_id
elif db_name == 'TEXT':
return None
else:
logger.warning('Unhandled name space %s' % db_name)
url = None
return url
|
Python
| 0
|
@@ -1648,45 +1648,28 @@
p://
-sorger.med.harvard.edu/indra/
+id
enti
-ties
+fiers.org/fplx
/%25s'
|
e8293bd1365c759d940297e48609ee69251b0d62
|
split grant code for better suggestion
|
invenio_openaire/indexer.py
|
invenio_openaire/indexer.py
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2019 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Record modification prior to indexing."""
from __future__ import absolute_import, print_function
from elasticsearch import VERSION as ES_VERSION
def indexer_receiver(sender, json=None, record=None, index=None,
**dummy_kwargs):
"""Connect to before_record_index signal to transform record for ES."""
if index and index.startswith('grants-'):
if ES_VERSION[0] == 2:
# Generate suggest field
suggestions = [
json.get('code'),
json.get('acronym'),
json.get('title')
]
json['suggest'] = {
'input': [s for s in suggestions if s],
'output': json['title'],
'context': {
'funder': [json['funder']['doi']]
},
'payload': {
'id': json['internal_id'],
'legacy_id': (json['code'] if json.get('program') == 'FP7'
else json['internal_id']),
'code': json['code'],
'title': json['title'],
'acronym': json.get('acronym'),
'program': json.get('program'),
},
}
elif ES_VERSION[0] > 2:
# Generate suggest field
suggestions = [
json.get('code'),
json.get('acronym'),
json.get('title')
]
json['suggest'] = {
'input': [s for s in suggestions if s],
'contexts': {
'funder': [json['funder']['doi']]
}
}
json['legacy_id'] = json['code'] if json.get('program') == 'FP7' \
else json['internal_id']
elif index and index.startswith('funders-'):
if ES_VERSION[0] == 2:
# Generate suggest field
suggestions = json.get('acronyms', []) + [json.get('name')]
json['suggest'] = {
'input': [s for s in suggestions if s],
'output': json['name'],
'payload': {
'id': json['doi']
},
}
elif ES_VERSION[0] > 2:
suggestions = json.get('acronyms', []) + [json.get('name')]
json['suggest'] = {
'input': [s for s in suggestions if s],
}
|
Python
| 0
|
@@ -617,72 +617,32 @@
-if ES_VERSION%5B0%5D == 2:%0A # Generate suggest field%0A
+code = json.get('code')%0A
@@ -677,32 +677,20 @@
-json.get('
code
-')
,%0A
@@ -760,32 +760,181 @@
)%0A %5D%0A
+ if code and %22_%22 in code:%0A suggestions.extend(code.split(%22_%22))%0A if ES_VERSION%5B0%5D == 2:%0A # Generate suggest field%0A
json
@@ -1661,155 +1661,8 @@
eld%0A
- suggestions = %5B%0A json.get('code'),%0A json.get('acronym'),%0A json.get('title')%0A %5D%0A
@@ -2043,139 +2043,135 @@
-if ES_VERSION%5B0%5D == 2:%0A # Generate suggest field%0A suggestions = json.get('acronyms', %5B%5D) + %5Bjson.get('name')%5D
+suggestions = json.get('acronyms', %5B%5D) + %5Bjson.get('name')%5D%0A if ES_VERSION%5B0%5D == 2:%0A # Generate suggest field
%0A
@@ -2432,80 +2432,8 @@
2:%0A
- suggestions = json.get('acronyms', %5B%5D) + %5Bjson.get('name')%5D%0A
|
d60dea7b7b1fb073eef2c350177b3920f32de748
|
Add comments indicating source of formulae..
|
6/e6.py
|
6/e6.py
|
#!/usr/bin/env python
def sum_seq_squares(n):
return (n * (n+1) * ((2*n)+1)) / 6
def sum_seq(n):
return (n * (n + 1)) / 2
def main():
sum_seq_sq_100 = sum_seq_squares(100)
sum_seq_100 = sum_seq(100)
sq_sum_seq_100 = sum_seq_100**2
diff = sq_sum_seq_100 - sum_seq_sq_100
print('diff is {0}'.format(diff))
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -16,16 +16,75 @@
python%0A%0A
+# http://www.proofwiki.org/wiki/Sum_of_Sequence_of_Squares%0A
def sum_
@@ -139,16 +139,84 @@
)) / 6%0A%0A
+# http://www.regentsprep.org/regents/math/algtrig/ATP2/ArithSeq.htm%0A
def sum_
|
2deae29d9724c82b70eef60037bd477ad083691d
|
fix eq -> exists
|
app.py
|
app.py
|
from chalice import Chalice
import boto3
import json
from botocore.exceptions import ClientError
from chalice import NotFoundError
from boto3.dynamodb.conditions import Key, Attr
REGION = 'us-east-1'
ITEMS = 'Items'
SENSORS_TABLE = 'sensors'
SENSORS_PARTITION_KEY = 'source'
SENSORS_SORT_KEY = 'timestamp'
SENSORS_PAYLOAD = 'payload'
PAYLOAD_PREFIX = 'payload.state.reported.{}'
app = Chalice(app_name='riot')
app.debug = True
DDB = boto3.resource('dynamodb', region_name=REGION)
def dict_or(d, k1, k2):
"""used for conditional assignment on a dictionary"""
if k1 in d:
return d[k1]
elif k2 in d:
return d[k2]
return None
def ddb_query(table, response_key, partition_key, partition_value, sort_key=None, params=None, attribute=None):
"""
DynamoDb query using partition key, sort key, and attributes.
Also gt, gte, lt, lte, between, begins_with on sort_key.
"""
p_key = Key(partition_key).eq(partition_value) # query must be partitioned
if params is not None and sort_key is not None:
if ('gt' in params or 'gte' in params) and ('lt' in params or 'lte' in params):
p_key = p_key & Key(sort_key).between(dict_or(params, 'gt', 'gte'), dict_or(params, 'lt', 'lte'))
else:
if 'gt' in params:
p_key = p_key & Key(sort_key).gt(params['gt'])
elif 'gte' in params:
p_key = p_key & Key(sort_key).gte(params['gte'])
elif 'lt' in params:
p_key = p_key & Key(sort_key).lt(params['lt'])
elif 'lte' in params:
p_key = p_key & Key(sort_key).lte(params['lte'])
elif 'eq' in params:
p_key = p_key & Key(sort_key).eq(params['eq'])
elif 'bw' in params:
p_key = p_key & Key(sort_key).begins_with(params['bw'])
if attribute is not None:
if sort_key is None:
ex = {"#pkey": partition_key}
pe = "#pkey, {}"
else:
ex = {"#pkey": partition_key, "#skey": sort_key}
pe = "#pkey, #skey, {}"
x = []
for idx, a in enumerate(attribute.split('.')):
ex["#n{}".format(idx)] = a
x.append("#n{}".format(idx))
response = table.query(
KeyConditionExpression=p_key,
FilterExpression=Attr(attribute).eq(),
ExpressionAttributeNames=ex,
ProjectionExpression=pe.format('.'.join(x))
)
else:
response = table.query(
KeyConditionExpression=p_key
)
if response_key in response:
return response[response_key]
else:
return []
@app.route('/things', methods=['GET'])
def get_things():
iot = boto3.client('iot', region_name=REGION)
request = app.current_request
if request.method == 'GET':
try:
response = iot.list_things()
return response["things"]
except ClientError as e:
raise NotFoundError()
@app.route('/things/{thing}', methods=['GET'])
def get_thing(thing):
iot_data = boto3.client('iot-data', region_name=REGION)
request = app.current_request
if request.method == 'GET':
try:
response = iot_data.get_thing_shadow(thingName=thing)
body = response[SENSORS_PAYLOAD]
return json.loads(body.read())
except ClientError as e:
raise NotFoundError(thing)
@app.route('/introspect')
def introspect():
return app.current_request.to_dict()
@app.route('/metrics/{thing}/{metric}', methods=['GET'])
def get_metrics(thing, metric):
params = app.current_request.query_params
try:
return ddb_query(DDB.Table(SENSORS_TABLE), ITEMS, SENSORS_PARTITION_KEY, thing, SENSORS_SORT_KEY, params,
PAYLOAD_PREFIX.format(metric))
except ClientError as e:
raise NotFoundError()
|
Python
| 0.000004
|
@@ -2361,17 +2361,21 @@
ibute).e
-q
+xists
(),%0A
|
36e8549053d28f51cc1e846e86bbdc8b32527cbe
|
Make app.py localhost only
|
app.py
|
app.py
|
#!/usr/bin/python3
from json import dumps
from datetime import datetime
import os
from bottle import app as bottleapp
from bottle import route, run, static_file, template
from pymongo import MongoClient
import sprout
os.chdir(os.path.dirname(os.path.abspath(__file__)))
mongo = MongoClient('localhost', 27017)
col = mongo['plant-rank']['users']
def readable(obj):
obj['class_name'] = {0: '陌生人',
1: '算法班',
2: 'C語法',
3: 'Py語法'}[obj['category']]
obj['class'] = {0: '',
1: 'label-primary',
2: 'label-warning',
3: 'label-success'}[obj['category']]
obj['algopoints'] = len(obj['algoprobs'])
obj['points'] = len(obj['problems'])
obj['problems'] = ', '.join(map(str, sorted(obj['problems'])))
obj['updated_at'] = (datetime.fromtimestamp(obj['updated_at'])
.strftime('%Y/%m/%d %H:%M'))
return obj
@route('/assets/<filename:path>')
def assets(filename):
return static_file(filename, root='./assets/')
@route('/')
def index():
board = list(map(readable, col.find({})))
countboard = sorted(board, key=lambda x: (x['points'], x['rate']), reverse=True)
algocountboard = sorted(board, key=lambda x: (x['algopoints'], x['points']),
reverse=True)
algoboard = sorted(board, key=lambda x: (x['rate'] if x['category'] == 1 else 0,
x['points']),
reverse=True)
return template('index.html', locals())
@route('/users/<uid>')
def user(uid):
board = map(readable, col.find({'uid': int(uid)}).limit(1))
return template('user.html', locals())
@route('/users/<uid>', method="POST")
def refresh(uid):
try:
sprout.refresh(int(uid))
except:
return dumps({'status': False})
else:
return dumps({'status': True})
run(app=bottleapp(), port=8787, host="0.0.0.0", debug=False, server='meinheld')
|
Python
| 0.000004
|
@@ -1974,15 +1974,17 @@
st=%22
-0
+127
.0.0.
-0
+1
%22, d
|
c4cdbc18b860cf899aa452e4d1f39afd9e4d6f4c
|
comment out APP_SETTINGS in app.py
|
app.py
|
app.py
|
#!/usr/bin/python3
import sys, os
from flask import Flask, redirect, session, url_for, render_template, request, send_from_directory
from flask_sqlalchemy import SQLAlchemy
from urllib.parse import urlparse, urljoin, urlencode
CLIENT_ID = "omNihUKDY7L8XXLh41WTTY9Pda21T2SRqAmJO86C"
CLIENT_SECRET = "fmdfCpUwDIu0E5FExHudOdySDSa7HPhNrRKTirNsXJIWc2NEMFJtiY7UaczcTJL2kzRnsBV4OWPQ8P8KTv8YDqS5rdOOAE0opdYBLbZtMzNTfnCWHTJTgmpmDDtSbjDY"
REDIRECT_URI = "/login"
AUTH_BASE_URL = "https://ion.tjhsst.edu/oauth/authorize/"
TOKEN_URL = "https://ion.tjhsst.edu/oauth/token/"
from requests_oauthlib import OAuth2Session
from oauthlib.oauth2.rfc6749.errors import InvalidGrantError
app = Flask(__name__)
app.config.from_object(os.environ['APP_SETTINGS'])
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
import models
import json
# print(os.environ['APP_SETTINGS'])
def is_safe_url(target):
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ('http', 'https') and \
ref_url.netloc == test_url.netloc
def get_redirect_target():
for target in request.values.get('next'), request.referrer:
if not target:
continue
if is_safe_url(target):
return target
def redirect_back(endpoint, **values):
target = request.form['next']
if not target or not is_safe_url(target):
target = url_for(endpoint, **values)
return redirect(target)
@app.route("/")
def index():
return render_template("index.html")
@app.route("/hours")
def hours():
if "oauth_token" in session:
profile_json = session.get('profile', {})
return render_template("hours.html", profile=profile_json)
return redirect(url_for('login', next='hours'))
@app.route("/admin")
def admin():
if "oauth_token" in session:
profile_json = session.get('profile', {})
return render_template("admin.html", profile=profile_json)
return redirect(url_for('login', next='admin'))
@app.route("/login", methods=["GET"])
def login():
nexturl = request.args.get('next')
if not is_safe_url(nexturl):
return flask.abort(400)
oauth = OAuth2Session(
CLIENT_ID, redirect_uri=REDIRECT_URI, scope=["read"])
if 'code' not in request.args:
authorization_url, state = oauth.authorization_url(AUTH_BASE_URL)
session["next"] = nexturl
return redirect(authorization_url)
try:
token = oauth.fetch_token(
TOKEN_URL, code=request.args.get(
"code", ""), client_secret=CLIENT_SECRET)
profile = oauth.get("https://ion.tjhsst.edu/api/profile")
profile_data = json.loads(profile.content.decode())
session["profile"] = profile_data
session["username"] = profile_data["ion_username"]
session["oauth_token"] = token
return redirect(url_for(session["next"]))
except InvalidGrantError:
return redirect(url_for('login'))
@app.route("/css/<path:path>")
def send_css(path):
return send_from_directory('static/css', path)
@app.route("/scripts/<path:path>")
def send_js(path):
return send_from_directory('static/scripts', path)
@app.route("/icons/<path:path>")
def send_icons(path):
return send_from_directory('static/icons', path)
@app.route("/images/<path:path>")
def send_images(path):
return send_from_directory('static/images', path)
@app.route("/fonts/<path:path>")
def send_fonts(path):
return send_from_directory('static/fonts', path)
@app.route("/logout")
def logout():
session.clear()
return redirect(url_for("index"))
if __name__ == "__main__":
if(len(sys.argv) < 2):
print("Please specify a port")
exit()
app.run(host="0.0.0.0", port=int(sys.argv[1]))
|
Python
| 0
|
@@ -684,16 +684,17 @@
name__)%0A
+#
app.conf
|
25fab7f2c289d9b1bc6098ef33524bbc627c5e75
|
Fix broken test in client.cli.job
|
src/test/python/apache/aurora/client/cli/test_status.py
|
src/test/python/apache/aurora/client/cli/test_status.py
|
import contextlib
from gen.apache.aurora.ttypes import (
AssignedTask,
Identity,
JobKey,
ResponseCode,
ScheduleStatus,
ScheduleStatusResult,
TaskConfig,
TaskEvent,
TaskQuery,
)
from apache.aurora.client.cli import (
AuroraCommandLine,
EXIT_INVALID_PARAMETER
)
from apache.aurora.common.aurora_job_key import AuroraJobKey
from apache.aurora.client.cli.util import AuroraClientCommandTest, FakeAuroraCommandContext
from mock import call, Mock, patch
class TestJobStatus(AuroraClientCommandTest):
@classmethod
def create_mock_scheduled_tasks(cls):
jobs = []
for name in ['foo', 'bar', 'baz']:
job = Mock()
job.key = JobKey(role=cls.TEST_ROLE, environment=cls.TEST_ENV, name=name)
job.failure_count = 0
job.assignedTask = Mock(spec=AssignedTask)
job.assignedTask.slaveHost = 'slavehost'
job.assignedTask.task = Mock(spec=TaskConfig)
job.assignedTask.task.maxTaskFailures = 1
job.assignedTask.task.packages = []
job.assignedTask.task.owner = Identity(role='bozo')
job.assignedTask.task.environment = 'test'
job.assignedTask.task.jobName = 'woops'
job.assignedTask.task.numCpus = 2
job.assignedTask.task.ramMb = 2
job.assignedTask.task.diskMb = 2
job.assignedTask.instanceId = 4237894
job.assignedTask.assignedPorts = None
job.status = ScheduleStatus.RUNNING
mockEvent = Mock(spec=TaskEvent)
mockEvent.timestamp = 28234726395
mockEvent.status = ScheduleStatus.RUNNING
mockEvent.message = "Hi there"
job.taskEvents = [mockEvent]
jobs.append(job)
return jobs
@classmethod
def create_getjobs_response(cls):
result = Mock()
result.responseCode = ResponseCode.OK
result.result = Mock()
result.result.getJobsResult = Mock()
mock_job_one = Mock()
mock_job_one.key = Mock()
mock_job_one.key.role = 'RoleA'
mock_job_one.key.environment = 'test'
mock_job_one.key.name = 'hithere'
mock_job_two = Mock()
mock_job_two.key = Mock()
mock_job_two.key.role = 'bozo'
mock_job_two.key.environment = 'test'
mock_job_two.key.name = 'hello'
result.result.getJobsResult.configs = [mock_job_one, mock_job_two]
return result
@classmethod
def create_status_response(cls):
resp = cls.create_simple_success_response()
resp.result.scheduleStatusResult = Mock(spec=ScheduleStatusResult)
resp.result.scheduleStatusResult.tasks = set(cls.create_mock_scheduled_tasks())
return resp
@classmethod
def create_failed_status_response(cls):
return cls.create_blank_response(ResponseCode.INVALID_REQUEST, 'No tasks found for query')
def test_successful_status_shallow(self):
"""Test the status command at the shallowest level: calling status should end up invoking
the local APIs get_status method."""
mock_context = FakeAuroraCommandContext()
mock_api = mock_context.get_api('west')
mock_api.check_status.return_value = self.create_status_response()
with contextlib.nested(
patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)):
cmd = AuroraCommandLine()
cmd.execute(['job', 'status', 'west/bozo/test/hello'])
mock_api.check_status.assert_called_with(AuroraJobKey('west', 'bozo', 'test', 'hello'))
def test_successful_status_deep(self):
"""Test the status command more deeply: in a request with a fully specified
job, it should end up doing a query using getTasksStatus."""
(mock_api, mock_scheduler) = self.setup_mock_api()
mock_scheduler.query.return_value = self.create_status_response()
with contextlib.nested(
patch('apache.aurora.client.api.SchedulerProxy', return_value=mock_scheduler),
patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
cmd = AuroraCommandLine()
cmd.execute(['job', 'status', 'west/bozo/test/hello'])
mock_scheduler.getTasksStatus.assert_called_with(TaskQuery(jobName='hello',
environment='test', owner=Identity(role='bozo')))
def test_status_wildcard(self):
"""Test status using a wildcard. It should first call api.get_jobs, and then do a
getTasksStatus on each job."""
mock_context = FakeAuroraCommandContext()
mock_api = mock_context.get_api('west')
mock_api.check_status.return_value = self.create_status_response()
mock_api.get_jobs.return_value = self.create_getjobs_response()
with contextlib.nested(
patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context),
patch('apache.aurora.client.cli.context.CLUSTERS', new=self.TEST_CLUSTERS)):
cmd = AuroraCommandLine()
cmd.execute(['job', 'status', '*'])
# Wildcard should have expanded to two jobs, so there should be two calls
# to check_status.
assert mock_api.check_status.call_count == 2
assert (call(AuroraJobKey('example', 'RoleA', 'test', 'hithere')) in
mock_api.check_status.call_args_list)
assert (call(AuroraJobKey('example', 'bozo', 'test', 'hello')) in
mock_api.check_status.call_args_list)
def test_status_wildcard_two(self):
"""Test status using a wildcard. It should first call api.get_jobs, and then do a
getTasksStatus on each job. This time, use a pattern that doesn't match all of the jobs."""
mock_context = FakeAuroraCommandContext()
mock_api = mock_context.get_api('west')
mock_api.check_status.return_value = self.create_status_response()
mock_api.get_jobs.return_value = self.create_getjobs_response()
with contextlib.nested(
patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)):
cmd = AuroraCommandLine()
cmd.execute(['job', 'status', 'example/*/*/hello'])
# Wildcard should have expanded to two jobs, but only matched one,
# so there should be one call to check_status.
assert mock_api.check_status.call_count == 1
mock_api.check_status.assert_called_with(
AuroraJobKey('example', 'bozo', 'test', 'hello'))
def test_unsuccessful_status_shallow(self):
"""Test the status command at the shallowest level: calling status should end up invoking
the local APIs get_status method."""
# Calls api.check_status, which calls scheduler.getJobs
mock_context = FakeAuroraCommandContext()
mock_api = mock_context.get_api('west')
mock_api.check_status.return_value = self.create_failed_status_response()
# mock_api.scheduler.getTasksStatus.return_value =
with contextlib.nested(
patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)):
cmd = AuroraCommandLine()
result = cmd.execute(['job', 'status', 'west/bozo/test/hello'])
assert result == EXIT_INVALID_PARAMETER
|
Python
| 0
|
@@ -4920,31 +4920,28 @@
roraJobKey('
-example
+west
', 'RoleA',
@@ -5032,39 +5032,36 @@
l(AuroraJobKey('
-example
+west
', 'bozo', 'test
|
91ff11cde50ce2485c0a6725651931f88a085ca7
|
Update get_time to handle timeout errors.
|
app.py
|
app.py
|
""" app.py """
from flask import Flask, render_template
import requests
app = Flask(__name__)
def get_time():
try:
response = requests.get('http://localhost:3001/time')
except requests.exceptions.ConnectionError:
return 'Unavailable'
return response.json().get('datetime')
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
|
Python
| 0
|
@@ -177,16 +177,29 @@
01/time'
+, timeout=3.0
)%0A ex
@@ -203,16 +203,17 @@
except
+(
requests
@@ -239,16 +239,58 @@
ionError
+,%0A requests.exceptions.Timeout)
:%0A
|
b2a1dcd25ecc9d50a975a41330a1620b52312857
|
add docstring
|
letmecreate/click/motion.py
|
letmecreate/click/motion.py
|
#!/usr/bin/env python3
import ctypes
_lib = ctypes.CDLL('libletmecreate_click.so')
callback_type = ctypes.CFUNCTYPE(None, ctypes.c_uint8)
callbacks = [None, None]
def enable(mikrobus_index):
ret = _lib.motion_click_enable(mikrobus_index)
if ret < 0:
raise Exception("motion click enable failed")
def attach_callback(mikrobus_index, callback):
ptr = callback_type(callback)
ret = _lib.motion_click_attach_callback(mikrobus_index, ptr)
if ret < 0:
raise Exception("motion click attach callback failed")
callbacks[mikrobus_index] = ptr;
def disable(mikrobus_index):
ret = _lib.motion_click_disable(mikrobus_index)
if ret < 0:
raise Exception("motion click disable failed")
|
Python
| 0
|
@@ -15,16 +15,85 @@
python3
+%0A%22%22%22Python binding of Motion Click wrapper of LetMeCreate library.%22%22%22
%0A%0Aimport
@@ -265,173 +265,997 @@
-ret = _lib.motion_click_enable(mikrobus_index)%0A if ret %3C 0:%0A raise Exception(%22motion click enable failed%22)%0A%0A%0Adef attach_callback(mikrobus_index, callback):
+%22%22%22Enable the motion click.%0A%0A Configures the EN pin as an output and set it to high.%0A%0A mikrobus_index: must be 0 (MIKROBUS_1) or 1 (MIKROBUS_2)%0A%0A Note: An exception is thrown if it fails to enable the Motion Click.%0A %22%22%22%0A ret = _lib.motion_click_enable(mikrobus_index)%0A if ret %3C 0:%0A raise Exception(%22motion click enable failed%22)%0A%0A%0Adef attach_callback(mikrobus_index, callback):%0A %22%22%22Attach a callback triggered if an event is detected.%0A%0A Returns the callback ID. The callback must be removed by calling%0A letmecreate.core.gpio_monitor.remove_callback().%0A%0A mikrobus_index: must be 0 (MIKROBUS_1) or 1 (MIKROBUS_2)%0A%0A callback: function must have one argument which can be safely ignored. This%0A argument indicates if the GPIO is on a falling or raising edge. In this%0A case, it triggers an event only if the INT pin is raising so this argument%0A will always be equal to 1.%0A%0A Note: An exception is thrown if it fails to attach a callback.%0A %22%22%22
%0A
@@ -1493,24 +1493,140 @@
bus_index):%0A
+ %22%22%22Disable the Motion Click.%0A%0A Note: An exception is thrown if it fails to disable the Motion Click.%0A %22%22%22%0A
ret = _l
|
460b48c10461df264a30ac26630d7299370988cd
|
Support alternative URLs
|
gsl.py
|
gsl.py
|
#!/usr/bin/python
from urlparse import urlparse
import urllib
import urllib2
import click
import os
import hashlib
PACKAGE_SERVER = 'https://server-to-be-determined/'
@click.command()
@click.option('--package_id', help='Package ID', required=True)
@click.option('--download_location', default='./',
help='Location for the downloaded file')
def get(package_id, download_location):
package_found = False
for line in urllib2.urlopen(PACKAGE_SERVER + 'urls.tsv'):
if line.strip() and not line.startswith('#'):
iid, upstream_url, checksum = line.split('\t')
if iid.strip() == package_id.strip():
package_found = True
# I worry about this being unreliable. TODO: add target filename column?
pkg_name = urlparse(upstream_url).path.split('/')[-1]
storage_path = os.path.join(download_location, pkg_name)
url = PACKAGE_SERVER + checksum
urllib.urlretrieve(url, storage_path)
download_checksum = hashlib.sha256(open(storage_path, 'rb').read()).hexdigest()
if checksum.strip() != download_checksum:
print 'Checksum does not match, something seems to be wrong.\n'
print checksum, '\t(expected)'
print download_checksum, '\t(downloaded)'
else:
print 'Download sucessfull for %s.' % (pkg_name)
if not package_found:
print 'Package (%s) could not be found in this servive.' % (package_id)
if __name__ == '__main__':
get()
|
Python
| 0.000001
|
@@ -570,16 +570,31 @@
checksum
+, alternate_url
= line.
@@ -623,24 +623,16 @@
if iid
-.strip()
== pack
@@ -912,24 +912,132 @@
, pkg_name)%0A
+ if alternate_url.strip():%0A url = alternate_url%0A else:%0A
@@ -1249,24 +1249,16 @@
checksum
-.strip()
!= down
@@ -1298,16 +1298,17 @@
print
+(
'Checksu
@@ -1381,100 +1381,179 @@
-print checksum, '%5Ct(expected)'%0A print download_checksum, '%5Ct(downloaded)'
+ '%7Bexpected%7D%5Ct(expected)%5Cn%7Bactual%7D%5Ct(downloaded)').format(%0A expected=checksum,%0A actual=download_checksum)
%0A
@@ -1614,15 +1614,15 @@
suc
+c
essful
-l
for
@@ -1729,11 +1729,10 @@
serv
-iv
e
+r
.' %25
|
9c012f3b5609b557b9d14059f2b2a6412283e0ed
|
support option ax='new'
|
src/pyquickhelper/helpgen/graphviz_helper.py
|
src/pyquickhelper/helpgen/graphviz_helper.py
|
"""
@file
@brief Helper about graphviz.
"""
import os
from ..loghelper import run_cmd
from .conf_path_tools import find_graphviz_dot
def plot_graphviz(dot, ax=None, temp_dot=None, temp_img=None, dpi=300):
"""
Plots a dot graph into a :epkg:`matplotlib` plot.
@param dot dot language
@param ax existing ax
@param temp_dot temporary file, if None,
a file is created and removed
@param temp_img temporary image, if None,
a file is created and removed
@param dpi dpi
@return ax
"""
if temp_dot is None:
temp_dot = "temp_%d.dot" % id(dot)
clean_dot = True
else:
clean_dot = False
if temp_img is None:
temp_img = "temp_%d.png" % id(dot)
clean_img = True
else:
clean_img = False
with open(temp_dot, "w", encoding="utf-8") as f:
f.write(dot)
dot_path = find_graphviz_dot()
cmd = '"%s" -Gdpi=%d -Tpng -o "%s" "%s"' % (
dot_path, dpi, temp_img, temp_dot)
out, err = run_cmd(cmd, wait=True)
if err is not None:
err = err.strip("\r\n\t ")
if len(err) > 0:
if clean_dot:
os.remove(temp_dot)
if clean_img and os.path.exists(temp_img):
os.remove(temp_img)
raise RuntimeError(
"Unable to run command line"
"\n---CMD---\n{}\n---OUT---\n{}"
"\n---ERR---\n{}".format(
cmd, out, err))
if ax is None:
import matplotlib.pyplot as plt
ax = plt.gca()
image = plt.imread(temp_img)
else:
import matplotlib.pyplot as plt
image = plt.imread(temp_img)
ax.imshow(image)
if clean_dot:
os.remove(temp_dot)
if clean_img and os.path.exists(temp_img):
os.remove(temp_img)
return ax
|
Python
| 0.000036
|
@@ -1587,16 +1587,137 @@
t.gca()%0A
+ elif isinstance(ax, str) and ax == 'new':%0A import matplotlib.pyplot as plt%0A _, ax = plt.subplots(1, 1)%0A
|
680f738739393f14a44fbb0d9093d08a0e520f65
|
Print error message if unknown generate command
|
src/hades/config/generate.py
|
src/hades/config/generate.py
|
import collections
from functools import partial
import os
import os.path
import shutil
import sys
import jinja2
import netaddr
import pkg_resources
from hades.config.loader import get_config
class ConfigGenerator(object):
TEMPLATE_SUFFIX = ".j2"
def __init__(self, template_dir, config):
self.config = config
self.template_dir = template_dir
self.env = jinja2.Environment(
loader=jinja2.FileSystemLoader(self.template_dir),
auto_reload=False, autoescape=False, keep_trailing_newline=True,
undefined=jinja2.StrictUndefined,
extensions=['jinja2.ext.do', 'jinja2.ext.loopcontrols',
'jinja2.ext.with_'],
)
self.env.globals.update({
'netaddr': netaddr,
})
def from_directory(self, name, target_dir):
source_base = os.path.join(self.template_dir, name)
sources = collections.deque()
sources.append(source_base)
while sources:
source = sources.pop()
relpath = os.path.relpath(source, source_base)
target = os.path.normpath(os.path.join(target_dir, relpath))
if os.path.isdir(source):
sources.extend(map(partial(os.path.join, source),
os.listdir(source)))
if not os.path.exists(target):
os.mkdir(target)
else:
if source.endswith(self.TEMPLATE_SUFFIX):
template_name = os.path.relpath(source, self.template_dir)
template = self.env.get_template(template_name)
target = target[:-len(self.TEMPLATE_SUFFIX)]
with open(target, 'w', encoding='UTF-8') as f:
stream = template.stream(BASE_DIRECTORY=target_dir,
TARGET=target, **self.config)
f.writelines(stream)
else:
shutil.copy(source, target)
def from_file(self, name, output):
target = os.path.join(self.template_dir, name)
base_directory = os.path.dirname(target)
stream = self.env.get_template(name).stream(
BASE_DIRECTORY=base_directory, TARGET=target, **self.config)
output.writelines(stream)
def write_single_file_config(generator, name, args):
if len(args) < 3:
generator.from_file(name, sys.stdout)
else:
target_file = args[2]
with open(target_file, 'w', encoding='utf-8') as f:
generator.from_file(name, f)
return 0
def write_directory_config(generator, name, args):
if len(args) < 3:
return os.EX_USAGE
target_dir = args[2]
generator.from_directory(name, target_dir)
return 0
def write_arping_config(generator, args):
return write_single_file_config(generator, 'arping.ini.j2', args)
def write_freeradius_config(generator, args):
return write_directory_config(generator, 'freeradius', args)
def write_iptables_config(generator, args):
return write_single_file_config(generator, "iptables.j2", args)
def write_nginx_config(generator, args):
return write_directory_config(generator, "nginx", args)
def write_postgresql_schema(generator, args):
return write_single_file_config(generator, "schema.sql.j2", args)
def write_regular_dnsmasq_config(generator, args):
return write_single_file_config(generator, "regular-dnsmasq.conf.j2", args)
def write_unauth_dnsmasq_config(generator, args):
return write_single_file_config(generator, "unauth-dnsmasq.conf.j2", args)
def write_unbound_config(generator, args):
return write_single_file_config(generator, "unbound.conf.j2", args)
def write_uwsgi_config(generator, args):
return write_single_file_config(generator, "uwsgi.ini.j2", args)
commands = {
"arping": write_arping_config,
"freeradius": write_freeradius_config,
"iptables": write_iptables_config,
"nginx": write_nginx_config,
"postgresql-schema": write_postgresql_schema,
"regular-dnsmasq": write_regular_dnsmasq_config,
"unauth-dnsmasq": write_unauth_dnsmasq_config,
"unbound": write_unbound_config,
"uwsgi": write_uwsgi_config,
}
def main(args):
if len(args) < 2:
return os.EX_USAGE
config = get_config()
template_dir = pkg_resources.resource_filename('hades.config', 'templates')
generator = ConfigGenerator(template_dir, config)
return commands.get(args[1], lambda g, a: os.EX_USAGE)(generator, args)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Python
| 0.000101
|
@@ -4476,22 +4476,25 @@
ig)%0A
-return
+command =
command
@@ -4510,35 +4510,180 @@
s%5B1%5D
-, lambda g, a: os.EX_USAGE)
+)%0A if command is None:%0A print(%22Unknown config generation command %7B%7D%22.format(args%5B1%5D),%0A file=sys.stderr)%0A return os.EX_USAGE%0A return command
(gen
|
cbe379efeb7592e9c918fc4d092098b74a3b8c1a
|
Update Deck.py - Add shuffle method to shuffle the deck and then return the shuffled cards.
|
Deck.py
|
Deck.py
|
#Deck
class Deck:
'''Definition of a card deck.'''
def __init__(self,hasJoker=False):
self.suits = ['H','D','S','C']
self.values = [str(x) for x in range(2,10)] #2-9 cards
self.values.extend(['T','J','Q','K','A']) #Face cards (including the 10s)
#Assemble deck
self.cards = [(v,s) for v in self.values for s in self.suits]
#Add Joker cards (2) as 'WW' if needed
if(hasJoker):
self.cards.extend([('W','W'),('W','W')])
#Draw a card from the deck and return a card
def draw(self,fromTop=True):
#Remove from the front/top of deck
if fromTop:
return self.cards.pop(0)
#Remove from the back/bottom of deck
else:
return self.cards.pop()
#Return how many cards are in deck
def sizeOf(self):
return len(self.cards)
|
Python
| 0
|
@@ -44,16 +44,56 @@
deck.'''
+%0A%09from random import shuffle as rShuffle
%0A%0A%09def _
@@ -781,8 +781,154 @@
f.cards)
+%0A%0A%09#Shuffle deck and return the newly shuffled deck%0A%09def shuffle(self):%0A%09%09#Use random.shuffle() method%0A%09%09rShuffle(self.cards)%0A%09%09return self.cards%0A
|
3bd37ff8b91787da22f925ab858157bffa5698d7
|
Remove unnecessary import
|
Fibo.py
|
Fibo.py
|
import math
import sys
def Fibo(num):
if num <= 2:
return 1
else:
return Fibo(num-1)+Fibo(num-2)
print(Fibo(int(sys.argv[1])))
|
Python
| 0
|
@@ -1,16 +1,4 @@
-import math%0A
impo
|
6855564716827546a5b68c154b0d95daba969119
|
add more user tests
|
src/inventory/tests/tests.py
|
src/inventory/tests/tests.py
|
from django.test import TestCase
from inventory.models import *
class UserTests(TestCase):
def test_for_fields(self):
""" saving and loading users"""
initial_user = User(username="user", password="pass", email="email",
f_name="fname", l_name="lname", active=True).save()
loaded_user = User.objects.get(username="user")
self.assertEqual(loaded_user.username, "user")
self.assertEqual(loaded_user.password, "pass")
self.assertEqual(loaded_user.email, "email")
self.assertEqual(loaded_user.f_name, "fname")
self.assertEqual(loaded_user.l_name, "lname")
self.assertEqual(loaded_user.active, True)
self.assertEqual(unicode(loaded_user), "user")
class CardTests(TestCase):
"""test cards"""
|
Python
| 0
|
@@ -185,16 +185,23 @@
= User(
+id=10,
username
@@ -263,16 +263,17 @@
_name=%22f
+_
name%22, l
@@ -280,16 +280,17 @@
_name=%22l
+_
name%22, a
@@ -351,25 +351,60 @@
get(
-username=%22user%22)%0A
+id=10)%0A%0A self.assertEqual(loaded_user.id, 10)
%0A
@@ -610,16 +610,17 @@
name, %22f
+_
name%22)%0A
@@ -665,16 +665,17 @@
name, %22l
+_
name%22)%0A
@@ -780,16 +780,18 @@
user%22)%0A%0A
+#
class Ca
@@ -805,24 +805,26 @@
(TestCase):%0A
+#
%22%22%22test
@@ -827,15 +827,272 @@
est
-cards%22%22%22
+saving and loading cards%22%22%22%0A%0A# initial_card = Card(id=1, repo_base=%22repo_base%22, repo_name=%22repo_name%22,%0A# card_name=%22card_name%22, query=%22query%22).save()%0A# loaded_card=Card.objects.get(id=1)%0A%0A# self.assertEqual(loaded_card.card_name, %22card_name%22)%0A%0A%0A
%0A%0A%0A
|
237b9d4577f004401c2385163b060c785692c8b6
|
add when_over and when_over_guessed fields to Event (db change)
|
src/knesset/events/models.py
|
src/knesset/events/models.py
|
from datetime import datetime
from django.db import models
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
from knesset.persons.models import Person
class Event(models.Model):
''' hold the when, who, what, where and which fields of events
and allows the users to contribute resources (through links)
and discuss upcoming events.
'''
when = models.DateTimeField()
who = models.ManyToManyField(Person)
what = models.TextField()
where = models.TextField()
which_type = models.ForeignKey(ContentType,
verbose_name=_('content type'),
related_name="event_for_%(class)s", null=True)
which_pk = models.TextField(_('object ID'), null=True)
which_object = generic.GenericForeignKey(ct_field="which_type", fk_field="which_pk")
@property
def is_future(self):
return self.when > datetime.now()
@property
def which(self):
return self.which_objects and unicode(self.which_object) or self.what
|
Python
| 0
|
@@ -496,24 +496,241 @@
TimeField()%0A
+ when_over = models.DateTimeField(null=True)%0A # KNESSET_TODO the end time of a committee meeting is not recorded anywhere,%0A # so we are left to guess%0A when_over_guessed = models.BooleanField(default=True)%0A
who = mo
|
ad8fd4ca9947716cf24fd9dd5f6427e390ec9cc7
|
change file dowload file
|
Main.py
|
Main.py
|
import json
import os
import re
import subprocess
import urllib2
import zipfile
def get_last_release(http_address):
http_stream = urllib2.urlopen(http_address)
res = http_stream.read()
res_json = json.loads(res)
print res_json["tag_name"], res_json["zipball_url"]
return res_json["tag_name"], res_json["zipball_url"]
def downLoad_last_release(version, zip_url):
path = r'F:'
file_name = version + r'.zip'
f = urllib2.urlopen(zip_url)
data = f.read()
with open(path + "/" + file_name, "wb") as code:
code.write(data)
print path + "/" + file_name + " Finish"
return path, path + "/" + file_name
def un_zip(zip_path, ext_path):
r = zipfile.is_zipfile(zip_path)
first_dic = False
filename = ""
if r:
fz = zipfile.ZipFile(zip_path, 'r')
for file in fz.namelist():
if not first_dic:
filename = file
first_dic = True
fz.extract(file, ext_path)
else:
print('This file is not zip file')
return ""
print filename + 'zip finish'
return ext_path + filename
def shell_exec(cmd):
child = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
info = child.stdout.read().decode('gb2312')
print "message:", info
return info
def set_environ(file_path):
os.chdir(file_path)
os.environ["GOPATH"] = file_path
# os.environ["path"] = os.environ["path"] + ";" + rice_path
print os.getcwd()
return
def get_file_path(owner, repo):
ves, zip_url = get_last_release("https://api.github.com/repos/" + owner + "/" + repo + "/releases/latest")
path, zip_path = downLoad_last_release(ves, zip_url)
file_path = un_zip(zip_path, path + "/")
return file_path
def go_build(file_path):
shell_info = shell_exec("go build -o win32.exe main.go")
if shell_info != "":
packs = re.findall("cannot find package \"(.*)\"", shell_info)
for pack in packs:
shell_exec("go get " + pack)
shell_exec("go build -o win32.exe main.go")
print "build success " + file_path + "win32.exe"
return file_path
def start(owner, repo, is_rice):
file_path = os.getcwd()
rice_path = ""
if is_rice:
set_environ(file_path)
shell_exec("go get github.com/GeertJohan/go.rice/rice")
rice_path = file_path + "/bin/rice.exe"
os.chdir(file_path)
repo_file = get_file_path(owner, repo)
if is_rice:
os.chdir(repo_file)
shell_exec(rice_path + " embed-go")
os.chdir(repo_file)
set_environ(repo_file)
repo_path = go_build(repo_file)
return
start("jacoblai", "Coolpy5Sub", True)
# get_last_release_zip("jacoblai", "Coolpy5Sub")
# for parent,dirnames,_ in os.walk("F:/"):
# for dirname in dirnames:
# print "parent is:" + parent
# print "dirname is" + dirname
# os.environ["GOPATH"] = "F:/" + filename
# print os.environ["GOPATH"]
# os.chdir("F:/" + filename)
# print os.getcwd()
# res = re.findall("cannot find package \"(.*)\"", info)
# print res
# for pack in res:
# child = subprocess.Popen('go get ' + pack, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
# info = child.stdout.read().decode('gb2312')
|
Python
| 0.000001
|
@@ -74,16 +74,41 @@
ipfile%0A%0A
+file_path = os.getcwd()%0A%0A
%0Adef get
@@ -419,13 +419,17 @@
h =
-r'F:'
+file_path
%0A
@@ -2229,36 +2229,8 @@
e):%0A
- file_path = os.getcwd()%0A
|
d53dc67fc002448c7b94758843223a17d4623483
|
Allow IP to be blank
|
lingcod/bookmarks/models.py
|
lingcod/bookmarks/models.py
|
from django.contrib.gis.db import models
from lingcod.features import register
from lingcod.features.models import Feature
from django.utils.html import escape
from django.conf import settings
class Bookmark(Feature):
description = models.TextField(default="", null=True, blank=True)
latitude = models.FloatField()
longitude = models.FloatField()
altitude = models.FloatField()
heading = models.FloatField(default=0)
tilt = models.FloatField(default=0)
roll = models.FloatField(default=0)
altitudeMode = models.FloatField(default=1)
ip = models.IPAddressField(default="0.0.0.0")
publicstate = models.TextField(default="{}")
@property
def kml(self):
camera = "<Camera>\n"
camera_params = ["latitude", "longitude", "altitude", "heading", "tilt", "roll", "altitudeMode"]
for p in camera_params:
val = self.__dict__[p]
if val is not None:
camera += " <%s>%s</%s>\n" % (p, val, p)
camera += " </Camera>\n"
return """
<Placemark id="%s">
<visibility>1</visibility>
<name>%s</name>
<description>%s</description>
<styleUrl>#%s-default</styleUrl>
%s
</Placemark>
""" % (self.uid, escape(self.name), escape(self.description), self.model_uid(),
camera)
@property
def kml_style(self):
return """
<Style id="%s-default">
<!-- invisible -->
<IconStyle>
<scale>0.0</scale>
</IconStyle>
<LabelStyle>
<scale>0.0</scale>
</LabelStyle>
</Style>
""" % (self.model_uid())
class Options:
manipulators = []
optional_manipulators = [ ]
verbose_name = 'Bookmark'
form = 'lingcod.bookmarks.forms.BookmarkForm'
icon_url = 'bookmarks/images/bookmark.png'
form_template = 'bookmarks/form.html'
show_template = 'bookmarks/show.html'
if settings.BOOKMARK_FEATURE:
Bookmark = register(Bookmark)
|
Python
| 0.000009
|
@@ -607,16 +607,39 @@
0.0.0.0%22
+, null=True, blank=True
)%0A pu
|
b293abaa52bffa705f4012d9726296c21db482c8
|
check if 'enabled' field exist in the Identity document
|
src/leap/mx/couchdbhelper.py
|
src/leap/mx/couchdbhelper.py
|
# -*- encoding: utf-8 -*-
# couchdb.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for working with CouchDB or BigCouch instances which store email alias
maps, user UUIDs, and GPG keyIDs.
"""
from paisley import client
from twisted.internet import defer
from twisted.python import log
from leap.soledad.common.couch import CouchDatabase
class ConnectedCouchDB(client.CouchDB):
"""
Connect to a CouchDB instance.
CouchDB document for testing is '_design', and the view is simply
a preconfigured set of mapped responses.
"""
def __init__(self, host, port=5984, dbName=None, username=None,
password=None, *args, **kwargs):
"""
Connect to a CouchDB instance.
:param host: A hostname string for the CouchDB server.
:type host: str
:param port: The port of the CouchDB server.
:type port: int
:param dbName: (optional) The default database to bind queries to.
:type dbName: str
:param username: (optional) The username for authorization.
:type username: str
:param str password: (optional) The password for authorization.
:type password: str
"""
self._mail_couch_url = "http://%s:%s@%s:%s" % (username,
password,
host,
port)
client.CouchDB.__init__(self,
host,
port=port,
dbName=dbName,
username=username,
password=password,
*args, **kwargs)
self._cache = {}
def createDB(self, dbName):
"""
Overrides ``paisley.client.CouchDB.createDB``.
"""
pass
def deleteDB(self, dbName):
"""
Overrides ``paisley.client.CouchDB.deleteDB``.
"""
pass
def getUuidAndPubkey(self, address):
"""
Query couch and return a deferred that will fire with the uuid and pgp
public key for address.
:param address: A string representing the email or alias to check.
:type address: str
:return: A deferred that will fire with the user's uuid and pgp public
key.
:rtype twisted.defer.Deferred
"""
# TODO: Cache results
d = self.openView(docId="Identity",
viewId="by_address/",
key=address,
reduce=False,
include_docs=True)
def _get_uuid_and_pubkey_cbk(result):
uuid = None
pubkey = None
if result["rows"]:
doc = result["rows"][0]["doc"]
if doc["enabled"]:
uuid = doc["user_id"]
if "keys" in doc:
pubkey = doc["keys"]["pgp"]
return uuid, pubkey
d.addCallback(_get_uuid_and_pubkey_cbk)
return d
def getPubkey(self, uuid):
"""
Query couch and return a deferred that will fire with the pgp public
key for user with given uuid.
:param uuid: The uuid of a user
:type uuid: str
:return: A deferred that will fire with the pgp public key for
the user.
:rtype: Deferred
"""
d = self.openView(docId="Identity",
viewId="by_user_id/",
key=uuid,
reduce=False,
include_docs=True)
def _get_pubkey_cbk(result):
pubkey = None
try:
doc = result["rows"][0]["doc"]
pubkey = doc["keys"]["pgp"]
except (KeyError, IndexError):
pass
return pubkey
d.addCallbacks(_get_pubkey_cbk, log.err)
return d
def getCertExpiry(self, fingerprint):
"""
Query couch and return a deferred that will fire with the expiration
date for the cert with the given fingerprint.
:param fingerprint: The cert fingerprint
:type fingerprint: str
:return: A deferred that will fire with the cert expiration date as a
str.
:rtype: Deferred
"""
d = self.openView(docId="Identity",
viewId="cert_expiry_by_fingerprint/",
key=fingerprint,
reduce=False,
include_docs=True)
def _get_cert_expiry_cbk(result):
try:
expiry = result["rows"][0]["value"]
except (KeyError, IndexError):
expiry = None
return expiry
d.addCallback(_get_cert_expiry_cbk)
return d
def put_doc(self, uuid, doc):
"""
Update a document.
If the document currently has conflicts, put will fail.
If the database specifies a maximum document size and the document
exceeds it, put will fail and raise a DocumentTooBig exception.
:param uuid: The uuid of a user
:type uuid: str
:param doc: A Document with new content.
:type doc: leap.soledad.common.couch.CouchDocument
:return: A deferred which fires with the new revision identifier for
the document if the Document object has being updated, or
which fails with CouchDBError if there was any error.
"""
# TODO: that should be implemented with paisley
url = self._mail_couch_url + "/user-%s" % (uuid,)
try:
db = CouchDatabase.open_database(url, create=False)
return defer.succeed(db.put_doc(doc))
except Exception as e:
return defer.fail(CouchDBError(e.message))
class CouchDBError(Exception):
pass
|
Python
| 0
|
@@ -3520,16 +3520,40 @@
if
+%22enabled%22 not in doc or
doc%5B%22ena
@@ -3769,24 +3769,69 @@
pubkey_cbk)%0A
+ d.addErrback(lambda _: (None, None))%0A
retu
|
a77f7028b326aaa52a3b159206ecb57356f05b7f
|
Replace assertEqual(None, *) with assertIsNone in tests
|
tempest/tests/test_glance_http.py
|
tempest/tests/test_glance_http.py
|
# Copyright 2014 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslotest import mockpatch
import six
from six.moves import http_client as httplib
from tempest.common import glance_http
from tempest import exceptions
from tempest.tests import base
from tempest.tests import fake_auth_provider
from tempest.tests import fake_http
class TestGlanceHTTPClient(base.TestCase):
def setUp(self):
super(TestGlanceHTTPClient, self).setUp()
self.fake_http = fake_http.fake_httplib2(return_type=200)
# NOTE(maurosr): using http here implies that we will be using httplib
# directly. With https glance_client would use an httpS version, but
# the real backend would still be httplib anyway and since we mock it
# that there is no reason to care.
self.endpoint = 'http://fake_url.com'
self.fake_auth = fake_auth_provider.FakeAuthProvider()
self.fake_auth.base_url = mock.MagicMock(return_value=self.endpoint)
self.useFixture(mockpatch.PatchObject(
httplib.HTTPConnection,
'request',
side_effect=self.fake_http.request(self.endpoint)[1]))
self.client = glance_http.HTTPClient(self.fake_auth, {})
def _set_response_fixture(self, header, status, resp_body):
resp = fake_http.fake_httplib(header, status=status,
body=six.StringIO(resp_body))
self.useFixture(mockpatch.PatchObject(httplib.HTTPConnection,
'getresponse', return_value=resp))
return resp
def test_raw_request(self):
self._set_response_fixture({}, 200, 'fake_response_body')
resp, body = self.client.raw_request('GET', '/images')
self.assertEqual(200, resp.status)
self.assertEqual('fake_response_body', body.read())
def test_raw_request_with_response_chunked(self):
self._set_response_fixture({}, 200, 'fake_response_body')
self.useFixture(mockpatch.PatchObject(glance_http,
'CHUNKSIZE', 1))
resp, body = self.client.raw_request('GET', '/images')
self.assertEqual(200, resp.status)
self.assertEqual('fake_response_body', body.read())
def test_raw_request_chunked(self):
self.useFixture(mockpatch.PatchObject(glance_http,
'CHUNKSIZE', 1))
self.useFixture(mockpatch.PatchObject(httplib.HTTPConnection,
'endheaders'))
self.useFixture(mockpatch.PatchObject(httplib.HTTPConnection,
'send'))
self._set_response_fixture({}, 200, 'fake_response_body')
req_body = six.StringIO('fake_request_body')
resp, body = self.client.raw_request('PUT', '/images', body=req_body)
self.assertEqual(200, resp.status)
self.assertEqual('fake_response_body', body.read())
call_count = httplib.HTTPConnection.send.call_count
self.assertEqual(call_count - 1, req_body.tell())
def test_get_connection_class_for_https(self):
conn_class = self.client._get_connection_class('https')
self.assertEqual(glance_http.VerifiedHTTPSConnection, conn_class)
def test_get_connection_class_for_http(self):
conn_class = (self.client._get_connection_class('http'))
self.assertEqual(httplib.HTTPConnection, conn_class)
def test_get_connection_http(self):
self.assertTrue(isinstance(self.client._get_connection(),
httplib.HTTPConnection))
def test_get_connection_https(self):
endpoint = 'https://fake_url.com'
self.fake_auth.base_url = mock.MagicMock(return_value=endpoint)
self.client = glance_http.HTTPClient(self.fake_auth, {})
self.assertTrue(isinstance(self.client._get_connection(),
glance_http.VerifiedHTTPSConnection))
def test_get_connection_url_not_fount(self):
self.useFixture(mockpatch.PatchObject(self.client, 'connection_class',
side_effect=httplib.InvalidURL()
))
self.assertRaises(exceptions.EndpointNotFound,
self.client._get_connection)
def test_get_connection_kwargs_default_for_http(self):
kwargs = self.client._get_connection_kwargs('http')
self.assertEqual(600, kwargs['timeout'])
self.assertEqual(1, len(kwargs.keys()))
def test_get_connection_kwargs_set_timeout_for_http(self):
kwargs = self.client._get_connection_kwargs('http', timeout=10,
ca_certs='foo')
self.assertEqual(10, kwargs['timeout'])
# nothing more than timeout is evaluated for http connections
self.assertEqual(1, len(kwargs.keys()))
def test_get_connection_kwargs_default_for_https(self):
kwargs = self.client._get_connection_kwargs('https')
self.assertEqual(600, kwargs['timeout'])
self.assertEqual(None, kwargs['ca_certs'])
self.assertEqual(None, kwargs['cert_file'])
self.assertEqual(None, kwargs['key_file'])
self.assertEqual(False, kwargs['insecure'])
self.assertEqual(True, kwargs['ssl_compression'])
self.assertEqual(6, len(kwargs.keys()))
def test_get_connection_kwargs_set_params_for_https(self):
kwargs = self.client._get_connection_kwargs('https', timeout=10,
ca_certs='foo',
cert_file='/foo/bar.cert',
key_file='/foo/key.pem',
insecure=True,
ssl_compression=False)
self.assertEqual(10, kwargs['timeout'])
self.assertEqual('foo', kwargs['ca_certs'])
self.assertEqual('/foo/bar.cert', kwargs['cert_file'])
self.assertEqual('/foo/key.pem', kwargs['key_file'])
self.assertEqual(True, kwargs['insecure'])
self.assertEqual(False, kwargs['ssl_compression'])
self.assertEqual(6, len(kwargs.keys()))
class TestResponseBodyIterator(base.TestCase):
def test_iter_default_chunk_size_64k(self):
resp = fake_http.fake_httplib({}, six.StringIO(
'X' * (glance_http.CHUNKSIZE + 1)))
iterator = glance_http.ResponseBodyIterator(resp)
chunks = list(iterator)
self.assertEqual(chunks, ['X' * glance_http.CHUNKSIZE, 'X'])
|
Python
| 0.000002
|
@@ -5631,36 +5631,31 @@
self.assert
-Equal(
+Is
None
-,
+(
kwargs%5B'ca_c
@@ -5677,36 +5677,31 @@
self.assert
-Equal(
+Is
None
-,
+(
kwargs%5B'cert
@@ -5732,20 +5732,15 @@
sert
-Equal(
+Is
None
-,
+(
kwar
|
d8d6ce50c6fef9157f76e1dfefef24d15532a4d9
|
Add missing contexts to integration tests
|
test/integration/ggrc/__init__.py
|
test/integration/ggrc/__init__.py
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Base test case for all ggrc integration tests."""
import logging
from sqlalchemy import exc
from flask.ext.testing import TestCase as BaseTestCase
from ggrc import db
from ggrc.app import app
# Hide errors during testing. Errors are still displayed after all tests are
# done. This is for the bad request error messages while testing the api calls.
logging.disable(logging.CRITICAL)
class TestCase(BaseTestCase):
# because it's required by unittests.
"""Base test case for all ggrc integration tests."""
maxDiff = None
@classmethod
def clear_data(cls):
"""Remove data from ggrc tables.
This is a helper function to remove any data that might have been generated
during a test. The ignored tables are the ones that don't exist or have
constant data in them, that was populated with migrations.
Note:
This is a hack because db.metadata.sorted_tables does not sort by
dependencies. The events table is given before Person table and reversed
order in then incorrect.
"""
ignore_tables = (
"categories",
"notification_types",
"object_types",
"options",
"relationship_test_mock_model",
"roles",
"test_model",
)
tables = set(db.metadata.tables).difference(ignore_tables)
for _ in range(len(tables)):
if len(tables) == 0:
break # stop the loop once all tables have been deleted
for table in reversed(db.metadata.sorted_tables):
if table.name in tables:
try:
db.engine.execute(table.delete())
tables.remove(table.name)
except exc.IntegrityError:
pass
db.session.commit()
def setUp(self):
self.clear_data()
def tearDown(self): # pylint: disable=no-self-use
db.session.remove()
@staticmethod
def create_app():
"""Flask specific function for running an app instance."""
app.config["SERVER_NAME"] = "localhost"
app.testing = True
app.debug = False
return app
def _check_response(self, response, expected_errors):
"""Test that response contains all expected errors and warnigs.
Args:
response: api response object.
expected_errors: dict of all expected errors by object type.
Raises:
AssertionError if an expected error or warning is not found in the
proper response block.
"""
messages = ("block_errors", "block_warnings", "row_errors", "row_warnings")
for block in response:
for message in messages:
expected = expected_errors.get(block["name"], {}).get(message, set())
self.assertEqual(set(expected), set(block[message]))
|
Python
| 0.000057
|
@@ -937,16 +937,316 @@
tions.%0A%0A
+ This function is used to speed up resetting of the database for each test.%0A the proper way would be to run all migrations on a fresh database, but that%0A would take too much time. This function should act as if the database was%0A just created, with the exception of autoincrement indexes.%0A
%0A Not
@@ -1252,26 +1252,34 @@
te:%0A Th
-is
+e deletion
is a hack b
@@ -1638,24 +1638,44 @@
est_model%22,%0A
+ %22contexts%22,%0A
)%0A ta
@@ -2098,16 +2098,119 @@
pass%0A
+ contexts = db.metadata.tables%5B%22contexts%22%5D%0A db.engine.execute(contexts.delete(contexts.c.id %3E 1))
%0A db.
|
7ca1b61cbe391093b1ffe1825d79b3e49fe9b989
|
handle exceptions during mapping to skip other tests
|
scripts/automation/regression/stateless_tests/stl_general_test.py
|
scripts/automation/regression/stateless_tests/stl_general_test.py
|
import os, sys
import unittest
from trex import CTRexScenario
from stateful_tests.trex_general_test import CTRexGeneral_Test
from trex_stl_lib.api import *
import time
from nose.tools import nottest
class CStlGeneral_Test(CTRexGeneral_Test):
"""This class defines the general stateless testcase of the TRex traffic generator"""
def setUp(self):
self.stl_trex = CTRexScenario.stl_trex if CTRexScenario.stl_trex else 'mock'
CTRexGeneral_Test.setUp(self)
# check basic requirements, should be verified at test_connectivity, here only skip test
if CTRexScenario.stl_init_error:
self.skip(CTRexScenario.stl_init_error)
def connect(self, timeout = 100):
# need delay and check only because TRex process might be still starting
sys.stdout.write('Connecting')
for i in range(timeout):
try:
sys.stdout.write('.')
sys.stdout.flush()
self.stl_trex.connect()
print('')
return True
except:
time.sleep(0.1)
print('')
return False
def map_ports(self, timeout = 100):
sys.stdout.write('Mapping ports')
for i in range(timeout):
sys.stdout.write('.')
sys.stdout.flush()
CTRexScenario.stl_ports_map = stl_map_ports(self.stl_trex)
if self.verify_bidirectional(CTRexScenario.stl_ports_map):
print('')
return True
time.sleep(0.1)
print('')
return False
# verify all the ports are bidirectional
@staticmethod
def verify_bidirectional(mapping_dict):
if len(mapping_dict['unknown']):
return False
if len(mapping_dict['bi']) * 2 == len(mapping_dict['map']):
return True
return False
@staticmethod
def get_port_count():
return CTRexScenario.stl_trex.get_port_count()
@staticmethod
def is_connected():
return CTRexScenario.stl_trex.is_connected()
class STLBasic_Test(CStlGeneral_Test):
# will run it first explicitly, check connectivity and configure routing
@nottest
def test_connectivity(self):
if not self.is_loopback:
try:
sys.stdout.flush()
sys.stdout.write('Configuring DUT... ')
start_time = time.time()
if CTRexScenario.router_cfg['forceCleanConfig']:
CTRexScenario.router.load_clean_config()
CTRexScenario.router.configure_basic_interfaces()
CTRexScenario.router.config_pbr(mode = "config")
CTRexScenario.router.config_ipv6_pbr(mode = "config")
sys.stdout.write('done. (%ss)\n' % int(time.time() - start_time))
except Exception as e:
print('')
CTRexScenario.stl_init_error = 'Could not configure device, err: %s' % e
self.fail(CTRexScenario.stl_init_error)
return
try:
sys.stdout.write('Starting TRex... ')
start_time = time.time()
cores = self.configuration.trex.get('trex_cores', 1)
if self.is_virt_nics and cores > 1:
raise Exception('Number of cores should be 1 with virtual NICs')
if not CTRexScenario.no_daemon:
self.trex.start_stateless(c = cores)
self.stl_trex = STLClient(username = 'TRexRegression',
server = self.configuration.trex['trex_name'],
verbose_level = CTRexScenario.json_verbose)
CTRexScenario.stl_trex = self.stl_trex
sys.stdout.write('done. (%ss)\n' % int(time.time() - start_time))
except Exception as e:
print('')
CTRexScenario.stl_init_error = 'Could not start stateless TRex, err: %s' % e
self.fail(CTRexScenario.stl_init_error)
return
if not self.connect():
CTRexScenario.stl_init_error = 'Client could not connect'
self.fail(CTRexScenario.stl_init_error)
return
print('Connected')
if not self.map_ports():
CTRexScenario.stl_init_error = 'Client could not map ports'
self.fail(CTRexScenario.stl_init_error)
return
print('Got ports mapping: %s' % CTRexScenario.stl_ports_map)
|
Python
| 0
|
@@ -681,36 +681,33 @@
nect(self, t
-imeout
+ries
= 10
-0
):%0A #
@@ -833,38 +833,36 @@
for i in range(t
-imeout
+ries
):%0A t
@@ -1073,33 +1073,33 @@
time.sleep(0.
-1
+5
)%0A print(
@@ -1153,20 +1153,17 @@
f, t
-imeout
+ries
= 10
-0
):%0A
@@ -1231,14 +1231,12 @@
ge(t
-imeout
+ries
):%0A
@@ -1291,32 +1291,53 @@
.stdout.flush()%0A
+ try:%0A
CTRe
@@ -1391,16 +1391,20 @@
l_trex)%0A
+
@@ -1474,32 +1474,36 @@
+
print('')%0A
@@ -1504,32 +1504,36 @@
+
+
return True%0A
@@ -1520,32 +1520,156 @@
return True%0A
+ except Exception as e:%0A print('%5CnException during mapping: %25s' %25 e)%0A return False%0A
time
@@ -1677,17 +1677,17 @@
sleep(0.
-1
+5
)%0A
|
d137005229e180b509f0a2f83f5d2472b40d8890
|
Set up Sentry if we're configured for it (so I don't lose this code again)
|
run.py
|
run.py
|
import os
from os.path import abspath, dirname, join
from makerbase import app
if 'MAKERBASE_SETTINGS' not in os.environ:
os.environ['MAKERBASE_SETTINGS'] = join(dirname(abspath(__file__)), 'settings.py')
app.config.from_envvar('MAKERBASE_SETTINGS')
if __name__ == '__main__':
app.run(debug=True)
|
Python
| 0
|
@@ -252,16 +252,146 @@
INGS')%0A%0A
+if 'SENTRY_DSN' in app.config:%0A from raven.contrib.flask import Sentry%0A sentry = Sentry(app, dsn=app.config%5B'SENTRY_DSN'%5D)%0A%0A
if __nam
|
a4656021f6a97bf5ffccb3d6e522515769ba0d21
|
Remove unnecessary calls to disable_continuous_mode
|
run.py
|
run.py
|
import argparse
import serial
import threading
from io import BufferedRWPair, TextIOWrapper
from time import sleep
temp_usb = '/dev/ttyAMA0'
BAUD_RATE = 9600
parser = argparse.ArgumentParser()
parser.add_argument('oxygen', help='The USB port of the oxygen sensor.')
parser.add_argument('salinity', help='The USB port of the salinity sensor.')
parser.add_argument('server_ip', help='The IP address of the lighthouse node.')
parser.add_argument('port', help='The port of the lighthouse node.')
def init_db():
# TODO: initialize the sqlite database.
pass
def create_connection(usb_port):
print('Creating connection on {}'.format(usb_port))
ser = serial.Serial(usb_port, BAUD_RATE)
# disable_continuous_mode(ser)
return TextIOWrapper(BufferedRWPair(ser, ser), newline='\r', encoding='ascii', line_buffering=True)
def disable_continuous_mode(conn: serial.Serial):
# TODO: research if we need to send this command every time we connect to the sensors, or if it only
# needs to be sent once to disable continuous mode. If only once we should move this code into a
# separate python file.
print('Disabling continuous mode...')
conn.write(bytes('E\r', 'ascii'))
if conn.inWaiting() > 0:
# clear the buffer if there is anything waiting.
print('Clearing buffer...')
conn.read(conn.inWaiting())
def save_data(temperature, salinity, oxygen):
# TODO save data to database (sqlite)
pass
def push_data(temperature, salinity, oxygen, server_ip, server_port):
payload = {'temperature': temperature, 'salinity': salinity, 'oxygen': oxygen}
# TODO push data to lighthouse node.
def initialize_serial_connections(oxy_usb, sal_usb):
temp_conn = create_connection(temp_usb)
sal_conn = create_connection(sal_usb)
oxy_conn = create_connection(oxy_usb)
disable_continuous_mode(temp_conn)
disable_continuous_mode(sal_conn)
disable_continuous_mode(oxy_conn)
return temp_conn, sal_conn, oxy_conn
def run_loop(oxy_usb, sal_usb, server_ip, server_port):
temp_conn, sal_conn, oxy_conn = initialize_serial_connections()
# TODO: Catch serial.serialutil.SerialException on read?
while True:
temp.write('R\r')
temp = temp_conn.readline()
sal.write('R\r')
sal = sal_conn.readline()
# TODO: send temp and sal to oxy sensor first, then retrieve oxy value.
# oxy.write(<salinity command here>)
# oxy.write(<temp command here>)
oxy.write('R\r')
oxy = oxy_conn.readline()
print('Temperature: {}, Dissolved Oxygen: {}, Salinity: {}'.format(temp, oxy, sal))
save_data(temp, oxy, sal)
push_data(temp, oxy, sal, server_ip, server_port)
# TODO: Determine how often we should be grabbing data from sensors and pushing to other pi node.
time.sleep(5)
if __name__ == '__main__':
# TODO: Create supervisord script to keep run.py running.
# TODO: Parse command line args for database connection info.
args = parser.parse_args()
run_loop(args.oxygen, args.salinity, args.server_ip, args.port)
|
Python
| 0.000003
|
@@ -1765,115 +1765,8 @@
b)%0A%0A
-%09disable_continuous_mode(temp_conn)%0A%09disable_continuous_mode(sal_conn)%0A%09disable_continuous_mode(oxy_conn)%0A%0A
%09ret
|
68d465988378f24e74f8dd098919031d3fcfa2f4
|
fix source reinsertion bug
|
run.py
|
run.py
|
import spider
import sys
import os
import json
'''
requires spider.py be in the same directory as this module
spider.py can be found at http://github.com/shariq/notion-on-firebase
'''
def get_firebase_json_path(firebase_path):
return os.path.abspath(os.path.join(firebase_path, 'firebase.json'))
def add_to_firebase_json(firebase_path, new_rewrites):
firebase_json_path = get_firebase_json_path(firebase_path)
with open(firebase_json_path) as handle:
firebase_json = json.loads(handle.read())
if 'rewrites' not in firebase_json['hosting']:
firebase_json['hosting']['rewrites'] = []
existing_rewrites = firebase_json['hosting']['rewrites']
for new_rewrite in new_rewrites:
for existing_rewrite in existing_rewrites[:]:
if existing_rewrite['destination'] == new_rewrite['destination']:
if existing_rewrite['source'] == new_rewrite['source']:
continue
print 'warning: removing', existing_rewrite
existing_rewrites.remove(existing_rewrite)
elif existing_rewrite['source'] == new_rewrite['source']:
print 'warning: removing', existing_rewrite
existing_rewrites.remove(existing_rewrite)
existing_rewrites.append(new_rewrite)
firebase_json['hosting']['rewrites'] = existing_rewrites
dumped = json.dumps(firebase_json, indent=4)
with open(firebase_json_path, 'w') as handle:
handle.write(dumped)
def get_firebase_public_path(firebase_path):
firebase_json_path = get_firebase_json_path(firebase_path)
with open(firebase_json_path) as handle:
contents = handle.read()
relative_public = json.loads(contents)['hosting']['public']
return os.path.join(firebase_path, relative_public)
def main(root_page, firebase_path):
print 'root_page:', root_page
print 'firebase_path:', firebase_path
firebase_public_path = get_firebase_public_path(firebase_path)
print 'firebase_public_path:', firebase_public_path
print 'beginning spider...'
rewrites = spider.run(root_page, firebase_public_path)
print 'completed spider'
print 'rewrites:', rewrites
add_to_firebase_json(firebase_path, rewrites)
original_path = os.getcwd()
os.chdir(firebase_path)
print 'deploying...'
os.system('firebase deploy')
os.chdir(original_path)
if __name__ == '__main__':
if len(sys.argv) != 3:
print 'usage: python run.py <root_notion_page_id> <firebase_path>'
print 'e.g, python run.py d065149ff38a4e7a9b908aeb262b0f4f ../firebase'
sys.exit(-1)
firebase_path = sys.argv[-1]
if not os.path.exists(firebase_path):
print 'error: that firebase_path could not be found. '
print '(path evaluated to {})'.format(os.path.abspath(firebase_path))
sys.exit(-1)
firebase_public_path = get_firebase_public_path(firebase_path)
if not os.path.exists(os.path.join(firebase_public_path, 'ga.js')):
print 'warning: ga.js was not found in your firebase public path'
print 'hit enter after placing it there or if you don\'t want ga.js'
print '(hint: this is a JS file from Google Analytics)'
raw_input()
root_page = sys.argv[-2]
main(root_page, firebase_path)
|
Python
| 0
|
@@ -847,109 +847,8 @@
'%5D:%0A
- if existing_rewrite%5B'source'%5D == new_rewrite%5B'source'%5D:%0A continue%0A
|
1c8a1bfeef8206267a45562d4932cece1cbea1b4
|
Fix some pylint issues
|
Trie.py
|
Trie.py
|
#! /usr/bin/env python
# vim: set encoding=utf-8
from ctypes import *
libtrie = cdll.LoadLibrary("./libtrie.so")
libtrie.trie_load.argtypes = [c_char_p]
libtrie.trie_load.restype = c_void_p
libtrie.trie_lookup.argtypes = [ c_void_p, c_char_p, c_char_p ]
libtrie.trie_lookup.restype = c_void_p
libtrie.trie_get_last_error.restype = c_char_p
class Trie(object):
def __init__(self, filename):
self.free_func = libtrie.trie_free
self.ptr = libtrie.trie_load(filename)
if self.ptr == 0:
err = libtrie.trie_get_last_error()
raise IOError(str(err))
def __del__(self):
if self:
self.free_func(self.ptr)
def lookup(self, key):
s = create_string_buffer('\000' * 256)
res = libtrie.trie_lookup(self.ptr, key, s)
if res:
return [s.decode('utf8') for s in s.value.split('\n')]
else:
return []
def test_main():
"""
This function creates a storage backed by a file and tests it by retrieving
a couple of records.
"""
import sys
t = Trie('prijmeni5.trie')
for name in sys.stdin.readlines():
name = name.strip()
for s in t.lookup(name):
print s
if __name__ == '__main__':
test_main()
|
Python
| 0.000077
|
@@ -62,17 +62,62 @@
import
-*
+cdll, c_char_p, c_void_p, create_string_buffer
%0A%0Alibtri
@@ -262,17 +262,16 @@
ypes = %5B
-
c_void_p
@@ -290,17 +290,16 @@
c_char_p
-
%5D%0Alibtri
|
e62db9661295ff3912dbaaaff0d9f267f0b7ffe1
|
Add url callback on custom login
|
auth.py
|
auth.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bottle.ext import auth
from utils import conf
try:
auth_import = conf('auth')['engine'].split('.')[-1]
auth_from = u".".join(conf('auth')['engine'].split('.')[:-1])
auth_engine = getattr(__import__(auth_from, fromlist=[auth_import]),
auth_import)
except:
print 'Set valid auth engine'
exit(0)
callback = u"{}://{}".format(
conf('openmining')['protocol'],
conf('openmining')['domain'])
if conf('openmining')['domain_port'] not in ['80', '443']:
callback = "{}:{}".format(callback, conf('openmining')['domain_port'])
if auth_import == 'Google':
engine = auth_engine(
conf('auth')['key'], conf('auth')['secret'], callback)
elif auth_import == 'Facebook':
# Not working requered parans
engine = auth_engine()
elif auth_import == 'Twitter':
# Not working requered parans
engine = auth_engine()
else:
engine = auth_engine()
auth = auth.AuthPlugin(engine)
|
Python
| 0
|
@@ -957,16 +957,37 @@
_engine(
+callback_url=callback
)%0A%0Aauth
|
f0681b5dbe576e9a3cf3c1f5d448ffd97b4a7bba
|
Correct ID qry for put.
|
src/devilry_subjectadmin/devilry_subjectadmin/rest/relateduser.py
|
src/devilry_subjectadmin/devilry_subjectadmin/rest/relateduser.py
|
"""
Manage related users.
"""
from django.db.models import Q
from djangorestframework.resources import ModelResource
from djangorestframework.views import ListOrCreateModelView
from djangorestframework.views import InstanceModelView
from djangorestframework.permissions import IsAuthenticated
from devilry.apps.core.models import RelatedExaminer
from devilry.apps.core.models import RelatedStudent
from .auth import IsPeriodAdmin
from .mixins import SelfdocumentingMixin
from .log import logger
class IsPeriodAdminPeriodIdKwarg(IsPeriodAdmin):
ID_KWARG = 'period_id'
class RelatedUserResource(ModelResource):
fields = ('id', 'period', 'user', 'tags')
def user(self, instance):
if isinstance(instance, self.model):
user = instance.user
return {'email': user.email,
'username': user.username,
'id': user.id,
'full_name': user.devilryuserprofile.full_name,
'displayname': user.devilryuserprofile.full_name or user.username
}
def period(self, instance):
if isinstance(instance, self.model):
return instance.period_id
def validate_request(self, data, files=None):
user = data.get('user')
if user:
if isinstance(user, dict) and 'id' in user:
data['user'] = user['id']
if 'id' in data:
del data['id']
return super(RelatedUserResource, self).validate_request(data, files)
class ListRelatedUsersRestMixin(SelfdocumentingMixin):
def get_period_id(self): # Overridden in ListRelatedUsersOnAssignmentMixin
return self.kwargs['period_id']
def get_queryset(self):
period_id = self.get_period_id()
qry = self.resource.model.objects.filter(period=period_id)
qry = qry.select_related('user', 'user__devilryuserprofile')
querystring = self.request.GET.get('query', '')
if len(querystring) > 0:
qry = qry.filter(Q(user__username__icontains=querystring) |
Q(user__email__icontains=querystring) |
Q(user__devilryuserprofile__full_name__icontains=querystring) |
Q(tags__icontains=querystring))
qry = qry.order_by('user__devilryuserprofile__full_name')
return qry
def get(self, request, **kwargs): # NOTE: We take **kwargs because this method is called with period_id or assignment_id(subclass), however it only uses ``request`` (the kwarg is used by permission handlers)
"""
Without the ``query`` parameter, list all users.
## Parameters
Use the ``query`` parameter in the querystring to search for users by:
- Full name
- Username
- Email
- Tags
Uses case-ignore-contains search.
# Returns
Get a list of related users. Each entry in the list is a dict/object
with the following attributes:
{responsetable}
"""
return super(ListRelatedUsersRestMixin, self).get(request)
def postprocess_get_docs(self, docs):
responsetable = self.htmlformat_response_from_fields()
return docs.format(modelname=self.resource.model.__name__,
responsetable=responsetable)
class CreateRelatedUserRestMixin(SelfdocumentingMixin):
def post(self, request, period_id):
"""
Create a {modelname}.
# Parameters
{parameterstable}
# Returns
{responsetable}
"""
result = super(CreateRelatedUserRestMixin, self).post(request)
created = result.cleaned_content
logger.info('User=%s created %s with id=%s (user_id=%s, tags=%s)', self.user,
self.resource.model.__name__, created.id, created.user_id,
created.tags)
return result
def postprocess_post_docs(self, docs):
responsetable = self.htmlformat_response_from_fields()
parameterstable = self.htmlformat_parameters_from_form(override_helptext={'user': 'The ID of the related user.'})
return docs.format(modelname=self.resource.model.__name__,
parameterstable=parameterstable,
responsetable=responsetable)
class InstanceRelatedUserRestBaseView(SelfdocumentingMixin, InstanceModelView):
permissions = (IsAuthenticated, IsPeriodAdminPeriodIdKwarg)
def put(self, request, period_id, id):
"""
Update the {modelname}.
# Parameters
{parameterstable}
# Returns
{responsetable}
"""
result = super(InstanceRelatedUserRestBaseView, self).put(request, id)
logger.info('User=%s updated %s with id=%s (user_id=%s, tags=%s)', self.user,
self.resource.model.__name__, id, result.user_id, result.tags)
return result
def delete(self, request, period_id, id):
"""
Delete the {modelname}.
# Response
Status 204, with empty body on success.
"""
userid = self.get_instance(id=id).user_id
result = super(InstanceRelatedUserRestBaseView, self).delete(request, id=id)
logger.info('User=%s deleted %s with id=%s (user_id=%s)', self.user,
self.resource.model.__name__, id, userid)
return result
def postprocess_docs(self, docs):
responsetable = self.htmlformat_response_from_fields()
parameterstable = self.htmlformat_parameters_from_form()
return docs.format(modelname=self.resource.model.__name__,
parameterstable=parameterstable,
responsetable=responsetable)
#############################
# Examiner
#############################
class RelatedExaminerResource(RelatedUserResource):
model = RelatedExaminer
class ListOrCreateRelatedExaminerRest(CreateRelatedUserRestMixin, ListRelatedUsersRestMixin,
ListOrCreateModelView):
resource = RelatedExaminerResource
permissions = (IsAuthenticated, IsPeriodAdminPeriodIdKwarg)
class InstanceRelatedExaminerRest(InstanceRelatedUserRestBaseView):
resource = RelatedExaminerResource
#############################
# Student
#############################
class RelatedStudentResource(RelatedUserResource):
model = RelatedStudent
fields = RelatedUserResource.fields + ('candidate_id',)
class ListOrCreateRelatedStudentRest(CreateRelatedUserRestMixin, ListRelatedUsersRestMixin,
ListOrCreateModelView):
resource = RelatedStudentResource
permissions = (IsAuthenticated, IsPeriodAdminPeriodIdKwarg)
class InstanceRelatedStudentRest(InstanceRelatedUserRestBaseView):
"""
Read, update and delete a single related student.
"""
resource = RelatedStudentResource
|
Python
| 0
|
@@ -4730,16 +4730,19 @@
equest,
+id=
id)%0A
|
95723719050aa08119ed2478c0bb40253a2b0b3e
|
Remove methods with unnecessary super delegation.
|
libqtile/layout/max.py
|
libqtile/layout/max.py
|
# Copyright (c) 2008, Aldo Cortesi. All rights reserved.
# Copyright (c) 2017, Dirk Hartmann.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from libqtile.layout.base import _SimpleLayoutBase
class Max(_SimpleLayoutBase):
"""Maximized layout
A simple layout that only displays one window at a time, filling the
screen_rect. This is suitable for use on laptops and other devices with
small screens. Conceptually, the windows are managed as a stack, with
commands to switch to next and previous windows in the stack.
"""
defaults = [("name", "max", "Name of this layout.")]
def __init__(self, **config):
super().__init__(**config)
self.add_defaults(Max.defaults)
def clone(self, group):
return super().clone(group)
def add(self, client):
return super().add(client, 1)
def configure(self, client, screen_rect):
if self.clients and client is self.clients.current_client:
client.place(
screen_rect.x,
screen_rect.y,
screen_rect.width,
screen_rect.height,
0,
None
)
client.unhide()
else:
client.hide()
cmd_previous = _SimpleLayoutBase.previous
cmd_next = _SimpleLayoutBase.next
cmd_up = cmd_previous
cmd_down = cmd_next
|
Python
| 0
|
@@ -1723,73 +1723,8 @@
s)%0A%0A
- def clone(self, group):%0A return super().clone(group)%0A%0A
|
d57bddf609ad84131b2f1e5cd46ebcc8798a1075
|
Version 0.12.2
|
starlette/__init__.py
|
starlette/__init__.py
|
__version__ = "0.12.1"
|
Python
| 0
|
@@ -13,11 +13,11 @@
= %220.12.
-1
+2
%22%0A
|
b5ee1f3dfccd3a18698ada03442854479e406d37
|
Update expression-add-operators.py
|
Python/expression-add-operators.py
|
Python/expression-add-operators.py
|
# Time: O(3^n)
# Space: O(n)
#
# Given a string that contains only digits 0-9
# and a target value, return all possibilities
# to add operators +, -, or * between the digits
# so they evaluate to the target value.
#
# Examples:
# "123", 6 -> ["1+2+3", "1*2*3"]
# "232", 8 -> ["2*3+2", "2+3*2"]
# "00", 0 -> ["0+0", "0-0", "0*0"]
# "3456237490", 9191 -> []
#
class Solution(object):
def addOperators(self, num, target):
"""
:type num: str
:type target: int
:rtype: List[str]
"""
result, expr = [], []
self.addOperatorsDFS(num, target, 0, 0, 0, expr, result)
return result
def addOperatorsDFS(self, s, target, pos, operand1, operand2, expr, result):
# Base Case 1
if pos == len(s):
if operand1 + operand2 == target:
e = "".join(expr)
e = e[1:] if e[0] == '+' else e
result.append(e)
return True
return False
num, i = 0, pos
num_str = ""
while i < len(s):
num_str += s[i]
num = num * 10 + ord(s[i]) - ord('0')
# Case '+':
expr.append("+"), expr.append(num_str)
self.addOperatorsDFS(s, target, i + 1, operand1 + operand2, num, expr, result)
expr.pop(), expr.pop()
# '-' and '*' could be used only if the expression is not empty.
if expr:
# Case '-':
expr.append("-"), expr.append(num_str)
self.addOperatorsDFS(s, target, i + 1, operand1 + operand2, -num, expr, result)
expr.pop(), expr.pop()
# Case '*':
expr.append("*"), expr.append(num_str)
self.addOperatorsDFS(s, target, i + 1, operand1, operand2 * num, expr, result)
expr.pop(), expr.pop()
# Char is '0'.
if num == 0:
break
i += 1
|
Python
| 0.000002
|
@@ -1333,29 +1333,16 @@
pr.pop()
-%0A
%0A%0A
|
5ebf34e1c572e5db9012af4228eaca2a8461b8d9
|
add some extra debug logging to smr-reduce
|
smr/reduce.py
|
smr/reduce.py
|
#!/usr/bin/env python
import sys
from .shared import get_config, configure_logging
def main():
if len(sys.argv) < 2:
sys.stderr.write("usage: smr-reduce config.py\n")
sys.exit(1)
config = get_config(sys.argv[1])
configure_logging(config)
try:
for result in iter(sys.stdin.readline, ""):
config.REDUCE_FUNC(result.rstrip()) # remove trailing linebreak
except (KeyboardInterrupt, SystemExit):
# we want to output results even if user aborted
config.OUTPUT_RESULTS_FUNC()
else:
config.OUTPUT_RESULTS_FUNC()
|
Python
| 0
|
@@ -340,27 +340,17 @@
-config.REDUCE_FUNC(
+result =
resu
@@ -360,17 +360,16 @@
rstrip()
-)
# remov
@@ -389,16 +389,110 @@
nebreak%0A
+ logging.debug(%22smr-reduce got %25s%22, result)%0A config.REDUCE_FUNC(result)%0A
exce
|
af4d3317bc9af09a5ea7feb069aff3a05cc32c67
|
Version bump to 0.4.0b9
|
navbar/__init__.py
|
navbar/__init__.py
|
VERSION = (0, 4, 0, "b8")
def get_version():
if VERSION[3] != "final":
return "%s.%s.%s%s" % (VERSION[0], VERSION[1], VERSION[2], VERSION[3])
else:
return "%s.%s.%s" % (VERSION[0], VERSION[1], VERSION[2])
__version__ = get_version()
|
Python
| 0
|
@@ -19,9 +19,9 @@
, %22b
-8
+9
%22)%0A%0A
@@ -239,20 +239,21 @@
on__ = get_version()
+%0A
|
dd020b279f011ff78a6a41571a839e4c57333e93
|
Rename username field to userspec (#196).
|
devilry/apps/core/models/relateduser.py
|
devilry/apps/core/models/relateduser.py
|
import re
from django.db import models
from django.db.models import Q
from django.core.exceptions import ValidationError
from period import Period
from node import Node
from abstract_is_admin import AbstractIsAdmin
class RelatedUserBase(models.Model, AbstractIsAdmin):
"""
Base class for :cls:`RelatedExaminer` and cls:`RelatedStudent`.
This is used to generate AssignmentGroups and
.. attribute:: username
One or more usernames followed by optional tags. Format: usernameA, ...., usernameN (tag1, tag2, ..., tagN).
For RelatedExaminer, only a single username is allowed.
"""
usersandtags_patt = r'((?:\w+\s*,\s*)*\w+)\s*\(((?:\w+\s*,\s*)*\w+)\)$'
username = models.CharField(max_length=200,
help_text='One or more usernames followed by optional tags. Format: usernameA, ...., usernameN (tag1, tag2, ..., tagN). For RelatedExaminer, only a single username is allowed.')
class Meta:
abstract = True # This model will then not be used to create any database table. Instead, when it is used as a base class for other models, its fields will be added to those of the child class.
unique_together = ('period', 'username')
app_label = 'core'
@classmethod
def q_is_admin(cls, user_obj):
return Q(period__admins=user_obj) | \
Q(period__parentnode__admins=user_obj) | \
Q(period__parentnode__parentnode__pk__in=Node._get_nodepks_where_isadmin(user_obj))
def clean(self, *args, **kwargs):
super(RelatedUserBase, self).clean(*args, **kwargs)
if not self.patt.match(self.username):
raise ValidationError('Invaid related user.')
def __unicode__(self):
return '{0}:{1}'.format(self.period, self.username)
class RelatedExaminer(RelatedUserBase):
"""
.. attribute:: period
A django.db.models.ForeignKey_ that points to the `Period`_.
"""
patt = re.compile('^' + RelatedUserBase.usersandtags_patt)
period = models.ForeignKey(Period, related_name='relatedexaminers',
help_text='The related period.')
class RelatedStudent(RelatedUserBase):
"""
.. attribute:: period
A django.db.models.ForeignKey_ that points to the `Period`_.
"""
patt = re.compile(r'^(?:(.+?)\s*::\s*)?' + RelatedUserBase.usersandtags_patt)
period = models.ForeignKey(Period, related_name='relatedstudents',
help_text='The related period.')
|
Python
| 0
|
@@ -417,20 +417,20 @@
e:: user
-name
+spec
%0A%0A
@@ -696,20 +696,20 @@
user
-name
+spec
= model
@@ -1207,20 +1207,20 @@
', 'user
-name
+spec
')%0A
@@ -1638,20 +1638,20 @@
elf.user
-name
+spec
):%0A
@@ -1785,20 +1785,20 @@
elf.user
-name
+spec
)%0A%0A%0Aclas
|
4b330755edab7a57de6d39a7e365c5f79df81065
|
Update config.py
|
blaspy/config.py
|
blaspy/config.py
|
"""
Copyright (c) 2014, The University of Texas at Austin.
All rights reserved.
This file is part of BLASpy and is available under the 3-Clause
BSD License, which can be found in the LICENSE file at the top-level
directory or at http://opensource.org/licenses/BSD-3-Clause
"""
from .errors import raise_blas_os_error
from ctypes import cdll
from os import chdir, path
from platform import system
from struct import calcsize
# The name of the BLAS .so or .dll file. By default this is the OpenBLAS reference
# implementation bundled with BLASpy. Only modify if you wish to use a different version of BLAS
# or if your operating system is not supported by BLASpy out of the box.
BLAS_NAME = "" # default is ""
# True if the BLAS .so or .dll file is in the blaspy/lib subdirectory,
# False if Python should search for it.
IN_BLASPY_SUBDIRECTORY = True # default is True
###############################
# DO NOT EDIT BELOW THIS LINE #
###############################
# find the appropriate BLAS to use
if BLAS_NAME == "": # try to use included OpenBLAS
if system() == "Windows":
if calcsize("P") == 8: # 64-bit
BLAS_NAME = "libopenblas-0.2.13-win64-int32.dll"
SUB_DIRECTORY = "win64"
else: # 32-bit
BLAS_NAME = "libopenblas-0.2.13-win32.dll"
SUB_DIRECTORY = "win32"
elif system() == "Linux":
if calcsize("P") == 8: # 64-bit
BLAS_NAME = "libopenblas-0.2.13-linux64.so"
SUB_DIRECTORY = "linux64"
else: # 32-bit
BLAS_NAME = "libopenblas-0.2.13-linux32.so"
SUB_DIRECTORY = "linux32"
else: # no appropriate OpenBLAS included, BLAS_NAME_OVERRIDE must be used
raise_blas_os_error()
else:
SUB_DIRECTORY = ""
# Change the directory and load the library
if IN_BLASPY_SUBDIRECTORY:
chdir(str(path.dirname(__file__))[:-6] + "lib/" + SUB_DIRECTORY)
_libblas = cdll.LoadLibrary(BLAS_NAME)
|
Python
| 0
|
@@ -1073,16 +1073,72 @@
penBLAS%0A
+ PREPEND = str(path.dirname(__file__))%5B:-6%5D + %22lib/%22%0A
if s
@@ -1159,16 +1159,16 @@
ndows%22:%0A
-
@@ -1277,31 +1277,31 @@
-SUB_DIRECTORY =
+chdir(PREPEND +
%22win64%22
%0A
@@ -1296,16 +1296,17 @@
%22win64%22
+)
%0A
@@ -1393,30 +1393,52 @@
-SUB_DIRECTORY = %22win32
+chdir(PREPEND + %22win32%22)%0A PREPEND = %22
%22%0A
@@ -1570,38 +1570,33 @@
-SUB_DIRECTORY
+PREPEND +
= %22linux64%22%0A
@@ -1593,16 +1593,17 @@
%22linux64
+/
%22%0A
@@ -1688,30 +1688,25 @@
-SUB_DIRECTORY
+PREPEND +
= %22linux
@@ -1707,16 +1707,17 @@
%22linux32
+/
%22%0A el
@@ -1828,30 +1828,23 @@
lse:%0A
- SUB_DIRECTORY
+PREPEND
= %22%22%0A%0A#
@@ -1890,104 +1890,8 @@
ary%0A
-if IN_BLASPY_SUBDIRECTORY:%0A chdir(str(path.dirname(__file__))%5B:-6%5D + %22lib/%22 + SUB_DIRECTORY)%0A
_lib
@@ -1914,16 +1914,26 @@
Library(
+PREPEND +
BLAS_NAM
|
7549ba00495ec3d2561cf8fbc02f55728d6020ac
|
Bump version number
|
VMEncryption/main/Common.py
|
VMEncryption/main/Common.py
|
#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
class CommonVariables:
utils_path_name = 'Utils'
extension_name = 'AzureDiskEncryptionForLinux'
extension_version = '0.1.0.999111'
extension_type = extension_name
extension_media_link = 'https://amextpaas.blob.core.windows.net/prod/' + extension_name + '-' + str(extension_version) + '.zip'
extension_label = 'Windows Azure VMEncryption Extension for Linux IaaS'
extension_description = extension_label
"""
disk/file system related
"""
sector_size = 512
luks_header_size = 4096 * 512
default_block_size = 52428800
min_filesystem_size_support = 52428800 * 3
#TODO for the sles 11, we should use the ext3
default_file_system = 'ext4'
default_mount_name = 'encrypted_disk'
dev_mapper_root = '/dev/mapper/'
disk_by_id_root = '/dev/disk/by-id'
BekVolumeFileSystem = 'vfat'
"""
parameter key names
"""
PassphraseFileNameKey = 'BekFileName'
KeyEncryptionKeyURLKey = 'KeyEncryptionKeyURL'
KeyVaultURLKey = 'KeyVaultURL'
AADClientIDKey = 'AADClientID'
KeyEncryptionAlgorithmKey = 'KeyEncryptionAlgorithm'
DiskFormatQuerykey = "DiskFormatQuery"
PassphraseKey = 'Passphrase'
BekVolumeFileSystemKey = "BekVolumeFileSystem"
"""
value for VolumeType could be OS or Data
"""
VolumeTypeKey = 'VolumeType'
AADClientSecretKey = 'AADClientSecret'
SecretUriKey = 'SecretUri'
"""
command types
"""
EnableEncryption = 'EnableEncryption'
EnableEncryptionFormat = 'EnableEncryptionFormat'
DisableEncryption = 'DisableEncryption'
"""
encryption config keys
"""
EncryptionEncryptionOperationKey = 'EncryptionOperation'
EncryptionDecryptionOperationKey = 'DecryptionOperation'
EncryptionVolumeTypeKey = 'VolumeType'
EncryptionDiskFormatQueryKey = 'DiskFormatQuery'
"""
crypt ongoing item config keys
"""
OngoingItemMapperNameKey = 'MapperName'
OngoingItemHeaderFilePathKey = 'HeaderFilePath'
OngoingItemOriginalDevNamePathKey = 'DevNamePath'
OngoingItemOriginalDevPathKey = 'DevicePath'
OngoingItemPhaseKey = 'Phase'
OngoingItemHeaderSliceFilePathKey = 'HeaderSliceFilePath'
OngoingItemFileSystemKey = 'FileSystem'
OngoingItemMountPointKey = 'MountPoint'
OngoingItemDeviceSizeKey = 'Size'
OngoingItemCurrentSliceIndexKey = 'CurrentSliceIndex'
OngoingItemFromEndKey = 'FromEnd'
OngoingItemCurrentDestinationKey = 'CurrentDestination'
OngoingItemCurrentTotalCopySizeKey = 'CurrentTotalCopySize'
OngoingItemCurrentLuksHeaderFilePathKey = 'CurrentLuksHeaderFilePath'
OngoingItemCurrentSourcePathKey = 'CurrentSourcePath'
OngoingItemCurrentBlockSizeKey = 'CurrentBlockSize'
"""
encryption phase devinitions
"""
EncryptionPhaseBackupHeader = 'BackupHeader'
EncryptionPhaseCopyData = 'EncryptingData'
EncryptionPhaseRecoverHeader = 'RecoverHeader'
EncryptionPhaseEncryptDevice = 'EncryptDevice'
EncryptionPhaseDone = 'Done'
"""
decryption phase constants
"""
DecryptionPhaseCopyData = 'DecryptingData'
DecryptionPhaseDone = 'Done'
"""
logs related
"""
InfoLevel = 'Info'
WarningLevel = 'Warning'
ErrorLevel = 'Error'
"""
error codes
"""
extension_success_status = 'success'
extension_error_status = 'error'
process_success = 0
success = 0
os_not_supported = 1
luks_format_error = 2
scsi_number_not_found = 3
device_not_blank = 4
environment_error = 5
luks_open_error = 6
mkfs_error = 7
folder_conflict_error = 8
mount_error = 9
mount_point_not_exists = 10
passphrase_too_long_or_none = 11
parameter_error = 12
create_encryption_secret_failed = 13
encrypttion_already_enabled = 14
passphrase_file_not_found = 15
command_not_support = 16
volue_type_not_support = 17
copy_data_error = 18
encryption_failed = 19
tmpfs_error = 20
backup_slice_file_error = 21
unknown_error = 100
class TestHooks:
search_not_only_ide = False
use_hard_code_passphrase = False
hard_code_passphrase = "Quattro!"
class DeviceItem(object):
def __init__(self):
#NAME,TYPE,FSTYPE,MOUNTPOINT,LABEL,UUID,MODEL
self.name = None
self.type = None
self.file_system = None
self.mount_point = None
self.label = None
self.uuid = None
self.model = None
self.size = None
def __str__(self):
return "name:" + str(self.name) + " type:" + str(self.type) + " fstype:" + str(self.file_system) + " mountpoint:" + str(self.mount_point) + " label:" + str(self.label) + " model:" + str(self.model)
class CryptItem(object):
def __init__(self):
self.mapper_name = None
self.dev_path = None
self.mount_point = None
self.file_system = None
self.luks_header_path = None
self.uses_cleartext_key = None
def __str__(self):
return ("name: " + str(self.mapper_name) + " dev_path:" + str(self.dev_path) +
" mount_point:" + str(self.mount_point) + " file_system:" + str(self.file_system) +
" luks_header_path:" + str(self.luks_header_path) +
" uses_cleartext_key:" + str(self.uses_cleartext_key))
|
Python
| 0.000002
|
@@ -794,17 +794,17 @@
.0.99911
-1
+2
'%0A ex
|
290a1f0cb301a6a4f4be2e218e8d97a5644cc2d3
|
Remove old Ensembl domain
|
rnacentral_pipeline/databases/ensembl/metadata/karyotypes.py
|
rnacentral_pipeline/databases/ensembl/metadata/karyotypes.py
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import csv
import json
import itertools as it
import requests
from retry import retry
from ratelimiter import RateLimiter
try:
from functools import lru_cache
except ImportError:
from functools32 import lru_cache
import six
DOMAINS = {
'ensemblgenomes',
'ensembl',
}
@lru_cache()
@retry(requests.HTTPError, tries=5, delay=1)
@RateLimiter(max_calls=15, period=1)
def find_species(domain):
response = requests.get(
'http://rest.%s.org/info/species' % domain,
headers={'Content-Type': 'application/json'}
)
response.raise_for_status()
species = []
raw = response.json()
for entry in raw['species']:
species.append(entry['name'])
return species
@lru_cache()
@retry(requests.HTTPError, tries=5, delay=1)
@RateLimiter(max_calls=15, period=1)
def fetch(species, domain):
response = requests.get(
'http://rest.%s.org/info/assembly/%s?bands=1' % (domain, species),
headers={'Content-Type': 'application/json'}
)
response.raise_for_status()
return response.json()
def default_bands(entry):
return {
"size": entry["length"],
"bands": [{
"start": 1,
"end": entry["length"]
}]
}
def process_chromosome(entry):
if 'bands' not in entry:
return default_bands(entry)
bands = []
for band in entry["bands"]:
bands.append({
"id": band["id"],
"start": band["start"],
"end": band["end"],
"type": band["stain"]
})
return {
"size": entry["length"],
"bands": bands
}
def process(raw):
result = {}
for entry in raw["top_level_region"]:
result[entry["name"]] = default_bands(entry)
if entry["coord_system"] == "chromosome":
result[entry['name']] = process_chromosome(entry)
return raw['default_coord_system_version'], result
def for_domain(domain, allowed=None):
for species in find_species(domain):
if not species or (allowed and species in allowed):
raw_data = fetch(species, domain)
yield process(raw_data)
def data(species=None):
results = six.moves.map(lambda d: for_domain(d, allowed=species), DOMAINS)
return it.chain.from_iterable(results)
def write(output, species=None):
writer = csv.writer(output)
for (assembly_id, bands) in data(species=species):
writer.writerow([assembly_id, json.dumps(bands)])
|
Python
| 0
|
@@ -858,30 +858,8 @@
= %7B%0A
- 'ensemblgenomes',%0A
|
34b5739216f5145ca0e6707bc4666138e2493308
|
Remove the executable flag from uploadhandler.py, it wasn't. Thanks to Florian for the report.
|
django/core/files/uploadhandler.py
|
django/core/files/uploadhandler.py
|
"""
Base file upload handler classes, and the built-in concrete subclasses
"""
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.uploadedfile import TemporaryUploadedFile, InMemoryUploadedFile
from django.utils import importlib
__all__ = ['UploadFileException','StopUpload', 'SkipFile', 'FileUploadHandler',
'TemporaryFileUploadHandler', 'MemoryFileUploadHandler',
'load_handler', 'StopFutureHandlers']
class UploadFileException(Exception):
"""
Any error having to do with uploading files.
"""
pass
class StopUpload(UploadFileException):
"""
This exception is raised when an upload must abort.
"""
def __init__(self, connection_reset=False):
"""
If ``connection_reset`` is ``True``, Django knows will halt the upload
without consuming the rest of the upload. This will cause the browser to
show a "connection reset" error.
"""
self.connection_reset = connection_reset
def __unicode__(self):
if self.connection_reset:
return u'StopUpload: Halt current upload.'
else:
return u'StopUpload: Consume request data, then halt.'
class SkipFile(UploadFileException):
"""
This exception is raised by an upload handler that wants to skip a given file.
"""
pass
class StopFutureHandlers(UploadFileException):
"""
Upload handers that have handled a file and do not want future handlers to
run should raise this exception instead of returning None.
"""
pass
class FileUploadHandler(object):
"""
Base class for streaming upload handlers.
"""
chunk_size = 64 * 2 ** 10 #: The default chunk size is 64 KB.
def __init__(self, request=None):
self.file_name = None
self.content_type = None
self.content_length = None
self.charset = None
self.request = request
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Handle the raw input from the client.
Parameters:
:input_data:
An object that supports reading via .read().
:META:
``request.META``.
:content_length:
The (integer) value of the Content-Length header from the
client.
:boundary: The boundary from the Content-Type header. Be sure to
prepend two '--'.
"""
pass
def new_file(self, field_name, file_name, content_type, content_length, charset=None):
"""
Signal that a new file has been started.
Warning: As with any data from the client, you should not trust
content_length (and sometimes won't even get it).
"""
self.field_name = field_name
self.file_name = file_name
self.content_type = content_type
self.content_length = content_length
self.charset = charset
def receive_data_chunk(self, raw_data, start):
"""
Receive data from the streamed upload parser. ``start`` is the position
in the file of the chunk.
"""
raise NotImplementedError()
def file_complete(self, file_size):
"""
Signal that a file has completed. File size corresponds to the actual
size accumulated by all the chunks.
Subclasses should return a valid ``UploadedFile`` object.
"""
raise NotImplementedError()
def upload_complete(self):
"""
Signal that the upload is complete. Subclasses should perform cleanup
that is necessary for this handler.
"""
pass
class TemporaryFileUploadHandler(FileUploadHandler):
"""
Upload handler that streams data into a temporary file.
"""
def __init__(self, *args, **kwargs):
super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs)
def new_file(self, file_name, *args, **kwargs):
"""
Create the file object to append to as data is coming in.
"""
super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs)
self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset)
def receive_data_chunk(self, raw_data, start):
self.file.write(raw_data)
def file_complete(self, file_size):
self.file.seek(0)
self.file.size = file_size
return self.file
class MemoryFileUploadHandler(FileUploadHandler):
"""
File upload handler to stream uploads into memory (used for small files).
"""
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Use the content_length to signal whether or not this handler should be in use.
"""
# Check the content-length header to see if we should
# If the post is too large, we cannot use the Memory handler.
if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
self.activated = False
else:
self.activated = True
def new_file(self, *args, **kwargs):
super(MemoryFileUploadHandler, self).new_file(*args, **kwargs)
if self.activated:
self.file = StringIO()
raise StopFutureHandlers()
def receive_data_chunk(self, raw_data, start):
"""
Add the data to the StringIO file.
"""
if self.activated:
self.file.write(raw_data)
else:
return raw_data
def file_complete(self, file_size):
"""
Return a file object if we're activated.
"""
if not self.activated:
return
self.file.seek(0)
return InMemoryUploadedFile(
file = self.file,
field_name = self.field_name,
name = self.file_name,
content_type = self.content_type,
size = file_size,
charset = self.charset
)
def load_handler(path, *args, **kwargs):
"""
Given a path to a handler, return an instance of that handler.
E.g.::
>>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
<TemporaryFileUploadHandler object at 0x...>
"""
i = path.rfind('.')
module, attr = path[:i], path[i+1:]
try:
mod = importlib.import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error importing upload handler module %s: "%s"' % (module, e))
except ValueError, e:
raise ImproperlyConfigured('Error importing upload handler module. Is FILE_UPLOAD_HANDLERS a correctly defined list or tuple?')
try:
cls = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" upload handler backend' % (module, attr))
return cls(*args, **kwargs)
|
Python
| 0.000003
| |
e4b2f5eed4c169792812ee82fa1f65cdc9516fb0
|
Add first/lastname to project search
|
lims/projects/views.py
|
lims/projects/views.py
|
import django_filters
from rest_framework import viewsets
from rest_framework.validators import ValidationError
from rest_framework.filters import (OrderingFilter,
SearchFilter,
DjangoFilterBackend)
from guardian.shortcuts import get_group_perms
from lims.shared.filters import ListFilter
from lims.permissions.permissions import (IsInAdminGroupOrRO,
ViewPermissionsMixin,
ExtendedObjectPermissions,
ExtendedObjectPermissionsFilter)
from .models import (Product, ProductStatus, Project)
from .serializers import (ProjectSerializer, ProductSerializer,
DetailedProductSerializer, ProductStatusSerializer)
from .parsers import DesignFileParser
class ProjectViewSet(ViewPermissionsMixin, viewsets.ModelViewSet):
"""
View all projects the user has permissions for
Projects are filtered by permissions and users cannot see any
projects they do not have permissions for.
"""
queryset = Project.objects.all()
serializer_class = ProjectSerializer
permission_classes = (ExtendedObjectPermissions,)
filter_backends = (SearchFilter, DjangoFilterBackend,
OrderingFilter, ExtendedObjectPermissionsFilter,)
search_fields = ('project_identifier', 'name', 'primary_lab_contact__username')
def perform_create(self, serializer):
serializer, permissions = self.clean_serializer_of_permissions(serializer)
instance = serializer.save(created_by=self.request.user)
self.assign_permissions(instance, permissions)
class ProductFilter(django_filters.FilterSet):
# on_workflow_as = django_filters.MethodFilter()
id__in = ListFilter(name='id')
def filter_on_workflow_as(self, queryset, value):
if value == 'False':
return queryset.filter(on_workflow_as__isnull=True)
elif value == 'True':
return queryset.filter(on_workflow_as__isnull=False)
return queryset
class Meta:
model = Product
fields = {
'id': ['exact', 'in'],
'project': ['exact'],
'status': ['exact'],
# 'on_workflow_as': ['exact'],
}
class ProductViewSet(ViewPermissionsMixin, viewsets.ModelViewSet):
"""
Provides a list of all products
"""
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (ExtendedObjectPermissions,)
filter_backends = (SearchFilter, DjangoFilterBackend,
OrderingFilter, ExtendedObjectPermissionsFilter,)
search_fields = ('product_identifier', 'name',)
filter_class = ProductFilter
def _parse_design(self, instance):
"""
Takes a design file and extracts the necessary info
out to add inventory items or other things.
"""
if instance.design is not None:
items = []
parser = DesignFileParser(instance.design)
if instance.design_format == 'csv':
items = parser.parse_csv()
elif instance.design_format == 'gb':
items = parser.parse_gb()
for i in items:
instance.linked_inventory.add(i)
def get_serializer_class(self):
# Use a more compact serializer when listing.
# This makes things run more efficiantly.
if self.action == 'retrieve':
return DetailedProductSerializer
return ProductSerializer
def perform_create(self, serializer):
# Ensure the user has the correct permissions on the Project
# to add a product to it.
project = serializer.validated_data['project']
if ('change_project' in get_group_perms(self.request.user, project)
or self.request.user.groups.filter(name='admin').exists()):
instance = serializer.save(created_by=self.request.user)
self.clone_group_permissions(instance.project, instance)
else:
raise ValidationError('You do not have permission to create this')
# Does it have a design?
# If so, parse the design to extract info to get parts from
# inventory.
self._parse_design(instance)
class ProductStatusViewSet(viewsets.ModelViewSet):
queryset = ProductStatus.objects.all()
serializer_class = ProductStatusSerializer
permission_classes = (IsInAdminGroupOrRO,)
|
Python
| 0
|
@@ -1457,16 +1457,142 @@
sername'
+,%0A 'crm_project__account__user__first_name',%0A 'crm_project__account__user__last_name',
)%0A%0A d
|
1809df6d5886ac6c0c35c8e879d9eda334606f4e
|
Simplify handling from_db_value across django versions
|
django_unixdatetimefield/fields.py
|
django_unixdatetimefield/fields.py
|
import datetime
import time
import django
import django.db.models as models
class UnixDateTimeField(models.DateTimeField):
# TODO(niklas9):
# * should we take care of transforming between time zones in any way here ?
# * get default datetime format from settings ?
DEFAULT_DATETIME_FMT = '%Y-%m-%d %H:%M:%S'
TZ_CONST = '+'
# TODO(niklas9):
# * metaclass below just for Django < 1.9, fix a if stmt for it?
#__metaclass__ = models.SubfieldBase
description = "Unix timestamp integer to datetime object"
def get_internal_type(self):
return 'PositiveIntegerField'
def to_python(self, val):
if val is None or isinstance(val, datetime.datetime):
return val
if isinstance(val, datetime.date):
return datetime.datetime(val.year, val.month, val.day)
elif self._is_string(val):
# TODO(niklas9):
# * not addressing time zone support as todo above for now
if self.TZ_CONST in val:
val = val.split(self.TZ_CONST)[0]
return datetime.datetime.strptime(val, self.DEFAULT_DATETIME_FMT)
else:
return datetime.datetime.fromtimestamp(float(val))
def _is_string(value, val):
try:
return isinstance(val, unicode)
except NameError:
return isinstance(val, str)
def get_db_prep_value(self, val, *args, **kwargs):
if val is None:
if self.default == models.fields.NOT_PROVIDED: return None
return self.default
return int(time.mktime(val.timetuple()))
def value_to_string(self, obj):
val = self._get_val_from_obj(obj)
return self.to_python(val).strftime(self.DEFAULT_DATETIME_FMT)
# Django 2.0 updates the signature of from_db_value.
# https://docs.djangoproject.com/en/2.0/releases/2.0/#context-argument-of-field-from-db-value-and-expression-convert-value
if django.VERSION < (2,):
def from_db_value(self, val, expression, connection, context):
return self.to_python(val)
else:
def from_db_value(self, val, expression, connection):
return self.to_python(val)
|
Python
| 0.000017
|
@@ -26,22 +26,8 @@
me%0A%0A
-import django%0A
impo
@@ -1742,404 +1742,55 @@
-# Django 2.0 updates the signature of from_db_value.%0A # https://docs.djangoproject.com/en/2.0/releases/2.0/#context-argument-of-field-from-db-value-and-expression-convert-value%0A if django.VERSION %3C (2,):%0A def from_db_value(self, val, expression, connection, context):%0A return self.to_python(val)%0A else:%0A def from_db_value(self, val, expression, connection):%0A
+def from_db_value(self, val, *args, **kwargs):%0A
|
20053951b3036d0ae49f7f1ae25d600848872c82
|
Bump version
|
lintreview/__init__.py
|
lintreview/__init__.py
|
__version__ = '2.36.1'
|
Python
| 0
|
@@ -17,7 +17,7 @@
.36.
-1
+2
'%0A
|
f426d44f82a4f1855cb180b5aff98221c14537f1
|
Update version.py
|
nltools/version.py
|
nltools/version.py
|
"""Specifies current version of nltools to be used by setup.py and __init__.py
"""
__version__ = '0.3.6'
|
Python
| 0.000001
|
@@ -100,7 +100,7 @@
0.3.
-6
+7
'%0A
|
eb9a370dc361caff7e1917c07f1711fb8ad60cf2
|
Create article link admin
|
opps/articles/admin.py
|
opps/articles/admin.py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Post, Album, Article, ArticleSource, ArticleImage
from .models import ArticleBox, ArticleBoxArticles, ArticleConfig
from opps.core.admin import PublishableAdmin
from redactor.widgets import RedactorEditor
from django_thumbor import generate_url
class ArticleImageInline(admin.TabularInline):
model = ArticleImage
fk_name = 'article'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class ArticleSourceInline(admin.TabularInline):
model = ArticleSource
fk_name = 'article'
raw_id_fields = ['source']
actions = None
extra = 1
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('source', 'order')})]
class ArticleBoxArticlesInline(admin.TabularInline):
model = ArticleBoxArticles
fk_name = 'articlebox'
raw_id_fields = ['article']
actions = None
extra = 1
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('article', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class ArticleAdmin(PublishableAdmin):
prepopulated_fields = {"slug": ["title"]}
readonly_fields = ['get_http_absolute_url', 'short_url']
raw_id_fields = ['main_image', 'channel']
class PostAdmin(ArticleAdmin):
form = PostAdminForm
inlines = [ArticleImageInline, ArticleSourceInline]
raw_id_fields = ['main_image', 'channel', 'album']
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'title', 'slug', 'get_http_absolute_url',
'short_url')}),
(_(u'Content'), {
'fields': ('short_title', 'headline', 'content', 'main_image',
'tags')}),
(_(u'Relationships'), {
'fields': ('channel', 'album',)}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available')}),
)
class AlbumAdminForm(forms.ModelForm):
class Meta:
model = Album
class AlbumAdmin(ArticleAdmin):
form = AlbumAdminForm
inlines = [ArticleImageInline]
fieldsets = (
(_(u'Identification'), {
'fields': ('title', 'slug', 'get_http_absolute_url',
'short_url',)}),
(_(u'Content'), {
'fields': ('short_title', 'headline', 'main_image')}),
(_(u'Relationships'), {
'fields': ('channel',)}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available')}),
)
class ArticleBoxAdmin(PublishableAdmin):
prepopulated_fields = {"slug": ["name"]}
list_display = ['name', 'date_available', 'published']
list_filter = ['date_available', 'published']
inlines = [ArticleBoxArticlesInline]
raw_id_fields = ['channel', 'article']
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'name', 'slug')}),
(_(u'Relationships'), {
'fields': ('channel', 'article')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available')}),
)
class HideArticleAdmin(PublishableAdmin):
list_display = ['image_thumb', 'title', 'channel_name', 'date_available',
'published']
readonly_fields = ['image_thumb']
def image_thumb(self, obj):
if obj.main_image:
return u'<img width="60px" height="60px" src="{0}" />'.format(
generate_url(obj.main_image.image.url, width=60, height=60))
return _(u'No Image')
image_thumb.short_description = _(u'Thumbnail')
image_thumb.allow_tags = True
def get_model_perms(self, *args, **kwargs):
return {}
def has_add_permission(self, request):
return False
class ArticleConfigAdmin(PublishableAdmin):
list_display = ['key', 'key_group', 'channel', 'date_insert',
'date_available', 'published']
list_filter = ["key", 'key_group', "channel", "published"]
search_fields = ["key", "key_group", "value"]
admin.site.register(Article, HideArticleAdmin)
admin.site.register(Post, PostAdmin)
admin.site.register(Album, AlbumAdmin)
admin.site.register(ArticleBox, ArticleBoxAdmin)
admin.site.register(ArticleConfig, ArticleConfigAdmin)
|
Python
| 0
|
@@ -173,16 +173,22 @@
Article,
+ Link,
Article
@@ -2790,24 +2790,517 @@
%7D),%0A )%0A%0A%0A
+class LinkAdmin(ArticleAdmin):%0A fieldsets = (%0A (_(u'Identification'), %7B%0A 'fields': ('title', 'slug', 'get_http_absolute_url',%0A 'short_url',)%7D),%0A (_(u'Content'), %7B%0A 'fields': ('short_title', 'headline', 'url', 'main_image')%7D),%0A (_(u'Relationships'), %7B%0A 'fields': ('channel',)%7D),%0A (_(u'Publication'), %7B%0A 'classes': ('extrapretty'),%0A 'fields': ('published', 'date_available')%7D),%0A )%0A%0A%0A%0A
class Articl
@@ -4932,24 +4932,61 @@
AlbumAdmin)%0A
+admin.site.register(Link, LinkAdmin)%0A
admin.site.r
|
fcf5d1f33026069d69690c67f7ddcc8c77f15626
|
add exception handingling for debug
|
opreturnninja/views.py
|
opreturnninja/views.py
|
import json
import random
from pyramid.view import view_config
from .constants import ELECTRUM_SERVERS
from bitcoin.rpc import RawProxy, DEFAULT_USER_AGENT
import socket
@view_config(route_name='api', renderer='json')
def api_view(request):
global rpc
assert hasattr(request, 'json_body')
assert 'method' in request.json_body and 'params' in request.json_body
method = request.json_body['method']
params = request.json_body['params']
assert type(params) == list
if method == 'sendrawtransaction':
assert len(params) == 1
sent = False
while not sent:
try:
s = socket.create_connection(random.choice(list(ELECTRUM_SERVERS.items())))
s.send(b'{"id":"0", "method":"blockchain.transaction.broadcast", "params":["' + params[0].encode() + b'"]}\n')
r = {'result': s.recv(1024)[:-1].decode(), 'error': None, 'id': request.json_body['id']} # the slice is to remove the trailing new line
print(r)
return r
except ConnectionRefusedError as e:
print(e)
except socket.gaierror as e:
print(e)
return {
'result': None,
'error': 'RPC Request Unknown',
'id': request.json_body['id'],
}
@view_config(route_name='index', renderer='templates/index.pt')
def index_view(request):
return {}
|
Python
| 0
|
@@ -640,36 +640,16 @@
s
- = socket.create_connection(
+erver =
rand
@@ -689,16 +689,68 @@
tems()))
+%0A s = socket.create_connection(server
)%0A
@@ -1140,24 +1140,32 @@
print(e
+, server
)%0A
@@ -1218,16 +1218,92 @@
print(e
+, server)%0A except Exception as e:%0A print(e, server
)%0A re
|
7b62f43f0a286c2aa40abb3f383d7cbecc45f242
|
Fix flaky failure: TestHuberLoss
|
tests/chainer_tests/functions_tests/loss_tests/test_huber_loss.py
|
tests/chainer_tests/functions_tests/loss_tests/test_huber_loss.py
|
import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer import utils
@testing.parameterize(*testing.product_dict(
[{'dtype': numpy.float16,
'forward_options': {'rtol': 5e-3, 'atol': 5e-3},
'backward_options': {'eps': 1e-1, 'rtol': 1e-1, 'atol': 1e-1},
'double_backward_options': {'eps': 1e-1, 'rtol': 1e-1, 'atol': 1e-1}},
{'dtype': numpy.float32,
'forward_options': {},
'backward_options': {'eps': 1e-3, 'rtol': 1e-2, 'atol': 1e-2},
'double_backward_options': {'eps': 1e-3, 'rtol': 1e-3, 'atol': 1e-3}},
{'dtype': numpy.float64,
'forward_options': {},
'backward_options': {'eps': 1e-3, 'rtol': 1e-2, 'atol': 1e-2},
'double_backward_options': {'eps': 1e-3, 'rtol': 1e-3, 'atol': 1e-3}},
],
testing.product({
'shape': [(), (3,)],
'reduce': ['no'],
}) + testing.product({
'shape': [(4, 10), (2, 5, 3, 3)],
'reduce': ['no', 'sum_along_second_axis'],
}),
))
class TestHuberLoss(unittest.TestCase):
def setUp(self):
self._config_user = chainer.using_config('dtype', self.dtype)
self._config_user.__enter__()
self.x = utils.force_array(
(numpy.random.random(self.shape) - 0.5) * 4, self.dtype)
self.t = utils.force_array(numpy.random.random(self.shape), self.dtype)
if self.reduce == 'sum_along_second_axis':
gy_shape = self.shape[:1] + self.shape[2:]
else:
gy_shape = self.shape
self.gy = utils.force_array(numpy.random.random(gy_shape), self.dtype)
self.ggx = utils.force_array(
numpy.random.uniform(-1, 1, self.x.shape), self.dtype)
self.ggt = utils.force_array(
numpy.random.uniform(-1, 1, self.t.shape), self.dtype)
def tearDown(self):
self._config_user.__exit__(None, None, None)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
loss = functions.huber_loss(x, t, delta=1, reduce=self.reduce)
self.assertEqual(loss.data.dtype, self.dtype)
loss_value = cuda.to_cpu(loss.data)
diff_data = cuda.to_cpu(x_data) - cuda.to_cpu(t_data)
loss_expect = numpy.zeros(self.shape)
mask = numpy.abs(diff_data) < 1
loss_expect[mask] = 0.5 * diff_data[mask] ** 2
loss_expect[~mask] = numpy.abs(diff_data[~mask]) - 0.5
if self.reduce == 'sum_along_second_axis':
loss_expect = numpy.sum(loss_expect, axis=1)
testing.assert_allclose(
loss_value, loss_expect, **self.forward_options)
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
def check_backward(self, x_data, t_data, y_grad):
def f(x, t):
return functions.huber_loss(x, t, delta=1, reduce=self.reduce)
gradient_check.check_backward(
f, (x_data, t_data), y_grad, **self.backward_options)
def test_backward_cpu(self):
self.check_backward(self.x, self.t, self.gy)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.t),
cuda.to_gpu(self.gy))
def check_double_backward(self, x_data, t_data, y_grad, x_grad_grad,
t_grad_grad):
delta = 1
eps = self.double_backward_options['eps'] * 2
xp = chainer.backend.get_array_module(x_data)
mask = xp.abs(xp.abs(x_data - t_data) - delta) < eps
x_data[mask] = 0
t_data[mask] = 0
def f(x, t):
return functions.huber_loss(x, t, delta=delta, reduce=self.reduce)
gradient_check.check_double_backward(
f, (x_data, t_data), y_grad, (x_grad_grad, t_grad_grad),
**self.double_backward_options)
def test_double_backward_cpu(self):
self.check_double_backward(self.x, self.t, self.gy, self.ggx, self.ggt)
@attr.gpu
def test_double_backward_gpu(self):
self.check_double_backward(
cuda.to_gpu(self.x), cuda.to_gpu(self.t), cuda.to_gpu(self.gy),
cuda.to_gpu(self.ggx), cuda.to_gpu(self.ggt))
class TestHuberLossInvalidReductionOption(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (4, 10)).astype(numpy.float32)
self.t = numpy.random.uniform(-1, 1, (4, 10)).astype(numpy.float32)
def check_invalid_option(self, xp):
x = xp.asarray(self.x)
t = xp.asarray(self.t)
with self.assertRaises(ValueError):
functions.huber_loss(x, t, 1, 'invalid_option')
def test_invalid_option_cpu(self):
self.check_invalid_option(numpy)
@attr.gpu
def test_invalid_option_gpu(self):
self.check_invalid_option(cuda.cupy)
testing.run_module(__name__, __file__)
|
Python
| 0.999961
|
@@ -1390,17 +1390,17 @@
0.5) *
-4
+3
, self.d
@@ -1432,32 +1432,46 @@
ils.force_array(
+%0A (
numpy.random.ran
@@ -1481,24 +1481,31 @@
(self.shape)
+ - 0.5)
, self.dtype
|
43d7850403e1e98951909bcb0c441098c3221bde
|
Update ipc_lista1.4.py
|
lista1/ipc_lista1.4.py
|
lista1/ipc_lista1.4.py
|
#ipc_lista1.4
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça as 4 notas bimestrais e mostre a media
nota1 = int(input("Digite a primeira nota do bimestre: "))
nota2 = int(input("Digite a segunda nota do bimestre: "))
nota3 = int(input("Digite a terceira nota do bismestre: "))
nota4 - int(input("Digite a quarta note do bismestre: "))
print
media = (nota1+nota2+nota3+nota4)/4.0
print" A sua média é: %s" %media
|
Python
| 0
|
@@ -421,16 +421,17 @@
+nota4)/
+
4.0%0A%0Apri
|
fb772e5e597082a119348efa68f70e60c11506cd
|
clean up
|
lists/gift_exchange.py
|
lists/gift_exchange.py
|
import random
import itertools
givers = [('tim', 'shirt'), ('jim', 'shoe'), ('joe', 'fruit'), ('john', 'ball')]
def valid(a, b):
if a == b:
return False
else:
return True
if len(givers) < 2:
print "must have more than 1 givers"
else:
a = list(givers)
b = list(givers)
while not valid(a, b):
random.shuffle(a)
random.shuffle(b)
for i, j in itertools.izip(a, b):
print '%s gives %s to %s.' % (i[0], i[1], j[0])
|
Python
| 0.000001
|
@@ -79,26 +79,26 @@
('jo
-e
+hn
', '
-fruit
+ball
'), ('jo
hn',
@@ -97,105 +97,21 @@
('jo
-hn
+e
', '
-ball')%5D%0A%0Adef valid(a, b):%0A if a == b:%0A return False%0A else:%0A return True
+fruit')%5D
%0A%0Aif
@@ -236,23 +236,14 @@
ile
-not valid(a,
+a ==
b
-)
:%0A
|
e3d148aec20b8a496ae353916597040c9fa237af
|
add support for mode bits
|
litespi/phy/generic.py
|
litespi/phy/generic.py
|
from migen import *
from migen.genlib.fsm import FSM, NextState
from litespi.clkgen import LiteSPIClkGen
from litespi.common import *
from litex.soc.interconnect import stream
# Output enable masks for the tri-state buffers, data mode mask is not included as oe pins default to 0
cmd_oe_mask = 0b00000001
soft_oe_mask = 0b00000001
addr_oe_mask = {
1: 0b00000001,
2: 0b00000011,
4: 0b00001111,
8: 0b11111111,
}
def GetConfig(flash=None):
if flash is None:
# TODO: replace with a named tuple/configuration object
# addr_bits, dummy_bits, cmd_width, addr_width, data_width, command
return (24, 8, 1, 1, 1, 0x0b)
class LiteSPIPHY(Module):
"""Generic LiteSPI PHY
The ``LiteSPIPHY`` class provides a generic PHY that can be connected to the ``LiteSPICore``.
It supports single/dual/quad/octal output reads from the flash chips.
Parameters
----------
pads : Object
SPI pads description.
flash : FlashModule
FlashModule configuration object or None, if None is provided then the default configuration is used.
device : str
Device type for use by the ``LiteSPIClkGen``.
Attributes
----------
source : Endpoint(spi_phy_data_layout), out
Data stream.
sink : Endpoint(spi_phy_ctl_layout), in
Control stream.
cs_n : Signal(), in
Flash CS signal.
"""
def shift_out(self, width, bits, next_state, negedge_op=None, posedge_op=None):
res = [
self.clkgen.en.eq(1),
If(self.clkgen.negedge,
NextValue(self.fsm_cnt, self.fsm_cnt+width),
If(self.fsm_cnt == (bits-width),
NextValue(self.fsm_cnt, 0),
NextState(next_state),
),
),
]
if negedge_op is not None:
res += [If(self.clkgen.negedge, *negedge_op)]
if posedge_op is not None:
res += [If(self.clkgen.posedge, *posedge_op)]
return res
def __init__(self, pads, flash=None, device="xc7"):
self.source = source = stream.Endpoint(spi_phy_data_layout)
self.sink = sink = stream.Endpoint(spi_phy_ctl_layout)
self.cs_n = Signal()
self.submodules.clkgen = clkgen = LiteSPIClkGen(pads, device)
addr_bits, dummy_bits, cmd_width, addr_width, data_width, command = GetConfig(flash)
data_bits = 32
cmd_bits = 8
self.comb += [
clkgen.div.eq(2), # should be SoftCPU configurable
pads.cs_n.eq(self.cs_n),
pads.clk.eq(clkgen.clk),
]
if hasattr(pads, "miso"):
bus_width = 1
pads.dq = [pads.mosi, pads.miso]
else:
bus_width = len(pads.dq)
assert bus_width in [1, 2, 4, 8]
dq_o = Signal(len(pads.dq))
dq_i = Signal(len(pads.dq))
dq_oe = Signal(len(pads.dq))
for i in range(len(pads.dq)):
t = TSTriple()
self.specials += t.get_tristate(pads.dq[i])
self.comb += [
dq_i[i].eq(t.i),
t.o.eq(dq_o[i]),
t.oe.eq(dq_oe[i]),
]
self.fsm_cnt = Signal(max=31)
addr = Signal(addr_bits)
data = Signal(data_bits)
cmd = Signal(cmd_bits)
self.submodules.fsm = fsm = FSM(reset_state="IDLE")
fsm.act("IDLE",
sink.ready.eq(1),
If(sink.ready & sink.valid,
If(sink.cmd, # command request
NextValue(addr, sink.addr),
NextValue(cmd, command),
NextState("CMD"),
).Else( # data request
NextState("DATA"),
),
),
)
fsm.act("CMD",
dq_oe.eq(cmd_oe_mask),
dq_o.eq(cmd[-1] if cmd_width == 1 else cmd[-cmd_width:]),
self.shift_out(cmd_width, cmd_bits, "ADDR", negedge_op=[NextValue(cmd, cmd<<cmd_width)]),
)
fsm.act("ADDR",
dq_oe.eq(addr_oe_mask[addr_width]),
dq_o.eq(addr[-1] if addr_width == 1 else addr[-addr_width:]),
self.shift_out(addr_width, addr_bits, "DUMMY", negedge_op=[NextValue(addr, addr<<addr_width)]),
)
fsm.act("DUMMY",
self.shift_out(addr_width, dummy_bits, "IDLE"),
)
fsm.act("DATA",
self.shift_out(data_width, data_bits, "SEND_DATA", posedge_op=[NextValue(data, Cat(dq_i[1] if data_width == 1 else dq_i[0:data_width], data))]),
)
fsm.act("SEND_DATA",
source.valid.eq(1),
source.data.eq(data),
If(source.ready & source.valid,
NextState("IDLE"),
)
)
|
Python
| 0
|
@@ -4343,16 +4343,124 @@
DUMMY%22,%0A
+ If(self.fsm_cnt %3C 8, dq_oe.eq(addr_oe_mask%5Baddr_width%5D)), # output 0's for the first dummy byte%0A
|
93db6df0b89d3fa5ff248ffc9d48040c795dde4d
|
update info
|
framework/SupervisedLearning/ScikitLearn/Ensemble/VotingRegressor.py
|
framework/SupervisedLearning/ScikitLearn/Ensemble/VotingRegressor.py
|
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Created on Jan 21, 2020
@author: alfoa, wangc
ExtraTreeRegressor
An extremely randomized tree regressor.
"""
#Internal Modules (Lazy Importer)--------------------------------------------------------------------
#Internal Modules (Lazy Importer) End----------------------------------------------------------------
#External Modules------------------------------------------------------------------------------------
import numpy as np
#External Modules End--------------------------------------------------------------------------------
#Internal Modules------------------------------------------------------------------------------------
from SupervisedLearning.ScikitLearn import ScikitLearnBase
from utils import InputData, InputTypes
#Internal Modules End--------------------------------------------------------------------------------
class VotingRegressor(ScikitLearnBase):
"""
Prediction voting regressor for unfitted estimators.
A voting regressor is an ensemble meta-estimator that fits several base regressors, each on the whole dataset.
Then it averages the individual predictions to form a final predictions.
"""
info = {'problemtype':'regression', 'normalize':False}
def __init__(self):
"""
Constructor that will appropriately initialize a supervised learning object
@ In, None
@ Out, None
"""
super().__init__()
self.multioutputWrapper = False
import sklearn
import sklearn.ensemble
self.model = sklearn.ensemble.VotingRegressor
self.settings = None #
@classmethod
def getInputSpecification(cls):
"""
Method to get a reference to a class that specifies the input data for
class cls.
@ In, cls, the class for which we are retrieving the specification
@ Out, inputSpecification, InputData.ParameterInput, class to use for
specifying input of cls.
"""
specs = super().getInputSpecification()
specs.description = r"""The \xmlNode{VotingRegressor}
\zNormalizationPerformed{VotingRegressor}
"""
estimatorInput = InputData.assemblyInputFactory("estimator", contentType=InputTypes.StringType,
descr=r"""name of a ROM that can be used as an estimator""", default='no-default')
#TODO: Add more inputspecs for estimator
specs.addSub(estimatorInput)
specs.addSub(InputData.parameterInputFactory("weights", contentType=InputTypes.FloatListType,
descr=r"""Sequence of weights (float or int) to weight the occurrences of predicted
values before averaging. Uses uniform weights if None.""", default=None))
return specs
def _handleInput(self, paramInput):
"""
Function to handle the common parts of the distribution parameter input.
@ In, paramInput, ParameterInput, the already parsed input.
@ Out, None
"""
super()._handleInput(paramInput)
## TODO extend to handle multi-output in train and evaluate methods
if len(self.target) != 1:
self.raiseAnError(IOError, self.name, 'can only handle single target variable, but found {}'.format(','.join(self.target)))
settings, notFound = paramInput.findNodesAndExtractValues(['weights'])
# notFound must be empty
assert(not notFound)
self.settings = settings
def __returnInitialParametersLocal__(self):
"""
Returns a dictionary with the parameters and their initial values
@ In, None
@ Out, params, dict, dictionary of parameter names and initial values
"""
params = self.settings
return params
def setEstimator(self, estimatorList):
"""
Initialization method
@ In, estimatorList, list of ROM instances/estimators used by ROM
@ Out, None
"""
estimators = []
for estimator in estimatorList:
interfaceRom = estimator._interfaceROM
if interfaceRom.info['problemtype'] != 'regression':
self.raiseAnError(IOError, 'estimator:', estimator.name, 'with problem type', interfaceRom.info['problemtype'],
'can not be used for VotingRegressor')
if not isinstance(interfaceRom, ScikitLearnBase):
self.raiseAnError(IOError, 'ROM', estimator.name, 'can not be used as estimator for ROM', self.name)
if not callable(getattr(interfaceRom.model, "fit", None)):
self.raiseAnError(IOError, 'estimator:', estimator.name, 'can not be used! Please change to a different estimator')
else:
self.raiseADebug('A valid estimator', estimator.name, 'is provided!')
estimators.append((estimator.name, interfaceRom.model))
self.settings['estimators'] = estimators
self.initializeModel(self.settings)
def __evaluateLocal__(self,featureVals):
"""
Evaluates a point.
This method need to be re-implemented because:
1. Current implementation in SciKitLearn version 1.0, VotingRegressor predict method can not handle
"mutioutput" wrapper correctly
2. tranform method will return predictions for each estimator, which can be used to replace predict method.
3. Current fit function can only accept single target, we may need to extend the fit method in future.
@ In, featureVals, np.array, list of values at which to evaluate the ROM
@ Out, returnDict, dict, dict of all the target results
"""
if self.uniqueVals is not None:
outcomes = self.uniqueVals
else:
transformOuts = self.model.transform(featureVals)
if self.settings['weights'] is not None:
outcomes = np.average(transformOuts, axis=-1, weights=self.settings['weights'])
else:
outcomes = np.average(transformOuts, axis=-1)
outcomes = np.atleast_1d(outcomes)
if len(outcomes.shape) == 1:
returnDict = {key:value for (key,value) in zip(self.target,outcomes)}
else:
returnDict = {key: outcomes[:, i] for i, key in enumerate(self.target)}
return returnDict
|
Python
| 0
|
@@ -603,20 +603,21 @@
on
-Jan 21
+Nov. 16
, 202
-0
+1
%0A%0A
@@ -628,15 +628,8 @@
hor:
- alfoa,
wan
@@ -637,17 +637,14 @@
c%0A
-ExtraTree
+Voting
Regr
@@ -656,34 +656,77 @@
%0A A
-n extremely randomized tre
+ voting regressor is an ensemble meta-estimator that fits several bas
e re
@@ -732,17 +732,17 @@
egressor
-.
+s
%0A%0A%22%22%22%0A#I
|
b76e1697b92565ca3fc8a7ee2961adf894095e04
|
Add User as foreign key in Bill
|
billing/models.py
|
billing/models.py
|
from django.db import models
from django.dispatch import receiver
from django.contrib.auth.models import User
from django.db.models.signals import pre_save, pre_init
import datetime
class Bill(models.Model):
number = models.CharField(max_length=10, unique=True, blank=True)
isPaid = models.BooleanField(default=False)
billing_date = models.DateField()
class Service(models.Model):
reference = models.CharField(max_length=5)
name = models.CharField(max_length=128)
description = models.CharField(max_length=1024)
price = models.FloatField()
def __unicode__(self):
""" Return name as object representation """
return self.name
class BillLine(models.Model):
bill = models.ForeignKey(Bill)
service = models.ForeignKey(Service)
quantity = models.SmallIntegerField(default=1)
total = models.FloatField(blank=True)
class UserProfile(models.Model):
""" extend User class """
user = models.OneToOneField(User)
billing_address = models.CharField(max_length=1024)
@receiver(pre_save, sender=BillLine)
def compute_total(sender, instance, **kwargs):
""" set total of line automatically """
if not instance.total:
instance.total = instance.service.price * instance.quantity
@receiver(pre_save, sender=Bill)
def define_number(sender, instance, **kwargs):
""" set bill number incrementally """
# only when we create record for the first time
if not instance.number:
today = datetime.date.today()
# get last id in base, we assume it's the last record
try:
last_record = sender.objects.latest('id')
#get last bill number and increment it
last_num = '%03d' % (int(last_record.number[-3:])+1)
# no Bill in db
except sender.DoesNotExist:
last_num = '001'
instance.number = 'F%s%s' % (today.strftime('%Y%m'), last_num)
|
Python
| 0.000001
|
@@ -200,24 +200,59 @@
ls.Model):%0A%0A
+ user = models.ForeignKey(User)%0A
number =
|
17d3d63564798cd03788ce579227d5425cd866c0
|
Make fake uploader use zlib compression
|
bin/fake_order.py
|
bin/fake_order.py
|
#!/usr/bin/env python
"""
A fake order upload script, used to manually test the whole stack.
"""
import simplejson
import requests
data = """
{
"resultType" : "orders",
"version" : "0.1alpha",
"uploadKeys" : [
{ "name" : "emk", "key" : "abc" },
{ "name" : "ec" , "key" : "def" }
],
"generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
"currentTime" : "2011-10-22T15:46:00+00:00",
"columns" : ["price","volRemaining","range","orderID","volEntered","minVolume","bid","issueDate","duration","stationID","solarSystemID"],
"rowsets" : [
{
"generatedAt" : "2011-10-22T15:43:00+00:00",
"regionID" : 10000065,
"typeID" : 11134,
"rows" : [
[8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
[11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
[11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
]
},
{
"generatedAt" : "2011-10-22T15:42:00+00:00",
"regionID" : null,
"typeID" : 11135,
"rows" : [
[8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
[11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
[11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
]
}
]
}
"""
data = simplejson.loads(data)
data = simplejson.dumps(data)
r = requests.post(
'http://localhost:8080/upload/unified/',
data=data,
)
print "RESPONSE"
print r.text
|
Python
| 0.000003
|
@@ -123,16 +123,28 @@
requests
+%0Aimport zlib
%0A%0Adata =
@@ -1463,32 +1463,46 @@
ds(data)%0Adata =
+zlib.compress(
simplejson.dumps
@@ -1511,116 +1511,226 @@
ata)
-%0A%0Ar = requests.post(%0A 'http://localhost:8080/upload/unified/',%0A data=data,%0A)%0Aprint %22RESPONSE%22%0Aprint r.text
+)%0Aheaders = %7B%0A 'Content-Encoding': 'gzip',%0A%7D%0A%0Ar = requests.post(%0A 'http://eve-emdr.local/upload/unified/',%0A #'http://localhost:8080/upload/unified/',%0A data=data,%0A headers=headers,%0A)%0Aprint %22Sent fake order.%22%0A
|
f7a86cec72e4b5ff017013561f4fd3f3f59bfde5
|
Fix typos
|
AutoSetNewFileSyntax.py
|
AutoSetNewFileSyntax.py
|
import sublime
import sublime_plugin
import sys
import os
import logging
sys.path.insert(0, os.path.dirname(__file__))
from SyntaxMappings import *
PLUGIN_NAME = 'AutoSetNewFileSyntax'
LOG_LEVEL = logging.INFO
LOG_FORMAT = "%(name)s: [%(levelname)s] %(message)s"
settings = None
syntaxMappings = None
loggingStreamHandler = None
logger = None
def plugin_unloaded():
global settings, loggingStreamHandler, logger
settings.clear_on_change("syntax_mapping")
logger.removeHandler(loggingStreamHandler)
def plugin_loaded():
global settings, syntaxMappings, loggingStreamHandler, logger
# create logger stream handler
loggingStreamHandler = logging.StreamHandler()
loggingStreamHandler.setFormatter(logging.Formatter(LOG_FORMAT))
# config logger
logger = logging.getLogger(PLUGIN_NAME)
logger.setLevel(LOG_LEVEL)
logger.addHandler(loggingStreamHandler)
settings = sublime.load_settings(PLUGIN_NAME+".sublime-settings")
syntaxMappings = SyntaxMappings(settings=settings, logger=logger)
# rebuilt syntax mappings if there is an user settings update
settings.add_on_change("syntax_mapping", syntaxMappings.rebuildSyntaxMappings)
class AutoSetNewFileSyntax(sublime_plugin.EventListener):
global settings, syntaxMappings
def on_activated_async(self, view):
if (
self.isEventListenerEnabled('on_activated_async') and
self.isScopePlainText(view)
):
self.matchAndSetSyntax(view)
def on_clone_async(self, view):
if (
self.isEventListenerEnabled('on_clone_async') and
self.isScopePlainText(view)
):
self.matchAndSetSyntax(view)
def on_load_async(self, view):
if (
self.isEventListenerEnabled('on_load_async') and
self.isScopePlainText(view)
):
self.matchAndSetSyntax(view)
def on_modified_async(self, view):
if (
self.isEventListenerEnabled('on_modified_async') and
self.isOnlyOneCursor(view) and
self.isFirstCursorNearBeginning(view) and
self.isScopePlainText(view)
):
self.matchAndSetSyntax(view)
def on_pre_save_async(self, view):
if (
self.isEventListenerEnabled('on_pre_save_async') and
self.isOnlyOneCursor(view) and
self.isFirstCursorNearBeginning(view) and
self.isScopePlainText(view)
):
self.matchAndSetSyntax(view)
def isEventListenerEnabled(self, event):
try:
return settings.get("event_listeners", None)[event]
except:
return False
def isOnlyOneCursor(self, view):
""" check there is only one cursor """
return len(view.sel()) == 1
def isFirstCursorNearBeginning(self, view):
""" check the cursor is at first few lines """
return view.rowcol(view.sel()[0].a)[0] < 2
def isScopePlainText(self, view):
""" check the scope of the first line is plain text """
return view.scope_name(0).strip() == 'text.plain'
def matchAndSetSyntax(self, view):
firstLine = self.getPartialFirstLine(view)
for syntaxMapping in syntaxMappings.value():
syntaxFile, firstLineMatchRegexes = syntaxMapping
for firstLineMatchRegex in firstLineMatchRegexes:
if firstLineMatchRegex.search(firstLine) is not None:
view.set_syntax_file(syntaxFile)
return
def getPartialFirstLine(self, view):
region = view.line(0)
firstLineLengthMax = settings.get('first_line_length_max')
if firstLineLengthMax >= 0:
# if the first line is longer than the max line length,
# then use the max line length
# otherwise use the actual length of the first line
region = sublime.Region(0, min(region.end(), firstLineLengthMax))
return view.substr(region)
|
Python
| 0.999999
|
@@ -2329,105 +2329,8 @@
and%0A
- self.isOnlyOneCursor(view) and%0A self.isFirstCursorNearBeginning(view) and%0A
|
348896e6f9318755d9bbefdf94de18ed32b17d1d
|
Update item.py
|
item.py
|
item.py
|
import pygame
class Item(pygame.sprite.Sprite):
def __init__(self, level, *groups):
super(Item, self).__init__(*groups)
#the game level
self.level = level
#base image
self.level.animator.set_Img(6,0)
self.image = self.level.animator.get_Img().convert()
self.image.set_colorkey((255,0,0))
#type
self.flavor_saver = ['gem', 'axe', 'sammich']
self.flavor = 'gem'
#location
self.firstflag = True
self.scrnx = 0
self.scrny = 0
self.mapx = 0
self.mapy = 0
def spawn(self,x,y):
self.scrnx = x
self.scrny = y
if self.firstflag:
self.mapx = x
self.mapy = y
self.firstflag = False
self.rect = pygame.rect.Rect((x * self.level.tilex, y * self.level.tiley), self.image.get_size())
def set_type(self, itype):
self.flavor = self.flavor_saver[itype]
if itype == 0:
xind = 6
yind = 0
if itype == 1:
xind = 6
yind = 5
if itype == 2:
xind = 6
yind = 4
self.level.animator.set_Img(xind,yind)
self.image = self.level.animator.get_Img().convert()
self.image.set_colorkey((255,0,0))
def set_Index(self, x, y):
self.scrnx = x
self.rect.x = x*self.level.tilex
self.scrny = y
self.rect.y = y*self.level.tiley
def get_Index(self, axis):
if axis == 'X':
return self.scrnx
if axis == 'Y':
return self.scrny
return -1
|
Python
| 0
|
@@ -204,11 +204,11 @@
Img(
-6,0
+0,5
)%0A%09%09
@@ -297,16 +297,155 @@
5,0,0))%0A
+%09%09self.level.animator.set_Img(6,0)%0A%09%09self.secretimage = self.level.animator.get_Img().convert()%0A%09%09self.secretimage.set_colorkey((255,0,0))%0A
%09%09%0A%09%09#ty
@@ -493,16 +493,29 @@
sammich'
+, 'telescope'
%5D%0A%09%09self
@@ -1066,16 +1066,57 @@
ind = 4%0A
+%09%09if itype == 3:%0A%09%09%09xind = 6%0A%09%09%09yind = 3%0A
%09%09%09%0A%09%09se
@@ -1151,32 +1151,38 @@
nd,yind)%0A%09%09self.
+secret
image = self.lev
@@ -1212,32 +1212,38 @@
onvert()%0A%09%09self.
+secret
image.set_colork
@@ -1257,16 +1257,68 @@
,0,0))%0A%0A
+%09def reveal(self):%0A%09%09self.image = self.secretimage%0A%0A
%09def set
|
e74420b90e83ade7956023eaf4ef2613e441a9ca
|
Fix linter error with ambiguous variable name 'l'.
|
bitfield/forms.py
|
bitfield/forms.py
|
from __future__ import absolute_import
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
try:
from django.utils.encoding import force_text
except ImportError:
from django.utils.encoding import force_unicode as force_text
from bitfield.types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
elif isinstance(value, int):
real_value = []
div = 2
for (k, v) in self.choices:
if value % div != 0:
real_value.append(k)
value -= (value % div)
div *= 2
value = real_value
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs)
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_text(value) for value in initial])
data_set = set([force_text(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
if isinstance(kwargs['initial'], int):
iv = kwargs['initial']
l = []
for i in range(0, min(len(choices), 63)):
if (1 << i) & iv > 0:
l += [choices[i][0]]
kwargs['initial'] = l
self.widget = widget
super(BitFormField, self).__init__(widget=widget, *args, **kwargs)
self.choices = self.widget.choices = choices
def clean(self, value):
if not value:
return 0
# Assume an iterable which contains an item per flag that's enabled
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice: %r' % (k,))
return int(result)
|
Python
| 0
|
@@ -1499,17 +1499,23 @@
-l
+iv_list
= %5B%5D%0A
@@ -1624,17 +1624,23 @@
-l
+iv_list
+= %5Bcho
@@ -1683,17 +1683,23 @@
ial'%5D =
-l
+iv_list
%0A
|
5fc8258c4d3819b6a4b23819fd3c4578510dd633
|
Allow www.lunahealing.ca as a domain
|
lunahealing/site_settings/prod.py
|
lunahealing/site_settings/prod.py
|
# Django settings for quotations project.
import os
from lunahealing.site_settings.common import *
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY')
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES = {
'default': dj_database_url.config()
}
DEFAULT_FILE_STORAGE = 's3_folder_storage.s3.DefaultStorage'
DEFAULT_S3_PATH = 'media'
STATICFILES_STORAGE = 's3_folder_storage.s3.StaticStorage'
STATIC_S3_PATH = 'static'
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_QUERYSTRING_AUTH = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '//s3.amazonaws.com/%s/media/' % AWS_STORAGE_BUCKET_NAME
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '//s3.amazonaws.com/%s/static/' % AWS_STORAGE_BUCKET_NAME
INSTALLED_APPS.extend([
's3_folder_storage',
'storages',
])
ALLOWED_HOSTS = ['lunahealing.herokuapp.com', '*.lunahealing.ca']
|
Python
| 0
|
@@ -1403,17 +1403,19 @@
.com', '
-*
+www
.lunahea
|
ed472902f71f39cf09eca5ee9193bcf99283b566
|
Remove unused code
|
room.py
|
room.py
|
# Each PS room joined creates an object here.
# Objects control settings on a room-per-room basis, meaning every room can
# be treated differently.
from plugins.tournaments import Tournament
class Room:
def __init__(self, room, data):
if not data:
# This is a hack to support both strings and dicts as input to the class
data = {'moderate':False, 'allow games':False}
self.users = {}
self.loading = True
self.title = room
self.moderate = data['moderate']
self.allowGames = data['allow games']
self.tour = None
self.game = None
def doneLoading(self):
self.loading = False
def addUserlist(self, users):
self.users = {u[1:]:u[0] for u in users.split(',')}
def addUser(self, user, auth):
if user not in self.users:
self.users[user] = auth
def removeUser(self, user):
if user in self.users:
self.users.pop(user)
def renamedUser(self, old, new):
self.removeUser(old)
self.addUser(new[1:], new[0])
def allowGames(self, yesNo):
self.allowGames = yesNo
def createTour(self, ws):
self.tour = Tournament(ws, self.title)
def endTour(self):
self.tour = None
|
Python
| 0.000006
|
@@ -1075,70 +1075,8 @@
%5D)%0A%0A
- def allowGames(self, yesNo):%0A %09self.allowGames = yesNo%0A
|
bd313ff4ce69e7b9a9765672442ef6cf9fa00dba
|
Fix parameter validation tests
|
tests/core/parameter_validation/test_parameter_clone.py
|
tests/core/parameter_validation/test_parameter_clone.py
|
# -*- coding: utf-8 -*-
from ..test_countries import tax_benefit_system
import os
from openfisca_core.parameters import ParameterNode
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
year = 2016
def test_clone():
path = os.path.join(BASE_DIR, 'filesystem_hierarchy')
parameters = ParameterNode('', directory_path = path)
parameters_at_instant = parameters('2016-01-01')
assert parameters_at_instant.node1.param == 1.0
clone = parameters.clone()
clone_at_instant = clone('2016-01-01')
assert clone_at_instant.node1.param == 1.0
assert id(clone) != id(parameters)
assert id(clone.node1) != id(parameters.node1)
assert id(clone.node1.param) != id(parameters.node1.param)
def test_clone_parameter():
param = tax_benefit_system.parameters.taxes.income_tax_rate
clone = param.clone()
assert clone is not param
assert clone.values_list is not param.values_list
assert clone.values_list[0] is not param.values_list[0]
assert clone.values_list == param.values_list
def test_clone_parameter_node():
node = tax_benefit_system.parameters.taxes
clone = node.clone()
assert clone is not node
assert clone.income_tax_rate is not node.income_tax_rate
assert clone.children['income_tax_rate'] is not node.children['income_tax_rate']
def test_clone_scale():
scale = tax_benefit_system.parameters.taxes.social_security_contribution
clone = scale.clone()
assert clone.brackets[0] is not scale.brackets[0]
assert clone.brackets[0].rate is not scale.brackets[0].rate
def test_deep_edit():
parameters = tax_benefit_system.parameters
clone = parameters.clone()
param = parameters.taxes.income_tax_rate
clone_param = clone.taxes.income_tax_rate
original_value = param.values_list[0].value
clone_param.values_list[0].value = 100
assert param.values_list[0].value == original_value
scale = parameters.taxes.social_security_contribution
clone_scale = clone.taxes.social_security_contribution
original_scale_value = scale.brackets[0].rate.values_list[0].value
clone_scale.brackets[0].rate.values_list[0].value = 10
assert scale.brackets[0].rate.values_list[0].value == original_scale_value
|
Python
| 0.000003
|
@@ -1,77 +1,4 @@
-# -*- coding: utf-8 -*-%0Afrom ..test_countries import tax_benefit_system%0A%0A
impo
@@ -3,16 +3,17 @@
port os%0A
+%0A
from ope
@@ -56,16 +56,17 @@
terNode%0A
+%0A
BASE_DIR
@@ -665,16 +665,34 @@
rameter(
+tax_benefit_system
):%0A%0A
@@ -1005,16 +1005,34 @@
er_node(
+tax_benefit_system
):%0A n
@@ -1297,16 +1297,34 @@
e_scale(
+tax_benefit_system
):%0A s
@@ -1561,16 +1561,34 @@
ep_edit(
+tax_benefit_system
):%0A p
|
9bbef1ca463f0f83841c6b61ea8aa56c5454dadc
|
increment stop_id.....
|
ipa_db.py
|
ipa_db.py
|
import sqlite3
class Db:
def __init__(self, db_name):
self.conn = sqlite3.connect(db_name)
def __del__(self):
self.conn.close()
def __execute(self, sql, args = tuple()):
c = self.conn.cursor()
c.execute(sql, args)
return c
def __commit(self):
self.conn.commit()
def remove(self):
self.__execute('DROP TABLE IF EXISTS trains')
self.__execute('DROP TABLE IF EXISTS schedule')
self.__commit()
def create(self):
self.__execute('''CREATE TABLE trains(
train_id integer PRIMARY KEY,
train_number text,
train_operator text,
train_date text,
train_relation text
)''')
self.__execute('''CREATE TABLE schedule(
train_id integer,
stop_id integer,
stop_name text,
sched_arrive_time text,
sched_arrive_delay text,
sched_departure_time text,
sched_departure_delay text
)''')
self.__commit()
def get_trains(self):
for row in self.__execute('SELECT DISTINCT train_number FROM trains'):
yield row[0]
def update_train(self, id, number, operator, date, relation):
self.__execute('DELETE FROM trains WHERE train_id = ?', (id,))
self.__execute('''INSERT INTO trains VALUES (
?, ?, ?, ?, ?)''',
(id, number, operator, date, relation)
)
self.__commit()
def update_schedule(self, id, schedule):
self.__execute('DELETE FROM schedule WHERE train_id = ?', (id,))
stop_id = 1
for stop in schedule:
self.__execute('''INSERT INTO schedule VALUES (
?, ?, ?, ?, ?, ?, ?)''',
(id, stop_id, stop[0], stop[1], stop[2], stop[3], stop[4])
)
self.__commit()
def get_train_schedules(self, name):
for row in self.__execute('SELECT train_id FROM trains WHERE train_number = ? ORDER BY train_id', (name,)):
yield row[0]
def get_schedule_info(self, id):
for row in self.__execute('SELECT * FROM schedule WHERE train_id = ? ORDER BY stop_id', (id,)):
yield row
|
Python
| 0
|
@@ -1848,32 +1848,57 @@
%5D)%0A )
+%0A stop_id += 1
%0A%0A self._
|
cdcb0fc8104646e96e20b62e4333d805cddeb704
|
Update test_cygwinccompiler.py
|
distutils/tests/test_cygwinccompiler.py
|
distutils/tests/test_cygwinccompiler.py
|
"""Tests for distutils.cygwinccompiler."""
import unittest
import sys
import os
from io import BytesIO
from test.support import run_unittest
from distutils import cygwinccompiler
from distutils.cygwinccompiler import (check_config_h,
CONFIG_H_OK, CONFIG_H_NOTOK,
CONFIG_H_UNCERTAIN, get_versions,
get_msvcr)
from distutils.tests import support
class FakePopen(object):
test_class = None
def __init__(self, cmd, shell, stdout):
self.cmd = cmd.split()[0]
exes = self.test_class._exes
if self.cmd in exes:
# issue #6438 in Python 3.x, Popen returns bytes
self.stdout = BytesIO(exes[self.cmd])
else:
self.stdout = os.popen(cmd, 'r')
class CygwinCCompilerTestCase(support.TempdirManager,
unittest.TestCase):
def setUp(self):
super(CygwinCCompilerTestCase, self).setUp()
self.version = sys.version
self.python_h = os.path.join(self.mkdtemp(), 'python.h')
from distutils import sysconfig
self.old_get_config_h_filename = sysconfig.get_config_h_filename
sysconfig.get_config_h_filename = self._get_config_h_filename
self.old_find_executable = cygwinccompiler.find_executable
cygwinccompiler.find_executable = self._find_executable
self._exes = {}
self.old_popen = cygwinccompiler.Popen
FakePopen.test_class = self
cygwinccompiler.Popen = FakePopen
def tearDown(self):
sys.version = self.version
from distutils import sysconfig
sysconfig.get_config_h_filename = self.old_get_config_h_filename
cygwinccompiler.find_executable = self.old_find_executable
cygwinccompiler.Popen = self.old_popen
super(CygwinCCompilerTestCase, self).tearDown()
def _get_config_h_filename(self):
return self.python_h
def _find_executable(self, name):
if name in self._exes:
return name
return None
def test_check_config_h(self):
# check_config_h looks for "GCC" in sys.version first
# returns CONFIG_H_OK if found
sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC '
'4.0.1 (Apple Computer, Inc. build 5370)]')
self.assertEqual(check_config_h()[0], CONFIG_H_OK)
# then it tries to see if it can find "__GNUC__" in pyconfig.h
sys.version = 'something without the *CC word'
# if the file doesn't exist it returns CONFIG_H_UNCERTAIN
self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN)
# if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
self.write_file(self.python_h, 'xxx')
self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK)
# and CONFIG_H_OK if __GNUC__ is found
self.write_file(self.python_h, 'xxx __GNUC__ xxx')
self.assertEqual(check_config_h()[0], CONFIG_H_OK)
def test_get_versions(self):
# get_versions calls distutils.spawn.find_executable on
# 'gcc', 'ld' and 'dllwrap'
self.assertEqual(get_versions(), (None, None, None))
# Let's fake we have 'gcc' and it returns '3.4.5'
self._exes['gcc'] = b'gcc (GCC) 3.4.5 (mingw special)\nFSF'
res = get_versions()
self.assertEqual(str(res[0]), '3.4.5')
# and let's see what happens when the version
# doesn't match the regular expression
# (\d+\.\d+(\.\d+)*)
self._exes['gcc'] = b'very strange output'
res = get_versions()
self.assertEqual(res[0], None)
# same thing for ld
self._exes['ld'] = b'GNU ld version 2.17.50 20060824'
res = get_versions()
self.assertEqual(str(res[1]), '2.17.50')
self._exes['ld'] = b'@(#)PROGRAM:ld PROJECT:ld64-77'
res = get_versions()
self.assertEqual(res[1], None)
# and dllwrap
self._exes['dllwrap'] = b'GNU dllwrap 2.17.50 20060824\nFSF'
res = get_versions()
self.assertEqual(str(res[2]), '2.17.50')
self._exes['dllwrap'] = b'Cheese Wrap'
res = get_versions()
self.assertEqual(res[2], None)
def test_get_msvcr(self):
# none
sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) '
'\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]')
self.assertEqual(get_msvcr(), None)
# MSVC 7.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1300 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr70'])
# MSVC 7.1
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1310 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr71'])
# VS2005 / MSVC 8.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1400 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr80'])
# VS2008 / MSVC 9.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1500 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr90'])
sys.version = '3.10.0 (tags/v3.10.0:b494f59, Oct 4 2021, 18:46:30) [MSC v.1929 32 bit (Intel)]'
self.assertEqual(get_msvcr(), ['ucrt', 'vcruntime140'])
# unknown
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1999 32 bits (Intel)]')
self.assertRaises(ValueError, get_msvcr)
def test_suite():
return unittest.makeSuite(CygwinCCompilerTestCase)
if __name__ == '__main__':
run_unittest(test_suite())
|
Python
| 0.000002
|
@@ -5605,12 +5605,12 @@
C v.
-1999
+2000
32
|
5e1fd7769a8911415824ce29177b057d33ab789a
|
update to latest dials.integrate interface
|
Wrappers/Dials/Integrate.py
|
Wrappers/Dials/Integrate.py
|
#!/usr/bin/env python
# Integrate.py
#
# Copyright (C) 2014 Diamond Light Source, Richard Gildea, Graeme Winter
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
#
# Integration using DIALS.
from __future__ import division
from __init__ import _setup_xia2_environ
_setup_xia2_environ()
from Handlers.Flags import Flags
from Schema.Interfaces.FrameProcessor import FrameProcessor
def Integrate(DriverType = None):
'''A factory for IntegrateWrapper classes.'''
from Driver.DriverFactory import DriverFactory
DriverInstance = DriverFactory.Driver(DriverType)
class IntegrateWrapper(DriverInstance.__class__,
FrameProcessor):
def __init__(self):
DriverInstance.__class__.__init__(self)
FrameProcessor.__init__(self)
self.set_executable('dials.integrate')
self._experiments_filename = None
self._reflections_filename = None
self._integration_algorithm = "fitrs"
self._outlier_algorithm = None
self._phil_file = None
self._mosaic = None
return
def set_experiments_filename(self, experiments_filename):
self._experiments_filename = experiments_filename
return
def get_experiments_filename(self):
return self._experiments_filename
def set_reflections_filename(self, reflections_filename):
self._reflections_filename = reflections_filename
return
def get_reflections_filename(self):
return self._reflections_filename
def set_intensity_algorithm(self, algorithm):
self._integration_algorithm = algorithm
return
def get_intensity_algorithm(self):
return self._integration_algorithm
def set_background_outlier_algorithm(self, algorithm):
self._outlier_algorithm = algorithm
return
def get_background_outlier_algorithm(self):
return self._outlier_algorithm
def set_phil_file(self, phil_file):
self._phil_file = phil_file
return
def get_integrated_filename(self):
import os
return os.path.join(self.get_working_directory(), 'integrated.pickle')
def get_mosaic(self):
return self._mosaic
def run(self):
from Handlers.Streams import Debug
Debug.write('Running dials.integrate')
self.clear_command_line()
self.add_command_line(self._experiments_filename)
self.add_command_line(("-r", self._reflections_filename))
self.add_command_line(
'intensity.algorithm=%s' % self._integration_algorithm)
if self._outlier_algorithm is not None:
self.add_command_line(
'outlier.algorithm=%s' % self._outlier_algorithm)
if self._phil_file is not None:
self.add_command_line("%s" % self._phil_file)
self.start()
self.close_wait()
self.check_for_errors()
for record in self.get_all_output():
if 'Sigma M' in record:
self._mosaic = float(record.split()[-1])
return
return IntegrateWrapper()
if __name__ == '__main__':
import sys
image_file = sys.argv[1]
scan_ranges = [(int(token.split(',')[0]), int(token.split(',')[1]))
for token in sys.argv[2:]]
from Wrappers.Dials.Import import Import
from Wrappers.Dials.Spotfinder import Spotfinder
from Wrappers.Dials.Index import Index
from Wrappers.Dials.RefineBravaisSettings import RefineBravaisSettings
print "Begin importing"
importer = Import()
importer.setup_from_image(image_file)
importer.set_image_range(scan_ranges[0])
importer.run()
print ''.join(importer.get_all_output())
print "Done importing"
print "Begin spotfinding"
spotfinder = Spotfinder()
spotfinder.set_sweep_filename(importer.get_sweep_filename())
spotfinder.set_scan_ranges(scan_ranges)
spotfinder.run()
print ''.join(spotfinder.get_all_output())
print "Done spotfinding"
print "Begin indexing"
indexer = Index()
indexer.set_spot_filename(spotfinder.get_spot_filename())
indexer.set_sweep_filename(importer.get_sweep_filename())
indexer.run('fft3d')
print ''.join(indexer.get_all_output())
print "Done indexing"
print "Begin refining"
rbs = RefineBravaisSettings()
rbs.set_experiments_filename(indexer.get_experiments_filename())
rbs.set_indexed_filename(indexer.get_indexed_filename())
rbs.run()
print ''.join(rbs.get_all_output())
print "Done refining"
print "Begin integrating"
integrater = Integrate()
integrater.set_experiments_filename(indexer.get_experiments_filename())
integrater.set_reflections_filename(indexer.get_indexed_filename())
integrater.run()
print ''.join(integrater.get_all_output())
print "Done integrating"
|
Python
| 0
|
@@ -2435,13 +2435,23 @@
e((%22
--r%22,
+reference=%25s%22 %25
self
|
f00b60342c82f30670dde60f42df53f165f08270
|
tweak Selector to be more ascii and close on escape
|
subiquitycore/ui/interactive.py
|
subiquitycore/ui/interactive.py
|
# Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Re-usable input widgets
"""
from urwid import (
ACTIVATE,
AttrWrap,
connect_signal,
Edit,
Filler,
IntEdit,
LineBox,
Pile,
PopUpLauncher,
SelectableIcon,
TOP,
WidgetWrap,
)
import logging
import re
log = logging.getLogger("subiquitycore.ui.input")
class StringEditor(WidgetWrap):
""" Edit input class
Initializes and Edit object and attachs its result
to the `value` accessor.
"""
def __init__(self, caption, **kwargs):
self._edit = Edit(caption=caption, **kwargs)
self.error = None
super().__init__(self._edit)
def keypress(self, size, key):
if self.error:
self._edit.set_edit_text("")
self.error = None
return super().keypress(size, key)
def set_error(self, msg):
self.error = msg
return self._edit.set_edit_text(msg)
@property
def value(self):
return self._edit.get_edit_text()
@value.setter # NOQA
def value(self, value):
self._edit.set_edit_text(value)
class PasswordEditor(StringEditor):
""" Password input prompt with masking
"""
def __init__(self, caption, mask="*"):
super().__init__(caption, mask=mask)
class RealnameEditor(StringEditor):
""" Username input prompt with input rules
"""
def keypress(self, size, key):
''' restrict what chars we allow for username '''
realname = r'[a-zA-Z0-9_\- ]'
if re.match(realname, key) is None:
return False
return super().keypress(size, key)
class EmailEditor(StringEditor):
""" Email input prompt with input rules
"""
def keypress(self, size, key):
''' restrict what chars we allow for username '''
realname = r'[-a-zA-Z0-9_.@+=]'
if re.match(realname, key) is None:
return False
return super().keypress(size, key)
class UsernameEditor(StringEditor):
""" Username input prompt with input rules
"""
def keypress(self, size, key):
''' restrict what chars we allow for username '''
userlen = len(self.value)
if userlen == 0:
username = r'[a-z_]'
else:
username = r'[a-z0-9_-]'
# don't allow non username chars
if re.match(username, key) is None:
return False
return super().keypress(size, key)
class MountEditor(StringEditor):
""" Mountpoint input prompt with input rules
"""
def keypress(self, size, key):
''' restrict what chars we allow for mountpoints '''
mountpoint = r'[a-zA-Z0-9_/\.\-]'
if re.match(mountpoint, key) is None:
return False
return super().keypress(size, key)
class IntegerEditor(WidgetWrap):
""" IntEdit input class
"""
def __init__(self, caption, default=0):
self._edit = IntEdit(caption=caption, default=default)
super().__init__(self._edit)
@property
def value(self):
return self._edit.get_edit_text()
class _PopUpButton(SelectableIcon):
"""It looks like a radio button, but it just emits 'click' on activation."""
signals = ['click']
states = {
True: " ▸ ",
False: " ",
}
def __init__(self, option, state):
p = self.states[state]
super().__init__(p + option, len(p))
def keypress(self, size, key):
if self._command_map[key] != ACTIVATE:
return key
self._emit('click')
class _PopUpSelectDialog(WidgetWrap):
"""A list of PopUpButtons with a box around them."""
def __init__(self, parent, cur_index):
self.parent = parent
group = []
for i, option in enumerate(self.parent._options):
btn = _PopUpButton(option, state=i==cur_index)
connect_signal(btn, 'click', self.click, i)
group.append(AttrWrap(btn, 'menu_button', 'menu_button focus'))
pile = Pile(group)
pile.set_focus(group[cur_index])
fill = Filler(pile, valign=TOP)
super().__init__(LineBox(fill))
def click(self, btn, index):
self.parent.index = index
self.parent.close_pop_up()
class Selector(PopUpLauncher):
"""A widget that allows the user to chose between options by popping up this list of options.
(A bit like <select> in an HTML form).
"""
_prefix = " ▾ "
def __init__(self, opts, index=0):
self._options = opts
self._button = SelectableIcon(self._prefix, len(self._prefix))
self.index = index
super().__init__(self._button)
def keypress(self, size, key):
if self._command_map[key] != ACTIVATE:
return key
self.open_pop_up()
@property
def index(self):
return self._index
@index.setter
def index(self, val):
self._button.set_text(self._prefix + self._options[val])
self._index = val
@property
def value(self):
return self._options[self._index]
def create_pop_up(self):
return _PopUpSelectDialog(self, self.index)
def get_pop_up_parameters(self):
width = max(map(len, self._options)) + len(self._prefix) + 3 # line on left, space, line on right
return {'left':-1, 'top':-self.index-1, 'overlay_width':width, 'overlay_height':len(self._options) + 2}
class YesNo(Selector):
""" Yes/No selector
"""
def __init__(self):
opts = ['Yes', 'No']
super().__init__(opts)
|
Python
| 0
|
@@ -3778,16 +3778,22 @@
t looks
+a bit
like a r
@@ -3909,10 +3909,11 @@
e: %22
- %E2%96%B8
+(+)
%22,%0A
@@ -3928,18 +3928,19 @@
False: %22
-
+( )
%22,%0A
@@ -4881,24 +4881,184 @@
e_pop_up()%0A%0A
+ def keypress(self, size, key):%0A if key == 'esc':%0A self.parent.close_pop_up()%0A else:%0A return super().keypress(size, key)%0A
%0Aclass Selec
@@ -5247,10 +5247,11 @@
= %22
- %E2%96%BE
+(+)
%22%0A%0A
|
175a8007ef06bbf3a01943c161a162adbf23d7fd
|
Use tf.gfile instead of os.path in sequence_generator.py for internal compatibility. (#178)
|
magenta/lib/sequence_generator.py
|
magenta/lib/sequence_generator.py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Abstract class for sequence generators.
Provides a uniform interface for interacting with generators for any model.
"""
import abc
import os
# internal imports
import tensorflow as tf
class SequenceGeneratorException(Exception):
"""Generic exception for sequence generation errors."""
pass
class BaseSequenceGenerator(object):
"""Abstract class for generators."""
__metaclass__ = abc.ABCMeta
def __init__(self, details, checkpoint):
"""Constructs a BaseSequenceGenerator.
Args:
details: A generator_pb2.GeneratorDetails for this generator.
checkpoint: Where to look for the most recent model checkpoint. Either a
directory to be used with tf.train.latest_checkpoint or the path to a
single checkpoint file.
"""
self._details = details
self._checkpoint = checkpoint
self._initialized = False
def get_details(self):
"""Returns a GeneratorDetails description of this generator."""
return self._details
@abc.abstractmethod
def _initialize(self, checkpoint_file):
"""Implementation for building the TF graph.
Args:
checkpoint_file: The path to the checkpoint file that should be used.
"""
pass
@abc.abstractmethod
def _close(self):
"""Implementation for closing the TF session."""
pass
@abc.abstractmethod
def _generate(self, generate_sequence_request):
"""Implementation for sequence generation based on request.
The implementation can assume that _initialize has been called before this
method is called.
Args:
generate_sequence_request: The request for generating a sequence
Returns:
A GenerateSequenceResponse proto.
"""
pass
def initialize(self):
"""Builds the TF graph and loads the checkpoint.
If the graph has already been initialized, this is a no-op.
Raises:
SequenceGeneratorException: If the checkpoint cannot be found.
"""
if not self._initialized:
if not os.path.exists(self._checkpoint):
raise SequenceGeneratorException(
'Checkpoint path does not exist: %s' % (self._checkpoint))
checkpoint_file = self._checkpoint
# If this is a directory, try to determine the latest checkpoint in it.
if os.path.isdir(checkpoint_file):
checkpoint_file = tf.train.latest_checkpoint(checkpoint_file)
if not os.path.isfile(checkpoint_file):
raise SequenceGeneratorException(
'Checkpoint path is not a file: %s (supplied path: %s)' % (
checkpoint_file, self._checkpoint))
self._initialize(checkpoint_file)
self._initialized = True
def close(self):
"""Closes the TF session.
If the session was already closed, this is a no-op.
"""
if self._initialized:
self._close()
self._initialized = False
def __enter__(self):
"""When used as a context manager, initializes the TF session."""
self.initialize()
return self
def __exit__(self, *args):
"""When used as a context manager, closes the TF session."""
self.close()
def generate(self, generate_sequence_request):
"""Generates a sequence from the model based on the request.
Also initializes the TF graph if not yet initialized.
Args:
generate_sequence_request: The request for generating a sequence
Returns:
A GenerateSequenceResponse proto.
"""
self.initialize()
return self._generate(generate_sequence_request)
|
Python
| 0
|
@@ -727,18 +727,8 @@
abc
-%0Aimport os
%0A%0A#
@@ -2563,17 +2563,18 @@
not
-os.path.e
+tf.gfile.E
xist
@@ -2839,21 +2839,28 @@
if
-os.path.isdir
+tf.gfile.IsDirectory
(che
@@ -2957,26 +2957,231 @@
if
-not os.path.isfile
+checkpoint_file is None:%0A raise SequenceGeneratorException(%0A 'No checkpoint file found in directory: %25s' %25 self._checkpoint)%0A if (not tf.gfile.Exists(checkpoint_file) or%0A tf.gfile.IsDirectory
(che
@@ -3185,32 +3185,33 @@
checkpoint_file)
+)
:%0A raise
|
1f752237d83c486b94ddcc7f5e3b42eb5951a60b
|
remove unused imports
|
pabot/SharedLibrary.py
|
pabot/SharedLibrary.py
|
from robot.libraries.BuiltIn import BuiltIn
from robot.libraries.Remote import Remote
from robot.api import logger
from robot.running.testlibraries import TestLibrary
from robot.running.context import EXECUTION_CONTEXTS
from robot.running.model import Keyword
from robotremoteserver import RemoteLibraryFactory
from .pabotlib import PABOT_QUEUE_INDEX
class SharedLibrary(object):
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
def __init__(self, name):
"""
Import a library so that the library instance is shared between executions.
[https://pabot.org/PabotLib.html?ref=log#import-shared-library|Open online docs.]
"""
# FIXME: RELATIVE IMPORTS WITH FILE NAME
self._remote = None
if BuiltIn().get_variable_value('${%s}' % PABOT_QUEUE_INDEX) is None:
logger.debug("Not currently running pabot. Importing library for this process.")
self._lib = RemoteLibraryFactory(TestLibrary(name).get_instance())
return
uri = BuiltIn().get_variable_value('${PABOTLIBURI}')
logger.debug('PabotLib URI %r' % uri)
remotelib = Remote(uri) if uri else None
if remotelib:
try:
port = remotelib.run_keyword("import_shared_library", [name], {})
except RuntimeError:
logger.error('No connection - is pabot called with --pabotlib option?')
raise
self._remote = Remote("http://127.0.0.1:%s" % port)
logger.debug("Lib imported with name %s from http://127.0.0.1:%s" % (name, port))
else:
logger.error('No connection - is pabot called with --pabotlib option?')
raise AssertionError('No connection to pabotlib')
def get_keyword_names(self):
if self._remote:
return self._remote.get_keyword_names()
return self._lib.get_keyword_names()
def run_keyword(self, name, args, kwargs):
if self._remote:
return self._remote.run_keyword(name, args, kwargs)
result = self._lib.run_keyword(name, args, kwargs)
if result['status'] == 'FAIL':
raise AssertionError(result['error'])
return result['return']
|
Python
| 0.000001
|
@@ -164,101 +164,8 @@
ary%0A
-from robot.running.context import EXECUTION_CONTEXTS%0Afrom robot.running.model import Keyword%0A
from
|
1f4ef496f932ec2a12d348b0c90b1f57d6ef9e20
|
update version number
|
nutils/__init__.py
|
nutils/__init__.py
|
import numpy
from distutils.version import LooseVersion
assert LooseVersion(numpy.version.version) >= LooseVersion('1.8'), 'nutils requires numpy 1.8 or higher, got %s' % numpy.version.version
version = '1.dev'
_ = numpy.newaxis
__all__ = [ '_', 'numpy', 'core', 'numeric', 'element', 'function',
'mesh', 'plot', 'library', 'topology', 'util', 'matrix', 'parallel', 'log',
'debug', 'cache', 'transform', 'rational' ]
|
Python
| 0.000002
|
@@ -203,13 +203,15 @@
= '
-1.dev
+2.0beta
'%0A%0A_
|
4659be5638ba4f3a898da16df805254dfab0986e
|
Fix backed-out subclassing foul
|
comment_utils/templatetags/comment_utils.py
|
comment_utils/templatetags/comment_utils.py
|
"""
Template tags designed to work with applications which use comment
moderation.
"""
from django import template
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import get_model
from django.contrib.comments.models import Comment, FreeComment
from django.contrib.comments.templatetags import comments
from django.contrib.contenttypes.models import ContentType
class PublicCommentCountNode(comments.CommentCountNode):
def iter_render(self, context):
from django.conf import settings
manager = self.free and FreeComment.objects or Comment.objects
if self.context_var_name is not None:
object_id = template.resolve_variable(self.context_var_name, context)
else:
object_id = self.obj_id
comment_count = manager.filter(object_id__exact=object_id,
content_type__app_label__exact=self.package,
content_type__model__exact=self.module,
site__id__exact=settings.SITE_ID,
is_public__exact=True).count()
context[self.var_name] = comment_count
return ''
class DoPublicCommentList(comments.DoGetCommentList):
"""
Retrieves comments for a particular object and stores them in a
context variable.
The difference between this tag and Django's built-in comment list
tags is that this tag will only return comments with
``is_public=True``. If your application uses any sort of comment
moderation which sets ``is_public=False``, you'll probably want to
use this tag, as it makes the template logic simpler by only
returning approved comments.
Syntax::
{% get_public_comment_list for [app_name].[model_name] [object_id] as [varname] %}
or::
{% get_public_free_comment_list for [app_name].[model_name] [object_id] as [varname] %}
When called as ``get_public_comment_list``, this tag retrieves
instances of ``Comment`` (comments which require
registration). When called as ``get_public_free_comment_list``,
this tag retrieves instances of ``FreeComment`` (comments which do
not require registration).
To retrieve comments in reverse order (e.g., newest comments
first), pass 'reversed' as an extra argument after ``varname``.
So, for example, to retrieve registered comments for a flatpage
with ``id`` 12, use like this::
{% get_public_comment_list for flatpages.flatpage 12 as comment_list %}
To retrieve unregistered comments for the same object::
{% get_public_free_comment_list for flatpages.flatpage 12 as comment_list %}
To retrieve in reverse order (newest comments first)::
{% get_public_free_comment_list for flatpages.flatpage 12 as comment_list reversed %}
"""
def __call__(self, parser, token):
bits = token.contents.split()
if len(bits) not in (6, 7):
raise template.TemplateSyntaxError("'%s' tag takes 5 or 6 arguments" % bits[0])
if bits[1] != 'for':
raise template.TemplateSyntaxError("first argument to '%s' tag must be 'for'" % bits[0])
try:
app_name, model_name = bits[2].split('.')
except ValueError:
raise template.TemplateSyntaxError("second argument to '%s' tag must be in the form 'app_name.model_name'" % bits[0])
model = get_model(app_name, model_name)
if model is None:
raise template.TemplateSyntaxError("'%s' tag got invalid model '%s.%s'" % (bits[0], app_name, model_name))
content_type = ContentType.objects.get_for_model(model)
var_name, object_id = None, None
if bits[3].isdigit():
object_id = bits[3]
try:
content_type.get_object_for_this_type(pk=object_id)
except ObjectDoesNotExist:
raise template.TemplateSyntaxError("'%s' tag got reference to %s object with id %s, which doesn't exist" % (bits[0], content_type.name, object_id))
else:
var_name = bits[3]
if bits[4] != 'as':
raise template.TemplateSyntaxError("fourth argument to '%s' tag must be 'as'" % bits[0])
if len(bits) == 7:
if bits[6] != 'reversed':
raise template.TemplateSyntaxError("sixth argument to '%s' tag, if given, must be 'reversed'" % bits[0])
ordering = '-'
else:
ordering = ''
return comments.CommentListNode(app_name, model_name, var_name, object_id, bits[5], self.free, ordering, extra_kwargs={ 'is_public__exact': True })
class DoPublicCommentCount(comments.DoCommentCount):
"""
Retrieves the number of comments attached to a particular object
and stores them in a context variable.
The difference between this tag and Django's built-in comment
count tags is that this tag will only count comments with
``is_public=True``. If your application uses any sort of comment
moderation which sets ``is_public=False``, you'll probably want to
use this tag, as it gives an accurate count of the comments which
will be publicly displayed.
Syntax::
{% get_public_comment_count for [app_name].[model_name] [object_id] as [varname] %}
or::
{% get_public_free_comment_count for [app_name].[model_name] [object_id] as [varname] %}
Example::
{% get_public_comment_count for weblog.entry entry.id as comment_count %}
When called as ``get_public_comment_list``, this tag counts
instances of ``Comment`` (comments which require
registration). When called as ``get_public_free_comment_count``,
this tag counts instances of ``FreeComment`` (comments which do
not require registration).
"""
def __call__(self, parser, token):
bits = token.contents.split()
if len(bits) != 6:
raise template.TemplateSyntaxError("'%s' tag takes five arguments" % bits[0])
if bits[1] != 'for':
raise template.TemplateSyntaxError("first argument to '%s' tag must be 'for'" % bits[0])
try:
app_name, model_name = bits[2].split('.')
except ValueError:
raise template.TemplateSyntaxError("second argument to '%s tag must be in the format app_name.model_name'" % bits[0])
model = get_model(app_name, model_name)
if model is None:
raise template.TemplateSyntaxError("'%s' tag got invalid model '%s.%s'" % (bits[0], app_name, model_name))
content_type = ContentType.objects.get_for_model(model)
var_name, object_id = None, None
if bits[3].isdigit():
object_id = bits[3]
try:
content_type.get_object_for_this_type(pk=object_id)
except ObjectDoesNotExist:
raise template.TemplateSyntaxError("'%s' tag got reference to %s object with id %s, which doesn't exist" % (bits[0], content_type.name, object_id))
else:
var_name = bits[3]
if bits[4] != 'as':
raise template.TemplateSyntaxError("fourth argument to '%s' tag must be 'as'" % bits[0])
return PublicCommentCountNode(app_name, model_name, var_name, object_id, bits[5], self.free)
register = template.Library()
register.tag('get_public_comment_list', DoPublicCommentList(False))
register.tag('get_public_free_comment_list', DoPublicCommentList(True))
register.tag('get_public_comment_count', DoPublicCommentCount(False))
register.tag('get_public_free_comment_count', DoPublicCommentCount(True))
|
Python
| 0.002044
|
@@ -456,13 +456,8 @@
def
-iter_
rend
|
577697301f8682293a00a793807687df9d0ce679
|
Fix fetch_ceph_keys to run in python3
|
docker/ceph/ceph-mon/fetch_ceph_keys.py
|
docker/ceph/ceph-mon/fetch_ceph_keys.py
|
#!/usr/bin/python
# Copyright 2015 Sam Yaple
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is a stripped down version of an ansible module I wrote in Yaodu to
# achieve the same goals we have for Kolla. I have relicensed it for Kolla
# https://github.com/SamYaple/yaodu/blob/master/ansible/library/bslurp
# Basically this module will fetch the admin and mon keyrings as well as the
# monmap file. It then hashes the content, compresses them, and finally it
# converts them to base64 to be safely transported around with ansible
import base64
import hashlib
import json
import os
import sys
import zlib
def json_exit(msg=None, failed=False, changed=False):
if type(msg) is not dict:
msg = {'msg': str(msg)}
msg.update({'failed': failed, 'changed': changed})
print(json.dumps(msg))
sys.exit()
def read_file(filename):
filename_path = os.path.join('/etc/ceph', filename)
if not os.path.exists(filename_path):
json_exit("file not found: {}".format(filename_path), failed=True)
if not os.access(filename_path, os.R_OK):
json_exit("file not readable: {}".format(filename_path), failed=True)
with open(filename_path, 'rb') as f:
raw_data = f.read()
# TODO(mnasiadka): Remove sha1 in U
return {'content': base64.b64encode(zlib.compress(raw_data)),
'sha1': hashlib.sha1(raw_data).hexdigest(),
'sha256': hashlib.sha256(raw_data).hexdigest(),
'filename': filename}
def main():
admin_keyring = 'ceph.client.admin.keyring'
mon_keyring = 'ceph.client.mon.keyring'
rgw_keyring = 'ceph.client.radosgw.keyring'
monmap = 'ceph.monmap'
files = [admin_keyring, mon_keyring, rgw_keyring, monmap]
json_exit({filename: read_file(filename) for filename in files})
if __name__ == '__main__':
main()
|
Python
| 0.000016
|
@@ -1780,16 +1780,17 @@
ntent':
+(
base64.b
@@ -1822,16 +1822,26 @@
w_data))
+).decode()
,%0A
|
b4acd028b613a721ffbe5a3136700f190635f7c9
|
Fix import.
|
tests/basics/class_store_class.py
|
tests/basics/class_store_class.py
|
# Inspired by urlparse.py from CPython 3.3 stdlib
# There was a bug in MicroPython that under some conditions class stored
# in instance attribute later was returned "bound" as if it was a method,
# which caused class constructor to receive extra argument.
from collections import namedtuple
_DefragResultBase = namedtuple('DefragResult', 'foo bar')
class _ResultMixinStr(object):
def encode(self):
return self._encoded_counterpart(*(x.encode() for x in self))
class _ResultMixinBytes(object):
def decode(self):
return self._decoded_counterpart(*(x.decode() for x in self))
class DefragResult(_DefragResultBase, _ResultMixinStr):
pass
class DefragResultBytes(_DefragResultBase, _ResultMixinBytes):
pass
DefragResult._encoded_counterpart = DefragResultBytes
DefragResultBytes._decoded_counterpart = DefragResult
# Due to differences in type and native subclass printing,
# the best thing we can do here is to just test that no exceptions
# happen
#print(DefragResult, DefragResult._encoded_counterpart)
#print(DefragResultBytes, DefragResultBytes._decoded_counterpart)
o1 = DefragResult("a", "b")
#print(o1, type(o1))
o2 = DefragResultBytes("a", "b")
#print(o2, type(o2))
#print(o1._encoded_counterpart)
_o1 = o1.encode()
print(_o1[0], _o1[1])
#print(_o1, type(_o1))
print("All's ok")
|
Python
| 0
|
@@ -255,16 +255,17 @@
t.%0Afrom
+_
collecti
|
1a80097aeb00d13c878bc9563e880287602c5833
|
Fix PEP8: E112 expected an indented block
|
pavelib/paver_tests/test_paver_quality.py
|
pavelib/paver_tests/test_paver_quality.py
|
import os
import tempfile
import unittest
from mock import patch
from ddt import ddt, file_data
import pavelib.quality
import paver.easy
from paver.easy import BuildFailure
@ddt
class TestPaverQualityViolations(unittest.TestCase):
def setUp(self):
self.f = tempfile.NamedTemporaryFile(delete=False)
self.f.close()
def test_pylint_parser_other_string(self):
with open(self.f.name, 'w') as f:
f.write("hello")
num = pavelib.quality._count_pylint_violations(f.name)
self.assertEqual(num, 0)
def test_pylint_parser_pep8(self):
# Pep8 violations should be ignored.
with open(self.f.name, 'w') as f:
f.write("foo/hello/test.py:304:15: E203 whitespace before ':'")
num = pavelib.quality._count_pylint_violations(f.name)
self.assertEqual(num, 0)
@file_data('pylint_test_list.json')
def test_pylint_parser_count_violations(self, value):
# Tests:
# * Different types of violations
# * One violation covering multiple lines
with open(self.f.name, 'w') as f:
f.write(value)
num = pavelib.quality._count_pylint_violations(f.name)
self.assertEqual(num, 1)
def test_pep8_parser(self):
with open(self.f.name, 'w') as f:
f.write("hello\nhithere")
num = pavelib.quality._count_pep8_violations(f.name)
self.assertEqual(num, 2)
def tearDown(self):
os.remove(self.f.name)
class TestPaverRunQuality(unittest.TestCase):
"""
For testing the paver run_quality task
"""
def setUp(self):
# mock the @needs decorator to skip it
self._mock_paver_needs = patch.object(pavelib.quality.run_quality, 'needs').start()
self._mock_paver_needs.return_value = 0
self._mock_paver_sh = patch('pavelib.quality.sh').start()
self.addCleanup(self._mock_paver_sh.stop())
self.addCleanup(self._mock_paver_needs.stop())
def test_failure_on_diffquality_pep8(self):
"""
If pep8 diff-quality fails due to the percentage threshold, pylint
should still be run
"""
# Underlying sh call must fail when it is running the pep8 diff-quality task
self._mock_paver_sh.side_effect = CustomShMock().fail_on_pep8
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
self.assertRaises(BuildFailure)
# Test that both pep8 and pylint were called by counting the calls
self.assertEqual(self._mock_paver_sh.call_count, 2)
def test_failure_on_diffquality_pylint(self):
"""
If diff-quality fails on pylint, the paver task should also fail
"""
# Underlying sh call must fail when it is running the pylint diff-quality task
self._mock_paver_sh.side_effect = CustomShMock().fail_on_pylint
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
self.assertRaises(BuildFailure)
# Test that both pep8 and pylint were called by counting the calls
self.assertEqual(self._mock_paver_sh.call_count, 2)
def test_other_exception(self):
"""
If diff-quality fails for an unknown reason on the first run (pep8), then
pylint should not be run
"""
self._mock_paver_sh.side_effect = [Exception('unrecognized failure!'), 0]
with self.assertRaises(Exception):
pavelib.quality.run_quality("")
# Test that pylint is NOT called by counting calls
self.assertEqual(self._mock_paver_sh.call_count, 1)
def test_no_diff_quality_failures(self):
# Assert nothing is raised
pavelib.quality.run_quality("")
self.assertEqual(self._mock_paver_sh.call_count, 2)
class CustomShMock(object):
"""
Diff-quality makes a number of sh calls. None of those calls should be made during tests; however, some
of them need to have certain responses.
"""
def fail_on_pep8(self, arg):
"""
For our tests, we need the call for diff-quality running pep8 reports to fail, since that is what
is going to fail when we pass in a percentage ("p") requirement.
"""
if "pep8" in arg:
# Essentially mock diff-quality exiting with 1
paver.easy.sh("exit 1")
else:
return
def fail_on_pylint(self, arg):
"""
For our tests, we need the call for diff-quality running pep8 reports to fail, since that is what
is going to fail when we pass in a percentage ("p") requirement.
"""
if "pylint" in arg:
# Essentially mock diff-quality exiting with 1
paver.easy.sh("exit 1")
else:
return
|
Python
| 0.000001
|
@@ -944,25 +944,39 @@
value):%0A
-#
+ %22%22%22%0A
Tests:%0A
@@ -974,24 +974,22 @@
sts:%0A
- #
-*
+-
Differe
@@ -1018,16 +1018,14 @@
%0A
- #
-*
+-
One
@@ -1059,16 +1059,28 @@
e lines%0A
+ %22%22%22%0A
|
5b5e00aaa9563e1d0a6fc1825e7f4dc05bac1c5e
|
add verson 3.11.0 (#11186)
|
var/spack/repos/builtin/packages/py-petsc4py/package.py
|
var/spack/repos/builtin/packages/py-petsc4py/package.py
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPetsc4py(PythonPackage):
"""This package provides Python bindings for the PETSc package.
"""
homepage = "https://bitbucket.org/petsc/petsc4py"
url = "https://bitbucket.org/petsc/petsc4py/get/3.10.0.tar.gz"
git = "https://bitbucket.org/petsc/petsc4py.git"
version('develop', branch='master')
version('3.10.1', sha256='4eae5eaf459875b1329cae36fa1e5e185f603e8b01a4e05b59b0983c02b5a174')
version('3.10.0', sha256='737e7812ccc54b1e0d6e8de4bdcd886c8ce287129297831f1f0e33089fa352f2')
version('3.9.1', sha256='9bad0bab69a19bbceb201b9203708104a0bbe0ee19c0fa839b6ea6aa55dc238c')
version('3.9.0', sha256='034d097b88ae874de712785f39f9d9a06329da071479c0dd834704dc6885dc97')
version('3.8.1', sha256='da07ffef7da61164ad75b23af59860fea467ae47532302d91b7b4ec561aa0f9c')
version('3.8.0', sha256='b9b728e39245213cd8e74cf4724be9bb48bd295f99634135e37dbbdbec275244')
version('3.7.0', sha256='fb78b50c596c3ba6a097751dd9a379e7acaf57edd36311a3afa94caa4312ee08')
depends_on('py-cython', type='build', when='@develop')
depends_on('python@2.6:2.8,3.3:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-mpi4py', type=('build', 'run'))
depends_on('petsc+mpi')
depends_on('petsc@develop+mpi', when='@develop')
depends_on('petsc@3.10.3:3.10.99+mpi', when='@3.10.1:3.10.99')
depends_on('petsc@3.10:3.10.2+mpi', when='@3.10.0')
depends_on('petsc@3.9:3.9.99+mpi', when='@3.9:3.9.99')
depends_on('petsc@3.8:3.8.99+mpi', when='@3.8:3.8.99')
depends_on('petsc@3.7:3.7.99+mpi', when='@3.7:3.7.99')
depends_on('petsc@3.6:3.6.99+mpi', when='@3.6:3.6.99')
|
Python
| 0
|
@@ -516,41 +516,177 @@
-version('develop', branch='master
+maintainers = %5B'dalcin', 'balay'%5D%0A%0A version('develop', branch='master')%0A version('3.11.0', sha256='50a7bbca76000da287d5b18969ddf4743b360bda1f6ee3b43b5829095569cc46
')%0A
@@ -1705,32 +1705,95 @@
hen='@develop')%0A
+ depends_on('petsc@3.11:3.11.99+mpi', when='@3.11:3.11.99')%0A
depends_on('
|
7a331edf955d914c82751eb7ec1dd20896e25f83
|
Use SequenceEqual because we care about maintaining order.
|
tests/cases/stats/tests/kmeans.py
|
tests/cases/stats/tests/kmeans.py
|
import os
from django.test import TestCase
from avocado.stats import cluster, kmeans
from scipy.cluster import vq
import numpy
from itertools import chain
__all__ = ('KmeansTestCase',)
random_points_file = open(os.path.join(os.path.dirname(__file__), '../fixtures/random_points.txt'))
random_points_3d_file = open(os.path.join(os.path.dirname(__file__), '../fixtures/random_points_3d.txt'))
random_points = [float(x.strip()) for x in random_points_file.xreadlines()]
random_points_3d = [[float(x) for x in l.strip().split(",")] for l in random_points_3d_file.xreadlines()]
class KmeansTestCase(TestCase):
def test_std_dev(self):
numpy_std_dev = numpy.std(numpy.array(random_points))
our_std_dev = kmeans.std_dev(random_points)
self.assertEqual(numpy_std_dev, our_std_dev)
def test_whiten(self):
scipy_whiten = vq.whiten(numpy.array(random_points))
our_whiten = kmeans.whiten(random_points)
self.assertEqual(len(scipy_whiten), len(our_whiten))
comp_whiten = zip(scipy_whiten, our_whiten)
[self.assertEqual(*comp) for comp in comp_whiten]
scipy_whiten = vq.whiten(numpy.array(random_points_3d))
our_whiten = kmeans.whiten(random_points_3d)
self.assertEqual(len(scipy_whiten), len(our_whiten))
comp_whiten = zip(scipy_whiten, our_whiten)
[self.assertListEqual(scipy_list.tolist(), our_list) for scipy_list, our_list in comp_whiten]
|
Python
| 0
|
@@ -1392,12 +1392,16 @@
sert
-List
+Sequence
Equa
|
268914e7a29231da882457a6e4744c9661526a73
|
Add latest version of py-tabulate (#14138)
|
var/spack/repos/builtin/packages/py-tabulate/package.py
|
var/spack/repos/builtin/packages/py-tabulate/package.py
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyTabulate(PythonPackage):
"""Pretty-print tabular data"""
homepage = "https://bitbucket.org/astanin/python-tabulate"
url = "https://pypi.io/packages/source/t/tabulate/tabulate-0.7.7.tar.gz"
version('0.7.7', sha256='83a0b8e17c09f012090a50e1e97ae897300a72b35e0c86c0b53d3bd2ae86d8c6')
depends_on('py-setuptools', type='build')
|
Python
| 0
|
@@ -416,19 +416,19 @@
ulate-0.
-7.7
+8.6
.tar.gz%22
@@ -429,16 +429,208 @@
ar.gz%22%0A%0A
+ version('0.8.6', sha256='5470cc6687a091c7042cee89b2946d9235fe9f6d49c193a4ae2ac7bf386737c8')%0A version('0.8.3', sha256='8af07a39377cee1103a5c8b3330a421c2d99b9141e9cc5ddd2e3263fea416943')%0A
vers
|
11c6d99285946a4558cad642c5298982776acba1
|
Fix int division in docs.
|
docs/_scripts/generate_dynamic_pages.py
|
docs/_scripts/generate_dynamic_pages.py
|
#!/usr/bin/env python
# coding: utf-8
"""
Script for creating dynamic documentation pages.
"""
import os
import re
import sys
from collections import OrderedDict
thisdir = os.path.dirname(os.path.abspath(__file__))
docsdir = os.path.dirname(thisdir)
basedir = os.path.dirname(docsdir)
sys.path.insert(0, basedir)
import law
def create_py_ref(s):
ref_text = s
identifier = s
ref_type = "class"
try:
obj = None
parent_obj = None
exec("obj = {}".format(s))
exec("parent_obj = {}".format(s.rsplit(".", 1)[0]))
if getattr(obj, "__file__", None):
ref_type = "mod"
elif callable(obj) and getattr(parent_obj, "__module__", None):
ref_type = "meth"
elif getattr(obj, "__module__", None):
ref_type = "class"
except:
pass
return ":py:{}:`{} <{}>`".format(ref_type, ref_text, identifier)
def replace_py_refs(text):
return re.sub(r"\"(law\.[^\"]+)\"", (lambda m: create_py_ref(m.group(1))), text)
def create_slug(text):
slug = re.sub(r"(\"|\[|\])", "", text)
slug = slug.strip().lower()
slug = re.sub(r"(\s+|_)", "-", slug)
return slug
def create_heading(text, delim, slug_text=None, no_slug=False):
slug = create_slug(slug_text or text)
text = replace_py_refs(text)
underline = len(text) * delim
if no_slug:
heading = "\n\n{}\n{}\n".format(text, underline)
else:
heading = "\n.. _{}:\n\n{}\n{}\n".format(slug, text, underline)
return heading
def create_note(text):
# return ".. note::\n\n {}".format(text)
return "**Note:** {}".format(text)
def create_option(name, description, type=None, default=None):
opt = ""
opt += ", ".join(law.util.make_list(name)) + "\n"
opt += " - **Description:** {}\n".format(" ".join(law.util.make_list(description)))
if type is not None:
opt += " - **Type:** {}\n".format(" ".join(law.util.make_list(type)))
if default is not None:
opt += " - **Default:** {}\n".format(" ".join(law.util.make_list(default)))
return opt
def create_config_page():
"""
Reads the configuration example in law.cfg.example, parses it and creates config.rst.
"""
# we need all contrib packages loaded here
law.contrib.load_all()
# read the example config
input_lines = []
with open(os.path.join(basedir, "law.cfg.example"), "r") as f:
for line in f.readlines():
line = line.rstrip()
if line in ("", ";"):
line = ""
elif line.startswith("; "):
line = line[2:]
input_lines.append(line)
input_lines.append("")
# helper to get the next non-empty lines
def get_next_lines(i):
lines = []
for j in range(i + 1, len(input_lines)):
if not input_lines[j]:
break
lines.append(input_lines[j])
return lines
output_lines = [
"Configuration",
"=============",
"",
]
# parse input lines
started = False
within_toc = False
within_options = False
current_note = None
skip_lines = []
for i, line in enumerate(input_lines):
if i in skip_lines:
continue
if not started:
if line == "Table of contents:":
started = True
else:
continue
# line identification
h2_match = re.match(r"^===\s(.+)\s=+$", line)
h3_match = re.match(r"^---\s(.+)\s-+$", line)
h4_match = re.match(r"^---\s(.+)$", line)
listing_match = re.match(r"^(\s*)- (.+)$", line)
section_heading_match = re.match(r"^---\s(.+)\ssection\s-+$", line)
section_marker_match = re.match(r"^\[([^\s]+)\]$", line)
note_match = re.match(r"^Note\:$", line)
# set "within" flags
if not within_toc and line == "Table of contents:":
within_toc = True
continue
elif within_toc and line and not listing_match:
within_toc = False
if not within_options and h2_match and h2_match.group(1) == "law configuration":
within_options = True
# convert headings
if h2_match:
line = create_heading(h2_match.group(1), "*")
elif h3_match:
text = h3_match.group(1)
# highlight section headings
if within_options and section_heading_match:
text = "[{}]".format(section_heading_match.group(1))
line = create_heading(text, "^", slug_text=h3_match.group(1))
elif h4_match:
line = create_heading(h4_match.group(1), "-", no_slug=within_options)
# fix indentation in listings
if listing_match:
n_indent = len(listing_match.group(1))
if n_indent % 2 != 0:
raise Exception("uneven indentation found in line {}".format(i + 1))
line = "{}- {}".format(" " * (n_indent / 2), listing_match.group(2))
# handle toc links
if within_toc and listing_match:
link_text = listing_match.group(2)
link_target = create_slug(link_text)
if link_text.startswith("[") and link_text.endswith("]"):
link_target += "-section"
line = "{}- :ref:`{}<{}>`".format(line[:line.index("-")], link_text, link_target)
# skip section markers
if section_marker_match:
continue
# parse options
if within_options:
# handle notes
if current_note is None and note_match:
current_note = ""
continue
elif current_note is not None:
if not line:
output_lines.append(create_note(current_note))
current_note = None
continue
else:
line = replace_py_refs(line)
current_note = line if not current_note else (current_note + " " + line)
continue
# handle actual options
if line:
# read the next lines ahead
next_lines = get_next_lines(i)
if any(next_line.startswith("Description: ") for next_line in next_lines):
skip_lines.extend(list(range(i + 1, i + 1 + len(next_lines))))
option = OrderedDict()
for _line in [line] + next_lines:
_line = replace_py_refs(_line)
if _line.startswith("Description: "):
option["description"] = [_line[13:]]
elif _line.startswith("Type: "):
option["type"] = [_line[6:]]
elif _line.startswith("Default: "):
option["default"] = [_line[9:]]
elif "description" not in option:
option.setdefault("name", []).append(_line)
else:
option[list(option.keys())[-1]].append(_line)
output_lines.append(create_option(**option))
continue
output_lines.append(line)
with open(os.path.join(docsdir, "config.rst"), "w") as f:
for line in output_lines:
f.write(str(line) + "\n")
def main():
create_config_page()
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -4983,16 +4983,19 @@
%22 %22 *
+int
(n_inden
|
b286e10d7d7c43ceea80cd4025105851ebb9bd8f
|
Comment out save statement
|
s4v3.py
|
s4v3.py
|
from s4v2 import *
import openpyxl
from openpyxl import Workbook
from openpyxl.writer.excel import ExcelWriter
from openpyxl.cell import get_column_letter
def save_spreadsheet(filename, data_sample):
wb = Workbook() # shortcut for typing Workbook function
ws = wb.active # shortcut for typing active workbook function and also, for finding the sheet in the workbook that we're working on, the active one.
row_index = 1 # set the row index to 1, the starting point for excel, i.e. the upper left-hand corner
for rows in data_sample: # iterate through the rows in the spreadsheet
col_index = 1 # set the col index to 1 (starting point for excel, i.e. the upper left-hand corner)
for field in rows:
col_letter = get_column_letter(col_index) # use the imported get column letter function to get the letter of the column that we're working in.
ws.cell('{}{}'.format(col_letter, row_index)).value = field # I'm not entirely sure what we're doing here because I haven't worked with these function before, but my guess is that we're writing the values in the field of the data sample into the current cell of the new workbook
col_index += 1 # increase column index
row_index += 1 # increase row index
wb.save(filename)
kiton_ties = filter_col_by_string(data_from_csv, "brandName", "Kiton")
save_spreadsheet("_data/s4-kiton.xlsx", kiton_ties)
|
Python
| 0
|
@@ -1297,16 +1297,18 @@
Kiton%22)%0A
+#
save_spr
|
5dd61d20f14ecbe1bc20fe8db3fd73a78707485a
|
Refactor partition.
|
lazy.py
|
lazy.py
|
import operator as op
import itertools as it
from functools import partial
class Wrapper(object):
def __init__(self, data):
self.data = data
def __lt__(self, other):
print 'comparing', self.data, other.data
return self.data < other.data
def partition(predicate, iterable):
pack = partial(it.imap, lambda i: (predicate(i), i))
new_pred = op.itemgetter(0)
unpack = partial(it.imap, op.itemgetter(1))
packed = pack(iterable)
first, second = it.tee(packed)
passing = it.ifilter(new_pred, first)
failing = it.ifilterfalse(new_pred, second)
return map(unpack, (passing, failing))
def isorted(xs):
xs = iter(xs)
pivot = next(xs)
below, above = partition(lambda y: y < pivot, i)
for x in isorted(below):
yield x
yield pivot
for x in isorted(above):
yield x
def imin(xs):
return next(isorted(xs))
def insmallest(n, xs):
return it.islice(isorted(xs), 0, n)
|
Python
| 0
|
@@ -67,16 +67,46 @@
partial
+%0Afrom collections import deque
%0A%0Aclass
@@ -342,320 +342,371 @@
pa
-ck = partial(it.imap, lambda i: (predicat
+ssing, failing = dequ
e(
-i
),
-i)
+deque(
)%0A%0A
-new_pred = op.itemgetter(0)%0A unpack = partial(it.imap, op.itemgetter(1))%0A%0A packed = pack(iterable)%0A first, second = it.tee(packed)%0A%0A passing = it.ifilter(new_pred, first)%0A failing = it.ifilterfalse(new_pred, second)%0A%0A return map(unpack
+def gen(f, mine, other):%0A while True:%0A if mine:%0A yield mine.popleft()%0A else:%0A newval = next(iterable)%0A if f(newval):%0A yield newval%0A else:%0A other.append(newval)%0A%0A return (%0A gen(predicate
,
-(
pass
@@ -718,18 +718,103 @@
failing)
-)%0A
+,%0A gen(lambda i: not(predicate(i)), failing, passing)%0A )%0A
%0A%0Adef is
@@ -914,17 +914,18 @@
pivot,
-i
+xs
)%0A%0A f
|
14f0afc20c9d6c200c6e9fa52a4121c98d349be7
|
Set version 0.2.5
|
pages/__init__.py
|
pages/__init__.py
|
# -*- coding: utf-8 -*-
VERSION = (0, 2, 4)
__version__ = '.'.join(map(str, VERSION))
|
Python
| 0.000001
|
@@ -38,9 +38,9 @@
2,
-4
+5
)%0A__
|
d282d5525c4d965dbe0a6ee4967a14f1f412f2b4
|
update version number from 1.4 to 1.5
|
oauth2/_version.py
|
oauth2/_version.py
|
# This is the version of this source code.
manual_verstr = "1.4"
auto_build_num = "143"
verstr = manual_verstr + "." + auto_build_num
try:
from pyutil.version_class import Version as pyutil_Version
__version__ = pyutil_Version(verstr)
except (ImportError, ValueError):
# Maybe there is no pyutil installed.
from distutils.version import LooseVersion as distutils_Version
__version__ = distutils_Version(verstr)
|
Python
| 0.000009
|
@@ -56,17 +56,17 @@
tr = %221.
-4
+5
%22%0A%0A%0A%0Aaut
|
7bfc2287d15198d9e37b4def4632481c8446a932
|
bump version
|
bread/__init__.py
|
bread/__init__.py
|
VERSION = '0.5.1'
|
Python
| 0
|
@@ -10,9 +10,9 @@
'0.
-5.1
+6.0
'%0A
|
928c3bb38f4fa24d082ea18db09ff4542b78466c
|
remove units from x gt 1 example
|
docs/source/examples/x_greaterthan_1.py
|
docs/source/examples/x_greaterthan_1.py
|
from gpkit import Variable, GP
# Decision variable
x = Variable("x", "m", "A really useful variable called x with units of meters")
# Constraint
constraint = [1/x <= 1]
# Objective (to minimize)
objective = x
# Formulate the GP
gp = GP(objective, constraint)
# Solve the GP
sol = gp.solve()
# Print results table
print sol.table()
|
Python
| 0
|
@@ -62,74 +62,11 @@
ble(
-%22x%22, %22m%22, %22A really useful variable called x with units of meters%22
+'x'
)%0A%0A#
@@ -91,17 +91,16 @@
aint
+s
= %5B
-1/
x
-%3C
+%3E
= 1%5D
@@ -190,16 +190,17 @@
nstraint
+s
)%0A%0A# Sol
@@ -266,8 +266,9 @@
.table()
+%0A
|
5d30c02f9adb7de3ce9eebef5178466711d96c64
|
Remove unused import: `RelatedField`
|
rest_framework_json_api/utils.py
|
rest_framework_json_api/utils.py
|
from django.utils.encoding import force_text
from django.utils.text import slugify
from rest_framework.serializers import RelatedField
try:
from rest_framework.serializers import ManyRelatedField
except ImportError:
ManyRelatedField = type(None)
try:
from rest_framework.serializers import ListSerializer
except ImportError:
ListSerializer = type(None)
def get_related_field(field):
if isinstance(field, ManyRelatedField):
return field.child_relation
if isinstance(field, ListSerializer):
return field.child
return field
def is_related_many(field):
if hasattr(field, "many"):
return field.many
if isinstance(field, ManyRelatedField):
return True
if isinstance(field, ListSerializer):
return True
return False
def model_from_obj(obj):
model = getattr(obj, "model", None)
if model is not None:
return model
queryset = getattr(obj, "queryset", None)
if queryset is not None:
return queryset.model
return None
def model_to_resource_type(model):
'''Return the verbose plural form of a model name, with underscores
Examples:
Person -> "people"
ProfileImage -> "profile_image"
'''
if model is None:
return "data"
return force_text(model._meta.verbose_name_plural)
#
# String conversion
#
def camelcase(string):
'''Return a string in lowerCamelCase
Examples:
"people" -> "people"
"profile images" -> "profileImages"
'''
out = slug(string).replace('-', ' ').title().replace(' ', '')
return out[0].lower() + out[1:]
def slug(string):
'''Return a string where words are connected with hyphens'''
return slugify(force_text(string))
def snakecase(string):
'''Return a string where words are connected with underscores
Examples:
"people" -> "people"
"profile images" -> "profile_images"
'''
return slug(string).replace('-', '_')
|
Python
| 0
|
@@ -81,61 +81,8 @@
fy%0A%0A
-from rest_framework.serializers import RelatedField%0A%0A
try:
|
07ac69ef3f722ae57bc0cc61c30a2378c8c53c2e
|
Fix mutable default argument problem
|
live.py
|
live.py
|
""" Parses http://www.live-footballontv.com for info about live matches """
import re
import requests
from bs4 import BeautifulSoup
from datetime import datetime, timedelta
url = 'http://www.live-footballontv.com'
headers = {'User-Agent': 'Football Push Notifications'}
def convert_date(date):
"""Returns datetime object
This will allow the script to calculate timedeltas and reformat the date easily"""
regex_date = re.compile(r'(Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday)+ \d{1,31}(th|rd|nd|st) +\w* \d\d\d\d')
if not regex_date.match(date):
raise Exception('Date was not the correct format')
date = date.split(' ')
date[1] = date[1][:-2]
if len(date[1]) == 1:
date[1] = '0'+date[1]
date = ' '.join(date)
date_format = '%A %d %B %Y'
date_object = datetime.strptime(date, date_format)
return date_object
def register_match(match, date):
"""Parses the match item into a simple dict"""
kotime = match[2].text
if kotime == 'TBC':
kotime = '12:00'
kotime = kotime.split(':')
# Date of match plus the kick off time
kotime = date + timedelta(hours=int(kotime[0]), minutes=int(kotime[1]))
match_dict = {
"matchfixture": match[0].text,
"competition": match[1].text,
"kickofftime": kotime,
"channels": match[3].text
}
return match_dict
def search_matches(match_list, search_list, ignore_list=[]):
"""Return list of football matches that match search"""
search = re.compile('|'.join(search_list))
my_matches = [matches for matches in match_list if search.search(matches['matchfixture'])]
if ignore_list:
ignore = re.compile('|'.join(ignore_list))
my_matches = [match for match in my_matches if not ignore.search(match["matchfixture"])]
return my_matches
def gather_data():
"""Returns the list of matches"""
soup = BeautifulSoup(requests.get(url, headers=headers).text, "html.parser")
# Get rid of <hr> cruft
for node in soup.findAll('hr'):
node.replaceWithChildren()
# Get the date nodes
result = soup.find_all('div', class_='span12 matchdate')
dates = []
for item in result:
dates.append(item.parent)
# Holds the list of dictionaries
matches = []
for item in dates:
date = convert_date(item.text)
cursor = item.findNextSibling()
while True:
try:
if cursor.next.attrs == {u'class': [u'span12', u'matchdate']}:
break
else:
matches.append(register_match(cursor.contents, date))
cursor = cursor.findNextSibling()
except Exception:
break
return matches
|
Python
| 0.000007
|
@@ -1448,18 +1448,20 @@
re_list=
-%5B%5D
+None
):%0A %22
@@ -1514,16 +1514,69 @@
earch%22%22%22
+%0A if ignore_list is None:%0A ignore_list = %5B%5D
%0A%0A se
|
8157af3da0e535074b18c76f0e5391d8cac806e8
|
Add error field to expected JSON
|
whats_fresh/whats_fresh_api/tests/views/test_stories.py
|
whats_fresh/whats_fresh_api/tests/views/test_stories.py
|
from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class StoriesTestCase(TestCase):
fixtures = ['whats_fresh_api/tests/testdata/test_fixtures.json']
def setUp(self):
self.expected_json = """
{
"story": "These are the voyages of the Starfish Enterblub; her five year mission -- to seek out new fish and new fishilizations..."
}"""
def test_url_endpoint(self):
url = reverse('story-details', kwargs={'id': '1'})
self.assertEqual(url, '/stories/1')
def test_json_equals(self):
c = Client()
response = c.get(reverse('story-details', kwargs={'id': '1'})).content
parsed_answer = json.loads(response)
expected_answer = json.loads(self.expected_json)
self.assertTrue(parsed_answer == expected_answer)
|
Python
| 0.000001
|
@@ -361,11 +361,158 @@
%22%22%22
+%22
%0A%7B%0A
+ error%22: %7B%0A %22error_status%22: false,%0A %22error_name%22: null,%0A %22error_text%22: null,%0A %22error_level%22: null%0A %7D,%0A %7B%0A
@@ -647,14 +647,21 @@
..%22%0A
+ %7D%0A
%7D%22%22%22%0A%0A
+%0A
|
549fd62e94080dc1f0e83c1777440c47fcc0c8a1
|
Update to released F19
|
broot/builder.py
|
broot/builder.py
|
# Copyright 2013 Daniel Narvaez
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import textwrap
import urllib2
from subprocess import check_call
class FedoraBuilder:
def __init__(self, root):
self._root = root
def _setup_yum(self):
yum_etc_path = os.path.join(self._root.path, "etc", "yum")
os.makedirs(yum_etc_path)
yum_plugins_path = os.path.join(self._root.path, "usr", "lib",
"yum-plugins")
os.makedirs(yum_plugins_path)
plugin_conf_path = os.path.join(yum_etc_path, "pluginconf.d")
os.makedirs(plugin_conf_path)
yum_conf = """
[main]
cachedir=/var/cache/yum
keepcache=1
debuglevel=2
logfile=/var/log/yum.log
exactarch=1
obsoletes=1
pluginconfpath=%s
plugins=1""" % plugin_conf_path
yum_conf_path = os.path.join(self._root.path, "etc", "yum", "yum.conf")
with open(yum_conf_path, "w") as f:
f.write(textwrap.dedent(yum_conf))
response = urllib2.urlopen("https://raw.github.com/toomasp/" \
"yum-plugin-ignoreos/master/" \
"yum_ignoreos.conf")
ignoreos_conf_path = os.path.join(plugin_conf_path,
"yum_ignoreos.conf")
with open(ignoreos_conf_path, "w") as f:
f.write(response.read())
response = urllib2.urlopen("https://raw.github.com/toomasp/" \
"yum-plugin-ignoreos/master/" \
"yum_ignoreos.py")
ignoreos_py_path = os.path.join(yum_plugins_path, "yum_ignoreos.conf")
with open(ignoreos_py_path, "w") as f:
f.write(response.read())
repo_config = """
[fedora]
name=Fedora 19 - i386
failovermethod=priority
baseurl=ftp://mirrors.kernel.org/fedora/development/19/i386/os
enabled=1
gpgcheck=0
ignore_os=1"""
repos_d_path = os.path.join(self._root.path, "etc", "yum.repos.d")
with open(os.path.join(repos_d_path, "fedora.repo"), "w") as f:
f.write(textwrap.dedent(repo_config))
for repo_name in "fedora-updates", "fedora-updates-testing":
os.unlink(os.path.join(repos_d_path, "%s.repo" % repo_name))
def create(self):
root_path = self._root.path
release_rpm = "ftp://mirrors.kernel.org/fedora/development/" \
"19/i386/os/Packages/f/fedora-release-19-0.5.noarch.rpm"
try:
check_call(["rpm", "--root", root_path, "--initdb"])
check_call(["rpm", "--root", root_path, "--ignoreos", "-i",
release_rpm])
self._setup_yum()
check_call(["yum", "-y", "--installroot", root_path, "install",
"yum"])
except (Exception, KeyboardInterrupt):
shutil.rmtree(root_path)
raise
def install_packages(self, packages):
self._root.run("yum -y update")
self._root.run("yum -v -y install %s" % " ".join(packages))
class DebianBuilder:
def __init__(self, root):
self._root = root
def create(self):
root_path = self._root.path
try:
check_call(["debootstrap", "wheezy", root_path])
except (Exception, KeyboardInterrupt):
shutil.rmtree(root_path)
raise
def install_packages(self, packages):
self._root.run("apt-get update")
self._root.run("apt-get dist-upgrade")
self._root.run("apt-get -y --no-install-recommends install %s" %
" ".join(packages))
|
Python
| 0
|
@@ -2448,32 +2448,139 @@
sponse.read())%0A%0A
+ base_url = %22ftp://mirrors.kernel.org/fedora/releases/19/Fedora/%22 %5C%0A %22x86_64/os%22%0A%0A
repo_con
@@ -2585,24 +2585,24 @@
onfig = %22%22%22%0A
-
@@ -2704,61 +2704,9 @@
url=
-ftp://mirrors.kernel.org/fedora/development/19/i386/o
+%25
s%0A
@@ -2748,16 +2748,16 @@
check=0%0A
-
@@ -2774,16 +2774,27 @@
_os=1%22%22%22
+ %25 base_url
%0A%0A
@@ -3247,19 +3247,26 @@
ora/
-development
+releases/19/Fedora
/%22 %5C
@@ -3293,15 +3293,14 @@
%22
-19/i386
+x86_64
/os/
@@ -3332,11 +3332,9 @@
-19-
-0.5
+2
.noa
|
feab9b1067a42a6d5d8586361ab1d02f1844aa7e
|
Remove unused imports
|
tests/integration/api/conftest.py
|
tests/integration/api/conftest.py
|
"""
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
API-specific fixtures
"""
import pytest
from tests.conftest import CONFIG_PATH_DATA_KEY
from tests.helpers import create_admin_app
from .helpers import assemble_authorization_header
API_TOKEN = 'just-say-PLEASE!'
@pytest.fixture(scope='package')
# `admin_app` fixture is required because it sets up the database.
def api_app(admin_app, make_admin_app):
config_overrides = {
'API_TOKEN': API_TOKEN,
'SERVER_NAME': 'api.acmecon.test',
}
app = make_admin_app(**config_overrides)
with app.app_context():
yield app
@pytest.fixture(scope='package')
def api_client(api_app):
"""Provide a test HTTP client against the API."""
return api_app.test_client()
@pytest.fixture(scope='package')
def api_client_authz_header():
"""Provide a test HTTP client against the API."""
return assemble_authorization_header(API_TOKEN)
|
Python
| 0.000001
|
@@ -142,100 +142,8 @@
st%0A%0A
-from tests.conftest import CONFIG_PATH_DATA_KEY%0Afrom tests.helpers import create_admin_app%0A%0A
from
|
f2139cad673ee50f027164bda80d86979d5ce7a0
|
Add more imports for further functionality
|
passenger_wsgi.py
|
passenger_wsgi.py
|
import os
import sys
try:
from flask import Flask, render_template, send_file, Response
import requests
except ImportError:
INTERP = "venv/bin/python"
if os.path.relpath(sys.executable, os.getcwd()) != INTERP:
try:
os.execl(INTERP, INTERP, *sys.argv)
except OSError:
sys.exit("Could not find virtual environment. Run `:~$ ./setup.sh`")
else:
sys.exit("Could not find requirements. Are they all included in requirements.txt? Run `:~$ ./setup.sh`")
application = Flask(__name__)
@application.route("/")
def index():
return "Hello, world!"
|
Python
| 0
|
@@ -51,50 +51,117 @@
lask
-, render_template, send_file, Response
+%0A import flask_login%0A from flask_restless import APIManager%0A from flask_sqlalchemy import SQLAlchemy
%0A
|
62ec46d6dddf1eb0054861d886ab6493d56670d5
|
Switch `open()` for `salt.utils.fopen()`
|
tests/integration/shell/syndic.py
|
tests/integration/shell/syndic.py
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
tests.integration.shell.syndic
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'''
# Import python libs
import os
import yaml
import signal
import shutil
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
import salt.utils
class SyndicTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
_call_binary_ = 'salt-syndic'
def test_issue_7754(self):
old_cwd = os.getcwd()
config_dir = os.path.join(integration.TMP, 'issue-7754')
if not os.path.isdir(config_dir):
os.makedirs(config_dir)
os.chdir(config_dir)
for fname in ('master', 'minion'):
pid_path = os.path.join(config_dir, '{0}.pid'.format(fname))
with salt.utils.fopen(self.get_config_file_path(fname), 'r') as fhr:
config = yaml.load(fhr.read())
config['log_file'] = config['syndic_log_file'] = 'file:///tmp/log/LOG_LOCAL3'
config['root_dir'] = config_dir
if 'ret_port' in config:
config['ret_port'] = int(config['ret_port']) + 10
config['publish_port'] = int(config['publish_port']) + 10
with salt.utils.fopen(os.path.join(config_dir, fname), 'w') as fhw:
fhw.write(
yaml.dump(config, default_flow_style=False)
)
ret = self.run_script(
self._call_binary_,
'--config-dir={0} --pid-file={1} -l debug'.format(
config_dir,
pid_path
),
timeout=5,
catch_stderr=True,
with_retcode=True
)
# Now kill it if still running
if os.path.exists(pid_path):
try:
os.kill(int(open(pid_path).read()), signal.SIGKILL)
except OSError:
pass
try:
self.assertFalse(os.path.isdir(os.path.join(config_dir, 'file:')))
self.assertIn(
'Failed to setup the Syslog logging handler', '\n'.join(ret[1])
)
self.assertEqual(ret[2], 2)
finally:
os.chdir(old_cwd)
if os.path.isdir(config_dir):
shutil.rmtree(config_dir)
if __name__ == '__main__':
from integration import run_tests
run_tests(SyndicTest)
|
Python
| 0
|
@@ -1893,16 +1893,72 @@
_path):%0A
+ with salt.utils.fopen(pid_path) as fhr:%0A
@@ -1958,32 +1958,36 @@
try:%0A
+
@@ -1998,30 +1998,19 @@
ill(int(
-open(pid_path)
+fhr
.read())
@@ -2039,16 +2039,20 @@
+
except O
@@ -2059,16 +2059,20 @@
SError:%0A
+
|
e019a2b5de66dbbc0ed76942824ec3d33bcac6fd
|
Add integration test for @returns
|
tests/integration/test_returns.py
|
tests/integration/test_returns.py
|
# Standard library imports
import collections
# Local imports.
import uplink
# Constants
BASE_URL = "https://api.github.com/"
# Schemas
Repo = collections.namedtuple("Repo", "owner name")
# Converters
@uplink.loads.from_json(Repo)
def repo_loader(cls, json):
return cls(**json)
@uplink.dumps.to_json(Repo)
def repo_dumper(_, repo):
return {"owner": repo.owner, "name": repo.name}
# Service
class GitHub(uplink.Consumer):
@uplink.returns.from_json(type=Repo)
@uplink.get("/users/{user}/repos/{repo}")
def get_repo(self, user, repo):
pass
@uplink.returns.from_json(type=uplink.types.List[Repo], key="data")
@uplink.get("/users/{user}/repos")
def get_repos(self, user):
pass
@uplink.json
@uplink.post("/users/{user}/repos", args={"repo": uplink.Body(Repo)})
def create_repo(self, user, repo):
pass
# Tests
def test_returns_json_with_type(mock_client, mock_response):
# Setup
mock_response.with_json({"owner": "prkumar", "name": "uplink"})
mock_client.with_response(mock_response)
github = GitHub(
base_url=BASE_URL, client=mock_client, converters=repo_loader
)
# Run
repo = github.get_repo("prkumar", "uplink")
# Verify
assert Repo(owner="prkumar", name="uplink") == repo
def test_returns_json_with_list(mock_client, mock_response):
# Setup
mock_response.with_json(
{
"data": [
{"owner": "prkumar", "name": "uplink"},
{"owner": "prkumar", "name": "uplink-protobuf"},
],
"errors": [],
}
)
mock_client.with_response(mock_response)
github = GitHub(
base_url=BASE_URL, client=mock_client, converters=repo_loader
)
# Run
repo = github.get_repos("prkumar")
# Verify
assert [
Repo(owner="prkumar", name="uplink"),
Repo(owner="prkumar", name="uplink-protobuf"),
] == repo
def test_post_json(mock_client):
# Setup
github = GitHub(
base_url=BASE_URL, client=mock_client, converters=repo_dumper
)
github.create_repo("prkumar", Repo(owner="prkumar", name="uplink"))
request = mock_client.history[0]
assert request.json == {"owner": "prkumar", "name": "uplink"}
|
Python
| 0
|
@@ -132,16 +132,65 @@
Schemas%0A
+User = collections.namedtuple(%22User%22, %22id name%22)%0A
Repo = c
@@ -249,16 +249,104 @@
rters%0A%0A%0A
+@uplink.loads(User)%0Adef user_reader(cls, response):%0A return cls(**response.json())%0A%0A%0A
@uplink.
@@ -372,26 +372,31 @@
o)%0Adef repo_
-lo
+json_re
ader(cls, js
@@ -462,20 +462,25 @@
ef repo_
-dump
+json_writ
er(_, re
@@ -577,24 +577,127 @@
.Consumer):%0A
+ @uplink.returns(User)%0A @uplink.get(%22/users/%7Buser%7D%22)%0A def get_user(self, user):%0A pass%0A%0A
@uplink.
@@ -1132,16 +1132,399 @@
Tests%0A%0A%0A
+def test_returns_with_type(mock_client, mock_response):%0A # Setup%0A mock_response.with_json(%7B%22id%22: 123, %22name%22: %22prkumar%22%7D)%0A mock_client.with_response(mock_response)%0A github = GitHub(%0A base_url=BASE_URL, client=mock_client, converters=user_reader%0A )%0A%0A # Run%0A user = github.get_user(%22prkumar%22)%0A%0A # Verify%0A assert User(id=123, name=%22prkumar%22) == user%0A%0A%0A
def test
@@ -1777,34 +1777,39 @@
converters=repo_
-lo
+json_re
ader%0A )%0A%0A
@@ -2375,18 +2375,23 @@
rs=repo_
-lo
+json_re
ader%0A
@@ -2716,20 +2716,25 @@
rs=repo_
-dump
+json_writ
er%0A )
|
f4e6f2c6eb77876b646da14805ee496b0b25f0bc
|
Support PortOpt from oslo.cfg
|
dragonflow/common/common_params.py
|
dragonflow/common/common_params.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from neutron.i18n import _
df_opts = [
cfg.StrOpt('remote_db_ip',
default='127.0.0.1',
help=_('The remote db server ip address')),
cfg.IntOpt('remote_db_port',
default=4001,
help=_('The remote db server port')),
cfg.StrOpt('nb_db_class',
default='dragonflow.db.drivers.etcd_db_driver.EtcdDbDriver',
help=_('The driver class for the NB DB driver')),
cfg.StrOpt('local_ip',
default='127.0.0.1',
help=_('Local host IP')),
cfg.StrOpt('tunnel_type',
default='geneve',
help=_('The encapsulation type for the tunnel')),
cfg.StrOpt('apps_list',
default='l2_app.L2App,l3_app.L3App',
help=_('List of openflow applications classes to load')),
cfg.BoolOpt('use_centralized_ipv6_DHCP',
default=False,
help=_("Enable IPv6 DHCP by using DHCP agent"))
]
|
Python
| 0.000001
|
@@ -773,10 +773,11 @@
cfg.
-In
+Por
tOpt
@@ -802,32 +802,33 @@
%0A
+
default=4001,%0A
@@ -825,16 +825,17 @@
t=4001,%0A
+
|
4ec09eb10aa352175769cc00f189ece719802ea6
|
remove temperature for now
|
lled.py
|
lled.py
|
#!/usr/bin/env python
"""Mookfist LimitlessLED Control
This tool can be used to control your LimitlessLED based lights.
Usage:
lled.py fade <start> <end> (--group=<GROUP>)... [options]
lled.py fadec <start> <end> (--group=<GROUP>)... [options]
lled.py fadeb <startb> <endb> <startc> <endc> (--group=<GROUP>)... [options]
lled.py on (--group=<group>)... [options]
lled.py off (--group=<group>)... [options]
lled.py color <color> (--group=<GROUP>)... [options]
lled.py colorcycle (--group=<GROUP>)... [options]
lled.py rgb <r> <g> <b> (--group=<GROUP>)... [options]
lled.py white (--group=<GROUP>)... [options]
lled.py brightness <brightness> (--group=<GROUP>)... [options]
lled.py temperature <temp> (--group=<GROUP>)... [options]
lled.py scan [options]
Options:
-h --bridge-ip=HOST IP / Hostname of the bridge
-p --bridge-port=PORT Port number of the bridge (defaults to 8899 or 5987)
--bridge-version=VERSION Bridge version (defaults to 4)
-g GROUP --group=GROUP Group number (defaults to 1)
--bulb=BULB Bulb type
-r RC --repeat=RC Number of times to repeat a command
--pause=PAUSE Number of milliseconds to wait between commands
--debug Enable debugging output
-h --help Show this help
--help-bulbtypes Display possible bulb type values
"""
import logging
from docopt import docopt
from mookfist_lled_controller.cli import configure_logger
from mookfist_lled_controller.cli import Main
def main():
"""Main function!"""
arguments = docopt(__doc__, version='Mookfist LimitlessLED Control 0.0.1')
configure_logger(arguments['--debug'])
log = logging.getLogger('lled')
log.info('Welcome to the Mookfist LimitlessLED Controller')
try:
m = Main(arguments)
m.run()
except KeyboardInterrupt:
log.warning('Stopping')
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -711,70 +711,8 @@
ns%5D%0A
- lled.py temperature %3Ctemp%3E (--group=%3CGROUP%3E)... %5Boptions%5D%0A
|
a324e8de7dc0bcb1676a8ae506d139f05751b233
|
fix lint for tests
|
tests/test_relation_identifier.py
|
tests/test_relation_identifier.py
|
from __future__ import absolute_import
import pytest
from catpy.client import ConnectorRelation, CatmaidClient
from catpy.applications import RelationIdentifier
from tests.common import relation_identifier, connectors_types # noqa
def test_from_id(relation_identifier): # noqa
assert relation_identifier.from_id(0) == ConnectorRelation.presynaptic_to
def test_to_id(relation_identifier): # noqa
assert relation_identifier.to_id(ConnectorRelation.presynaptic_to) == 0
@pytest.fixture
def real_relation_identifier(credentials):
return RelationIdentifier(CatmaidClient(**credentials))
def populate_relid(relation_identifier):
relation_identifier._get_dict(False, None)
relation_identifier._get_dict(True, None)
def test_from_id_real(real_relation_identifier):
populate_relid(real_relation_identifier)
assert real_relation_identifier.id_to_relation
def test_to_id_real(real_relation_identifier):
populate_relid(real_relation_identifier)
assert real_relation_identifier.relation_to_id
|
Python
| 0.000001
|
@@ -627,50 +627,22 @@
(rel
-ation_identifier):%0A relation_identifier
+id):%0A relid
._ge
@@ -668,32 +668,18 @@
%0A rel
-ation_identifier
+id
._get_di
|
a9119708d93b1aaf046ec65d8a077e5314820b45
|
fix test cond
|
rsqueakvm/test/test_squeakssl.py
|
rsqueakvm/test/test_squeakssl.py
|
import os
import py
import time
import base64
import socket as pysocket
import sys
from rsqueakvm import constants
from rsqueakvm.model.compiled_methods import W_PreSpurCompiledMethod
from rsqueakvm.model.variable import W_BytesObject
from rsqueakvm.primitives import prim_table
from rsqueakvm.primitives.constants import EXTERNAL_CALL
from rsqueakvm.error import PrimitiveFailedError
from rsqueakvm.plugins import socket_plugin as socket
from .util import create_space, copy_to_module, cleanup_module
from .test_primitives import mock
from . import squeakssl_data
from rpython.rlib import ropenssl
ropenssl.init_ssl()
ropenssl.init_digests()
def setup_module():
space = create_space(bootstrap = True)
space.set_system_attribute(constants.SYSTEM_ATTRIBUTE_IMAGE_NAME_INDEX, "IMAGENAME")
w = space.w
bootstrap_class = space.bootstrap_class
new_frame = space.make_frame
copy_to_module(locals(), __name__)
def teardown_module():
cleanup_module(__name__)
IMAGENAME = "anImage.image"
def _prim(space, name, module, stack, context = None):
interp, w_frame, argument_count = mock(space, stack, context)
orig_stack = list(w_frame.as_context_get_shadow(space).stack())
prim_meth = W_PreSpurCompiledMethod(space, 0, header=17045052)
prim_meth._primitive = EXTERNAL_CALL
prim_meth.argsize = argument_count - 1
descr = space.wrap_list([space.wrap_string(module), space.wrap_string(name)])
prim_meth.literalatput0(space, 1, descr)
def call():
prim_table[EXTERNAL_CALL](interp, w_frame.as_context_get_shadow(space), argument_count-1, prim_meth)
return w_frame, orig_stack, call
def prim(name, module=None, stack = None, context = None):
if module is None: module = "SqueakSSL"
if stack is None:
stack = [space.w_nil]
else:
stack = [space.w_nil] + stack
w_frame, orig_stack, call = _prim(space, name, module, stack, context)
call()
res = w_frame.as_context_get_shadow(space).pop()
s_frame = w_frame.as_context_get_shadow(space)
assert not s_frame.stackdepth() - s_frame.tempsize() # check args are consumed
return res
def prim_fails(name, module, stack):
w_frame, orig_stack, call = _prim(name, module, stack)
with py.test.raises(PrimitiveFailedError):
call()
assert w_frame.as_context_get_shadow(space).stack() == orig_stack
def fix(name):
f = getattr(squeakssl_data, name)
return w(base64.standard_b64decode(f))
@py.test.mark.skipif(os.name == 'nt' or sys.platform == 'darwin')
def test_https_connect():
import os
w_handle = prim("primitiveCreate")
prim("primitiveSetIntProperty", stack=[w_handle, w(1), w(2)])
assert w_handle.loglevel == 2
prim("primitiveSetStringProperty", stack=[w_handle, w(2), w("www.google.com")])
assert w_handle.servername == "www.google.com"
w_result = prim("primitiveConnect",
stack=[
w_handle,
w(""),
w(1),
w(0),
fix("outbuf1")])
assert w_result.value == 320
w_result = prim("primitiveConnect",
stack=[
w_handle,
fix("inbuf2"),
w(1),
w(3501),
fix("outbuf2")])
assert w_result.value == -1
return
# those cannot be mocked
assert w_result.value == 126
w_result = prim("primitiveConnect",
stack=[
w_handle,
fix("inbuf3"),
w(1),
w(262),
fix("outbuf3")])
assert w_result.value == 0
@py.test.mark.skipif(sys.platform == 'darwin')
def test_http_real():
s = pysocket.socket(pysocket.AF_INET, pysocket.SOCK_STREAM)
s.connect((pysocket.gethostbyname("www.google.com"), 443))
w_handle = prim("primitiveCreate")
prim("primitiveSetIntProperty", stack=[w_handle, w(1), w(2)])
prim("primitiveSetStringProperty", stack=[w_handle, w(2), w("localhost:4443")])
w_out = fix("outbuf1")
w_result = prim("primitiveConnect",
stack=[
w_handle,
w(""),
w(1),
w(0),
w_out])
assert w_result.value > 0
s.sendall(space.unwrap_string(w_out)[0:w_result.value])
inbuf = s.recv(4096)
w_result = prim("primitiveConnect",
stack=[
w_handle,
w(inbuf),
w(1),
w(len(inbuf)),
w_out])
assert w_result.value > 0
s.sendall(space.unwrap_string(w_out)[0:w_result.value])
inbuf = s.recv(4096)
w_result = prim("primitiveConnect",
stack=[
w_handle,
w(inbuf),
w(1),
w(len(inbuf)),
w_out])
assert w_result.value == 0
_get = """GET / HTTP/1.1
User-Agent: WebClient/1.5
Accept-Encoding: gzip
Host: www.google.com
""".replace("\n", "\r\n")
w_result = prim("primitiveEncrypt",
stack=[
w_handle,
w(_get),
w(1),
w(len(_get)),
w_out])
assert w_result.value > 0
s.sendall(space.unwrap_string(w_out)[0:w_result.value])
inbuf = s.recv(4096)
w_result = prim("primitiveDecrypt",
stack=[
w_handle,
w(inbuf),
w(1),
w(len(inbuf)),
w_out])
assert w_result.value > 0
http_response = space.unwrap_string(w_out)[0:w_result.value]
assert http_response.startswith("HTTP/1.1")
|
Python
| 0.000001
|
@@ -2512,32 +2512,62 @@
form == 'darwin'
+, reason=%22Only works on Linux%22
)%0Adef test_https
@@ -3660,16 +3660,48 @@
'darwin'
+, reason=%22Not working on darwin%22
)%0Adef te
|
e0cf4deb8d2cc07db5781b1a25b58132551027d1
|
Clean up unused imports, use explicit imports
|
openelex/models.py
|
openelex/models.py
|
import datetime
from mongoengine import *
from mongoengine.fields import (
BooleanField,
DateTimeField,
DictField,
EmbeddedDocumentField,
IntField,
ListField,
StringField,
)
from openelex.us import STATE_POSTALS
class Office(EmbeddedDocument):
state = StringField(choices=STATE_POSTALS)
name = StringField()
district = StringField()
class Contest(DynamicDocument):
created = DateTimeField()
updated = DateTimeField()
source = StringField(required=True, help_text="Name of data source (preferably from datasource.py). NOTE: this could be a single file among many for a given state, if results are split into different files by reporting level")
election_id = StringField(required=True, help_text="election id, e.g. md-2012-11-06-general")
slug = StringField(required=True, help_text="Slugified office name, plus district and party if relevant")
state = StringField(required=True, choices=STATE_POSTALS)
start_date = DateTimeField(required=True)
end_date = DateTimeField(required=True)
election_type = StringField(help_text="general, primary, etc. from OpenElex metadata")
result_type = StringField(required=True)
special = BooleanField(default=False)
raw_office = StringField(required=True)
raw_district = StringField()
raw_party = StringField(help_text="This should only be assigned for closed primaries, where voters must be registered in party to vote in the contest")
# FIELDS FOR TRANSFORMED/LINKED DATA
office = EmbeddedDocumentField(Office)
district = StringField()
party = StringField(help_text="This should only be assigned for closed primaries, where voters must be registered in party to vote in the contest")
def __unicode__(self):
return u'%s-%s' % self.key
@property
def key(self):
return (self.election_id, self.slug)
class Candidate(DynamicDocument):
"""
State is included because in nearly all cases, a candidate is unique to a state (presidential candidates run in multiple states,
but hail from a single state). This would help with lookups and prevent duplicates. Identifiers is a DictField because a candidate
may have 0 or more identifiers, including state-level IDs.
parties = ['Democratic', 'Republican']
identifiers = {
'bioguide' : <bioguide_id>,
'fec' : [<fecid_1>, <fecid_2>, ...],
'votesmart' : <votesmart_id>,
...
}
"""
source = StringField(required=True, help_text="Name of data source (preferably from datasource.py). NOTE: this could be a single file among many for a given state, if results are split into different files by reporting level")
election_id = StringField(required=True, help_text="election id, e.g. md-2012-11-06-general")
contest = ReferenceField(Contest, reverse_delete_rule=CASCADE, required=True)
contest_slug = StringField(required=True, help_text="Denormalized contest slug for easier querying and obj repr")
state = StringField(required=True, choices=STATE_POSTALS)
#TODO: Add validation to require raw_full_name or raw_family_name
raw_full_name = StringField(max_length=300)
slug = StringField(max_length=300, required=True, help_text="Slugified name for easier querying and obj repr")
raw_given_name = StringField(max_length=200)
raw_family_name = StringField(max_length=200)
raw_suffix = StringField(max_length=200)
raw_additional_name = StringField(max_length=200, help_text="For middle names, nicknames, etc")
raw_parties = ListField(StringField(), default=list)
# FIELDS FOR TRANSFORMED/CLEANED DATA or LINKS TO OTHER DATA SETS
#name = StringField(max_length=300, required=True)
given_name = StringField(max_length=200)
family_name = StringField(max_length=200)
suffix = StringField(max_length=200)
additional_name = StringField(max_length=200, help_text="For middle names, nicknames, etc")
other_names = ListField(StringField(), default=list)
parties = ListField(StringField(), default=list) # normalized? abbreviations?
identifiers = DictField()
def __unicode__(self):
name = u'%s - %s' % (self.contest_slug, self.name)
parties = ""
if self.raw_parties:
parties = ", ".join([party for party in self.raw_parties])
if parties:
name += " (%s)" % parties
return name
@property
def name(self):
if self.raw_full_name:
name = self.raw_full_name
else:
name = "%s, %s" % (self.raw_family_name, self.raw_given_name)
if self.raw_suffix:
name += " %s" % self.raw_suff
return name
@property
def key(self):
return (self.election_id, self.contest_slug, self.slug)
class Result(DynamicDocument):
REPORTING_LEVEL_CHOICES = (
'state',
'congressional_district',
'state_legislative',
'county',
'precinct',
'parish',
)
source = StringField(required=True, help_text="Name of data source for this file, preferably standardized filename from datasource.py")
election_id = StringField(required=True, help_text="election id, e.g. md-2012-11-06-general")
state = StringField(required=True, choices=STATE_POSTALS)
reporting_level = StringField(required=True, choices=REPORTING_LEVEL_CHOICES)
contest = ReferenceField(Contest, reverse_delete_rule=CASCADE, required=True)
contest_slug = StringField(required=True, help_text="Denormalized contest slug for easier querying and obj repr")
candidate = ReferenceField(Candidate, reverse_delete_rule=CASCADE, required=True)
candidate_slug = StringField(required=True, help_text="Denormalized candidate slug for easier querying and obj repr")
ocd_id = StringField()
#TODO: Add validation to require raw_jurisdiction or jurisdiction
raw_jurisdiction = StringField(help_text="Political geography from raw results, if present. E.g. county name, congressional district, precinct number.")
raw_total_votes = IntField(required=True)
raw_winner = StringField()
raw_write_in = StringField()
raw_vote_breakdowns = DictField(help_text="If provided, store vote totals for election day, absentee, provisional, etc.")
jurisdiction = StringField(help_text="Derived/standardized political geography, typically when not found in raw results.")
total_votes = IntField()
winner = BooleanField(help_text="Winner as determined by OpenElex, if not provided natively in data")
write_in = BooleanField()
#vote_breakdowns = DictField(help_text="If provided, store vote totals for election day, absentee, provisional, etc.")
def __unicode__(self):
bits = (
self.election_id,
self.contest_slug,
self.candidate_slug,
self.reporting_level,
self.raw_jurisdiction,
self.raw_total_votes,
)
return u'%s-%s-%s-%s-%s (%s)' % bits
|
Python
| 0.000001
|
@@ -1,20 +1,4 @@
-import datetime%0A
from
@@ -21,9 +21,41 @@
ort
-*
+EmbeddedDocument, DynamicDocument
%0Afro
@@ -209,17 +209,79 @@
gField,%0A
-)
+ ReferenceField,%0A)%0Afrom mongoengine.queryset import CASCADE%0A
%0Afrom op
|
ad4b9ffb7292a5b810df033088008cd503bc1169
|
Add pre-fabricated fake PyPI envs at the top.
|
tests/unit/test_spec_resolving.py
|
tests/unit/test_spec_resolving.py
|
import unittest
from piptools.datastructures import SpecSet
from piptools.package_manager import FakePackageManager
def print_specset(specset, round):
print('After round #%s:' % (round,))
for spec in specset:
print(' - %s' % (spec.description(),))
class TestDependencyResolving(unittest.TestCase):
def test_find_dependencies_simple(self):
"""A simple scenario for finding dependencies."""
content = {
'foo-0.1': ['bar'],
'bar-1.2': ['qux', 'simplejson'],
'qux-0.1': ['simplejson<2.6'],
'simplejson-2.4.0': [],
'simplejson-2.6.2': [],
}
pkgmgr = FakePackageManager(content)
spec_set = SpecSet()
spec_set.add_spec('foo')
round = 1
print_specset(spec_set, round)
while True:
round += 1
new_deps = []
for spec in spec_set.normalize():
name, version = pkgmgr.find_best_match(spec)
new_deps += pkgmgr.get_dependencies(name, version)
if not new_deps:
break
# TODO: We should detect whether adding the new_deps really
# "changes anything" to the spec set. In order words: if no
# significant new constraints are added, we're done
# XXX: FIXME: Current, we "just stop" after X rounds (to prevent
# endless loops), but obviously this is not the correct impl!
if round > 4:
break
spec_set.add_specs(new_deps)
print_specset(spec_set, round)
# Print the final result:
print_specset(spec_set.normalize(), 'final')
spec_set = spec_set.normalize()
self.assertItemsEqual(['foo', 'qux', 'bar', 'simplejson<2.6'], map(str, spec_set))
|
Python
| 0
|
@@ -266,189 +266,19 @@
)%0A%0A%0A
-class TestDependencyResolving(unittest.TestCase):%0A def test_find_dependencies_simple(self):%0A %22%22%22A simple scenario for finding dependencies.%22%22%22%0A content = %7B%0A
+simple = %7B%0A
@@ -293,32 +293,24 @@
': %5B'bar'%5D,%0A
-
'bar-1.2
@@ -335,24 +335,16 @@
json'%5D,%0A
-
'qux
@@ -371,24 +371,16 @@
2.6'%5D,%0A%0A
-
'sim
@@ -399,24 +399,16 @@
0': %5B%5D,%0A
-
'sim
@@ -431,18 +431,164 @@
%5B%5D,%0A
- %7D%0A
+%7D%0A%0A%0Aclass TestDependencyResolving(unittest.TestCase):%0A def test_find_dependencies_simple(self):%0A %22%22%22A simple scenario for finding dependencies.%22%22%22
%0A
@@ -624,15 +624,14 @@
ger(
-content
+simple
)%0A%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.