blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 257 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
126a20fb489533be6f9eb87fb82a6b0b6be1efa9 | ca010e22f1cf5238a3529a43536960193054370c | /game_2048/main.py | b099d76e807ea858df663ef3c6368e31d45d7b79 | [] | no_license | acoderboy/code | e916a83160a6c9ccc12bf16b3f08e4c73abb5867 | 4f32f84525c12da48b2fc2c785a1a2ddbf5678f7 | refs/heads/master | 2020-07-15T20:38:49.139795 | 2019-09-01T08:18:12 | 2019-09-01T08:18:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | """
游戏入口
10:40 上课
"""
from ui import GameConsoleView
if __name__ == "__main__":
view = GameConsoleView()
view.start()
view.update()
| [
"396871178@qq.com"
] | 396871178@qq.com |
e564cbb6e5bd4a5146b48e57490b98887aa49bcc | bde6ed092b7b29703737e11c5a5ff90934af3d74 | /AtCoder/tkppc/c.py | c001eb99b71d1c90cfe2d44eb70b9b13d6f44518 | [] | no_license | takecian/ProgrammingStudyLog | 2ab7ea601e0996b3fa502b81ec141bc3772442b6 | 94485d131c0cc9842f1f4799da2d861dbf09b12a | refs/heads/master | 2023-04-28T16:56:18.943574 | 2023-04-18T06:34:58 | 2023-04-18T06:34:58 | 128,525,713 | 4 | 0 | null | 2022-12-09T06:15:19 | 2018-04-07T12:21:29 | Python | UTF-8 | Python | false | false | 321 | py | # https://tkppc.contest.atcoder.jp/tasks/tkppc2015_c
N, M = map(int, input().split())
S = int(input())
T = [0 for _ in range(10000)]
for _ in range(N):
t, k = map(int, input().split())
T[t-1] = k
# print(T)
total = 0
d = 0
for i in range(S - 1):
total += T[i]
if total >= M:
d += 1
print(d)
| [
"takecian@gmail.com"
] | takecian@gmail.com |
b141c7a18efb2baa9b0e19dbd1d9477614e8ad49 | 6061701df1acc2cae24fdadd8054b96b10f96f56 | /tests/test_matrix.py | 64a731af58583c1d85ee14307776fea81f2c0fed | [
"Apache-2.0"
] | permissive | gf2crypto/blincodes | f1b0615ca5455743ad097a884fc261dc4ab7119b | c1471168c89f7f395f45e5844d1c04780518fcfa | refs/heads/master | 2022-02-19T04:49:38.040674 | 2020-04-25T18:21:02 | 2020-04-25T18:21:02 | 231,413,664 | 0 | 3 | Apache-2.0 | 2022-01-21T20:18:47 | 2020-01-02T15:56:54 | Python | UTF-8 | Python | false | false | 39,034 | py | """Unit tests for matrix module."""
import unittest
from blincodes import matrix
from blincodes.vector import Vector
class InitMatrixTestCase(unittest.TestCase):
"""Test to init of Matrix object."""
def test_init_default(self):
"""Init by default values."""
matr = matrix.Matrix()
self.assertEqual(matr.shapes, (0, 0))
self.assertEqual(list(matr), [])
def test_init_by_integers(self):
"""Init by list of integers."""
matr = matrix.Matrix((0, 0b0011, 0b1011))
self.assertEqual(matr.shapes, (0, 0))
self.assertEqual(list(matr), [])
matr = matrix.Matrix((0, 0b0011, 0b1011), ncolumns=2)
self.assertEqual(matr.shapes, (3, 2))
self.assertEqual(list(matr),
[Vector(0, 2), Vector(0b0011, 2), Vector(0b1011, 2)])
matr = matrix.Matrix((0, 0b0011, 0b1011), ncolumns=4)
self.assertEqual(matr.shapes, (3, 4))
self.assertEqual(list(matr),
[Vector(0, 4), Vector(0b0011, 4), Vector(0b1011, 4)])
matr = matrix.Matrix((0, 0b0011, 0b1011), ncolumns=10)
self.assertEqual(matr.shapes, (3, 10))
self.assertEqual(list(matr),
[Vector(0, 10),
Vector(0b0011, 10),
Vector(0b1011, 10)])
def test_init_by_vectors(self):
"""Init by list of vectors."""
matr = matrix.from_vectors([Vector(0b011, 3),
Vector(0b1110, 4),
Vector(0b01, 2)])
self.assertEqual(matr.shapes, (3, 4))
self.assertEqual(list(matr), [Vector(0b011, 4),
Vector(0b1110, 4),
Vector(0b01, 4)])
def test_init_by_string(self):
"""Init Matrix object by string."""
matr = matrix.from_string(
'10000101;'
'01001;'
'00011100101;'
'0101001'
)
self.assertEqual(matr.shapes, (4, 11))
self.assertEqual(list(matr), [Vector(0b00010000101, 11),
Vector(0b00000001001, 11),
Vector(0b00011100101, 11),
Vector(0b00000101001, 11)])
matr = matrix.from_string('')
self.assertEqual(matr.shapes, (0, 0))
self.assertEqual(list(matr), [])
matr = matrix.from_string(
'100**101\\'
'0100|\\'
'00-..10-101\\'
'01$1-01',
zerofillers='*$-',
onefillers='|.',
row_sep='\\'
)
self.assertEqual(matr.shapes, (4, 11))
self.assertEqual(list(matr), [Vector(0b00010000101, 11),
Vector(0b00000001001, 11),
Vector(0b00011100101, 11),
Vector(0b00000101001, 11)])
def test_init_by_iterable(self):
"""Init Matrix object by iterable."""
matr = matrix.from_iterable(
[
('*', 1, '&', 1, 1, '-', '0', '1', 1, 0, '|*1-'),
('*', 1, '&', '-', '0', '1', 1, 0, '|*1-'),
('*', 1, '&', 1, 1, '-', '0', '1', 1, 0, '|*1-00111101'),
],
onefillers='&|',
zerofillers='*-'
)
self.assertEqual(matr.shapes, (3, 22))
self.assertEqual(list(matr), [Vector(0b0000000001111001101010, 22),
Vector(0b0000000000011001101010, 22),
Vector(0b0111100110101000111101, 22)])
matr = matrix.from_iterable([])
self.assertEqual(matr.shapes, (0, 0))
self.assertEqual(list(matr), [])
matr = matrix.from_iterable([[], []])
self.assertEqual(matr.shapes, (0, 0))
self.assertEqual(list(matr), [])
class AriphmeticsAndComparingMatrixTestCase(unittest.TestCase):
"""Testing arithmetics and comparing functions."""
def test_bool(self):
"""Test comparing with True and False."""
self.assertFalse(matrix.Matrix())
self.assertTrue(matrix.Matrix([0b1], ncolumns=1))
def test_iterator(self):
"""Test iteration over matrix rows."""
self.assertEqual(list(matrix.Matrix()), [])
self.assertTrue(list(matrix.Matrix([0b1], ncolumns=1)),
[Vector(0b1, 1)])
self.assertTrue(list(matrix.Matrix(
[0b0011, 0b1010, 0b0111], ncolumns=4)),
[Vector(0b0011, 4),
Vector(0b1010, 4),
Vector(0b0111, 4)])
def test_getitem(self):
"""Test getting the item."""
matr = matrix.Matrix(
[
0b11110000101,
0b01100001001,
0b00011100101,
0b10100101001,
],
ncolumns=11)
self.assertIsInstance(matr[2], Vector)
self.assertEqual(matr[2], Vector(0b00011100101, 11))
self.assertEqual(matr[-2], Vector(0b00011100101, 11))
self.assertIsInstance(matr[0:4:2], matrix.Matrix)
self.assertEqual(list(matr[0:4:2]),
[Vector(0b11110000101, 11),
Vector(0b00011100101, 11)])
self.assertIsInstance(matr[::-1], matrix.Matrix)
self.assertEqual(list(matr[::-1]),
[Vector(0b10100101001, 11),
Vector(0b00011100101, 11),
Vector(0b01100001001, 11),
Vector(0b11110000101, 11)])
def test_setitem(self):
"""Test setting the item."""
matr_values = [
0b11110000101,
0b01100001001,
0b00011100101,
0b10100101001,
]
matr = matrix.Matrix(matr_values, ncolumns=11)
matr[0] = 0
self.assertEqual([x.value for x in matr], [0] + matr_values[1:])
matr = matrix.Matrix(matr_values, ncolumns=11)
matr[0] = Vector(0b11, 3)
self.assertEqual([x.value for x in matr], [0b011] + matr_values[1:])
matr = matrix.Matrix(matr_values, ncolumns=11)
matr[0] = '1011'
self.assertEqual([x.value for x in matr], [0b1011] + matr_values[1:])
matr = matrix.Matrix(matr_values, ncolumns=11)
matr[0] = (1, 0, '11', '001', True)
self.assertEqual([x.value for x in matr],
[0b10110011] + matr_values[1:])
matr = matrix.Matrix(matr_values, ncolumns=11)
matr[2] = 0
self.assertEqual([x.value for x in matr],
matr_values[:2] + [0] + matr_values[3:])
matr[-2] = 0
self.assertEqual([x.value for x in matr],
matr_values[:2] + [0] + matr_values[3:])
def test_equality(self):
"""Test matrix's equality."""
matr_values = [
0b11110000101,
0b01100001001,
0b00011100101,
0b10100101001,
]
matr = matrix.Matrix(matr_values, ncolumns=11)
self.assertEqual(matr, matrix.Matrix(matr_values, ncolumns=11))
self.assertNotEqual(matr, matrix.Matrix())
self.assertEqual(matrix.Matrix(), matrix.Matrix())
matr[0] = 0
self.assertNotEqual(matrix.Matrix(matr_values, ncolumns=11), matr)
self.assertNotEqual(matrix.Matrix(matr_values, ncolumns=11),
matrix.Matrix([0b11, 0b01], 2))
def test_multiplication(self):
"""Test multiply of matrices."""
matr_values = [
0b11110000101,
0b01100001001,
0b00011100101,
0b10100101001,
]
matr_a_values = [
0b1001,
0b1100,
0b1101,
0b1010,
0b0010,
0b0011,
0b0000,
0b0101,
0b1010,
0b0000,
0b1111,
]
matr_result = [
0b0111,
0b1011,
0b1110,
0b1101,
]
self.assertEqual(
matrix.Matrix(matr_values, 11) * matrix.Matrix(matr_a_values, 4),
matrix.Matrix(matr_result, 4))
self.assertEqual(matrix.Matrix() * matrix.Matrix(), matrix.Matrix())
with self.assertRaises(ValueError):
matrix.Matrix(matr_values, 11) * matrix.Matrix()
matr_a = matrix.Matrix(matr_values, 11)
matr_a *= matrix.Matrix(matr_a_values, 4)
self.assertEqual(matr_a, matrix.Matrix(matr_result, 4))
def test_addition(self):
"""Test add of matrices."""
matr_values = [
0b11110000101,
0b01100001001,
0b00011100101,
0b10100101001,
]
matr_a_values = [
0b1001,
0b1100,
0b1101,
0b1010,
0b0010,
0b0011,
0b0000,
0b0101,
0b1010,
0b0000,
0b1111,
]
matr_result = [
0b11110001100,
0b01100000101,
0b00011101000,
0b10100100011,
]
self.assertEqual(
matrix.Matrix(matr_values, 11) + matrix.Matrix(matr_values, 11),
matrix.Matrix([0] * 4, 11))
self.assertEqual(matrix.Matrix() + matrix.Matrix(), matrix.Matrix())
self.assertEqual(
matrix.Matrix(matr_values, 11) + matrix.Matrix(),
matrix.Matrix())
self.assertEqual(
matrix.Matrix(matr_values, 11) + matrix.Matrix(matr_a_values, 4),
matrix.Matrix(matr_result, 11))
matr_a = matrix.Matrix(matr_values, 11)
matr_a += matrix.Matrix(matr_a_values, 11)
self.assertEqual(matr_a, matrix.Matrix(matr_result, 11))
def test_xor(self):
"""Test xor of matrices."""
self.test_addition()
def test_or(self):
"""Test OR operation under matrices."""
matr_values = [
0b11110000101,
0b01100001001,
0b00011100101,
0b10100101001,
]
matr_a_values = [
0b1001,
0b1100,
0b1101,
0b1010,
0b0010,
0b0011,
0b0000,
0b0101,
0b1010,
0b0000,
0b1111,
]
matr_result = [
0b11110001101,
0b01100001101,
0b00011101101,
0b10100101011,
]
self.assertEqual(
matrix.Matrix(matr_values, 11) | matrix.Matrix(matr_values, 11),
matrix.Matrix(matr_values, 11))
self.assertEqual(matrix.Matrix() | matrix.Matrix(), matrix.Matrix())
self.assertEqual(
matrix.Matrix(matr_values, 11) | matrix.Matrix(),
matrix.Matrix())
self.assertEqual(
matrix.Matrix(matr_values, 11) | matrix.Matrix(matr_a_values, 4),
matrix.Matrix(matr_result, 11))
matr_a = matrix.Matrix(matr_values, 11)
matr_a |= matrix.Matrix(matr_a_values, 11)
self.assertEqual(matr_a, matrix.Matrix(matr_result, 11))
def test_and(self):
"""Test AND operation under matrices."""
matr_values = [
0b11110000101,
0b01100001001,
0b00011100101,
0b10100101001,
]
matr_a_values = [
0b1001,
0b1100,
0b1101,
0b1010,
0b0010,
0b0011,
0b0000,
0b0101,
0b1010,
0b0000,
0b1111,
]
matr_result = [
0b0001,
0b1000,
0b0101,
0b1000,
]
self.assertEqual(
matrix.Matrix(matr_values, 11) & matrix.Matrix(matr_values, 11),
matrix.Matrix(matr_values, 11))
self.assertEqual(matrix.Matrix() & matrix.Matrix(), matrix.Matrix())
self.assertEqual(
matrix.Matrix(matr_values, 11) & matrix.Matrix(),
matrix.Matrix())
self.assertEqual(
matrix.Matrix(matr_values, 11) & matrix.Matrix(matr_a_values, 4),
matrix.Matrix(matr_result, 11))
matr_a = matrix.Matrix(matr_values, 11)
matr_a &= matrix.Matrix(matr_a_values, 11)
self.assertEqual(matr_a, matrix.Matrix(matr_result, 11))
class StringRepresentationMatrixTestCase(unittest.TestCase):
"""Testing representation as string."""
def test_to_str(self):
"""Test representation as customisable string."""
matr_values = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
0b000000000001
]
matr_str = (
'111111111111\n'
'011111111111\n'
'001111111111\n'
'000111111111\n'
'000011111111\n'
'000001111111\n'
'000000111111\n'
'000000011111\n'
'000000001111\n'
'000000000111\n'
'000000000011\n'
'000000000001')
matr_str_numbered = (
' 0: 111111111111\n'
' 1: 011111111111\n'
' 2: 001111111111\n'
' 3: 000111111111\n'
' 4: 000011111111\n'
' 5: 000001111111\n'
' 6: 000000111111\n'
' 7: 000000011111\n'
' 8: 000000001111\n'
' 9: 000000000111\n'
'10: 000000000011\n'
'11: 000000000001')
self.assertEqual(
matrix.Matrix(matr_values, 12).to_str(), matr_str)
self.assertEqual(
matrix.Matrix(matr_values, 12).to_str(zerofillers='*'),
matr_str.replace('0', '*'))
self.assertEqual(
matrix.Matrix(matr_values, 12).to_str(onefillers='*'),
matr_str.replace('1', '*'))
self.assertEqual(
matrix.Matrix().to_str(onefillers='*'),
'')
self.assertEqual(
matrix.Matrix(matr_values, 12).to_str(numbered=True),
matr_str_numbered)
def test_to_str_default(self):
"""Test representation as string to print it."""
matr_values = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
0b000000000001
]
matr_str = (
'111111111111\n'
'011111111111\n'
'001111111111\n'
'000111111111\n'
'000011111111\n'
'000001111111\n'
'000000111111\n'
'000000011111\n'
'000000001111\n'
'000000000111\n'
'000000000011\n'
'000000000001')
self.assertEqual(
str(matrix.Matrix(matr_values, 12)), matr_str)
self.assertEqual(
str(matrix.Matrix()),
'')
def test_to_str_repr(self):
"""Test representation as string."""
matr_values = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
0b000000000001
]
matr_str = (
'Matrix(shapes=(12, 12), ['
'0: 1111...1111, '
'1: 0111...1111, '
'..., '
'11: 0000...0001])')
matr_str8 = (
'Matrix(shapes=(12, 8), ['
'0: 11111111, '
'1: 11111111, '
'..., '
'11: 00000001])')
matr_str48 = (
'Matrix(shapes=(3, 8), ['
'0: 11111111, '
'1: 11111111, '
'2: 11111111])')
self.assertEqual(
repr(matrix.Matrix(matr_values, 12)), matr_str)
self.assertEqual(
repr(matrix.Matrix(matr_values, 8)), matr_str8)
self.assertEqual(
repr(matrix.Matrix(matr_values, 8)[:3]), matr_str48)
self.assertEqual(
repr(matrix.Matrix()),
'Matrix(shapes=(0, 0), [])')
def test_to_latex_str(self):
"""Test representation Matrix object as LaTeX string."""
matr_values = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
]
matr_str = (
'1&1&1&1&1&1&1&1&1&1&1&1\\\\\n'
'0&1&1&1&1&1&1&1&1&1&1&1\\\\\n'
'0&0&1&1&1&1&1&1&1&1&1&1\\\\\n'
'0&0&0&1&1&1&1&1&1&1&1&1\\\\\n'
'0&0&0&0&1&1&1&1&1&1&1&1\\\\\n'
'0&0&0&0&0&1&1&1&1&1&1&1\\\\\n'
'0&0&0&0&0&0&1&1&1&1&1&1\\\\\n'
'0&0&0&0&0&0&0&1&1&1&1&1\\\\\n'
'0&0&0&0&0&0&0&0&1&1&1&1\\\\\n'
'0&0&0&0&0&0&0&0&0&1&1&1\\\\\n'
'0&0&0&0&0&0&0&0&0&0&1&1')
matr = matrix.Matrix(matr_values, 12)
self.assertEqual(matr.to_latex_str(), matr_str)
self.assertEqual(matrix.Matrix().to_latex_str(), '')
def test_shapes(self):
"""Test get shapes."""
matr_values = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
]
matr = matrix.Matrix(matr_values, 12)
self.assertEqual(matr.shapes, (11, 12))
self.assertEqual(matr.nrows, 11)
self.assertEqual(matr.ncolumns, 12)
matr_empty = matrix.Matrix()
self.assertEqual(matr_empty.shapes, (0, 0))
self.assertEqual(matr_empty.ncolumns, 0)
self.assertEqual(matr_empty.nrows, 0)
def test_make_copy(self):
"""Test make copy of Matrix object."""
matr_values = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
]
matr = matrix.Matrix(matr_values, 12)
self.assertEqual(matr.copy(), matr)
self.assertEqual(matrix.Matrix().copy(), matrix.Matrix())
def test_submatrix(self):
"""Test choice submatrix."""
matr_values = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
]
submatr_values = [
0b1111111,
0b1111111,
0b0111111,
0b0111111,
0b0011111,
0b0001111,
0b0001111,
0b0001111,
0b0000111,
0b0000011,
0b0000001,
]
submatr_values2 = [
0b1111111111,
0b1111111011,
0b0111111011,
0b0111111011,
0b0011111011,
0b0001111011,
0b0001111010,
0b0001111010,
0b0000111010,
0b0000011000,
0b0000001000,
]
matr = matrix.Matrix(matr_values, 12)
self.assertEqual(matr.submatrix([1, 3, 4, 7, 8, 9, -1]),
matrix.Matrix(submatr_values, 7))
self.assertEqual(matr.submatrix(),
matr)
self.assertEqual(matr.submatrix([1, 3, 4, 7, 8, 9, -1, 12, 20, 17]),
matrix.Matrix(submatr_values2, 10))
self.assertEqual(matrix.Matrix().submatrix([0, 7, 8]),
matrix.Matrix())
def test_transpose(self):
"""Test matrix transposition."""
matr_values = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
]
transpose_values = [
0b10000000000,
0b11000000000,
0b11100000000,
0b11110000000,
0b11111000000,
0b11111100000,
0b11111110000,
0b11111111000,
0b11111111100,
0b11111111110,
0b11111111111,
0b11111111111
]
matr = matrix.Matrix(matr_values, 12)
self.assertEqual(matr.transpose(),
matrix.Matrix(transpose_values, 11))
self.assertEqual(matr.transpose().transpose(), matr)
self.assertEqual(matrix.Matrix().transpose(),
matrix.Matrix())
self.assertEqual(matr.T, matr.transpose())
def test_concatenate(self):
"""Test matrix concatenation."""
matr_values1 = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
]
matr_values2 = [
0b10000000000,
0b11000000000,
0b11100000000,
0b11110000000,
0b11111000000,
0b11111100000,
0b11111110000,
0b11111111000,
0b11111111100,
0b11111111110,
0b11111111111,
0b11111111111
]
matr_concat_columns = [
0b11111111111110000000000,
0b01111111111111000000000,
0b00111111111111100000000,
0b00011111111111110000000,
0b00001111111111111000000,
0b00000111111111111100000,
0b00000011111111111110000,
0b00000001111111111111000,
0b00000000111111111111100,
0b00000000011111111111110,
0b00000000001111111111111,
]
matr_concat_rows = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
0b010000000000,
0b011000000000,
0b011100000000,
0b011110000000,
0b011111000000,
0b011111100000,
0b011111110000,
0b011111111000,
0b011111111100,
0b011111111110,
0b011111111111,
0b011111111111
]
matr1 = matrix.Matrix(matr_values1, 12)
matr2 = matrix.Matrix(matr_values2, 11)
self.assertEqual(matrix.concatenate(matr1, matr2),
matrix.Matrix(matr_concat_columns, 23))
self.assertEqual(matr1.concatenate(matr2),
matrix.Matrix(matr_concat_columns, 23))
matr1 = matrix.Matrix(matr_values1, 12)
self.assertEqual(matrix.concatenate(matr1, matr2, by_rows=True),
matrix.Matrix(matr_concat_rows, 12))
self.assertEqual(matr1.concatenate(matr2, by_rows=True),
matrix.Matrix(matr_concat_rows, 12))
matr1 = matrix.Matrix(matr_values1, 12)
self.assertEqual(matrix.concatenate(matrix.Matrix(), matr1),
matrix.Matrix())
self.assertEqual(matrix.Matrix().concatenate(matr1),
matrix.Matrix())
self.assertEqual(matrix.concatenate(matr1, matrix.Matrix()),
matrix.Matrix())
self.assertEqual(matr1.concatenate(matrix.Matrix()),
matr1)
matr1 = matrix.Matrix(matr_values1, 12)
self.assertEqual(matrix.concatenate(matrix.Matrix(),
matr1, by_rows=True),
matr1)
self.assertEqual(matrix.Matrix().concatenate(matr1, by_rows=True),
matr1)
self.assertEqual(matrix.concatenate(matr1,
matrix.Matrix(), by_rows=True),
matr1)
self.assertEqual(matr1.concatenate(matrix.Matrix(), by_rows=True),
matr1)
def test_is_zero(self):
"""Test matrix comparing with zero."""
matr_values = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
]
self.assertFalse(matrix.Matrix(matr_values, 12).is_zero())
self.assertTrue(matrix.Matrix([0] * 15, 12).is_zero())
self.assertTrue(matrix.Matrix().is_zero())
def test_is_identity(self):
"""Test matrix comparing with identity matrix."""
matr_values = [
0b111111111111,
0b011111111111,
0b001111111111,
0b000111111111,
0b000011111111,
0b000001111111,
0b000000111111,
0b000000011111,
0b000000001111,
0b000000000111,
0b000000000011,
]
self.assertFalse(matrix.Matrix(matr_values, 12).is_identity())
self.assertTrue(
matrix.Matrix([1 << (11 - i)
for i in range(12)], 12).is_identity())
self.assertFalse(matrix.Matrix().is_identity())
class MatrixLinearTransformationsTestCase(unittest.TestCase):
"""Testing linear transformation of matrix and solving linear equations."""
def setUp(self):
"""Set the test value."""
self.matr_upper = [
0b1111,
0b0111,
0b0011,
0b0001,
]
self.matr_max_rank = [
0b0111,
0b1000,
0b1100,
0b1110,
]
self.matr_non_max_rank1 = [
0b01110,
0b00101,
0b11001,
0b11100,
]
self.matr_non_max_rank2 = [
0b01110,
0b00101,
0b11001,
0b11100,
0b10010,
0b11111,
0b01010,
]
def test_echelon_form(self):
"""Test evaluating of matrix echelon form."""
matr_max_rank_echelon = [
0b1000,
0b0111,
0b0011,
0b0001,
]
matr_non_max_rank1_echelon = [
0b10010,
0b01110,
0b00101,
0b00000,
]
matr_non_max_rank2_echelon = [
0b10010,
0b01110,
0b00101,
0b00011,
0b00001,
0b00000,
0b00000,
]
self.assertEqual(matrix.Matrix(self.matr_upper, 4).echelon_form,
matrix.Matrix(self.matr_upper, 4))
self.assertEqual(matrix.Matrix(self.matr_max_rank, 4).echelon_form,
matrix.Matrix(matr_max_rank_echelon, 4))
self.assertEqual(matrix.Matrix(self.matr_non_max_rank1,
5).echelon_form,
matrix.Matrix(matr_non_max_rank1_echelon, 5))
self.assertEqual(matrix.Matrix(self.matr_non_max_rank2,
5).echelon_form,
matrix.Matrix(matr_non_max_rank2_echelon, 5))
self.assertEqual(matrix.Matrix().echelon_form,
matrix.Matrix())
def test_rank(self):
"""Test evaluating of matrix rank."""
self.assertEqual(matrix.Matrix(self.matr_upper, 4).rank, 4)
self.assertEqual(matrix.Matrix(self.matr_max_rank, 4).rank, 4)
self.assertEqual(matrix.Matrix(self.matr_non_max_rank1,
5).rank, 3)
self.assertEqual(matrix.Matrix(self.matr_non_max_rank2,
5).rank, 5)
self.assertEqual(matrix.Matrix().rank, 0)
def test_is_max_rank(self):
"""Test check if matrix has maximal rank."""
self.assertTrue(matrix.Matrix(self.matr_upper, 4).is_max_rank())
self.assertTrue(matrix.Matrix(self.matr_max_rank, 4).is_max_rank())
self.assertFalse(matrix.Matrix(self.matr_non_max_rank1,
5).is_max_rank())
self.assertTrue(matrix.Matrix(self.matr_non_max_rank2,
5).is_max_rank())
self.assertTrue(matrix.Matrix().is_max_rank)
def test_diagonal_form(self):
"""Test evaluating of matrix diagonal form."""
matr_non_max_rank1_diagonal = [
0b10010,
0b01011,
0b00101,
0b00000,
]
matr_non_max_rank2_diagonal = [
0b10000,
0b01000,
0b00100,
0b00010,
0b00001,
0b00000,
0b00000,
]
self.assertTrue(
matrix.Matrix(self.matr_upper, 4).diagonal_form.is_identity())
self.assertTrue(
matrix.Matrix(self.matr_max_rank, 4).diagonal_form.is_identity())
self.assertEqual(matrix.Matrix(self.matr_non_max_rank1,
5).diagonal_form,
matrix.Matrix(matr_non_max_rank1_diagonal, 5))
self.assertEqual(matrix.Matrix(self.matr_non_max_rank2,
5).diagonal_form,
matrix.Matrix(matr_non_max_rank2_diagonal, 5))
self.assertEqual(matrix.Matrix().diagonal_form,
matrix.Matrix())
def test_inverse(self):
"""Test evaluating of inverse matrix."""
matr_non_max_rank1_diagonal = [
0b10010,
0b01011,
0b00101,
0b00000,
]
matr_non_max_rank2_diagonal = [
0b10000,
0b01000,
0b00100,
0b00010,
0b00001,
0b00000,
0b00000,
]
self.assertTrue(
(matrix.Matrix(self.matr_upper,
4).inverse * matrix.Matrix(self.matr_upper,
4)).is_identity())
self.assertTrue(
(matrix.Matrix(self.matr_max_rank,
4).inverse * matrix.Matrix(self.matr_max_rank,
4)).is_identity())
self.assertEqual(
matrix.Matrix(self.matr_non_max_rank1,
5).inverse * matrix.Matrix(self.matr_non_max_rank1,
5),
matrix.Matrix(matr_non_max_rank1_diagonal, 5))
self.assertEqual(
matrix.Matrix(self.matr_non_max_rank2,
5).inverse * matrix.Matrix(self.matr_non_max_rank2,
5),
matrix.Matrix(matr_non_max_rank2_diagonal, 5))
self.assertEqual(matrix.Matrix().inverse, matrix.Matrix())
self.assertTrue(matrix.Matrix([0] * 10, 20).inverse.is_identity())
for _ in range(10):
matr = matrix.nonsingular(20)
self.assertTrue((matr * matr.inverse).is_identity())
def test_othogonal(self):
"""Test evaluating of maximal orthogonal matrix."""
matr_upper_ort = matrix.Matrix(self.matr_upper, 4).orthogonal
self.assertTrue(matr_upper_ort.is_zero())
self.assertTrue(
(matrix.Matrix(self.matr_upper, 4) * matr_upper_ort.T).is_zero())
matr_max_rank = matrix.Matrix(self.matr_max_rank, 4)
matr_max_rank_ort = matr_max_rank.orthogonal
self.assertTrue(matr_max_rank_ort.is_zero())
self.assertTrue((matr_max_rank * matr_max_rank_ort.T).is_zero())
matr_non_max_rank1 = matrix.Matrix(self.matr_non_max_rank1, 5)
matr_non_max_rank1_ort = matr_non_max_rank1.orthogonal
self.assertEqual(
matr_non_max_rank1_ort.shapes,
(matr_non_max_rank1.ncolumns - matr_non_max_rank1.rank,
matr_non_max_rank1.ncolumns))
self.assertTrue(
(matr_non_max_rank1 * matr_non_max_rank1_ort.T).is_zero())
matr_non_max_rank2 = matrix.Matrix(self.matr_non_max_rank2, 5)
matr_non_max_rank2_ort = matr_non_max_rank2.orthogonal
self.assertTrue(matr_non_max_rank2_ort.is_zero())
self.assertTrue(
(matr_non_max_rank2 * matr_non_max_rank2_ort.T).is_zero())
self.assertEqual(matrix.Matrix().orthogonal, matrix.Matrix())
def test_solving_linear_equation(self):
"""Test solving of linear equation."""
vec = Vector(0b1010, 4)
matr_max_rank = matrix.Matrix(self.matr_max_rank, 4)
fundamental, vec_solve = matr_max_rank.solve(vec)
self.assertFalse(fundamental)
self.assertEqual(
matr_max_rank * matrix.from_vectors([vec_solve]).transpose(),
matrix.from_vectors([vec]).transpose())
vec = Vector(0b1010, 4)
matr_non_max_rank1 = matrix.Matrix(self.matr_non_max_rank1, 5)
fundamental, vec_solve = matr_non_max_rank1.solve(vec)
self.assertFalse(fundamental)
self.assertFalse(vec_solve)
vec = Vector(0b1110, 4)
fundamental, vec_solve = matr_non_max_rank1.solve(vec)
self.assertEqual(fundamental,
matrix.Matrix([0b11010, 0b01101], 5))
self.assertEqual(
matr_non_max_rank1 * matrix.from_vectors([vec_solve]).transpose(),
matrix.from_vectors([vec]).transpose())
def test_gaussian_elimination(self):
"""Test evaluating of Gaussian elimination."""
matr_gauss_full_non_sort = [
0b01011,
0b00101,
0b10010,
0b00000,
]
matr_gauss_full_sort = [
0b10010,
0b01011,
0b00101,
0b00000,
]
matr_gauss_partial_non_sort = [
0b11100,
0b00101,
0b10010,
0b00000,
]
matr_gauss_partial_sort = [
0b11100,
0b10010,
0b00101,
0b00000,
]
matr = matrix.Matrix(self.matr_non_max_rank1, 5)
self.assertEqual(matr.gaussian_elimination(),
matrix.Matrix(matr_gauss_full_sort, 5))
self.assertEqual(matr.gaussian_elimination(sort=False),
matrix.Matrix(matr_gauss_full_non_sort, 5))
self.assertEqual(matr.gaussian_elimination([1, 3, 4], sort=False),
matrix.Matrix(matr_gauss_partial_non_sort, 5))
self.assertEqual(matr.gaussian_elimination([1, 3, 4]),
matrix.Matrix(matr_gauss_partial_sort, 5))
self.assertEqual(matr.gaussian_elimination([1, 3, 4, -1, 7, 9, 10]),
matrix.Matrix(matr_gauss_partial_sort, 5))
self.assertEqual(matr.gaussian_elimination([1, 3, 4, 4]),
matrix.Matrix(matr_gauss_partial_sort, 5))
self.assertEqual(matrix.Matrix().gaussian_elimination(),
matrix.Matrix())
class GenerateMatrixTestCase(unittest.TestCase):
"""Testing generating of special type matrix."""
def test_generate_zero(self):
"""Test generating zero matrix."""
zero1 = matrix.zero(10)
self.assertTrue(zero1.is_zero())
self.assertEqual(zero1.shapes, (10, 10))
zero1 = matrix.zero(20, 10)
self.assertTrue(zero1.is_zero())
self.assertEqual(zero1.shapes, (20, 10))
zero1 = matrix.zero(5, 10)
self.assertTrue(zero1.is_zero())
self.assertEqual(zero1.shapes, (5, 10))
def test_generate_identity(self):
"""Test generating identity matrix."""
ident1 = matrix.identity(10)
self.assertTrue(ident1.is_identity())
self.assertEqual(ident1.shapes, (10, 10))
ident1 = matrix.identity(20, 10)
self.assertTrue(ident1.is_identity())
self.assertEqual(ident1.shapes, (10, 10))
ident1 = matrix.identity(5, 10)
self.assertTrue(ident1.is_identity())
self.assertEqual(ident1.shapes, (5, 10))
def test_generate_random(self):
"""Test generating random matrix."""
matr = matrix.random(10)
self.assertEqual(matr.shapes, (10, 10))
matr = matrix.random(10, 21)
self.assertEqual(matr.shapes, (10, 21))
matr = matrix.random(42, 21)
self.assertEqual(matr.shapes, (42, 21))
for _ in range(10):
self.assertEqual(matrix.random(10, max_rank=True).rank, 10)
self.assertEqual(matrix.random(10, 20, max_rank=True).rank, 10)
self.assertEqual(matrix.random(20, 10, max_rank=True).rank, 10)
def test_generate_nonsingular(self):
"""Test generating random non-singular matrix."""
for _ in range(10):
self.assertEqual(matrix.nonsingular(20).rank, 20)
def test_generate_permutation(self):
"""Test generating permutation matrix."""
perm_matrix = [
0b10000000,
0b00010000,
0b00000010,
0b01000000,
0b00000100,
0b00001000,
0b00100000,
0b00000001,
]
self.assertEqual(matrix.permutation([]), matrix.Matrix())
perm = matrix.permutation([0, 3, 6, 1, 5, 4, 2, 7])
self.assertEqual(perm, matrix.Matrix(perm_matrix, 8))
perm = matrix.permutation([0, 3, 6, 1, 5, 4, 2, 7], by_rows=True)
self.assertEqual(perm, matrix.Matrix(perm_matrix, 8).transpose())
if __name__ == "__main__":
unittest.main()
| [
"ivchizhov@gmail.com"
] | ivchizhov@gmail.com |
2484e18a908970de9c105cff0b254bb207d90b5e | 9e6564ecbe82a89bb61b0edaa1ece34508d118eb | /tests/client_tests/test_client_callback.py | f7ae5d00c854711b7d65393eaf782f48e74e1473 | [
"MIT"
] | permissive | richmahn/tx-manager | 4cae2d68b31a7da1af50ab4da76fd86ebacf081a | a01f8c425c7f9634dea00e331fac7e20595f3aa5 | refs/heads/do-not-use | 2020-12-25T17:45:56.395142 | 2017-07-06T17:20:35 | 2017-07-06T17:20:35 | 68,339,644 | 0 | 0 | null | 2016-09-15T23:34:19 | 2016-09-15T23:34:19 | null | UTF-8 | Python | false | false | 6,876 | py | from __future__ import absolute_import, unicode_literals, print_function
import json
import tempfile
import os
import shutil
from libraries.aws_tools.s3_handler import S3Handler
from libraries.general_tools import file_utils
from mock import patch
from unittest import TestCase
from libraries.client.client_callback import ClientCallback
from moto import mock_s3
@mock_s3
class TestClientCallback(TestCase):
base_temp_dir = os.path.join(tempfile.gettempdir(), 'tx-manager')
resources_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'resources')
source_zip = ''
build_log_json = ''
project_json = ''
transfered_files = [] # for keeping track of file translfers to cdn
def setUp(self):
try:
os.makedirs(TestClientCallback.base_temp_dir)
except:
pass
self.temp_dir = tempfile.mkdtemp(dir=TestClientCallback.base_temp_dir, prefix='callbackTest_')
TestClientCallback.transfered_files = []
def tearDown(self):
shutil.rmtree(TestClientCallback.base_temp_dir, ignore_errors=True)
@patch('libraries.client.client_callback.download_file')
def test_clientCallbackSimpleJob(self, mock_download_file):
# given
self.source_zip = os.path.join(self.resources_dir, "raw_sources/en-ulb.zip")
identifier = 'tx-manager-test-data/en-ulb/22f3d09f7a'
mock_ccb = self.mockClientCallback(identifier, mock_download_file)
# when
results = mock_ccb.process_callback()
# then
self.assertIsNotNone(results)
@patch('libraries.client.client_callback.download_file')
def test_clientCallbackMultipleJobPartial(self, mock_download_file):
# given
self.source_zip = os.path.join(self.resources_dir, "raw_sources/en-ulb.zip")
identifier = 'tx-manager-test-data/en-ulb/22f3d09f7a/2/1/01-GEN.usfm'
self.generate_parts_completed(1, 2)
mock_ccb = self.mockClientCallback(identifier, mock_download_file)
# when
results = mock_ccb.process_callback()
# then
self.assertIsNotNone(results)
@patch('libraries.client.client_callback.download_file')
def test_clientCallbackMultipleJobComplete(self, mock_download_file):
# given
self.source_zip = os.path.join(self.resources_dir, "raw_sources/en-ulb.zip")
identifier = 'tx-manager-test-data/en-ulb/22f3d09f7a/2/0/01-GEN.usfm'
self.generate_parts_completed(0, 2)
mock_ccb = self.mockClientCallback(identifier, mock_download_file)
# when
results = mock_ccb.process_callback()
# then
self.assertIsNotNone(results)
@patch('libraries.client.client_callback.download_file')
def test_clientCallbackMultipleJobCompleteError(self, mock_download_file):
# given
self.source_zip = os.path.join(self.resources_dir, "raw_sources/en-ulb.zip")
identifier = 'tx-manager-test-data/en-ulb/22f3d09f7a/2/0/01-GEN.usfm'
self.generate_parts_completed(0, 2)
mock_ccb = self.mockClientCallback(identifier, mock_download_file, 'conversion failed')
# when
results = mock_ccb.process_callback()
# then
self.assertIsNotNone(results)
@patch('libraries.client.client_callback.download_file')
def test_clientCallbackMultipleNoJobsComplete(self, mock_download_file):
# given
self.source_zip = os.path.join(self.resources_dir, "raw_sources/en-ulb.zip")
identifier = 'tx-manager-test-data/en-ulb/22f3d09f7a/2/0/01-GEN.usfm'
self.generate_parts_completed(0, 0)
mock_ccb = self.mockClientCallback(identifier, mock_download_file)
# when
results = mock_ccb.process_callback()
# then
self.assertIsNotNone(results)
#
# helpers
#
def mockClientCallback(self, identifier, mock_download_file, error=None):
mock_download_file.side_effect = self.mock_download_file
self.build_log_json = {
'dummy_data': 'stuff',
'commit_id': '123456ff',
'created_at': '2017-05-22T13:39:15Z',
'started_at': '2017-05-22T13:39:16Z',
'repo_owner': 'repo_owner1',
'repo_name': 'repo_name2',
'resource_type': 'resource_type3'
}
if error:
self.build_log_json['errors'] = [error]
self.build_log_json = json.dumps(self.build_log_json)
self.project_json = '{}'
vars = {
'job_data': {
'created_at': '2017-05-22T13:39:15Z',
'identifier': ('%s' % identifier),
'output': 'https://test-cdn.door43.org/tx/job/6864ae1b91195f261ba5cda62d58d5ad9333f3131c787bb68f20c27adcc85cad.zip',
'ended_at': '2017-05-22T13:39:17Z',
'started_at': '2017-05-22T13:39:16Z',
'status': 'started',
'success': 'success'
},
'gogs_url': 'https://git.example.com',
'cdn_bucket': 'cdn_test_bucket'
}
ccb = ClientCallback(**vars)
ccb.cdn_handler = S3Handler("test_cdn")
ccb.cdn_handler.create_bucket()
ccb.cdn_handler.get_objects = self.mock_cdn_get_objects
ccb.cdn_handler.upload_file = self.mock_cdn_upload_file
ccb.cdn_handler.get_json = self.mock_cdn_get_json
return ccb
def mock_download_file(self, url, target):
file_name = os.path.basename(url)
if '.zip' in file_name:
shutil.copyfile(self.source_zip, target)
elif file_name == 'build_log.json':
file_utils.write_file(target, self.build_log_json)
elif file_name == 'project.json':
file_utils.write_file(target, self.project_json)
def mock_cdn_upload_file(self, project_file, s3_key, cache_time=600):
TestClientCallback.transfered_files.append({'type': 'upload', 'file': project_file,
'key': s3_key})
return
def mock_cdn_get_json(self, s3_key):
TestClientCallback.transfered_files.append({'type': 'download', 'file': 'json',
'key': s3_key})
if 'build_log.json' in s3_key:
return json.loads(self.build_log_json)
elif 'project.json' in s3_key:
return json.loads(self.project_json)
return ''
def generate_parts_completed(self, start, end):
TestClientCallback.parts = []
for i in range(start, end):
part = Part("{0}.finished".format(i))
TestClientCallback.parts.append(part)
return TestClientCallback.parts
def mock_cdn_get_objects(self, prefix=None, suffix=None):
return TestClientCallback.parts
class Part(object):
def __init__(self, key):
self.key = key
| [
"richmahn@users.noreply.github.com"
] | richmahn@users.noreply.github.com |
3db9fbe581392cbb9ce6e18651b9b6cefe80e988 | f958555fa2ff0dbcd4d548436e581a476a416ab8 | /Day3-1.py | 5ca0b6a24815c9c6fd9795273da4595f99721a75 | [] | no_license | tony940411/python200805 | c750549f4d476637ef8a661f357d9040edaff225 | aabef929cb1cad4fa4601e63abb08018ad33ab61 | refs/heads/master | 2022-11-29T03:10:23.626631 | 2020-08-05T08:41:04 | 2020-08-05T08:41:04 | 285,170,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 195 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Aug 5 09:27:49 2020
@author: user
"""
for i in range(1,10,1):
#print(i)
for j in range(1,10,1):
print(i,'*',j , '=' ,i*j)
| [
"noreply@github.com"
] | tony940411.noreply@github.com |
f442f7588502633da7efa36fefb3ef833092ca60 | 50b5281dc2daa2b50fc6591cb64fc4c7db2c4ffc | /SentimentAnalysis/plotting.py | fb56314599ef9b8a73bd5777514d41e58213e3b3 | [] | no_license | vaibhavsingh007/DMTM | 5c01d8c28adb5122bb9430e658313bdcb0b38e3e | 81efce65aa134abda4f1a4e04650bdd74717fab8 | refs/heads/master | 2021-04-29T11:58:21.293755 | 2018-05-13T17:43:25 | 2018-05-13T17:43:25 | 121,720,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,667 | py | import matplotlib.pyplot as plt
def plot_avg_p_r_curves(precision, recall, average_precision):
plt.figure()
plt.step(recall['micro'], precision['micro'], color='b', alpha=0.2,
where='post')
plt.fill_between(recall["micro"], precision["micro"], step='post', alpha=0.2,
color='b')
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.ylim([0.0, 1.05])
plt.xlim([0.0, 1.0])
plt.title('Average precision score, micro-averaged over all classes: AP={0:0.2f}'
.format(average_precision["micro"]))
def plot_per_class_p_r_curves(precision, recall, average_precision, classes):
from itertools import cycle
# setup plot details
colors = cycle(['navy', 'turquoise', 'darkorange', 'cornflowerblue', 'teal'])
plt.figure(figsize=(7, 8))
lines = []
labels = []
l, = plt.plot(recall["micro"], precision["micro"], color='gold', lw=2)
lines.append(l)
labels.append('micro-average Precision-recall (area = {0:0.2f})'
''.format(average_precision["micro"]))
for i, color in zip(range(len(classes)), colors):
l, = plt.plot(recall[i], precision[i], color=color, lw=2)
lines.append(l)
labels.append('Precision-recall for class {0} (area = {1:0.2f})'
''.format(classes[i], average_precision[i]))
fig = plt.gcf()
fig.subplots_adjust(bottom=0.25)
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.title('Extension of Precision-Recall curve to multi-class')
plt.legend(lines, labels, loc=(0, -.38), prop=dict(size=14))
plt.show()
| [
"vaibhav.singh21@yahoo.co.in"
] | vaibhav.singh21@yahoo.co.in |
e962467186f30fec64065c41550c2d274278e0de | 5732aa2a46c3793dd512ea0585a14d23afbabbfd | /fake_channel_app/migrations/0002_contact_responseset.py | f9f5e7c00261f4610bc56415731a4dcc9e44df1f | [] | no_license | ihor-palii/fake-channel | 4fad13eae1218e65a6e9c3d575f5df0739bb6953 | 988fd84002a372c61e4e14d7c1ceafdc968acc8c | refs/heads/main | 2023-07-08T18:43:39.813437 | 2021-08-10T16:20:29 | 2021-08-10T16:20:29 | 303,385,758 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 889 | py | # Generated by Django 3.1.2 on 2020-10-12 11:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fake_channel_app', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('number', models.CharField(max_length=100, primary_key=True, serialize=False)),
('requests_count', models.PositiveIntegerField(default=0)),
],
),
migrations.CreateModel(
name='ResponseSet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('number_of_response', models.PositiveIntegerField(default=0)),
('text_of_response', models.TextField(max_length=256)),
],
),
]
| [
"ithor@communityconnectlabs.com"
] | ithor@communityconnectlabs.com |
a1d72f52c959e13a6eb7ba685ecd9da2c752d9fd | 1904cd3ef48f37810d220373e735e3d8e64a1058 | /python/avi/sdk/samples/autoscale/aws_samplescaleout.py | 6b1321e457b4ceb86ef200531cffe609064589d9 | [
"Apache-2.0"
] | permissive | avinetworks/sdk | 5d4fff95376737505bece591eed23235b06de9ef | 2998d85d5e7604b4e3223972d43899cd34b44a2d | refs/heads/master | 2022-04-28T08:11:22.394520 | 2021-04-01T16:51:28 | 2021-04-01T16:51:28 | 353,759,395 | 7 | 7 | Apache-2.0 | 2021-05-25T03:39:57 | 2021-04-01T16:21:11 | Java | UTF-8 | Python | false | false | 9,423 | py | #!/usr/bin/python
'''
Created on Apr 15, 2015
@author: Gaurav Rastogi
Avi Networks Inc.
It implements scaleout and scalein hooks that can be used for implementing scaleout and scalein workflow.
Usage:
Step 1: Create Alert with filter string to match on event SERVER_AUTOSCALE_IN or SERVER_AUTOSCALE_OUT as:
filter_string: "filter=eq(event_id,SERVER_AUTOSCALE_IN)"
filter_string: "filter=eq(event_id,SERVER_AUTOSCALE_OUT)"
Step 2: Register the scaleout and scalein hooks as alertactionscript
#action_script for scalein
#!/usr/bin/python
import sys
from avi.sdk.samples.autoscale.aws_samplescaleout import scalein
aws_setting = {
'ec2_region': 'us-west-2',
'tenant': 'Demo',
'aws_access_key_id': 'xxxxx',
'aws_secret_access_key': 'xxxxx',
'image_id': 'ami-xxxxx',
'security_group_ids': ['sg-xxxxx'],
'subnet_id': 'subnet-xxxxx',
'tag': 'avidemo',
'key_name': None
}
scalein(aws_setting, *sys.argv)
#action_script for scaleout
#!/usr/bin/python
import sys
from avi.sdk.samples.autoscale.aws_samplescaleout import scaleout
aws_setting = {
'ec2_region': 'us-west-2',
'tenant': 'Demo',
'aws_access_key_id': 'xxxxx',
'aws_secret_access_key': 'xxxxx',
'image_id': 'ami-xxxxx',
'security_group_ids': ['sg-xxxxx'],
'subnet_id': 'subnet-xxxxx',
'tag': 'avidemo',
'key_name': None
}
scaleout(aws_setting, *sys.argv)
Step 3: Monitor the output of the script as
tail -F /home/<admin_user>/<AlertName>-<pool_name>-Log
Eg. tail -F /home/admin/SERVER_AUTOSCALE_IN-p1-Log
'''
import sys
import json
from avi.sdk.avi_api import ApiSession
import time
import boto.ec2
from avi.sdk.samples.autoscale.samplescaleout import scaleout_params, \
autoscale_dump
import os
from collections import namedtuple
AviInstanceInfo = namedtuple(
'AviInstanceInfo', ['instance_id', 'ip_address', 'hostname'])
def getAviApiSession(tenant='admin'):
"""
create session to avi controller
"""
token = os.environ.get('API_TOKEN')
user = os.environ.get('USER')
# tenant=os.environ.get('TENANT')
api = ApiSession.get_session("localhost", user, token=token,
tenant=tenant)
return api
def create_aws_connection(aws_settings):
"""
creates aws connection
:param aws_settings: dictionary of aws settings keys [aws_access_key_id,
aws_secret_access_key, ec2_region, security_group_ids, instance_type,
image_id]
"""
aws_access_key_id = aws_settings['aws_access_key_id']
aws_secret_access_key = aws_settings['aws_secret_access_key']
ec2_region = aws_settings.get('ec2_region', 'us-west-2')
print('using: ', aws_access_key_id, aws_secret_access_key, ec2_region)
conn = boto.ec2.connect_to_region(
ec2_region, aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key)
print('connection obj', conn)
return conn
def create_aws_instance(aws_settings):
"""
Create AWS instance with public IP address.
:param aws_settings: dictionary of aws settings keys [aws_access_key_id,
aws_secret_access_key, ec2_region, security_group_ids, instance_type,
image_id]
"""
ami_id = aws_settings['image_id']
security_groups = aws_settings['security_group_ids']
instance_type = aws_settings.get('instance_type', 't2.micro')
conn = create_aws_connection(aws_settings)
pub_ip = aws_settings.get('associate_public_ip_address', False)
key_name = aws_settings.get('key_name', None)
if pub_ip:
interface = boto.ec2.networkinterface.NetworkInterfaceSpecification(
groups=security_groups, associate_public_ip_address=True)
interfaces = \
boto.ec2.networkinterface.NetworkInterfaceCollection(interface)
reservations = conn.run_instances(
ami_id, key_name=key_name,
instance_type=instance_type, network_interfaces=interfaces)
else:
reservations = conn.run_instances(ami_id, key_name=key_name,
instance_type=instance_type,
subnet_id=aws_settings['subnet_id'])
if not reservations.instances:
print('Did not get AMI')
raise Exception('Instance creation failed AMI %s %s' %
ami_id, instance_type)
instance = reservations.instances[0]
# Wait for the instance to enter the running state
# check for instance is running
rc = ''
for _ in range(25):
# try for 2mins
time.sleep(5)
rc = instance.update()
if rc == 'running':
break
time.sleep(5)
if rc != 'running':
print('instance', instance.id, ' is still not running', rc)
tag = '-'.join([aws_settings.get('tag', 'avidemo'),
instance.id])
instance.add_tag("Name", tag)
if pub_ip:
result = AviInstanceInfo(instance_id=instance.id,
ip_address=instance.ip_address,
hostname=tag)
else:
result = AviInstanceInfo(instance_id=instance.id,
ip_address=instance.private_ip_address,
hostname=tag)
print('created instance', result)
if not result.ip_address:
instance_ids = [instance.id]
delete_aws_instance(aws_settings, instance_ids)
raise Exception('instance %s rc %s did not get ip %s cleaning it up' %
(result.instance_id, rc, instance))
return result
def delete_aws_instance(aws_settings, instance_ids):
"""
deletes an istance from the aws
:param aws_settings: dictionary of aws settings keys [aws_access_key_id,
aws_secret_access_key, ec2_region, security_group_ids, instance_type,
image_id]
:param instance_ids: list of instance ids to delete. These are typically
stored in the external uuid of the server.
"""
print('deleting instances ', instance_ids)
conn = create_aws_connection(aws_settings)
rc = conn.stop_instances(instance_ids=instance_ids)
print('stopping instances ', instance_ids, rc)
rc = conn.terminate_instances(instance_ids=instance_ids)
print('terminating instances ', instance_ids, rc)
def scaleout(aws_settings, *args):
"""
1. Creates an instance in AWS
2. Registers that instance as a Pool Member
:param aws_settings: dictionary of aws settings keys [aws_access_key_id,
aws_secret_access_key, ec2_region, security_group_ids, instance_type,
image_id]
:param args: The args passed down as part of the alert.
"""
# print all the args passed down
tenant = aws_settings.get('tenant', 'admin')
api = getAviApiSession(tenant)
autoscale_dump(*args)
alert_info = json.loads(args[1])
# Perform actual scaleout
pool_name, pool_uuid, pool_obj, num_scaleout = \
scaleout_params('scaleout', alert_info, api=api, tenant=tenant)
# create AWS instance using these two ids.
print(pool_name, 'scaleout', num_scaleout)
insid, ip_addr, hostname = create_aws_instance(aws_settings)
new_server = {
'ip': {'addr': ip_addr, 'type': 'V4'},
'port': 0,
'hostname': hostname,
'external_uuid': insid,
'vm_uuid': insid,
}
# add new server to the pool
pool_obj['servers'].append(new_server)
# call controller API to update the pool
print('new pool obj', pool_obj)
api = getAviApiSession()
resp = api.put('pool/%s' % pool_uuid, tenant=tenant,
data=json.dumps(pool_obj))
print('updated pool', pool_obj['name'], resp.status_code)
def scalein(aws_settings, *args):
"""
Deletes an instance from AWS and removes it from the Pool
:param aws_settings: dictionary of aws settings keys [aws_access_key_id,
aws_secret_access_key, ec2_region, security_group_ids, instance_type,
image_id]
:param args: The args passed down as part of the alert.
"""
tenant = aws_settings.get('tenant', 'admin')
api = getAviApiSession(tenant)
autoscale_dump(*args)
alert_info = json.loads(args[1])
# Perform actual scaleout
pool_name, pool_uuid, pool_obj, num_autoscale = \
scaleout_params('scalein', alert_info, api=api, tenant=tenant)
print((pool_name, ':', pool_uuid, ' num_scaleout', num_autoscale))
scalein_server = pool_obj['servers'][-1]
try:
instance_ids = [scalein_server['external_uuid']]
except KeyError:
vm_ref = scalein_server['vm_ref']
# https://10.130.129.34/api/vimgrvmruntime/i-08ddf0d2
vm_uuid = vm_ref.split('/api/vimgrvmruntime/')[1].split('#')[0]
instance_ids = [vm_uuid]
pool_obj['servers'] = pool_obj['servers'][:-1]
# call controller API to update the pool
print('pool %s scalein server %s' % (pool_name, scalein_server))
api = getAviApiSession()
resp = api.put('pool/%s' % pool_uuid,
tenant=tenant, data=json.dumps(pool_obj))
print('updated pool', pool_obj['name'], resp.status_code)
if resp.status_code in (200, 201, 204):
print('deleting the instance from the aws - ', instance_ids)
delete_aws_instance(aws_settings, instance_ids)
if __name__ == '__main__':
scaleout(*sys.argv)
| [
"grastogi@avinetworks.com"
] | grastogi@avinetworks.com |
103df15e530a3641af992efa603c744e6645ac74 | 73e3a93b6ce524111d57ad72e865e9e573c735f8 | /Project_3_Collaboration_and_Competition/files/agent.py | 744f0989f6de98be618c17475c0c040c21d80471 | [] | no_license | verbeemen/Udacity-Deep-Reinforcement-Learning | ffe5037132bd32e43cd7a4b9ae941aca3732570c | f50a2b5aa97a36b9845554ab1dac771143f53796 | refs/heads/master | 2022-12-30T19:25:25.367314 | 2020-11-01T19:36:20 | 2020-11-01T19:36:20 | 216,380,995 | 0 | 0 | null | 2022-12-27T15:33:49 | 2019-10-20T15:10:35 | HTML | UTF-8 | Python | false | false | 7,388 | py | import numpy as np
import random
import torch
import torch.nn.functional as F
import torch.optim as optim
from files.replayBuffer import ReplayBuffer
from ouNoise import OUNoise
from model_actor import Actor
from model_critic import Critic
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class Agent:
"""Interacts with and learns from the environment."""
def __init__(self, state_size, action_size, random_seed = 1, \
learn_interval = 4, learn_num = 1, lr_actor = 1e-4, lr_critic = 1e-3, \
gamma = 0.99, weight_decay = 0, tau = 0.001, batch_size = 128, buffer_size = 1e5):
"""Initialize an Agents object.
Params
======
state_size (int): dimension of each state
action_size (int): dimension of each action
num_agents (int): number of agents
seed (int): random_seed
"""
self.STATE_SIZE = state_size
self.ACTION_SIZE = action_size
self.seed = random.seed(random_seed)
# hyper static parameters:
self.LEARN_INTERVAL = learn_interval
self.LEARN_NUM = learn_num
self.LR_ACTOR = lr_actor
self.LR_CRITIC = lr_critic
self.GAMMA = gamma
self.WEIGHT_DECAY = weight_decay
self.TAU = tau
self.BATCH_SIZE = batch_size
self.BUFFER_SIZE = buffer_size
# Actor network with target network
self.actor_local = Actor(self.STATE_SIZE, self.ACTION_SIZE, random_seed).to(device)
self.actor_target = Actor(self.STATE_SIZE, self.ACTION_SIZE, random_seed).to(device)
self.actor_optimizer = optim.Adam(self.actor_local.parameters(), lr= self.LR_ACTOR)
# Critic network with target network
self.critic_local = Critic(self.STATE_SIZE, self.ACTION_SIZE, random_seed).to(device)
self.critic_target = Critic(self.STATE_SIZE, self.ACTION_SIZE, random_seed).to(device)
self.critic_optimizer = optim.Adam(self.critic_local.parameters(), lr= self.LR_CRITIC, weight_decay= self.WEIGHT_DECAY)
# Noise process
self.noise = OUNoise(self.ACTION_SIZE, random_seed)
# Replay memory
self.memory = ReplayBuffer(self.ACTION_SIZE, self.BUFFER_SIZE, self.BATCH_SIZE, random_seed)
def reset(self):
self.noise.reset()
def step(self, step, state, action, reward, next_state, done, agent_id):
"""Save experience in replay memory and use random sample from buffer to learn."""
self.memory.add(state, action, reward, next_state, done)
# Learn every X frames | intervals
if step % self.LEARN_INTERVAL == 0:
# Learn, if we have enough samples to learn
if len(self.memory) > self.BATCH_SIZE:
# amount of times that we want to learn
# is not the same as batch size
for _ in range(self.LEARN_NUM):
experiences = self.memory.sample()
self.learn(experiences, self.GAMMA, agent_id)
def action(self, states, add_noise=True):
"""Returns actions for given state as per current policy."""
states = torch.from_numpy(states).float().to(device)
# get out of the training environment
self.actor_local.eval()
with torch.no_grad():
# for each state, predict the next action
actions = self.actor_local(states).cpu().data.numpy()
# enter the training environment
self.actor_local.train()
# add some noise
if add_noise:
actions += self.noise.sample()
# clip the action
return np.clip(actions, -1, 1)
def learn(self, experiences, gamma, agent_id):
"""Update policy and value parameters using given batch of experience tuples.
Q_targets = r + γ * critic_target(next_state, actor_target(next_state))
where:
actor_target(state) -> action
critic_target(state, action) -> Q-value
Params
======
experiences (Tuple[torch.Tensor]): tuple of (s, a, r, s', done) tuples
gamma (float): discount factor
"""
states, actions, rewards, next_states, dones = experiences
# ---------------------------- update critic ---------------------------- #
# Get predicted next-state actions and Q values from target models
actions_next = self.actor_target(next_states)
if agent_id == 0:
actions_next_tuple = (actions_next, actions[:, self.ACTION_SIZE:])
else:
actions_next_tuple = (actions[:, :self.ACTION_SIZE], actions_next)
actions_next = torch.cat(actions_next_tuple, dim=1)
Q_targets_next = self.critic_target(next_states, actions_next)
# Compute Q targets for current states (y_i)
Q_targets = rewards + (gamma * Q_targets_next * (1 - dones))
# Compute critic loss
Q_expected = self.critic_local(states, actions)
critic_loss = F.mse_loss(Q_expected, Q_targets)
# Minimize the loss
self.critic_optimizer.zero_grad()
critic_loss.backward()
self.critic_optimizer.step()
# ---------------------------- update actor ---------------------------- #
# Compute actor loss
actions_pred = self.actor_local(states)
if agent_id == 0:
actions_pred_tuple = (actions_pred, actions[:, self.ACTION_SIZE:])
else:
actions_pred_tuple = (actions[:, :self.ACTION_SIZE], actions_pred)
actions_pred = torch.cat(actions_pred_tuple, dim=1)
actor_loss = -self.critic_local(states, actions_pred).mean()
# Minimize the loss
self.actor_optimizer.zero_grad()
actor_loss.backward()
self.actor_optimizer.step()
# ----------------------- update target networks ----------------------- #
self.soft_update(self.critic_local, self.critic_target, self.TAU)
self.soft_update(self.actor_local, self.actor_target, self.TAU)
def soft_update(self, local_model, target_model, tau):
"""Soft update model parameters.
θ_target = τ*θ_local + (1 - τ)*θ_target
Params
======
local_model: PyTorch model (weights will be copied from)
target_model: PyTorch model (weights will be copied to)
tau (float): interpolation parameter
"""
for target_param, local_param in zip(target_model.parameters(), local_model.parameters()):
target_param.data.copy_(tau * local_param.data + (1.0 - tau) * target_param.data)
# Load and Save data
def save_agent(self, checkpoint_name):
torch.save(self.actor_local.state_dict(), f'./checkpoints/{checkpoint_name}_actor.pth')
torch.save(self.critic_local.state_dict(), f'./checkpoints/{checkpoint_name}_critic.pth')
def load_agent(self, checkpoint_name):
self.actor_local.load_state_dict(torch.load(f'./checkpoints/{checkpoint_name}_actor.pth'))
self.critic_local.load_state_dict(torch.load(f'./checkpoints/{checkpoint_name}_critic.pth'))
| [
"noreply@github.com"
] | verbeemen.noreply@github.com |
466091c8d567dd766be080701fcfc16fa667e45c | 576a6ffef41be7eec4ccc1535781f6b50f720b65 | /app/get_env_app.py | cc58b6790cbe8a3764ab57a30b23d27b8e33c059 | [] | no_license | crouchr/cv19 | 05415cfe6c7e25b2b4eb55e3b4aa6c82699f8a17 | e7657a7fcf7c4a887b3dbbfd87832772bbddd27e | refs/heads/master | 2023-06-16T18:14:21.437661 | 2021-06-28T16:33:05 | 2021-06-28T16:33:05 | 350,139,961 | 0 | 0 | null | 2021-03-29T08:42:36 | 2021-03-21T22:50:50 | Python | UTF-8 | Python | false | false | 206 | py | import os
def get_poll_secs():
if 'POLL_SECS' in os.environ:
poll_secs = os.environ['POLL_SECS']
else:
poll_secs = 300 # same as polling OpenWeather API
return poll_secs
| [
"richard.crouch100@gmail.com"
] | richard.crouch100@gmail.com |
dd226443065d6f7a1b74ccf3736fb3b9c90418d0 | 6728681b24674eb653447a15ce58509d4566b74d | /pset6/tweets/analizerfinal.py | c1c7a5014a0a171a546095749cea422339e054fc | [] | no_license | Jcgo3003/CS50-Studies | 6f23033e5113e3e604a637243b9329d8deab8947 | 89a6970c06dc5a5f32f11a3705c619f7a02aeaeb | refs/heads/master | 2020-05-14T16:47:52.199916 | 2019-04-17T11:41:28 | 2019-04-17T11:41:28 | 181,865,674 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,244 | py | import nltk
class Analyzer():
"""Implements sentiment analysis."""
def __init__(self, positives, negatives):
"""Initialize Analyzer."""
#allocate espace in memory
self.negatives = set()
self.positives = set()
#adding negatives words
file_neg = open(negatives, "r")
for line in file_neg:
if not line.startswith('\n') and not line.startswith(';'):
self.negatives.add(line.rstrip('\n'))
file_neg.close()
#adding positives words
file_pos = open(positives, "r")
for line in file_pos:
if not line.startswith('\n') and not line.startswith(';'):
self.positives.add(line.rstrip('\n'))
file_pos.close()
def analyze(self, text):
"""Analyze text for sentiment, returning its score."""
# setting up score to 0
score = 0
#getting each word of text
tokens = nltk.word_tokenize(text)
for line in tokens:
if line.lower() in self.positives:
score += 1
elif line.lower() in self.negatives:
score -= 1
else:
score += 0
#retunig score
return score
| [
"jcgo3003@outlook.com"
] | jcgo3003@outlook.com |
be54ea3ff5b875b693eba7f37503f0dd65d88fda | 505503f0c1703d8b49fabaebcd245ae8619799df | /pymqttvideo/piomxplay.py | a62e804d4bf0a26f0320675fb62ae8abcd5f7184 | [] | no_license | erlendstav/drafts | 333ae15af3496007e22c161b48cdeb8b157acc89 | 60e23070dec5d5201e6a66d99575c3d71df456ba | refs/heads/master | 2021-11-24T08:08:03.614186 | 2021-10-28T21:27:04 | 2021-10-28T21:27:04 | 154,835,167 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,196 | py | import paho.mqtt.client as mqtt
import time
import random
from omxplayer.player import OMXPlayer
from pathlib import Path
server_address="192.168.1.50"
client_name = "Pi4Video1"
# scare levels
LVL_FRIENDLY = "level/friendly"
LVL_NORMAL = "level/normal"
LVL_SCARY = "level/scary"
# Paths for videos
VIDEO_PATH = Path("/home/pi/Videos/")
# videos for levels
videos = {
LVL_FRIENDLY : ["boo_scare_1.mp4", "boo_scare_2.mp4"],
LVL_NORMAL : ["spinster_tea_1.mp4", "spinster_tea_2.mp4"],
LVL_SCARY : ["Twisted Twins_Startle Scare1_Holl_H.mp4", "Diabolic Debutant_Startle Scare_Win_H.mp4"]
}
scare_level = LVL_FRIENDLY
player = OMXPlayer(Video1,dbus_name='org.mpris.MediaPlayer2.anyName')
#player2 = OMXPlayer(Video2,dbus_name='org.mpris.MediaPlayer2.anyName2')
def init_video_player():
def play_video(video_name):
print("Playing video " + video_name)
def play_video_from_level(level):
candidates = videos[level]
print("Playing video from level " + level)
play_video(random.choice(candidates))
def on_message(client, userdata, message):
global scare_level
if message.topic.endswith("movement1"):
print("Movement detected: starting video " + message.topic)
if scare_level == LVL_SCARY:
client.publish("garage/smoke", "3") # For now, integrate smoke triggering here
play_video_from_level(scare_level)
elif message.topic.endswith(LVL_FRIENDLY):
scare_level = LVL_FRIENDLY
print("Scare level set to " + LVL_FRIENDLY)
elif message.topic.endswith(LVL_NORMAL):
scare_level = LVL_NORMAL
print("Scare level set to " + LVL_NORMAL)
elif message.topic.endswith(LVL_SCARY):
scare_level = LVL_SCARY
print("Scare level set to " + LVL_SCARY)
else:
print("Unknown message " + message.topic)
return
def setup_mqtt():
c = mqtt.Client(client_name)
c.on_message=on_message
c.connect(server_address)
c.loop_start()
return c
client = setup_mqtt()
client.subscribe("garage/#")
client.publish("test/hello", "Hello from " + client_name)
while True:
time.sleep(5)
client.publish("status/alive/" + client_name)
client.loop_stop() | [
"erlend.stav@gmail.com"
] | erlend.stav@gmail.com |
71f0a9cb84e03ebdb58ea58e957eb8bad44a7181 | 19bb0960311956cdc966b7c5324419aacdb02de1 | /boutique_ado/settings.py | e625909f404d2eb80166c20709703a824190e963 | [] | no_license | joshhunt1991/boutique_ado_v1 | 18a4a6e48767eef7deadc0ca6a764ce8a26eef96 | 6a5ef34cb13e237ef3e6a79a821ed8b3c41a1a88 | refs/heads/master | 2023-04-08T07:08:13.315387 | 2021-04-13T18:09:34 | 2021-04-13T18:09:34 | 352,735,331 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,872 | py | """
Django settings for boutique_ado project.
Generated by 'django-admin startproject' using Django 3.0.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'p@ei#69*b*zz3u4yie-$()@cy^l(+x9&@6ypx+r0lm(3%_9hr7'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'allauth',
'allauth.account',
'allauth.socialaccount',
'home',
'products',
'bag',
'checkout',
'profiles',
# Other
'crispy_forms',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'boutique_ado.urls'
CRISPY_TEMPLATE_PACK = 'bootstrap4'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
os.path.join(BASE_DIR, 'templates', 'allauth'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request', # required by allauth
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
'bag.contexts.bag_contents',
],
'builtins': [
'crispy_forms.templatetags.crispy_forms_tags',
'crispy_forms.templatetags.crispy_forms_field',
]
},
},
]
MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage'
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
'django.contrib.auth.backends.ModelBackend',
# `allauth` specific authentication methods, such as login by e-mail
'allauth.account.auth_backends.AuthenticationBackend',
)
SITE_ID = 1
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
ACCOUNT_AUTHENTICATION_METHOD = 'username_email'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
ACCOUNT_SIGNUP_EMAIL_ENTER_TWICE = True
ACCOUNT_USERNAME_MIN_LENGTH = 4
LOGIN_URL = '/accounts/login/'
LOGIN_REDIRECT_URL = '/'
WSGI_APPLICATION = 'boutique_ado.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Stripe
FREE_DELIVERY_THRESHOLD = 50
STANDARD_DELIVERY_PERCENTAGE = 10
STRIPE_CURRENCY = 'usd'
STRIPE_PUBLIC_KEY = os.getenv('STRIPE_PUBLIC_KEY', '')
STRIPE_SECRET_KEY = os.getenv('STRIPE_SECRET_KEY', '')
STRIPE_WH_SECRET = os.getenv('STRIPE_WH_SECRET', '')
DEFAULT_FROM_EMAIL = 'boutiqueado@example.com' | [
"Joshhunt_1991@hotmail.co.uk"
] | Joshhunt_1991@hotmail.co.uk |
64e2c618b140ed47ae8db4922ea4a51e7fc75108 | fca4f1073b5d6f3068ddd23651a9890be59367fe | /scoreboard.py | 8acb47e350f95243ede732b91607e660af2f575b | [] | no_license | learyjk/snake-game | ea752d315b5c235910837b04e93e7d63483247af | 265a1a3e5fabf6171918a5405c44aa29458f6421 | refs/heads/master | 2023-01-12T04:08:11.221772 | 2020-11-09T19:37:09 | 2020-11-09T19:37:09 | 310,493,810 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,026 | py | from turtle import Turtle
ALIGNMENT = "center"
FONT = ("Courier", 24, "normal")
class Scoreboard(Turtle):
def __init__(self):
super().__init__()
self.score = 0
with open("data.txt") as data:
self.high_score = int(data.read())
self.color("white")
self.penup()
self.goto(0, 270)
self.update_scoreboard()
self.hideturtle()
def update_scoreboard(self):
self.clear()
self.write(f"Score: {self.score} High Score: {self.high_score}", align=ALIGNMENT, font=FONT)
def reset(self):
if self.score > self.high_score:
self.high_score = self.score
with open("data.txt", mode="w") as data:
data.write(f"{self.high_score}")
self.score = 0
self.update_scoreboard()
# def game_over(self):
# self.goto(0, 0)
# self.write(f"GAME OVER", align=ALIGNMENT, font=FONT)
def increase_score(self):
self.score += 1
self.update_scoreboard()
| [
"leary.keegan@gmail.com"
] | leary.keegan@gmail.com |
faba0987aed4324e8dd12c43cf14e4551e37d6c3 | e0351b06c6721fe11fb3f6886dd77362366c521e | /com_4.py | e39c160d989acceffbe07be1ac0100a570472cc5 | [] | no_license | sauravshaurya/Home-Automation | 88928ae9c1b0181859fa0cd779f04be692d057f5 | 282c9a22cd1f5625ac4fcadff3c57538034c2622 | refs/heads/master | 2022-07-10T07:42:36.731500 | 2018-06-23T21:41:01 | 2018-06-23T21:41:01 | 224,234,099 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 324 | py | import serial
from datetime import datetime
data = serial.Serial('/dev/ttyACM0',9600)
while True:
while (data.inWaiting()==0):
pass
arduinoString = data.readline()
print(arduinoString)
f = open('log.txt','a')
f.write('{} - {}\n'.format(datetime.now().strftime("%c"),arduinoString))
f.close()
| [
"noreply@github.com"
] | sauravshaurya.noreply@github.com |
b3a16c0fcb3f1cc1e5a2bcabca0c3d000509cfbc | 5ac48b24a04fcbb423dcee269514da57c7a02fb5 | /setup.py | 7cb463d67572d9d29a70c31f62a6c8708dd588df | [
"MIT"
] | permissive | abassel/Flask-DSwagger | c1c06bd3837de27ea5acbf8ed494c1bbeba58351 | b98c8b45ab7d45ecbe5e9d1b7e6d683f98333ae4 | refs/heads/master | 2021-05-10T14:17:09.726222 | 2019-03-16T23:13:59 | 2019-03-16T23:13:59 | 118,510,357 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,599 | py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# python setup.py build
# python setup.py sdist
# python setup.py bdist_egg
from __future__ import absolute_import
from __future__ import print_function
import io
from glob import glob
from os.path import basename
from os.path import dirname
from os.path import join
from os.path import splitext
from setuptools import find_packages
from setuptools import setup
def read(*names, **kwargs):
return io.open(
join(dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8')
).read()
setup(
name='Flask-DSwagger',
version='0.0.3',
license='MIT license',
description='Convert Doc String OpenAPI 2.0 in a single endpoint',
author='Alexandre Bassel',
author_email='abassel@gmail.com',
url='https://github.com/abassel/Flask-DSwagger',
packages=find_packages('src'),
package_dir={'': 'src'},
py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
include_package_data=True,
zip_safe=False,
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
# uncomment if you test on these interpreters:
# 'Programming Language :: Python :: Implementation :: IronPython',
# 'Programming Language :: Python :: Implementation :: Jython',
# 'Programming Language :: Python :: Implementation :: Stackless',
# 'Topic :: Utilities',
],
keywords=[
'OpenAPI 2.0', 'flask', 'swagger', 'docstring', 'doc-string',
# eg: 'keyword1', 'keyword2', 'keyword3',
],
install_requires=[
'Flask>=0.12.2',
'PyYAML>=3.12'
# eg: 'aspectlib==1.1.1', 'six>=1.7',
],
# extras_require={
# # eg:
# # 'rst': ['docutils>=0.11'],
# # ':python_version=="2.6"': ['argparse'],
# },
# entry_points={
# 'console_scripts': [
# 'mongosafe = mongosafe.cli:main',
# ]
# },
)
| [
"abassel@gmail.com"
] | abassel@gmail.com |
4ab92065962d53964ce2f930d220837337ee3eac | c318bd15c40063639edc95bb8419f4c0f4a2b54f | /update_s3_configuration.py | e520e1541db8ce977a5e0513f0439b48d7e25a29 | [
"MIT"
] | permissive | cwestleyj/HearthstoneJSON | 716fa1b05782d311a04c16c5917ad6e6ae15749a | ed30c943983a4ee0da3a80562655d5a274faad39 | refs/heads/master | 2021-01-19T10:36:59.554294 | 2017-02-10T16:29:13 | 2017-02-10T16:29:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,954 | py | #!/usr/bin/env python
import sys
import boto3
from pprint import pprint
API_BUCKET = "api.hearthstonejson.com"
ART_BUCKET = "art.hearthstonejson.com"
def update_website_configuration(s3, build, bucket=API_BUCKET):
print("Querying website configuration for %r" % (bucket))
orig_config = s3.get_bucket_website(Bucket=bucket)
pprint(orig_config)
if "ResponseMetadata" in orig_config:
del orig_config["ResponseMetadata"]
config = orig_config.copy()
config["RoutingRules"] = [{
"Condition": {
"KeyPrefixEquals": "v1/latest/"
},
"Redirect": {
"ReplaceKeyPrefixWith": "v1/%i/" % (build),
"HttpRedirectCode": "302",
"Protocol": "https",
},
}]
if config != orig_config:
print("Updating website configuration")
pprint(config)
s3.put_bucket_website(Bucket=bucket, WebsiteConfiguration=config)
else:
print("Website configuration up-to-date")
def update_art_404_redirects(s3, bucket=ART_BUCKET):
orig_config = s3.get_bucket_website(Bucket=bucket)
if "ResponseMetadata" in orig_config:
del orig_config["ResponseMetadata"]
config = orig_config.copy()
prefixes = [
("v1/orig/", "png", "XXX_001"),
("v1/tiles/", "png", "HERO_01"),
("v1/256x/", "jpg", "XXX_001"),
("v1/512x/", "jpg", "XXX_001"),
]
config["RoutingRules"] = []
for prefix, ext, fallback in prefixes:
config["RoutingRules"].append({
"Condition": {
"HttpErrorCodeReturnedEquals": "404",
"KeyPrefixEquals": prefix,
},
"Redirect": {
"ReplaceKeyWith": prefix + "%s.%s" % (fallback, ext),
"HttpRedirectCode": "302",
"Protocol": "https",
}
})
if config != orig_config:
print("Updating 404 redirects")
pprint(config)
s3.put_bucket_website(Bucket=bucket, WebsiteConfiguration=config)
else:
print("404 redirects up-to-date")
def main():
build = int(sys.argv[1])
s3 = boto3.client("s3")
update_website_configuration(s3, build)
update_art_404_redirects(s3)
if __name__ == "__main__":
main()
| [
"jerome@leclan.ch"
] | jerome@leclan.ch |
ef5db8ce40bc075966dac562c543c33ef9f3ee02 | ca1cc0f1cc7217900e2fc33b6e1f001cd9ba565d | /mainapp/migrations/0018_auto_20171225_1806.py | 3b022536df5077ac2005815b7f03567c8515d047 | [] | no_license | ccyjoshua/amazoncb | a592c45fc3bac2a5af54cec0417fa576fff48d41 | 7f6f8911605d163f92d25126a2180ecef3bec72f | refs/heads/master | 2021-09-06T17:42:42.779395 | 2018-02-09T06:41:22 | 2018-02-09T06:41:22 | 113,111,305 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 580 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-12-26 02:06
from __future__ import unicode_literals
from django.db import migrations, models
import mainapp.validators
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0017_auto_20171225_1749'),
]
operations = [
migrations.AlterField(
model_name='product',
name='amazon_link',
field=models.URLField(blank=True, max_length=500, validators=[mainapp.validators.AmazonURLValidator(message='Domain must be Amazon')]),
),
]
| [
"ccyjoshua@gmail.com"
] | ccyjoshua@gmail.com |
02f372267f82ab0fb8dfcbd4c69bebafc4f80903 | 5a8bba80e4d0835235a3a4494db87808e7c5d0f8 | /setup.py | 54e99bab9fc25e73164eb473c01c808b6240e822 | [
"MIT"
] | permissive | JLyons1985/SmartMirrorServer | 3d562d42143c5c3d2269ea22e7c9338838534065 | 417540280dee57b7e39004e6891f13835aa15ced | refs/heads/master | 2021-01-12T03:36:37.612847 | 2017-01-06T20:35:20 | 2017-01-06T20:35:20 | 78,236,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 753 | py | #!/usr/bin/python
from distutils.core import setup
setup(name='forismatic',
version='1.0',
description='PyForismatic package',
long_description = "Getting quotes from http://forismatic.com using API",
author='Andrey Basalyha',
author_email='abasalyha@gmail.com',
url='http://ab-developer.tumblr.com/',
packages=['forismatic'],
scripts=['example.py'],
classifiers=(
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
),
license="GPL-2"
)
| [
"josh@lyonsdensoftware.com"
] | josh@lyonsdensoftware.com |
731800828469aa1b78563d3dae74e8f7ed296abf | 2bdedcda705f6dcf45a1e9a090377f892bcb58bb | /src/main/output/point/president/way/kerberos_place_part.py | a02fe413ce8add5d86a79fbfa0f688adb60943f7 | [] | no_license | matkosoric/GenericNameTesting | 860a22af1098dda9ea9e24a1fc681bb728aa2d69 | 03f4a38229c28bc6d83258e5a84fce4b189d5f00 | refs/heads/master | 2021-01-08T22:35:20.022350 | 2020-02-21T11:28:21 | 2020-02-21T11:28:21 | 242,123,053 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,105 | py | using System;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using Microsoft.Translator.API;
namespace CSharp_TranslateSample
{
public class Program
{
public static string traducida;
public static void Main(string[] args)
{
//TranslateAsync().Wait();
//Console.ReadKey();
}
public static void iniciar() {
TranslateAsync().Wait();
Console.ReadKey();
}
/// Demonstrates getting an access token and using the token to translate.
private static async Task TranslateAsync()
{
var translatorService = new TranslatorService.LanguageServiceClient();
var authTokenSource = new AzureAuthToken(SubscriptionKey);
var token = string.Empty;
try
{
token = await authTokenSource.GetAccessTokenAsync();
}
catch (HttpRequestException)
{
switch (authTokenSource.RequestStatusCode)
{
case HttpStatusCode.Unauthorized:
Console.WriteLine("Request to token service is not authorized (401). Check that the Azure subscription key is valid.");
break;
case HttpStatusCode.Forbidden:
Console.WriteLine("Request to token service is not authorized (403). For accounts in the free-tier, check that the account quota is not exceeded.");
break;
}
throw;
}
traducida = translatorService.Translate(token, "Hello World", "en", "fr", "text/plain", "general", string.Empty);
private const string SubscriptionKey = "a82656b8f3060cebdca7483b1bf557d2"; //Enter here the Key from your Microsoft Translator Text subscription on http://portal.azure.com
//Console.WriteLine("Translated to French: {0}", translatorService.Translate(token, "Hello World", "en", "fr", "text/plain", "general", string.Empty));
}
}
}
| [
"soric.matko@gmail.com"
] | soric.matko@gmail.com |
f5693ba9d5f5661315af0ff316348508bfffa665 | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog/optimized_42606.py | 4c7ba5374406b23f4f9662574ff7bdd96180c47f | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,833 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((480.133, 574.617, 353.855), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((472.89, 506.186, 366.499), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((461.36, 426.318, 371.916), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((463.5, 488.736, 247.655), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((445.537, 242.226, 430.618), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((466.957, 531.167, 363.335), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((466.273, 532.349, 363.508), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((440.013, 528.239, 372.237), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((425.324, 537.967, 393.901), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((399.261, 527.122, 392.677), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((377.08, 531.438, 376.089), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((377.278, 555.174, 361.198), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((483.905, 553.219, 369.612), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((271.035, 559.086, 359.439), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((314.991, 374.058, 427.888), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((314.991, 374.058, 427.888), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((341.953, 381.36, 436.356), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((370.147, 390.326, 439.651), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((394.479, 406.023, 436.611), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((411.069, 429.236, 429.918), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((424.036, 453.57, 419.548), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((435.58, 476.678, 405.281), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((189.494, 422.769, 348.469), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((682.681, 550.891, 440.684), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((457.67, 451.265, 412.51), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((457.67, 451.265, 412.51), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((477.136, 449.251, 391.197), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((489.693, 440.589, 366.536), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((475.566, 433.51, 341.909), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((497.788, 549.921, 303.974), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((447.684, 314.825, 371.61), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((481.892, 514.219, 340.56), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((482.006, 514.319, 340.367), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((487.412, 542.546, 339.741), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((476.438, 563.475, 355.169), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((456.629, 567.891, 375.9), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((433.885, 571.824, 392.34), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((408.443, 573.851, 404.449), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((387.496, 559.47, 392.46), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((453.421, 527.079, 436.448), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((320.645, 588.786, 344.08), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((491.899, 503.154, 427.54), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((490.244, 493.314, 402.88), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((484.724, 469.799, 350.456), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((478.939, 445.038, 298.961), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((532.383, 502.052, 278.523), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((432.292, 368.905, 244.95), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((505.686, 525.311, 392.624), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((505.669, 516.009, 365.984), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((519.318, 495.81, 380.205), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((521.203, 467.974, 383.863), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((522.364, 439.958, 383.551), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((514.033, 413.155, 380.166), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((497.001, 492.839, 378.454), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((531.46, 333.91, 382.133), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"batxes@gmail.com"
] | batxes@gmail.com |
e90ae16967de9f07884f180a93732c5449e223bb | c467be10195f4849ac1eba4c566a0e6f39af99d5 | /Ignore/DPLL P1.py | c04aee2fbba80b76e0e94e17828b4b17ea0b7fe5 | [
"Apache-2.0"
] | permissive | paxcema/cl-dpll | 5b2bac82074000310a8191a982b484e9a7004c7d | e91815779ecb9a41ecaca32e62c25d7fe7c2804e | refs/heads/master | 2021-06-01T07:36:22.846027 | 2016-06-14T15:43:08 | 2016-06-14T15:43:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,106 | py | __author__ = 'Patricio Cerda, Joaquin Moreno y Pedro Zepeda'
import os
from time import time
#Pseudocodigo de lo que se debe hacer para optimizar el algoritmo base:
#
#Unit propagation
#If a clause is a unit clause, i.e. it contains only a single unassigned literal,
#this clause can only be satisfied by assigning the necessary value to make this
#literal true. Thus, no choice is necessary. In practice, this often leads to
#deterministic cascades of units, thus avoiding a large part of the naive search space.
#
#Pure literal elimination
#If a propositional variable occurs with only one polarity in the formula, it is called
#pure. Pure literals can always be assigned in a way that makes all clauses containing
#them true. Thus, these clauses do not constrain the search anymore and can be deleted.
#
#def DPLL(lista):
# if lista (es un set de literales consistente):
# return true
# if lista (contiene una clausula vacia):
# return false
# for (cada clausula unitaria 'l') in lista:
#*** lista = unit-propagate(l, lista)
# for (cada literal 'l' que ocurre 'puro') in lista:
#*** lista = pure-literal-assign(l, lista)
# l = choose-literal(lista)
# lista1 = lista.append(l)
# lista2 = lista.append(-l)
# return DPLL(lista1) or DPLL(lista2)
#
#Con *** indicando los pasos a implementar para mejorar la rapidez. De wikipedia:
#
#unit-propagate(l, lista) and pure-literal-assign(l, lista) are functions that return the result of applying unit
#propagation and the pure literal rule, respectively, to the literal l and the formula ?. In other words, they
#replace every occurrence of l with "true" and every occurrence of not l with "false" in the formula ?, and
#simplify the resulting formula.
def DPLL(alpha, recorrido):
print('Expresion actual', alpha)
if alpha == '': print('alpha es nada, por lo tanto True'); return True
elif alpha == []: print('alpha es vacia, por lo tanto False'); return False
elif (len(alpha[0]) == 1) and (alpha[0].isalpha()):
print('alpha es clausula unitaria, por lo tanto simplificamos.')
literal_1 == alpha[2] # == True?
print('Debiese asignarse True a ', literal_1)
return DPLL(simplificacion(alpha, literal_1))
i = 0; j = 0
literal_2 = alpha[i][j]
while literal_2 in recorrido:
try:
literal_2 = alpha[i][j+1]
except:
try:
j = 0
literal_2 = alpha[i+1][j]
except: break
recorrido.append(literal_2)
print('Asigno valor de verdad a ', literal_2, 'y modifico el recorrido, que ahora es: ', recorrido)
if DPLL(simplificacion(alpha, literal_2), recorrido) == True:
return True
else:
print('No resulto la asignacion, probamos con el valor de verdad contrario para el mismo literal.')
return DPLL(simplificacion(alpha, -literal_2),recorrido)
def simplificacion(alpha,literal):
"""remover clausulas in alpha donde literal es afirmativo/positivo
remover (no literal) de clausulas donde aparece
return new alpha"""
i = 1
largo = len(alpha)
if len(alpha) == 1: largo = largo + 1
while i in range(0,largo):
i -= 1
clausula = alpha[i]
print('Alpha que entra es ', alpha)
j = 0
while j in range(0,2):
atomo = clausula[j]
print('En la clausula ', clausula, ' se tiene el atomo ', atomo)
if atomo == -literal:
print('Atomo opuesto, remuevo el atomo.')
clausula.remove(atomo)
j = 0
continue
if atomo == literal:
print('Atomo igual, remuevo clausula')
alpha.remove(clausula)
j = 1
if len(alpha) >= 2: i += 1
if len(alpha) == 0: return ''
break
j += 1
return alpha
def LeerArchivo(ruta):
lista_clausulas = []
archivo = open(ruta, 'r')
linea = archivo.readline()
while linea != '':
pos1 = linea.find(' ')
valor1 = int(linea[:pos1])
resto = linea[pos1 + 1:len(linea) - 1]
pos2 = resto.find(' ')
valor2 = int(resto[:pos2])
valor3 = int(resto[pos2 +1:len(resto)])
lista = [valor1, valor2, valor3]
lista_clausulas.append(lista)
linea = archivo.readline()
archivo.close()
return lista_clausulas
while True:
opcion = str(input('Seleccione el archivo a probar: \n'))
path = str(os.getcwd())
if opcion == 'q' or opcion == 'Q':
print("Gracias por usar nuestro SAT solver!")
break
try:
r = '%s/instancia%s.txt' % path,opcion
print('r', r)
lista_clausulas = LeerArchivo(r)
print(lista_clausulas)
except:
print("Archivo instancia%s.txt no encontrado. Intente de nuevo!" % opcion)
continue
casos_recorridos = []
timer0 = time()
evaluacion = DPLL(lista_clausulas, casos_recorridos)
t_final = time() - timer0
print(evaluacion)
print("Tiempo", (t_final*1000)//1, 'ms') | [
"pcerda2@uc.cl"
] | pcerda2@uc.cl |
daef37796dc30e504e3045c54fa5fe319374339c | 285d322cdd60997987603f263fe9749b08ffe3b6 | /venv/Scripts/fixup_firestore_v1_keywords.py | cde584e9a2cef539babe3b4e03ca6dff8bc2923d | [] | no_license | ChristianCollaguazo/flask-app | 0904cced10d9edde3580805d899e3f8434efa105 | 09e1de6242ba47286a851cb16abfd91d67c43adc | refs/heads/master | 2023-06-03T12:26:57.057395 | 2021-06-18T22:46:54 | 2021-06-18T22:46:54 | 378,279,195 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,343 | py | #!c:\users\chris\pycharmprojects\to-do-list\venv\scripts\python.exe
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
try:
import libcst as cst
except ImportError:
raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.')
import pathlib
import sys
from typing import (Any, Callable, Dict, List, Sequence, Tuple)
def partition(
predicate: Callable[[Any], bool],
iterator: Sequence[Any]
) -> Tuple[List[Any], List[Any]]:
"""A stable, out-of-place partition."""
results = ([], [])
for i in iterator:
results[int(predicate(i))].append(i)
# Returns trueList, falseList
return results[1], results[0]
class firestoreCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
'batch_get_documents': ('database', 'documents', 'mask', 'transaction', 'new_transaction', 'read_time', ),
'batch_write': ('database', 'writes', 'labels', ),
'begin_transaction': ('database', 'options', ),
'commit': ('database', 'writes', 'transaction', ),
'create_document': ('parent', 'collection_id', 'document', 'document_id', 'mask', ),
'delete_document': ('name', 'current_document', ),
'get_document': ('name', 'mask', 'transaction', 'read_time', ),
'list_collection_ids': ('parent', 'page_size', 'page_token', ),
'list_documents': ('parent', 'collection_id', 'page_size', 'page_token', 'order_by', 'mask', 'transaction', 'read_time', 'show_missing', ),
'listen': ('database', 'add_target', 'remove_target', 'labels', ),
'partition_query': ('parent', 'structured_query', 'partition_count', 'page_token', 'page_size', ),
'rollback': ('database', 'transaction', ),
'run_query': ('parent', 'structured_query', 'transaction', 'new_transaction', 'read_time', ),
'update_document': ('document', 'update_mask', 'mask', 'current_document', ),
'write': ('database', 'stream_id', 'writes', 'stream_token', 'labels', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
try:
key = original.func.attr.value
kword_params = self.METHOD_TO_PARAMS[key]
except (AttributeError, KeyError):
# Either not a method from the API or too convoluted to be sure.
return updated
# If the existing code is valid, keyword args come after positional args.
# Therefore, all positional args must map to the first parameters.
args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
if any(k.keyword.value == "request" for k in kwargs):
# We've already fixed this file, don't fix it again.
return updated
kwargs, ctrl_kwargs = partition(
lambda a: not a.keyword.value in self.CTRL_PARAMS,
kwargs
)
args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
request_arg = cst.Arg(
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
# those could have been passed positionally or by keyword.
for name, arg in zip(kword_params, args + kwargs)]),
keyword=cst.Name("request")
)
return updated.with_changes(
args=[request_arg] + ctrl_kwargs
)
def fix_files(
in_dir: pathlib.Path,
out_dir: pathlib.Path,
*,
transformer=firestoreCallTransformer(),
):
"""Duplicate the input dir to the output dir, fixing file method calls.
Preconditions:
* in_dir is a real directory
* out_dir is a real, empty directory
"""
pyfile_gen = (
pathlib.Path(os.path.join(root, f))
for root, _, files in os.walk(in_dir)
for f in files if os.path.splitext(f)[1] == ".py"
)
for fpath in pyfile_gen:
with open(fpath, 'r') as f:
src = f.read()
# Parse the code and insert method call fixes.
tree = cst.parse_module(src)
updated = tree.visit(transformer)
# Create the path and directory structure for the new file.
updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
updated_path.parent.mkdir(parents=True, exist_ok=True)
# Generate the updated source file at the corresponding path.
with open(updated_path, 'w') as f:
f.write(updated.code)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Fix up source that uses the firestore client library.
The existing sources are NOT overwritten but are copied to output_dir with changes made.
Note: This tool operates at a best-effort level at converting positional
parameters in client method calls to keyword based parameters.
Cases where it WILL FAIL include
A) * or ** expansion in a method call.
B) Calls via function or method alias (includes free function calls)
C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
These all constitute false negatives. The tool will also detect false
positives when an API method shares a name with another method.
""")
parser.add_argument(
'-d',
'--input-directory',
required=True,
dest='input_dir',
help='the input directory to walk for python files to fix up',
)
parser.add_argument(
'-o',
'--output-directory',
required=True,
dest='output_dir',
help='the directory to output files fixed via un-flattening',
)
args = parser.parse_args()
input_dir = pathlib.Path(args.input_dir)
output_dir = pathlib.Path(args.output_dir)
if not input_dir.is_dir():
print(
f"input directory '{input_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if not output_dir.is_dir():
print(
f"output directory '{output_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if os.listdir(output_dir):
print(
f"output directory '{output_dir}' is not empty",
file=sys.stderr,
)
sys.exit(-1)
fix_files(input_dir, output_dir)
| [
"christian.collaguazom@gmail.com"
] | christian.collaguazom@gmail.com |
f7d13a2a878558bb3b53dd2d4ec2dd8a9005bf90 | 7d19ab25d7eaff3bbfe477c12c146b07dd4f2db3 | /stubserver.py | 17aec2a608e21255e19f7dd24f115810c80f5c64 | [
"BSD-2-Clause-Views"
] | permissive | tarttelin/pyrestmodels | 071f8910b877ef802f989637fc9eadb89334f3c0 | 0c36fb2360d503e3b8df24971edfad655836ca47 | refs/heads/master | 2021-03-12T19:45:55.391900 | 2013-01-03T15:16:44 | 2013-01-03T15:16:44 | 7,397,305 | 1 | 2 | null | 2015-09-08T13:30:49 | 2013-01-01T17:33:42 | Python | UTF-8 | Python | false | false | 10,972 | py | """
Copyright 2009 Chris Tarttelin
Redistribution and use in source and binary forms, with or without modification, are
permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list
of conditions and the following disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE FREEBSD PROJECT ``AS IS'' AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FREEBSD PROJECT OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those of the
authors and should not be interpreted as representing official policies, either expressed
or implied, of the FreeBSD Project.
"""
import BaseHTTPServer, cgi, threading, re, urllib, httplib
import unittest, urllib, urllib2, time
from unittest import TestCase
import sys
class StoppableHTTPServer(BaseHTTPServer.HTTPServer):
"""Python 2.5 HTTPServer does not close down properly when calling server_close.
The implementation below was based on the comments in the below article:-
http://stackoverflow.com/questions/268629/how-to-stop-basehttpserver-serveforever-in-a-basehttprequesthandler-subclass
"""
stopped = False
allow_reuse_address = True
def __init__(self, *args, **kw):
BaseHTTPServer.HTTPServer.__init__(self, *args, **kw)
def serve_forever(self):
while not self.stopped:
self.handle_request()
def server_close(self):
BaseHTTPServer.HTTPServer.server_close(self)
self.stopped = True
self._create_dummy_request()
def _create_dummy_request(self):
f = urllib.urlopen("http://localhost:" + str(self.server_port) + "/__shutdown")
f.read()
f.close()
if sys.version_info[0] == 2 and sys.version_info[1] < 6:
HTTPServer = StoppableHTTPServer
print "Using stoppable server"
else:
HTTPServer = BaseHTTPServer.HTTPServer
class StubServer(object):
_expectations = []
def __init__(self, port):
self.port = port
def run(self):
server_address = ('localhost', self.port)
self.httpd = HTTPServer(server_address, StubResponse)
t = threading.Thread(target=self._run)
t.start()
def stop(self):
self.httpd.server_close()
self.verify()
def _run(self, ):
try:
self.httpd.serve_forever()
except:
pass
def verify(self):
failures = []
for expectation in self._expectations:
if not expectation.satisfied:
failures.append(str(expectation))
self._expectations.remove(expectation)
if failures:
raise Exception("Unsatisfied expectations:\n" + "\n".join(failures))
def expect(self, method="GET", url="^UrlRegExpMatcher$", data=None, data_capture={}, file_content=None):
expected = Expectation(method, url, data, data_capture)
self._expectations.append(expected)
return expected
class Expectation(object):
def __init__(self, method, url, data, data_capture):
self.method = method
self.url = url
self.data = data
self.data_capture = data_capture
self.satisfied = False
def and_return(self, mime_type="text/html", reply_code=200, content="", file_content=None):
if file_content:
f = open(file_content, "r")
content = f.read()
f.close()
self.response = (reply_code, mime_type, content)
def __str__(self):
return self.method + ":" + self.url
class StubResponse(BaseHTTPServer.BaseHTTPRequestHandler):
def __init__(self, request, clientaddress, parent):
self.expected = StubServer._expectations
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, request, clientaddress, parent)
def _get_data(self):
if self.headers.has_key("content-length"):
size_remaining = int(self.headers["content-length"])
return self._read_chunk(size_remaining)
elif self.headers.get('Transfer-Encoding', "") == "chunked":
# Copied from httplib ... should find a way to use httplib instead of copying...
chunk_left = None
value = ''
amt = None
# XXX This accumulates chunks by repeated string concatenation,
# which is not efficient as the number or size of chunks gets big.
while True:
if chunk_left is None:
line = self.rfile.readline()
i = line.find(';')
if i >= 0:
line = line[:i] # strip chunk-extensions
chunk_left = int(line, 16)
if chunk_left == 0:
break
if amt is None:
value += self._read_chunk(chunk_left)
elif amt < chunk_left:
value += self._read_chunk(amt)
self.chunk_left = chunk_left - amt
return value
elif amt == chunk_left:
value += self._read_chunk(amt)
self._read_chunk(2) # toss the CRLF at the end of the chunk
self.chunk_left = None
return value
else:
value += self._read_chunk(chunk_left)
amt -= chunk_left
# we read the whole chunk, get another
self._read_chunk(2) # toss the CRLF at the end of the chunk
chunk_left = None
# read and discard trailer up to the CRLF terminator
### note: we shouldn't have any trailers!
while True:
line = self.rfile.readline()
if not line:
# a vanishingly small number of sites EOF without
# sending the trailer
break
if line == '\r\n':
break
# we read everything; close the "file"
self.rfile.close()
return value
else:
return ""
def _read_chunk(self, size_remaining):
max_chunk_size = 10*1024*1024
L = []
while size_remaining:
chunk_size = min(size_remaining, max_chunk_size)
L.append(self.rfile.read(chunk_size))
size_remaining -= len(L[-1])
return ''.join(L)
def handle_one_request(self):
"""Handle a single HTTP request.
You normally don't need to override this method; see the class
__doc__ string for information on how to handle specific HTTP
commands such as GET and POST.
"""
self.raw_requestline = self.rfile.readline()
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request(): # An error code has been sent, just exit
return
method = self.command
if self.path == "/__shutdown":
self.send_response(200, "Python")
for exp in self.expected:
if exp.method == method and re.search(exp.url, self.path) and not exp.satisfied:
self.send_response(exp.response[0], "Python")
self.send_header("Content-Type", exp.response[1])
self.end_headers()
self.wfile.write(exp.response[2])
data = self._get_data()
exp.satisfied = True
exp.data_capture["body"] = data
break
self.wfile.flush()
class WebTest(TestCase):
def setUp(self):
self.server = StubServer(8998)
self.server.run()
def tearDown(self):
self.server.stop()
self.server.verify()
def _make_request(self, url, method="GET", payload="", headers={}):
self.opener = urllib2.OpenerDirector()
self.opener.add_handler(urllib2.HTTPHandler())
request = urllib2.Request(url, headers=headers, data=payload)
request.get_method = lambda: method
response = self.opener.open(request)
response_code = getattr(response, 'code', -1)
return (response, response_code)
def test_get_with_file_call(self):
f = open('data.txt', 'w')
f.write("test file")
f.close()
self.server.expect(method="GET", url="/address/\d+$").and_return(mime_type="text/xml", file_content="./data.txt")
response, response_code = self._make_request("http://localhost:8998/address/25", method="GET")
expected = open("./data.txt", "r").read()
try:
self.assertEquals(expected, response.read())
finally:
response.close()
def test_put_with_capture(self):
capture = {}
self.server.expect(method="PUT", url="/address/\d+$", data_capture=capture).and_return(reply_code=201)
f, reply_code = self._make_request("http://localhost:8998/address/45", method="PUT", payload=str({"hello": "world", "hi": "mum"}))
try:
self.assertEquals("", f.read())
captured = eval(capture["body"])
self.assertEquals("world", captured["hello"])
self.assertEquals("mum", captured["hi"])
self.assertEquals(201, reply_code)
finally:
f.close()
def test_post_with_data_and_no_body_response(self):
self.server.expect(method="POST", url="address/\d+/inhabitant", data='<inhabitant name="Chris"/>').and_return(reply_code=204)
f, reply_code = self._make_request("http://localhost:8998/address/45/inhabitant", method="POST", payload='<inhabitant name="Chris"/>')
self.assertEquals(204, reply_code)
def test_get_with_data(self):
self.server.expect(method="GET", url="/monitor/server_status$").and_return(content="<html><body>Server is up</body></html>", mime_type="text/html")
f, reply_code = self._make_request("http://localhost:8998/monitor/server_status", method="GET")
try:
self.assertTrue("Server is up" in f.read())
self.assertEquals(200, reply_code)
finally:
f.close()
if __name__=='__main__':
unittest.main()
| [
"chris@tarttelin.co.uk"
] | chris@tarttelin.co.uk |
b5d7d906ca1b952f86510b73dd4b2ab3e980c6db | 283815445952a37e0124801b456844774355733f | /app/models/__init__.py | ffca30aca69e6931817c66d4933a609c5d6bf330 | [] | no_license | paulosjd/woodrecs | 7aa4bec22f2c126bd51023e141f1a113c8faf3d8 | 19a8a53c753ae0978fc092d9a2f6f560dc8644bf | refs/heads/master | 2022-07-07T08:36:06.902801 | 2020-11-08T21:59:00 | 2020-11-08T21:59:00 | 251,587,209 | 0 | 0 | null | 2022-06-22T01:36:43 | 2020-03-31T11:47:53 | Python | UTF-8 | Python | false | false | 183 | py | from .profile import Profile
from .profile_board import ProfileBoard
from .route import Route
from .user import User
__all__ = [
Profile,
ProfileBoard,
Route,
User
]
| [
"pjdavis@gmx.com"
] | pjdavis@gmx.com |
7e973f7b6ada2d4a52895cd70e33c4f1bb0ed231 | adff01ff96615108d64fcc85e8f2ac80eb564b15 | /venv/bin/pip2 | 76c93c3b5b8eff088359bc4a87879ba92d1b1c3b | [] | no_license | Drisicus/hackaton_2017_python_server | 8645c7b02d1608f7838b58a5944b6f7bb22ed1d5 | 25531d73136ac87ae41150ccdc52781556932f04 | refs/heads/master | 2021-05-04T18:42:52.094795 | 2017-10-06T02:41:11 | 2017-10-06T02:41:11 | 105,961,097 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 271 | #!/home/drisicus/Escritorio/HackatonInditex2017/colasBackend/Server/venv/bin/python2
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"aaasssccc@gmail.com"
] | aaasssccc@gmail.com | |
5c53ac3afeba59c03e9437ce6dfc39c59e1ca5cd | d989358ccdd0e21daf33e70742058b02f805c549 | /section1/06.py | d5fc3e8f0eb4cb286a1ef236fb4e520238489a3b | [] | no_license | kouheiszk/nltk | d8f21a3ad8aad7547f002e30638160eb81f322ca | 12046291bc7ddc6130024400c09649e8ee58a411 | refs/heads/master | 2016-09-05T15:42:03.870484 | 2012-08-04T06:37:56 | 2012-08-04T06:37:56 | 4,922,894 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import nltk
from nltk.book import *
text2.dispersion_plot(["Edward", "Elinor", "Willoughby", "Marianne"])
| [
"kouhei.szk@gmail.com"
] | kouhei.szk@gmail.com |
6c4ec435613984dddbe630af2cf2b84ad8144ed3 | 9415ab209b475b670ec24ff458ba48b34134ad12 | /Week2/numpy_practice.py | 554f3936ebf3078f77bc7f2a0bf23eddfe0ae2fe | [] | no_license | jmoon34/advanced-ml-specialization | cb84201362ea267135b5e75267f7f06269209ef9 | 0b488a96eafebd0075837e3c26eed56cc4aee1e8 | refs/heads/master | 2021-02-18T21:43:42.629082 | 2020-03-05T18:57:32 | 2020-03-05T18:57:32 | 245,240,861 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
# def sinus2d(x, y):
# return np.sin(x) + np.sin(y)
#
# x = np.linspace(0, 2*np.pi, 100)
# y = np.linspace(0, 2*np.pi, 100)
#
# xx, yy = np.meshgrid(x, y)
# z = sinus2d(xx, yy)
# plt.imshow(z, origin='lower', interpolation='none')
# plt.show()
patch = np.array([[1, 1],
[0, 1]])
kernel = np.array([[1, 2],
[3, 4]])
print(patch.dot(kernel)) | [
"jmcube@gmail.com"
] | jmcube@gmail.com |
23392d5b646b6de77cc4f67ba3f42220c8974f41 | 03b628d6103eb0a9b41e36a9521129793ab3c4d9 | /bookkeeping/migrations/0007_auto_20181119_1234.py | 9f2651a9cfbb80fe7ba5b3c81af3e15ac9aec821 | [] | no_license | McDiamund/Accuracy | dcd9b625d5c6ee5432a7349b7c6a7a393f950656 | 63eec74feec43ed7554b4cd7d8225ecc75a460e3 | refs/heads/master | 2020-04-08T04:58:17.087562 | 2018-11-26T03:41:11 | 2018-11-26T03:41:11 | 159,040,256 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 371 | py | # Generated by Django 2.1.1 on 2018-11-19 17:34
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('bookkeeping', '0006_auto_20181119_1233'),
]
operations = [
migrations.RenameField(
model_name='account',
old_name='accounts',
new_name='profile',
),
]
| [
"mcdiamundtasks@gmail.com"
] | mcdiamundtasks@gmail.com |
061b46322d284653c94c803921d86a35f31c4c3a | 8c067089ac94844919c4dc37681c898c0f93819e | /jenkins-master/jobs/scripts/workspace/config.py | c477a45df06ebcddcd194c10096182da65606db8 | [] | no_license | Ramireddyashok/mozmill-ci | 9ac1a5762fa8c14c4802447a9d5878422d2e164a | 0b8c6417e596235cca403ca80947fc328bd2fe8b | refs/heads/master | 2021-04-30T01:27:42.502771 | 2017-06-14T18:24:50 | 2017-06-14T18:24:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,089 | py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
here = os.path.dirname(os.path.abspath(__file__))
config = {
'test_types': {
'functional': {
'harness_config': os.path.join('firefox_ui_tests', 'qa_jenkins.py'),
'harness_script': os.path.join('firefox_ui_tests', 'functional.py'),
'treeherder': {
'group_name': 'Firefox UI Functional Tests',
'group_symbol': 'Fxfn',
'job_name': 'Firefox UI Functional Tests ({locale})',
'job_symbol': '{locale}',
'tier': 3,
'artifacts': {
'log_info.log': os.path.join(here, 'build', 'upload', 'logs', 'log_info.log'),
'report.html': os.path.join(here, 'build', 'upload', 'reports', 'report.html'),
},
'log_reference': 'log_info.log',
},
},
'update': {
'harness_config': os.path.join('firefox_ui_tests', 'qa_jenkins.py'),
'harness_script': os.path.join('firefox_ui_tests', 'update.py'),
'treeherder': {
'group_name': 'Firefox UI Update Tests - {update_channel}',
'group_symbol': 'Fxup-{update_channel}',
'job_name': 'Firefox UI Update Tests - {update_channel} {locale}-{update_number}',
'job_symbol': '{locale}-{update_number}',
'tier': 3,
'artifacts': {
'log_info.log': os.path.join(here, 'build', 'upload', 'logs', 'log_info.log'),
'report.html': os.path.join(here, 'build', 'upload', 'reports', 'report.html'),
# TODO: Bug 1210753: Move generation of log as option to mozharness
'http.log': os.path.join(here, 'build', 'http.log'),
},
'log_reference': 'log_info.log',
},
},
},
}
| [
"mail@hskupin.info"
] | mail@hskupin.info |
4a4ea706f226e5b29344a5ed46350541e6b35925 | 44d3026f538e4d63f4f9616a8041519079e1358b | /ToSpark.py | 68be7b341087d19499d5d272dd2c4ce11f7b80f2 | [] | no_license | TIM245-W16/twitter-sentiment | 29ad30207fa98fdaceec5bfcfdb0c6bdd09106ef | fbfd3fdb176bf1a76b19353562f5eb71a0551a7c | refs/heads/master | 2020-12-07T15:36:36.645728 | 2016-03-17T23:55:17 | 2016-03-17T23:55:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,908 | py | """
This program process all 1247391 tweets I collcted in my database.
Transform each tweet into a vector by comparing words in wordlist.
Save 70% tweets as train set 30% tweets as test set into txt files.
"""
import pymongo
import nltk
import io
try:
import json
except ImportError:
import simplejson as json
from collections import Counter
from prettytable import PrettyTable
from sklearn.naive_bayes import MultinomialNB
from nltk.corpus import stopwords
from sklearn import cross_validation
def setOfWords2Vec(vocabList, inputSet) :
returnVec = [0]*len(vocabList)
for word in inputSet:
if word in vocabList:
returnVec[vocabList.index(word)]= 1
return returnVec
def get_diversity(tokens):
return 1.0 * len(set(tokens)) / len(tokens)
def average_words(statuses):
total_words = sum([len(s.split()) for s in statuses])
return 1.0*total_words/len(statuses)
total_text = []
connection = pymongo.MongoClient()
db = connection['test-data8']
collection = ['negtive_data', 'negtive_data2', 'negtive_data3', 'positive_data', 'positive_data2', 'positive_data3']
for i in collection:
coll = db[i]
num = coll.count()
print 'number of tweets', num
text = [i for i in coll.distinct('text')]
for j in text:
total_text.append(j)
y = [0]*len(total_text)
print 'number of unique tweets', len(y)
words_total = [w
for t in total_text
for w in t.split()]
english_punctuations = [',', '.', ':', ';', '?', '(', ')', '[', ']', '&', '!', '*', '@', '#', '$', '%']
english_stopwords = stopwords.words('english')
for i in range(len(total_text)):
if ':)' in total_text[i]:
y[i] = 1.0
else:
y[i] = 0.0
X_train, X_test, y_train, y_test = cross_validation.train_test_split(total_text,y,test_size=0.3,random_state=0)
print 'size of train set:', len(X_train)
print 'size of test set:', len(X_test)
words = [[w
for w in document.split()] for document in X_train]
texts_filtered_stopwords = [[word for word in document if not word in english_stopwords] for document in words]
texts_filtered = [[word for word in document if not word in english_punctuations] for document in words]
all_words = []
for document in texts_filtered:
for word in document:
if len(word) > 2:
all_words.append(word.lower())
all_words2 = nltk.FreqDist(w.lower() for w in all_words)
wordlist = [fpair[0] for fpair in list(all_words2.most_common(13000))]
print 'length of Wordlist', len(wordlist)
X_a= []
for i in X_test:
X_a.append(i.lower())
X_test = X_a
X_traina, X_trainb, y_traina, y_trainb = cross_validation.train_test_split(X_train,y_train,test_size=0.5,random_state=0)
X_train1, X_train2, y_train1, y_train2 = cross_validation.train_test_split(X_traina,y_traina,test_size=0.5,random_state=0)
X_train3, X_train4, y_train3, y_train4 = cross_validation.train_test_split(X_trainb,y_trainb,test_size=0.5,random_state=0)
X_test1, X_test2, y_test1, y_test2 = cross_validation.train_test_split(X_test,y_test,test_size=0.5,random_state=0)
print 'size of train1 set:', len(X_train1)
print 'size of train2 set:', len(X_train2)
print 'size of train3 set:', len(X_train3)
print 'size of train4 set:', len(X_train4)
print 'size of test1 set:', len(X_test1)
print 'size of test2 set:', len(X_test2)
ftr1 = open('/Users/yuanjun/Desktop/train1.txt', 'w')
print "open ftr1"
train_data1 = [([0] * len(wordlist)) for i in range(len(y_train1))]
for i in range(len(train_data1)):
words = [w
for w in X_train1[i].split()]
train_data1[i] = setOfWords2Vec(wordlist, words)
s = ""
for j in train_data1[i]:
s = s + "#" + str(j)
print >>ftr1, str(y_train1[i])+",",s
ftr1.close()
print "write train1 done!"
ftr2 = open('/Users/yuanjun/Desktop/train2.txt', 'w')
print "open ftr2"
train_data2 = [([0] * len(wordlist)) for i in range(len(y_train2))]
for i in range(len(train_data2)):
words = [w
for w in X_train2[i].split()]
train_data2[i] = setOfWords2Vec(wordlist, words)
s = ""
for j in train_data2[i]:
s = s + "#" + str(j)
print >>ftr2, str(y_train2[i])+",",s
ftr2.close()
print "write train2 done!"
ftr3 = open('/Users/yuanjun/Desktop/train3.txt', 'w')
print "open ftr3"
train_data3 = [([0] * len(wordlist)) for i in range(len(y_train3))]
for i in range(len(train_data3)):
words = [w
for w in X_train3[i].split()]
train_data3[i] = setOfWords2Vec(wordlist, words)
s = ""
for j in train_data3[i]:
s = s + "#" + str(j)
print >>ftr3, str(y_train3[i])+",",s
ftr3.close()
print "write train3 done!"
ftr4 = open('/Users/yuanjun/Desktop/train4.txt', 'w')
print "open ftr4"
train_data4 = [([0] * len(wordlist)) for i in range(len(y_train4))]
for i in range(len(train_data4)):
words = [w
for w in X_train4[i].split()]
train_data4[i] = setOfWords2Vec(wordlist, words)
s = ""
for j in train_data4[i]:
s = s + "#" + str(j)
print >>ftr4, str(y_train4[i])+",",s
ftr4.close()
print "write train4 done!"
fte1 = open('/Users/yuanjun/Desktop/test1.txt', 'w')
print "open fte1"
test_data1 = [([0] * len(wordlist)) for i in range(len(y_test1))]
for i in range(len(test_data1)):
words = [w
for w in X_test1[i].split()]
test_data1[i] = setOfWords2Vec(wordlist, words)
s = ""
for j in test_data1[i]:
s = s + "#" + str(j)
print >>fte1, str(y_test1[i])+",",s
fte1.close()
print "write test1 done!"
fte2 = open('/Users/yuanjun/Desktop/test2.txt', 'w')
print "open fte2"
test_data2 = [([0] * len(wordlist)) for i in range(len(y_test2))]
for i in range(len(test_data2)):
words = [w
for w in X_test2[i].split()]
test_data2[i] = setOfWords2Vec(wordlist, words)
s = ""
for j in test_data2[i]:
s = s + "#" + str(j)
print >>fte2, str(y_test2[i])+",",s
fte2.close()
print "write test2 done!" | [
"junyuancs@gmail.com"
] | junyuancs@gmail.com |
b969aff50964ebae5ecd9541c8ed4af2b0ec93fa | 4d99350a527a88110b7bdc7d6766fc32cf66f211 | /OpenGLCffi/GLX/EXT/NV/copy_image.py | 1981ca497e85e50301da73e66cd5b08f9e4f85dd | [
"MIT"
] | permissive | cydenix/OpenGLCffi | e790ef67c2f6c9877badd5c38b7d58961c8739cd | c78f51ae5e6b655eb2ea98f072771cf69e2197f3 | refs/heads/master | 2021-01-11T07:31:10.591188 | 2017-04-17T11:04:55 | 2017-04-17T11:04:55 | 80,312,084 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | from OpenGLCffi.GLX import params
@params(api='glx', prms=['dpy', 'srcCtx', 'srcName', 'srcTarget', 'srcLevel', 'srcX', 'srcY', 'srcZ', 'dstCtx', 'dstName', 'dstTarget', 'dstLevel', 'dstX', 'dstY', 'dstZ', 'width', 'height', 'depth'])
def glXCopyImageSubDataNV(dpy, srcCtx, srcName, srcTarget, srcLevel, srcX, srcY, srcZ, dstCtx, dstName, dstTarget, dstLevel, dstX, dstY, dstZ, width, height, depth):
pass
| [
"cdenizol@gmail.com"
] | cdenizol@gmail.com |
130f5e58a29834c93b9660f4e8a1496ca2c37af7 | 3c2a698420a70516960d6b7f939731d69d55c050 | /Python游戏开发入门/helloWorld.py | 85b2d07dfec8def39620ddf4c19c6c6a6bbeb37f | [] | no_license | qiebzps/163note | c9d7b55d81a2dd4df40bab68630d37ddb7d35160 | ae7173fcd46f782323abb7e90c7e2b7c8139dd88 | refs/heads/master | 2021-09-02T04:39:02.530279 | 2017-12-29T07:45:52 | 2017-12-29T07:45:52 | 113,682,934 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 611 | py | # Unit PYG02: Pygame Hello World Game
import pygame, sys # 引入
pygame.init() # 初始化(对Pygame内部各功能模块进行初始化创建及变量设置,默认调用)
screen = pygame.display.set_mode((600,400)) # 窗口大小600x400
pygame.display.set_caption("Pygame游戏之旅")# 设置标题
# main loop
while True:
for event in pygame.event.get(): # 响应事件
if event.type == pygame.QUIT: # 退出事件
pygame.quit()
sys.exit()
pygame.display.update() # 刷新
| [
"1033239636@qq.com"
] | 1033239636@qq.com |
09038e22ac74201634b7000c44b95ec681573279 | fde51c57b2fbfbb2e33c695a2c6bb7114fbc8da6 | /2.구현/트럭_ghoon99.py | 044c3064f5813ecf404aa6d3b1bc474b751a0a6a | [] | no_license | GHooN99/2021_SJU_BOJAlgorithm.py | 6302b305518908efd7b818b373cbbecbbf352221 | fd98dde1e0c593b3f7f4287765f965ead13ec7c1 | refs/heads/main | 2023-08-07T12:26:41.527546 | 2021-04-13T12:49:46 | 2021-04-13T12:49:46 | 329,366,544 | 0 | 1 | null | 2021-01-18T08:09:39 | 2021-01-13T16:27:43 | Python | UTF-8 | Python | false | false | 456 | py | from collections import deque
# input
n,w,l =map(int,input().split())
arr = deque(list(map(int,input().split())))
# solution
moved = []
bridge = deque([0]*w)
result = 0
#print(bridge)
while len(moved)!=n:
if arr and sum(bridge)+arr[0]-bridge[-1] <= l:
bridge.appendleft(arr.popleft())
else:
bridge.appendleft(0)
truck = bridge.pop()
if truck != 0:
moved.append(truck)
#print(bridge,moved)
result += 1
# output
print(result)
| [
"noreply@github.com"
] | GHooN99.noreply@github.com |
7f2c3f1106ce8ea93756f1ab4bb249953c260728 | d6dd8f1d468dff79b21039870fb9df5d9bd002a9 | /Python/exercices/week3/week3.py | a5b7323c74be17bed07463ce50d7fad81b4ea684 | [] | no_license | timdepater/periode_2-1 | 6edd3f982cfb80655f934e8a08295196bd81ef21 | f9a1a126cad32d44493c0d1ee46ff8b2f8ba8e7b | refs/heads/master | 2020-06-03T23:01:59.430115 | 2019-05-08T12:32:35 | 2019-05-08T12:32:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,162 | py |
# opgave 1
### A
class A:
def __init__(self, i = 0):
self.i = i
def m1(self):
self.i += 1
class B(A):
def __init__(self, j = 0):
super().__init__(3)
self.j = j
def m1(self):
self.i += 1
def main():
b = B() # b.i = 3 b.j = 0
print(b.i, b.j) # 3 0
b.m1() # b.i = 4 b.j = 0
print(b.i, b.j) # 4 0
main()
### B
class A:
def __init__(self, i):
self.i = i
def __str__(self):
return "I am class A"
class B(A):
def __init__(self, i, j):
super().__init__(i)
self.j = j
def __str__(self):
return "I am class B"
def main():
a = A(1) # a.i = 3
b = B(1, 2) # b.i = 1 b.j = 2
print(a) # "I am class A"
print(b) # "I am class B"
print(a.i) # 3
print(b.i, b.j) # 1 2
main()
# opgave 2
# NEE
# opgave 3
class Circle():
PI = 3.14159265358979
def __init__(self, straal):
self.straal = straal
def area(self, persision = 2):
return round(self.straal * self.straal * self.PI, persision)
def perimeter(self, persision = 2):
return round(self.straal * 2 * self.PI, persision)
c = Circle(8)
print(c.area())
# assert c.area() == 200.96
# assert c.perimeter() == 50.24
# assert c.perimeter() == 50.24
# opgave 4
class Roman():
rom_val = {'I': 1, 'V': 5, 'X': 10, 'L': 50, 'C': 100, 'D': 500, 'M': 1000}
@classmethod
def roman_to_int_me(self, roman):
return eval(self.__roman_to_int_inner(roman))
@classmethod
def __roman_to_int_inner(self, roman):
if len(roman) == 1:
return str(self.rom_val[roman])
if self.rom_val[roman[-2]] < self.rom_val[roman[-1]]:
return "{} - {}".format(self.rom_val[roman[-1]], self.__roman_to_int_inner(roman[:-1]))
else:
return "{} + {}".format(self.rom_val[roman[-1]], self.__roman_to_int_inner(roman[:-1]))
@classmethod
def roman_to_int(cls, s):
rom_val = {'I': 1, 'V': 5, 'X': 10, 'L': 50, 'C': 100, 'D': 500, 'M': 1000}
int_val = 0
for i in range (len(s)):
if i > 0 and rom_val[s[i]] > rom_val[s[i - 1]]:
print("+=", rom_val[s[i]], - 2 * rom_val[s[i - 1]])
int_val += rom_val[s[i]] - 2 * rom_val[s[i - 1]]
else:
print("+=", rom_val[s[i]])
int_val += rom_val[s[i]]
return int_val
# assert Roman.roman_to_int('C') == 100
assert Roman.roman_to_int('XLIX') == 49
# assert Roman.roman_to_int('MMMCMLXXXVI') == 3986
# opgave 5
class Stack:
def __init__(_):
_.__elements = []
_.__index = 0
# Return True if the stack is empty
def is_empty(_):
return _.__index == 0
# Return the element at the top of the stack
# without removing it from the stack.
def peek(_):
if self.is_empty():
return none
else:
return _.__elements[_.__index-1]
# Store an element at the top of the stack
def push(_, value):
if len(_.__elements) > _.__index:
_.__elements[_.__index] = value
else:
_.__elements.append(value)
_.__index += 1
# Remove the element at the top of the stack and return it
def pop(_):
if self.is_empty():
return none
else:
_.__index -= 1
return _.__elements[_.__index]
# Return the size of the stack
def get_size(_):
return _.__index
stack = Stack()
for i in range(10):
stack.push(i)
while not stack.is_empty():
# prints9 8 7 6 5 4 3 2 1 0
print(stack.pop(), end = " ")
# opgave 6
import time
class StopWatch():
def __init__(_):
_.__start_time = 0
_.__stop_time = 0
_.start()
def start(_):
_.__start_time = time.time()
def stop(_):
_.__stop_time = time.time()
def get_elapsed_time(_):
return _.__stop_time - _.__start_time
def getStartTime(_):
return _.__start_time
def getStopTime(_):
return _.__stop_time
size = 1000000
stopWatch = StopWatch()
sum = 0
for i in range(1, size + 1):
sum += i
stopWatch.stop()
print("The loop time is", stopWatch.get_elapsed_time(), "milliseconds")
# opgave 7
# try:
# statement1
# statement2
# statement3 # ALs de regel hierboven een exception gooit van wordt deze regel niet uitgevoert
# except Exception1:
# # Handle exception
# except Exception2:
# # Handle exception
# except Exception3:
# # Handle exception
# finally:
# statement4
# statement5
# opgave 8
import json, requests, sys
from pprint import pprint
# get command line arguments
# if len(sys.argv) < 2:
# sys.exit()
# argument 0 is program name
location = ' '.join(sys.argv[1:])
# download JSON data
# api key = 842af58c3d0f07bb8fb62b5199a09350
url='http://api.openweathermap.org/data/2.5/forecast?id={}&APPID=842af58c3d0f07bb8fb62b5199a09350'.format(location)
response = requests.get(url)
response.raise_for_status()
# load JSON data into Python variable
weatherData = json.loads(response.text)
w = weatherData['list']
print(weatherData)
print(w)
# opgave 9
# ik miste alleen een dubbele enter voor de class en op 2 plekken tonden onnodige extra spaties na de code
# ik ben wel verbaast dat mijn stuckje om self te vervangen met underscore geen problemen oplevert in pep8
import time
class StopWatch():
def __init__(_):
_.__start_time = 0
_.__stop_time = 0
_.start()
def start(_):
_.__start_time = time.time()
def stop(_):
_.__stop_time = time.time()
def get_elapsed_time(_):
return _.__stop_time - _.__start_time
def getStartTime(_):
return _.__start_time
def getStopTime(_):
return _.__stop_time
size = 1000000
stopWatch = StopWatch()
sum = 0
for i in range(1, size + 1):
sum += i
stopWatch.stop()
print("The loop time is", stopWatch.get_elapsed_time(), "milliseconds") | [
"timostrating@home.nl"
] | timostrating@home.nl |
af04c17e64634697c76c907107a82c65fdf844af | 52467ee4b50eabada0af517b5d30c4f059b3b64b | /app.py | 7702d3727318cd1c108c475ec5ae4a67c3dfb621 | [] | no_license | robotBaby/clinics | e85b63741694b90505fedfc6cfda22417937006f | 841dba66c728c477cb32e4dd4260041f685a2ead | refs/heads/master | 2021-01-20T14:02:12.026151 | 2017-05-07T16:54:59 | 2017-05-07T16:54:59 | 90,547,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,206 | py | from flask import Flask, render_template, jsonify, request
from flask.ext.sqlalchemy import SQLAlchemy
from config import SQLALCHEMY_DATABASE_URI
from datetime import datetime
import json
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
lastReadTime = None
@app.route('/logs', methods=['GET'])
def metrics():
try:
global lastReadTime
this_log = []
with open('clinics.log', "r") as logFile:
lines = logFile.readlines()
# iterate over lines in reversed order (most recent first)
# of entries in the monitored time frame
i = 1
while i < len(lines) :
log = lines[i].split(' - ')
time = datetime.strptime(log[0], "%d/%b/%Y:%H:%M:%S")
if lastReadTime == None or time >= lastReadTime:
this_log.append(log)
i += 1
now = datetime.now()
lastReadTime = datetime(now.year, now.month, now.day, now.hour, now.minute, now.second)
print(len(this_log))
return json.dumps(this_log)
except Exception as e:
print(e)
return ""
@app.route('/', methods=['GET'])
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run()
| [
"arindrimadatta@gmail.com"
] | arindrimadatta@gmail.com |
f882f5c485a3e1e225e0601808e8510847b4d66d | 964b3baf11dd9d150b06eac439f11fb7c85c1a23 | /Classes-inClassLesson/Superhero/TestSuperman.py | 75f246ef34b5f7b7c2ea8b5636016a7e9cfd68ff | [] | no_license | markellisdev/bangazon-llc-orientation-exercises | 8c278ae54dcf2e0415706ed9e9344cf4808c04a9 | 6faabb33cb451e4a2eb65b5c7faca038865a08ac | refs/heads/master | 2021-01-12T03:37:32.793508 | 2017-01-10T22:53:47 | 2017-01-10T22:53:47 | 78,242,190 | 0 | 0 | null | 2017-01-10T22:53:47 | 2017-01-06T22:03:54 | Python | UTF-8 | Python | false | false | 934 | py | import unittest
from superheroPart3 import *
class TestSuperman(unittest.TestCase):
@classmethod
def setUpClass(self):
print('Set up class')
self.superman = Superman()
def test_SupermanMustBeBulletproof(self):
# superman = Superman() Now unecessary after @classmethod setUpClass
self.assertTrue(self.superman.is_bulletproof)
def test_SupermanIsFlyingFast(self):
# superman = Superman()
self.assertEqual(self.superman.air_speed, 1000000)
def test_SupermanIsSwimmingFast(self):
# superman = Superman()
self.assertEqual(self.superman.water_speed, 250)
def test_SupermanIsASuperhero(self):
#superman = Superman() #first need to create instance of Superman
self.assertIsInstance(self.superman, Superhero)
def test_SupermanIsAFlyingSuperhero(self):
self.assertIsInstance(self.superman, Flying)
#superman = Superman()
#first need to create instance of Superman
if __name__ == '__main__':
unittest.main()
| [
"markellisdev@gmail.com"
] | markellisdev@gmail.com |
f6403a7179164b65baf0defb76db1e75596bf584 | 63db67c4b5f8f427acd4210cba386f12f853a09b | /decoder/decoder_simple.py | e6958d943b1b69032ec032a8bccfe2578c5e5b71 | [
"MIT"
] | permissive | xclmj/VAE-GAN | 920a3176e5bc57ddac0baf36e26be4e40783f7ea | 37fe060cd993d89c983bc8cfcc59c6edc3c5920f | refs/heads/master | 2020-06-20T04:24:23.021385 | 2018-06-21T05:22:17 | 2018-11-21T06:49:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 800 | py | import os
import sys
import tensorflow as tf
import tensorflow.contrib.layers as tcl
sys.path.append('../')
from netutils.weightsinit import get_weightsinit
from netutils.activation import get_activation
from netutils.normalization import get_normalization
from network.devgg import DEVGG
from network.base_network import BaseNetwork
class DecoderSimple(BaseNetwork):
def __init__(self, config, is_training):
super(DecoderSimple, self).__init__(config, is_training)
self.name = config.get('name', 'DecoderSimple')
self.config = config
network_config = config.copy()
self.network = DEVGG(network_config, is_training)
def __call__(self, x, condition=None):
if condition is not None:
x = tf.concatenate([x, condition], axis=-1)
x, end_points = self.network(x)
return x
| [
"1069163331@qq.com"
] | 1069163331@qq.com |
a120684b2c1a40c3ec177781c4159a744f83ff71 | 6763fa3a0fbc42f5199ed806c736588be2bb66fa | /TP2/TestTP2.py | 25b8dc7a2aa97b41dd903852f4dbdc89c59ac95a | [] | no_license | etouss/Python | 3c0b75991cc72d33c24b1beb018edbf4abf4fbe5 | 0bcc7ff845bfef8863a2e61a8bc5e30541e081e6 | refs/heads/master | 2020-12-24T16:43:03.987578 | 2015-01-21T16:17:12 | 2015-01-21T16:17:12 | 19,542,867 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 779 | py | '''
TestTP2.py
Ce fichier contient quelques réponses erronées du tp2.
Vous devez le compléter pour les tester et les corriger.
'''
# Ici, on importe les modules:
from my_test import # Compléter par les fonctions a importer
# Ici, on definit les fonctions:
def suffixe(p):
(s,n) = p
if len(s) >= n:
return s[-n:]
else:
return False
def double(l):
for i in l:
t.append(i)
return l
# Ici, on test les fonctions lorsque le fichier n'est pas importé
if __name__ == "__main__":
S = [( ('', 0) , ''), ( ('suffixe', 3) , 'ixe' ) , ( ('a', 0) , '') , (('a', 3) , 'False' ) ]
#IOtest(suffixe, S)
T = [[], [1,2,3], [0,0]]
#Itest(double, T)
U = [([], []), ([1,2,3], [1,2,3]), ([0,0], [0,0,0,0])]
#IOtest(double , U)
| [
"tsst.etienne@gmail.com"
] | tsst.etienne@gmail.com |
e7e0aae2e70c383f7b5f090b4e1e35349fa8464b | 9e688b5bf5951a6a5a229c64a96672ec1895f900 | /Python/head/head.py | bb0dfb1e6140a751c15f68b76e1f5f5c136620fc | [] | no_license | andrew-morris/coding-homework | baf8c55898f5ef67e50c0c5b73b71b9b24a06513 | aad5a35277b4931da903a246280cf25f032ff483 | refs/heads/master | 2021-01-24T18:59:01.365843 | 2017-09-02T20:10:34 | 2017-09-02T20:10:34 | 86,171,852 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 349 | py | #!/usr/bin/env python
import sys
#TODO: Add arg for amount of lines
#TODO: Add arg for amount of bytes
#TODO: Support reading from a file
#TODO: Support reading from stdin
def main():
data = sys.stdin.read()
lines = data.split('\n')
for line in lines[:10]:
sys.stdout.write(line + '\n')
if __name__ == '__main__':
main()
| [
"morr.drew@gmail.com"
] | morr.drew@gmail.com |
55ba8ba43e70ad0c81ae5a13092713c51815930f | 93c05672c065e1ae091c1ad06ff5c66f07875ac7 | /semanticModel/residual.py | d3c4fcefd3b5ba49aa97c40e4196026ed5bf1a21 | [] | no_license | ncu-psl/STFlow | eeb7ab689dd97cc6712cc2e53adfae49d539fe55 | cf5024e1944d03f9defdad4e28ca1b85f9efab4d | refs/heads/master | 2023-07-10T01:26:54.197445 | 2021-08-09T10:32:40 | 2021-08-09T10:32:40 | 271,042,993 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 450 | py | import abc
from cffi import FFI
import _FDtomoC
class ResidualVector(object):
def __init__(self, residual_vector = None):
self.residualField = None
self.residual_vector = residual_vector
def getClass(self):
self.residual_vector = _FDtomoC.ffi.unpack(self.residualField, 50000000)
def getField(self):
residualFieldPtr = _FDtomoC.ffi.new("float[]", self.residual_vector)
return residualFieldPtr[0] | [
"q1qip123@gmail.com"
] | q1qip123@gmail.com |
c285f313dbea3d3eb1634c811e4e1e7ec485035a | fcde34b7f162cddee7f848122196b6c39523d9b1 | /django_movies/django_movies/asgi.py | 213752a3cf45f784192b7bd4ea08acbb4e0f47b2 | [] | no_license | ViktorRaboshchuk/movies_api | 9fa3552867aeae55536fecb8a38e05a1b4d5c4c2 | 9b510d858ca56729ebfad41ce4257a83c812eb2e | refs/heads/master | 2023-03-24T03:04:53.816223 | 2021-03-25T14:25:25 | 2021-03-25T14:25:25 | 346,648,350 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | """
ASGI config for django_movies project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_movies.settings')
application = get_asgi_application()
| [
"viktor.raboshchuk@gmail.com"
] | viktor.raboshchuk@gmail.com |
5467fae7e4778c44980c1df105301dce1b6eade3 | 6bd94f1a333fd7145996408d17ecba750d58f15f | /Code/wifi/RSSI/main.py | f03192499461c4d7e57c2c93e21d4d5bc20c5e36 | [] | no_license | jo-taye/Iot-Workshop-AASTU | 96228e92ef9d8a607319fc61745615b6edd299b3 | d5433e1b2b88d59b4c54fa0a9fef592f2b583361 | refs/heads/master | 2020-06-23T21:43:32.146187 | 2019-07-26T13:01:44 | 2019-07-26T13:01:44 | 198,761,633 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 471 | py | import machine
from network import WLAN
import pycom
wlan = WLAN(mode=WLAN.STA)
nets = wlan.scan()
print(nets)
while True:
for net in nets:
if net.ssid == "Ethiopia":
rssid = net.rssi
if rssid < -80:
pycom.rgbled(0x7f0000)
elif rssid >= -80 and rssid <= -70:
pycom.rgbled(0x7f7f00)
elif rssid > -70:
pycom.rgbled(0x007f00)
print(net.ssid, net.rssi)
| [
"yohannes.taye@aau.edu.et"
] | yohannes.taye@aau.edu.et |
eed711eb7afe83895d8a6bef4923273b65aa7ac0 | a3852496b239fdcb9789ab6f762c83eb634078ad | /events/migrations/0004_auto_20180725_1359.py | 9a0ccc648abe57061536edd6177211d5f865e559 | [] | no_license | MohamedRadwan180/test | 5159ee992d56abedf1fc6cd5928bef1168b5ac07 | 52759db05ae5991b356d3fd4016733a5002a20df | refs/heads/master | 2020-03-25T23:59:29.438648 | 2018-08-10T15:23:12 | 2018-08-10T15:23:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,573 | py | # Generated by Django 2.0.7 on 2018-07-25 10:59
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0003_auto_20180725_1302'),
]
operations = [
migrations.AddField(
model_name='offer',
name='status',
field=models.IntegerField(choices=[(0, 'Pending'), (1, 'Accepted'), (2, 'Rejected')], default=0),
),
migrations.AddField(
model_name='planner',
name='type',
field=models.CharField(default='U\\A', max_length=100),
),
migrations.AlterField(
model_name='event',
name='duration',
field=models.PositiveSmallIntegerField(),
),
migrations.AlterField(
model_name='event',
name='expcted_cpacity',
field=models.PositiveSmallIntegerField(),
),
migrations.AlterField(
model_name='event',
name='max_price',
field=models.DecimalField(decimal_places=2, max_digits=9, validators=[django.core.validators.MinValueValidator(0.0), django.core.validators.MaxValueValidator(999999999)]),
),
migrations.AlterField(
model_name='event',
name='pud_date',
field=models.DateTimeField(verbose_name='Publish Date'),
),
migrations.AlterField(
model_name='planner',
name='mobile',
field=models.CharField(max_length=13),
),
]
| [
"m.medhat_180@yahoo.com"
] | m.medhat_180@yahoo.com |
651930fd736184cb7f793d23885d3a0c3a2be442 | 67c3c2a310a4d129a45739ca6351052f36f6d5f4 | /venv/lib/python3.7/tarfile.py | 1ac8bdb103bbf0d353b2ffa45630fbbea77736ed | [] | no_license | cyobero/django-blog | a743203bdaf1d8ae9e6bd47c6e7b33a213a7abfd | 307335c84a0fa9eba6d3f69172a47580144cc066 | refs/heads/master | 2022-12-09T20:25:51.396813 | 2020-03-10T14:52:26 | 2020-03-10T14:52:26 | 245,950,344 | 0 | 0 | null | 2022-11-22T05:22:50 | 2020-03-09T05:20:31 | Python | UTF-8 | Python | false | false | 48 | py | /home/cyobero/anaconda3/lib/python3.7/tarfile.py | [
"cyobero@gmail.com"
] | cyobero@gmail.com |
f3d1fe716956a41dcaccd88cddd806332ba54e33 | 1b5c3039c05427ad5e731a18e06e0e0accb5ce98 | /scripts/creatematches.py | 2c4bb9fd8b280c5439cdaa0f3eddc508cad483bc | [] | no_license | matthew-brett/beatbased | 1df43cb7f16b4d6cde18acecd7d2b7209887ed89 | f6c7c6bd0fb62efcb3397d512f70717b49f5cccd | refs/heads/master | 2021-01-23T21:42:29.063883 | 2014-05-30T19:05:06 | 2014-05-30T19:05:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 728 | py | #!/bin/env python
'''Creates as many matches as possible for metric sequences, with 5-7 intervals'''
import beatsequence as BS
#First, create a list of all combinations of intervals, taking those which add up to 12
print "calculating possible combinations"
S=[]
for length in range(5,10):
L=[4 for n in range(length)]
for i in BS.valueperm(L):
#work out total, gp to next if not 12
total=0
for n in i:
total+=n
if total!=12:continue
i.sort()
if i not in S:
print "added",i
S.append(i)
#now run the match creator on S:
for i in S:
BS.getmatches(i,debug=True)
print i,"completed"
i=raw_input("Finished. Press enter to close")
| [
"matthew.brett@gmail.com"
] | matthew.brett@gmail.com |
709391620a55b233d0d5e264d633f5b22a369552 | 50740127489647171d701e7b26513fbb48bc7144 | /python-docs-samples-master/speech/cloud-client/transcribe.py | 259242e797a0e876925fd25b6e8cd329b3fbb888 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | nick-merlino/multimediaToolkit | e11ab5d0a596d86733aeb4833987d9ce53f7adf9 | d471857bca122fe0e97713c0c0148dc3646036af | refs/heads/master | 2021-01-01T17:15:26.991782 | 2017-07-22T15:15:23 | 2017-07-22T15:15:23 | 98,036,831 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,267 | py | #!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API sample application using the REST API for batch
processing.
Example usage:
python transcribe.py resources/audio.raw
python transcribe.py gs://cloud-samples-tests/speech/brooklyn.flac
"""
# [START import_libraries]
import argparse
import io
# [END import_libraries]
# [START def_transcribe_file]
def transcribe_file(speech_file):
"""Transcribe the given audio file."""
print("Transcribing file")
from google.cloud import speech
from google.cloud.speech import enums
from google.cloud.speech import types
client = speech.SpeechClient()
# [START migration_sync_request]
# [START migration_audio_config_file]
with io.open(speech_file, 'rb') as audio_file:
content = audio_file.read()
audio = types.RecognitionAudio(content=content)
config = types.RecognitionConfig(
encoding=enums.RecognitionConfig.AudioEncoding.LINEAR16,
# sample_rate_hertz=16000,
language_code='en-US')
# [END migration_audio_config_file]
# [START migration_sync_response]
response = client.recognize(config, audio)
# [END migration_sync_request]
if response.results:
alternatives = response.results[0].alternatives
for alternative in alternatives:
print('Transcript: {}'.format(alternative.transcript))
else:
print("No results found")
# [END migration_sync_response]
# [END def_transcribe_file]
# [START def_transcribe_gcs]
def transcribe_gcs(gcs_uri):
"""Transcribes the audio file specified by the gcs_uri."""
from google.cloud import speech
from google.cloud.speech import enums
from google.cloud.speech import types
client = speech.SpeechClient()
# [START migration_audio_config_gcs]
audio = types.RecognitionAudio(uri=gcs_uri)
config = types.RecognitionConfig(
encoding=enums.RecognitionConfig.AudioEncoding.FLAC,
sample_rate_hertz=16000,
language_code='en-US')
# [END migration_audio_config_gcs]
response = client.recognize(config, audio)
alternatives = response.results[0].alternatives
for alternative in alternatives:
print('Transcript: {}'.format(alternative.transcript))
# [END def_transcribe_gcs]
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'path', help='File or GCS path for audio file to be recognized')
args = parser.parse_args()
if args.path.startswith('gs://'):
transcribe_gcs(args.path)
else:
transcribe_file(args.path)
| [
"NMERLINO@MITRE.ORG"
] | NMERLINO@MITRE.ORG |
348cf09a6a4d5dfc28378c1f633c35951329607f | 5e0abacbf058cbe730fb44b8121e1a6c8e6db0fc | /statusCheck/status/views.py | 2785a9f9b43abd0c39941b626647e06beb3e734c | [] | no_license | sprakashsingh/pythonDjangoDocker | 3c2f05144e1af418c200655d422659a84bed823b | 70ed0871b1390b15b9a76ccb2a3886a63312ba66 | refs/heads/master | 2020-04-07T19:36:04.148625 | 2018-11-22T08:49:35 | 2018-11-22T08:49:35 | 158,654,885 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 685 | py | from django.shortcuts import render
from rest_framework import generics, mixins, status, viewsets, serializers
from datetime import datetime
# Create your views here.
from rest_framework.response import Response
class TagListAPIView(generics.ListAPIView):
def list(self, request):
serializer_data = datetime.now().second % 10
print(serializer_data)
serializer = serializers.Serializer({"value":serializer_data})
stat = status.HTTP_200_OK
print(serializer.data)
if serializer_data%10 ==0:
stat = status.HTTP_503_SERVICE_UNAVAILABLE
return Response({
'tags': serializer_data,
}, status=stat)
| [
"satyap@synopsys.com"
] | satyap@synopsys.com |
b2a97343f96ca9246962933acc173b23375b9a5c | 3474b315da3cc5cb3f7823f19a18b63a8da6a526 | /scratch/KRAMS/src/apps/scratch/rch/mlab/yarn_cs.py | 2d8a38ca1f2761ea2b42d42e5d831bb3cf157889 | [] | no_license | h4ck3rm1k3/scratch | 8df97462f696bc2be00f1e58232e1cd915f0fafd | 0a114a41b0d1e9b2d68dbe7af7cf34db11512539 | refs/heads/master | 2021-01-21T15:31:38.718039 | 2013-09-19T10:48:24 | 2013-09-19T10:48:24 | 29,173,525 | 0 | 0 | null | 2015-01-13T04:58:57 | 2015-01-13T04:58:56 | null | UTF-8 | Python | false | false | 3,840 | py | #-------------------------------------------------------------------------------
#
# Copyright (c) 2009, IMB, RWTH Aachen.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in simvisage/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.simvisage.com/licenses/BSD.txt
#
# Thanks for using Simvisage open source!
#
# Created on Jul 22, 2010 by: rch
from numpy import \
loadtxt, ones_like, vstack, c_, hstack, array, cumsum, \
zeros_like, zeros
import wxversion
wxversion.select( '2.8' )
from os.path import join
from promod.simdb import SimDB
simdb = SimDB()
data_dir = join( simdb.exdata_dir, 'trc', 'bond_structure' )
from enthought.tvtk.api import tvtk
from enthought.mayavi.scripts import mayavi2
from enthought.mayavi import mlab
n_slices = 15
start_slice = 4
slice_range = range( start_slice, start_slice + n_slices )
slice_distance = 500 # micrometers
def read_yarn_structure():
slice_point_list = []
slice_radius_list = []
slice_len_list = []
cut_off_start = zeros( ( n_slices, ), dtype = 'int' )
cut_off_start[ 1: ] += 0
for slice_idx, cut_off_idx in zip( slice_range, cut_off_start ):
data_file = join( data_dir, '1cOrientiertSchnitt%d.txt' % slice_idx )
print 'reading data_file'
points = loadtxt( data_file ,
skiprows = 1,
usecols = ( 1, 2, 3 ) )
y = points[ cut_off_idx:, 0]
z = points[ cut_off_idx:, 1]
x = ones_like( y ) * slice_idx * slice_distance
r = points[ cut_off_idx:, 2]
slice_point_list.append( c_[ x, y, z ] )
slice_radius_list.append( r )
slice_len_list.append( points.shape[0] )
lens_arr = array( slice_len_list )
print 'slice lens', lens_arr
offset_arr = cumsum( lens_arr )
slice_offset_arr = zeros_like( offset_arr )
slice_offset_arr[1:] = offset_arr[:-1]
print 'slice offsets', slice_offset_arr
data_file = join( data_dir, 'connectivity.txt' )
filam_connect_arr = loadtxt( data_file )
print filam_connect_arr.shape
print filam_connect_arr.shape
print slice_offset_arr.shape
fil_map = array( filam_connect_arr + slice_offset_arr, dtype = 'int' )
points = vstack( slice_point_list )
radius = hstack( slice_radius_list )
print points.shape
print max( fil_map.flatten() )
p = points[ fil_map.flatten() ]
r = radius[ fil_map.flatten() ]
mlab.plot3d( p[:, 0], p[:, 1], p[:, 2], r,
tube_radius = 20, colormap = 'Spectral' )
offset = array( [0, 3, 6] )
cells = array( [10, 4000, 20, 5005, 20, 4080, 4000, 20, 404 ] )
# line_type = tvtk.Line().cell_type # VTKLine == 10
# cell_types = array( [line_type] )
# # Create the array of cells unambiguously.
# cell_array = tvtk.CellArray()
# cell_array.set_cells( 3, cells )
# Now create the UG.
ug = tvtk.UnstructuredGrid( points = points )
# Now just set the cell types and reuse the ug locations and cells.
# ug.set_cells( cell_types, offset, cell_array )
ug.point_data.scalars = radius
ug.point_data.scalars.name = 'radius'
return ug
# Now view the data.
@mayavi2.standalone
def view( ug ):
from enthought.mayavi.sources.vtk_data_source import VTKDataSource
from enthought.mayavi.modules.outline import Outline
from enthought.mayavi.modules.surface import Surface
from enthought.mayavi.modules.vectors import Vectors
mayavi.new_scene()
src = VTKDataSource( data = ug )
mayavi.add_source( src )
s = Surface()
mayavi.add_module( s )
if __name__ == '__main__':
ug = read_yarn_structure()
mlab.show() # view( ug )
| [
"Axel@Axel-Pc"
] | Axel@Axel-Pc |
be3316f334e724c618b42189f507354ad4952699 | d450ba14e9b0b970a3817d11d510553fd10696b5 | /python/169.MajorityElement.py | 3f5f549cb8dcddf8afb68b8e23d69ca990cbe5d2 | [] | no_license | xy-cai/leetcode | bf43f710332f20e7c4de96a9ddbd135ed16d25f4 | 3ee5af84a9b591d1a3814bcfb9bae5d8183261c1 | refs/heads/master | 2021-01-23T03:48:24.273020 | 2015-09-06T15:27:54 | 2015-09-06T15:27:54 | 40,019,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | class Solution(object):
def majorityElement(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
# delete two different num
# then the majority element will be the rest
# candidate = nums[0]
cnt = 0
for ele in nums:
if cnt:
if candidate == ele:
cnt += 1
else:
cnt -= 1
else:
candidate = ele
cnt = 1
return candidate | [
"tifosi.cai@gmail.com"
] | tifosi.cai@gmail.com |
fddbcb160ab6e4a91ee203118dc66d2e57485552 | 6a704951972e1debf3f41e4ae08867932930c816 | /exo1.py | a39137f5165ccd051d8a20f5fe85a79be8a69d7e | [] | no_license | Ngangolo/python-liste | db6868c114d354436ee2af52bafd866b31be2337 | 337291182feaa10f6835b974f79d5e909920aaa0 | refs/heads/main | 2023-03-17T12:22:18.583844 | 2021-03-16T00:23:56 | 2021-03-16T00:23:56 | 340,368,564 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 514 | py | jours=["lundi","mardi","mercredi","jeudi","vendredi"]
chiffres=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
composite=[0, "chocolat", 145, "Python", "Landais", 12.1, True]
print(chiffres) ; print(composite) ; print(jours[0]) ; print(jours[1]) ; print(jours[4]) ;
print(composite[2]) ; print(chiffres[3]+composite[2]) ; print(chiffres[9]*composite[0]) ;
print(jours[0] + jours[2])
if composite[6] :
print(composite[3] + " " + composite[4])
if not composite[6] :
print("Pas la peine de réfléchir plus longtemps !")
| [
"konengangolo54@gmail.com"
] | konengangolo54@gmail.com |
91f597ec42d56a81ab6b52570b993350a022eb20 | 93b5603de5cbc7f2bcd8348172e3f059105870b9 | /app/__init__.py | 35d84664de18bbbb3bdce674ddcd820a2d7a56ba | [] | no_license | kimpadkjaer/sample | 97804767898ca686a6dc3e359a9766855fc71bf8 | 042bae2e48180837dde146ef8dac33bf272b488e | refs/heads/master | 2020-05-07T10:56:53.309832 | 2019-04-23T20:10:26 | 2019-04-23T20:10:26 | 180,440,000 | 0 | 0 | null | 2019-04-09T19:58:28 | 2019-04-09T19:58:28 | null | UTF-8 | Python | false | false | 214 | py | from flask import Flask
from config import Config
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object(Config)
db = SQLAlchemy(app)
from app import routes, models, errors
| [
"noreply@github.com"
] | kimpadkjaer.noreply@github.com |
c2325688216669fc6c3d6a76dd35b96ec2905b34 | 6f59c30eb8801c7ab96fc6a0c67aeacff98ba333 | /football/urls.py | e6f5d5b5b0f041f5ab0e57efc364ea2cd7e09886 | [] | no_license | PyaeZaw97/FootballInfoProject | 7b9b250e69e807289e6b62f236909c7ca28ee29d | 2140bf7f7de5e0e952fe63a1eddb6a03b1f9aa3a | refs/heads/master | 2022-12-13T00:45:36.241848 | 2021-10-16T05:20:55 | 2021-10-16T05:20:55 | 196,852,309 | 0 | 0 | null | 2022-12-08T05:18:58 | 2019-07-14T15:23:06 | HTML | UTF-8 | Python | false | false | 824 | py | """football URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('footballapp.urls')),
]
| [
"noreply@github.com"
] | PyaeZaw97.noreply@github.com |
403301171b3118fa6b8b8f4c0438c7feedec9f3b | 05a598acd2128ac8fe5954fb7237329075625655 | /playlist-app/models.py | 52b1cce72ad990df108081e85c3860f61cb1211e | [] | no_license | ktkinsey37/flask-database-dj | d9c441c918645ff27e1b6be6c089e2f896adbc3b | 6d553eea21db596669aa6ae36937a0f036d9037f | refs/heads/master | 2023-04-15T16:39:28.118028 | 2021-04-21T21:33:36 | 2021-04-21T21:33:36 | 360,319,077 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,378 | py | """Models for Playlist app."""
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.schema import PrimaryKeyConstraint
db = SQLAlchemy()
class Playlist(db.Model):
"""Playlist."""
def __repr__(self):
p = self
return f'<Playlist {p.id} {p.name} {p.description}'
__tablename__ = 'playlists'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(100))
description = db.Column(db.String(100))
songs = db.relationship('Song', secondary='playlist_songs', backref="playlist")
class Song(db.Model):
"""Song."""
def __repr__(self):
s = self
return f'<Song {s.id} {s.title} {s.artist}'
__tablename__ = 'songs'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(100))
artist = db.Column(db.String(100))
class PlaylistSong(db.Model):
"""Mapping of a playlist to a song."""
__tablename__ = 'playlist_songs'
__table_args__ = (db.PrimaryKeyConstraint('playlist_id', 'song_id'), )
playlist_id = db.Column(db.Integer,
db.ForeignKey('playlists.id'))
song_id = db.Column(db.Integer,
db.ForeignKey('songs.id'))
# DO NOT MODIFY THIS FUNCTION
def connect_db(app):
"""Connect to database."""
db.app = app
db.init_app(app)
| [
"ktkinsey37@gmail.com"
] | ktkinsey37@gmail.com |
8178707676f800fb50f1590ef87fccd174c3c21f | e9d55ae524563c2de237a426d848df9bf15c938d | /watson_apis/Base/Base/urls.py | c5a697a323ddd4b5204ca4b331d31ef5994fff8e | [] | no_license | arycloud/google_vision_nlp_api_implementation | c87af9e615a7415f6c775884ead64412a870dab3 | b75c2181a888ac1d7965f5ee0744f017decc9464 | refs/heads/master | 2020-03-18T00:16:49.852534 | 2018-05-19T18:16:45 | 2018-05-19T18:16:45 | 132,249,489 | 0 | 1 | null | 2018-05-19T06:49:00 | 2018-05-05T13:16:29 | JavaScript | UTF-8 | Python | false | false | 837 | py | """Base URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from app import urls as app_urls
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^', include(app_urls)),
]
| [
"abdul12391@gmail.com"
] | abdul12391@gmail.com |
df4de3c89e3e0456ec62e028fb88040009f9c36e | 23611933f0faba84fc82a1bc0a85d97cf45aba99 | /google-cloud-sdk/lib/googlecloudsdk/third_party/appengine/api/taskqueue/taskqueue_service_pb.py | bb940144f8a18ef4f4b1e6b825080cea0ed6df38 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | KaranToor/MA450 | 1f112d1caccebdc04702a77d5a6cee867c15f75c | c98b58aeb0994e011df960163541e9379ae7ea06 | refs/heads/master | 2021-06-21T06:17:42.585908 | 2020-12-24T00:36:28 | 2020-12-24T00:36:28 | 79,285,433 | 1 | 1 | Apache-2.0 | 2020-12-24T00:38:09 | 2017-01-18T00:05:44 | Python | UTF-8 | Python | false | false | 262,899 | py | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: apphosting/api/taskqueue/taskqueue_service.proto
from googlecloudsdk.third_party.appengine.proto import ProtocolBuffer
import array
import thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
from googlecloudsdk.third_party.appengine.datastore.datastore_v3_pb import *
import googlecloudsdk.third_party.appengine.datastore.datastore_v3_pb
from googlecloudsdk.third_party.appengine.proto.message_set import MessageSet
class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
# ErrorCode values
OK = 0
UNKNOWN_QUEUE = 1
TRANSIENT_ERROR = 2
INTERNAL_ERROR = 3
TASK_TOO_LARGE = 4
INVALID_TASK_NAME = 5
INVALID_QUEUE_NAME = 6
INVALID_URL = 7
INVALID_QUEUE_RATE = 8
PERMISSION_DENIED = 9
TASK_ALREADY_EXISTS = 10
TOMBSTONED_TASK = 11
INVALID_ETA = 12
INVALID_REQUEST = 13
UNKNOWN_TASK = 14
TOMBSTONED_QUEUE = 15
DUPLICATE_TASK_NAME = 16
SKIPPED = 17
TOO_MANY_TASKS = 18
INVALID_PAYLOAD = 19
INVALID_RETRY_PARAMETERS = 20
INVALID_QUEUE_MODE = 21
ACL_LOOKUP_ERROR = 22
TRANSACTIONAL_REQUEST_TOO_LARGE = 23
INCORRECT_CREATOR_NAME = 24
TASK_LEASE_EXPIRED = 25
QUEUE_PAUSED = 26
INVALID_TAG = 27
DATASTORE_ERROR = 10000
_ErrorCode_NAMES = {
0: "OK",
1: "UNKNOWN_QUEUE",
2: "TRANSIENT_ERROR",
3: "INTERNAL_ERROR",
4: "TASK_TOO_LARGE",
5: "INVALID_TASK_NAME",
6: "INVALID_QUEUE_NAME",
7: "INVALID_URL",
8: "INVALID_QUEUE_RATE",
9: "PERMISSION_DENIED",
10: "TASK_ALREADY_EXISTS",
11: "TOMBSTONED_TASK",
12: "INVALID_ETA",
13: "INVALID_REQUEST",
14: "UNKNOWN_TASK",
15: "TOMBSTONED_QUEUE",
16: "DUPLICATE_TASK_NAME",
17: "SKIPPED",
18: "TOO_MANY_TASKS",
19: "INVALID_PAYLOAD",
20: "INVALID_RETRY_PARAMETERS",
21: "INVALID_QUEUE_MODE",
22: "ACL_LOOKUP_ERROR",
23: "TRANSACTIONAL_REQUEST_TOO_LARGE",
24: "INCORRECT_CREATOR_NAME",
25: "TASK_LEASE_EXPIRED",
26: "QUEUE_PAUSED",
27: "INVALID_TAG",
10000: "DATASTORE_ERROR",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueServiceError'
class TaskQueueRetryParameters(ProtocolBuffer.ProtocolMessage):
has_retry_limit_ = 0
retry_limit_ = 0
has_age_limit_sec_ = 0
age_limit_sec_ = 0
has_min_backoff_sec_ = 0
min_backoff_sec_ = 0.1
has_max_backoff_sec_ = 0
max_backoff_sec_ = 3600.0
has_max_doublings_ = 0
max_doublings_ = 16
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def retry_limit(self): return self.retry_limit_
def set_retry_limit(self, x):
self.has_retry_limit_ = 1
self.retry_limit_ = x
def clear_retry_limit(self):
if self.has_retry_limit_:
self.has_retry_limit_ = 0
self.retry_limit_ = 0
def has_retry_limit(self): return self.has_retry_limit_
def age_limit_sec(self): return self.age_limit_sec_
def set_age_limit_sec(self, x):
self.has_age_limit_sec_ = 1
self.age_limit_sec_ = x
def clear_age_limit_sec(self):
if self.has_age_limit_sec_:
self.has_age_limit_sec_ = 0
self.age_limit_sec_ = 0
def has_age_limit_sec(self): return self.has_age_limit_sec_
def min_backoff_sec(self): return self.min_backoff_sec_
def set_min_backoff_sec(self, x):
self.has_min_backoff_sec_ = 1
self.min_backoff_sec_ = x
def clear_min_backoff_sec(self):
if self.has_min_backoff_sec_:
self.has_min_backoff_sec_ = 0
self.min_backoff_sec_ = 0.1
def has_min_backoff_sec(self): return self.has_min_backoff_sec_
def max_backoff_sec(self): return self.max_backoff_sec_
def set_max_backoff_sec(self, x):
self.has_max_backoff_sec_ = 1
self.max_backoff_sec_ = x
def clear_max_backoff_sec(self):
if self.has_max_backoff_sec_:
self.has_max_backoff_sec_ = 0
self.max_backoff_sec_ = 3600.0
def has_max_backoff_sec(self): return self.has_max_backoff_sec_
def max_doublings(self): return self.max_doublings_
def set_max_doublings(self, x):
self.has_max_doublings_ = 1
self.max_doublings_ = x
def clear_max_doublings(self):
if self.has_max_doublings_:
self.has_max_doublings_ = 0
self.max_doublings_ = 16
def has_max_doublings(self): return self.has_max_doublings_
def MergeFrom(self, x):
assert x is not self
if (x.has_retry_limit()): self.set_retry_limit(x.retry_limit())
if (x.has_age_limit_sec()): self.set_age_limit_sec(x.age_limit_sec())
if (x.has_min_backoff_sec()): self.set_min_backoff_sec(x.min_backoff_sec())
if (x.has_max_backoff_sec()): self.set_max_backoff_sec(x.max_backoff_sec())
if (x.has_max_doublings()): self.set_max_doublings(x.max_doublings())
def Equals(self, x):
if x is self: return 1
if self.has_retry_limit_ != x.has_retry_limit_: return 0
if self.has_retry_limit_ and self.retry_limit_ != x.retry_limit_: return 0
if self.has_age_limit_sec_ != x.has_age_limit_sec_: return 0
if self.has_age_limit_sec_ and self.age_limit_sec_ != x.age_limit_sec_: return 0
if self.has_min_backoff_sec_ != x.has_min_backoff_sec_: return 0
if self.has_min_backoff_sec_ and self.min_backoff_sec_ != x.min_backoff_sec_: return 0
if self.has_max_backoff_sec_ != x.has_max_backoff_sec_: return 0
if self.has_max_backoff_sec_ and self.max_backoff_sec_ != x.max_backoff_sec_: return 0
if self.has_max_doublings_ != x.has_max_doublings_: return 0
if self.has_max_doublings_ and self.max_doublings_ != x.max_doublings_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_retry_limit_): n += 1 + self.lengthVarInt64(self.retry_limit_)
if (self.has_age_limit_sec_): n += 1 + self.lengthVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_): n += 9
if (self.has_max_backoff_sec_): n += 9
if (self.has_max_doublings_): n += 1 + self.lengthVarInt64(self.max_doublings_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_retry_limit_): n += 1 + self.lengthVarInt64(self.retry_limit_)
if (self.has_age_limit_sec_): n += 1 + self.lengthVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_): n += 9
if (self.has_max_backoff_sec_): n += 9
if (self.has_max_doublings_): n += 1 + self.lengthVarInt64(self.max_doublings_)
return n
def Clear(self):
self.clear_retry_limit()
self.clear_age_limit_sec()
self.clear_min_backoff_sec()
self.clear_max_backoff_sec()
self.clear_max_doublings()
def OutputUnchecked(self, out):
if (self.has_retry_limit_):
out.putVarInt32(8)
out.putVarInt32(self.retry_limit_)
if (self.has_age_limit_sec_):
out.putVarInt32(16)
out.putVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_):
out.putVarInt32(25)
out.putDouble(self.min_backoff_sec_)
if (self.has_max_backoff_sec_):
out.putVarInt32(33)
out.putDouble(self.max_backoff_sec_)
if (self.has_max_doublings_):
out.putVarInt32(40)
out.putVarInt32(self.max_doublings_)
def OutputPartial(self, out):
if (self.has_retry_limit_):
out.putVarInt32(8)
out.putVarInt32(self.retry_limit_)
if (self.has_age_limit_sec_):
out.putVarInt32(16)
out.putVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_):
out.putVarInt32(25)
out.putDouble(self.min_backoff_sec_)
if (self.has_max_backoff_sec_):
out.putVarInt32(33)
out.putDouble(self.max_backoff_sec_)
if (self.has_max_doublings_):
out.putVarInt32(40)
out.putVarInt32(self.max_doublings_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_retry_limit(d.getVarInt32())
continue
if tt == 16:
self.set_age_limit_sec(d.getVarInt64())
continue
if tt == 25:
self.set_min_backoff_sec(d.getDouble())
continue
if tt == 33:
self.set_max_backoff_sec(d.getDouble())
continue
if tt == 40:
self.set_max_doublings(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_retry_limit_: res+=prefix+("retry_limit: %s\n" % self.DebugFormatInt32(self.retry_limit_))
if self.has_age_limit_sec_: res+=prefix+("age_limit_sec: %s\n" % self.DebugFormatInt64(self.age_limit_sec_))
if self.has_min_backoff_sec_: res+=prefix+("min_backoff_sec: %s\n" % self.DebugFormat(self.min_backoff_sec_))
if self.has_max_backoff_sec_: res+=prefix+("max_backoff_sec: %s\n" % self.DebugFormat(self.max_backoff_sec_))
if self.has_max_doublings_: res+=prefix+("max_doublings: %s\n" % self.DebugFormatInt32(self.max_doublings_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kretry_limit = 1
kage_limit_sec = 2
kmin_backoff_sec = 3
kmax_backoff_sec = 4
kmax_doublings = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "retry_limit",
2: "age_limit_sec",
3: "min_backoff_sec",
4: "max_backoff_sec",
5: "max_doublings",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.DOUBLE,
5: ProtocolBuffer.Encoder.NUMERIC,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueRetryParameters'
class TaskQueueAcl(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.user_email_ = []
self.writer_email_ = []
if contents is not None: self.MergeFromString(contents)
def user_email_size(self): return len(self.user_email_)
def user_email_list(self): return self.user_email_
def user_email(self, i):
return self.user_email_[i]
def set_user_email(self, i, x):
self.user_email_[i] = x
def add_user_email(self, x):
self.user_email_.append(x)
def clear_user_email(self):
self.user_email_ = []
def writer_email_size(self): return len(self.writer_email_)
def writer_email_list(self): return self.writer_email_
def writer_email(self, i):
return self.writer_email_[i]
def set_writer_email(self, i, x):
self.writer_email_[i] = x
def add_writer_email(self, x):
self.writer_email_.append(x)
def clear_writer_email(self):
self.writer_email_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.user_email_size()): self.add_user_email(x.user_email(i))
for i in xrange(x.writer_email_size()): self.add_writer_email(x.writer_email(i))
def Equals(self, x):
if x is self: return 1
if len(self.user_email_) != len(x.user_email_): return 0
for e1, e2 in zip(self.user_email_, x.user_email_):
if e1 != e2: return 0
if len(self.writer_email_) != len(x.writer_email_): return 0
for e1, e2 in zip(self.writer_email_, x.writer_email_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.user_email_)
for i in xrange(len(self.user_email_)): n += self.lengthString(len(self.user_email_[i]))
n += 1 * len(self.writer_email_)
for i in xrange(len(self.writer_email_)): n += self.lengthString(len(self.writer_email_[i]))
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.user_email_)
for i in xrange(len(self.user_email_)): n += self.lengthString(len(self.user_email_[i]))
n += 1 * len(self.writer_email_)
for i in xrange(len(self.writer_email_)): n += self.lengthString(len(self.writer_email_[i]))
return n
def Clear(self):
self.clear_user_email()
self.clear_writer_email()
def OutputUnchecked(self, out):
for i in xrange(len(self.user_email_)):
out.putVarInt32(10)
out.putPrefixedString(self.user_email_[i])
for i in xrange(len(self.writer_email_)):
out.putVarInt32(18)
out.putPrefixedString(self.writer_email_[i])
def OutputPartial(self, out):
for i in xrange(len(self.user_email_)):
out.putVarInt32(10)
out.putPrefixedString(self.user_email_[i])
for i in xrange(len(self.writer_email_)):
out.putVarInt32(18)
out.putPrefixedString(self.writer_email_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.add_user_email(d.getPrefixedString())
continue
if tt == 18:
self.add_writer_email(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.user_email_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("user_email%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.writer_email_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("writer_email%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kuser_email = 1
kwriter_email = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "user_email",
2: "writer_email",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAcl'
class TaskQueueHttpHeader(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 18:
self.set_value(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueHttpHeader'
class TaskQueueMode(ProtocolBuffer.ProtocolMessage):
# Mode values
PUSH = 0
PULL = 1
_Mode_NAMES = {
0: "PUSH",
1: "PULL",
}
def Mode_Name(cls, x): return cls._Mode_NAMES.get(x, "")
Mode_Name = classmethod(Mode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueMode'
class TaskQueueAddRequest_Header(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(58)
out.putPrefixedString(self.key_)
out.putVarInt32(66)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(58)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(66)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 52: break
if tt == 58:
self.set_key(d.getPrefixedString())
continue
if tt == 66:
self.set_value(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
class TaskQueueAddRequest_CronTimetable(ProtocolBuffer.ProtocolMessage):
has_schedule_ = 0
schedule_ = ""
has_timezone_ = 0
timezone_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def schedule(self): return self.schedule_
def set_schedule(self, x):
self.has_schedule_ = 1
self.schedule_ = x
def clear_schedule(self):
if self.has_schedule_:
self.has_schedule_ = 0
self.schedule_ = ""
def has_schedule(self): return self.has_schedule_
def timezone(self): return self.timezone_
def set_timezone(self, x):
self.has_timezone_ = 1
self.timezone_ = x
def clear_timezone(self):
if self.has_timezone_:
self.has_timezone_ = 0
self.timezone_ = ""
def has_timezone(self): return self.has_timezone_
def MergeFrom(self, x):
assert x is not self
if (x.has_schedule()): self.set_schedule(x.schedule())
if (x.has_timezone()): self.set_timezone(x.timezone())
def Equals(self, x):
if x is self: return 1
if self.has_schedule_ != x.has_schedule_: return 0
if self.has_schedule_ and self.schedule_ != x.schedule_: return 0
if self.has_timezone_ != x.has_timezone_: return 0
if self.has_timezone_ and self.timezone_ != x.timezone_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_schedule_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: schedule not set.')
if (not self.has_timezone_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: timezone not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.schedule_))
n += self.lengthString(len(self.timezone_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_schedule_):
n += 1
n += self.lengthString(len(self.schedule_))
if (self.has_timezone_):
n += 1
n += self.lengthString(len(self.timezone_))
return n
def Clear(self):
self.clear_schedule()
self.clear_timezone()
def OutputUnchecked(self, out):
out.putVarInt32(106)
out.putPrefixedString(self.schedule_)
out.putVarInt32(114)
out.putPrefixedString(self.timezone_)
def OutputPartial(self, out):
if (self.has_schedule_):
out.putVarInt32(106)
out.putPrefixedString(self.schedule_)
if (self.has_timezone_):
out.putVarInt32(114)
out.putPrefixedString(self.timezone_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 100: break
if tt == 106:
self.set_schedule(d.getPrefixedString())
continue
if tt == 114:
self.set_timezone(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_schedule_: res+=prefix+("schedule: %s\n" % self.DebugFormatString(self.schedule_))
if self.has_timezone_: res+=prefix+("timezone: %s\n" % self.DebugFormatString(self.timezone_))
return res
class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
# RequestMethod values
GET = 1
POST = 2
HEAD = 3
PUT = 4
DELETE = 5
_RequestMethod_NAMES = {
1: "GET",
2: "POST",
3: "HEAD",
4: "PUT",
5: "DELETE",
}
def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
RequestMethod_Name = classmethod(RequestMethod_Name)
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_method_ = 0
method_ = 2
has_url_ = 0
url_ = ""
has_body_ = 0
body_ = ""
has_transaction_ = 0
transaction_ = None
has_datastore_transaction_ = 0
datastore_transaction_ = ""
has_app_id_ = 0
app_id_ = ""
has_crontimetable_ = 0
crontimetable_ = None
has_description_ = 0
description_ = ""
has_payload_ = 0
payload_ = None
has_retry_parameters_ = 0
retry_parameters_ = None
has_mode_ = 0
mode_ = 0
has_tag_ = 0
tag_ = ""
has_cron_retry_parameters_ = 0
cron_retry_parameters_ = None
def __init__(self, contents=None):
self.header_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def method(self): return self.method_
def set_method(self, x):
self.has_method_ = 1
self.method_ = x
def clear_method(self):
if self.has_method_:
self.has_method_ = 0
self.method_ = 2
def has_method(self): return self.has_method_
def url(self): return self.url_
def set_url(self, x):
self.has_url_ = 1
self.url_ = x
def clear_url(self):
if self.has_url_:
self.has_url_ = 0
self.url_ = ""
def has_url(self): return self.has_url_
def header_size(self): return len(self.header_)
def header_list(self): return self.header_
def header(self, i):
return self.header_[i]
def mutable_header(self, i):
return self.header_[i]
def add_header(self):
x = TaskQueueAddRequest_Header()
self.header_.append(x)
return x
def clear_header(self):
self.header_ = []
def body(self): return self.body_
def set_body(self, x):
self.has_body_ = 1
self.body_ = x
def clear_body(self):
if self.has_body_:
self.has_body_ = 0
self.body_ = ""
def has_body(self): return self.has_body_
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
# Warning: this method does not acquire the lock.
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def datastore_transaction(self): return self.datastore_transaction_
def set_datastore_transaction(self, x):
self.has_datastore_transaction_ = 1
self.datastore_transaction_ = x
def clear_datastore_transaction(self):
if self.has_datastore_transaction_:
self.has_datastore_transaction_ = 0
self.datastore_transaction_ = ""
def has_datastore_transaction(self): return self.has_datastore_transaction_
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def crontimetable(self):
if self.crontimetable_ is None:
self.lazy_init_lock_.acquire()
try:
if self.crontimetable_ is None: self.crontimetable_ = TaskQueueAddRequest_CronTimetable()
finally:
self.lazy_init_lock_.release()
return self.crontimetable_
def mutable_crontimetable(self): self.has_crontimetable_ = 1; return self.crontimetable()
def clear_crontimetable(self):
# Warning: this method does not acquire the lock.
if self.has_crontimetable_:
self.has_crontimetable_ = 0;
if self.crontimetable_ is not None: self.crontimetable_.Clear()
def has_crontimetable(self): return self.has_crontimetable_
def description(self): return self.description_
def set_description(self, x):
self.has_description_ = 1
self.description_ = x
def clear_description(self):
if self.has_description_:
self.has_description_ = 0
self.description_ = ""
def has_description(self): return self.has_description_
def payload(self):
if self.payload_ is None:
self.lazy_init_lock_.acquire()
try:
if self.payload_ is None: self.payload_ = MessageSet()
finally:
self.lazy_init_lock_.release()
return self.payload_
def mutable_payload(self): self.has_payload_ = 1; return self.payload()
def clear_payload(self):
# Warning: this method does not acquire the lock.
if self.has_payload_:
self.has_payload_ = 0;
if self.payload_ is not None: self.payload_.Clear()
def has_payload(self): return self.has_payload_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
# Warning: this method does not acquire the lock.
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def mode(self): return self.mode_
def set_mode(self, x):
self.has_mode_ = 1
self.mode_ = x
def clear_mode(self):
if self.has_mode_:
self.has_mode_ = 0
self.mode_ = 0
def has_mode(self): return self.has_mode_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def cron_retry_parameters(self):
if self.cron_retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cron_retry_parameters_ is None: self.cron_retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.cron_retry_parameters_
def mutable_cron_retry_parameters(self): self.has_cron_retry_parameters_ = 1; return self.cron_retry_parameters()
def clear_cron_retry_parameters(self):
# Warning: this method does not acquire the lock.
if self.has_cron_retry_parameters_:
self.has_cron_retry_parameters_ = 0;
if self.cron_retry_parameters_ is not None: self.cron_retry_parameters_.Clear()
def has_cron_retry_parameters(self): return self.has_cron_retry_parameters_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_method()): self.set_method(x.method())
if (x.has_url()): self.set_url(x.url())
for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
if (x.has_body()): self.set_body(x.body())
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
if (x.has_datastore_transaction()): self.set_datastore_transaction(x.datastore_transaction())
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_crontimetable()): self.mutable_crontimetable().MergeFrom(x.crontimetable())
if (x.has_description()): self.set_description(x.description())
if (x.has_payload()): self.mutable_payload().MergeFrom(x.payload())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_mode()): self.set_mode(x.mode())
if (x.has_tag()): self.set_tag(x.tag())
if (x.has_cron_retry_parameters()): self.mutable_cron_retry_parameters().MergeFrom(x.cron_retry_parameters())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_method_ != x.has_method_: return 0
if self.has_method_ and self.method_ != x.method_: return 0
if self.has_url_ != x.has_url_: return 0
if self.has_url_ and self.url_ != x.url_: return 0
if len(self.header_) != len(x.header_): return 0
for e1, e2 in zip(self.header_, x.header_):
if e1 != e2: return 0
if self.has_body_ != x.has_body_: return 0
if self.has_body_ and self.body_ != x.body_: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if self.has_datastore_transaction_ != x.has_datastore_transaction_: return 0
if self.has_datastore_transaction_ and self.datastore_transaction_ != x.datastore_transaction_: return 0
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_crontimetable_ != x.has_crontimetable_: return 0
if self.has_crontimetable_ and self.crontimetable_ != x.crontimetable_: return 0
if self.has_description_ != x.has_description_: return 0
if self.has_description_ and self.description_ != x.description_: return 0
if self.has_payload_ != x.has_payload_: return 0
if self.has_payload_ and self.payload_ != x.payload_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_mode_ != x.has_mode_: return 0
if self.has_mode_ and self.mode_ != x.mode_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
if self.has_cron_retry_parameters_ != x.has_cron_retry_parameters_: return 0
if self.has_cron_retry_parameters_ and self.cron_retry_parameters_ != x.cron_retry_parameters_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
for p in self.header_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
if (self.has_crontimetable_ and not self.crontimetable_.IsInitialized(debug_strs)): initialized = 0
if (self.has_payload_ and not self.payload_.IsInitialized(debug_strs)): initialized = 0
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
if (self.has_cron_retry_parameters_ and not self.cron_retry_parameters_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
if (self.has_datastore_transaction_): n += 2 + self.lengthString(len(self.datastore_transaction_))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSize()
if (self.has_description_): n += 1 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSize())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_mode_): n += 2 + self.lengthVarInt64(self.mode_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_cron_retry_parameters_): n += 2 + self.lengthString(self.cron_retry_parameters_.ByteSize())
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSizePartial()
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
if (self.has_datastore_transaction_): n += 2 + self.lengthString(len(self.datastore_transaction_))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSizePartial()
if (self.has_description_): n += 1 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSizePartial())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_mode_): n += 2 + self.lengthVarInt64(self.mode_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_cron_retry_parameters_): n += 2 + self.lengthString(self.cron_retry_parameters_.ByteSizePartial())
return n
def Clear(self):
self.clear_queue_name()
self.clear_task_name()
self.clear_eta_usec()
self.clear_method()
self.clear_url()
self.clear_header()
self.clear_body()
self.clear_transaction()
self.clear_datastore_transaction()
self.clear_app_id()
self.clear_crontimetable()
self.clear_description()
self.clear_payload()
self.clear_retry_parameters()
self.clear_mode()
self.clear_tag()
self.clear_cron_retry_parameters()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
for i in xrange(len(self.header_)):
out.putVarInt32(51)
self.header_[i].OutputUnchecked(out)
out.putVarInt32(52)
if (self.has_body_):
out.putVarInt32(74)
out.putPrefixedString(self.body_)
if (self.has_transaction_):
out.putVarInt32(82)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
if (self.has_app_id_):
out.putVarInt32(90)
out.putPrefixedString(self.app_id_)
if (self.has_crontimetable_):
out.putVarInt32(99)
self.crontimetable_.OutputUnchecked(out)
out.putVarInt32(100)
if (self.has_description_):
out.putVarInt32(122)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(130)
out.putVarInt32(self.payload_.ByteSize())
self.payload_.OutputUnchecked(out)
if (self.has_retry_parameters_):
out.putVarInt32(138)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_mode_):
out.putVarInt32(144)
out.putVarInt32(self.mode_)
if (self.has_tag_):
out.putVarInt32(154)
out.putPrefixedString(self.tag_)
if (self.has_cron_retry_parameters_):
out.putVarInt32(162)
out.putVarInt32(self.cron_retry_parameters_.ByteSize())
self.cron_retry_parameters_.OutputUnchecked(out)
if (self.has_datastore_transaction_):
out.putVarInt32(170)
out.putPrefixedString(self.datastore_transaction_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
for i in xrange(len(self.header_)):
out.putVarInt32(51)
self.header_[i].OutputPartial(out)
out.putVarInt32(52)
if (self.has_body_):
out.putVarInt32(74)
out.putPrefixedString(self.body_)
if (self.has_transaction_):
out.putVarInt32(82)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
if (self.has_app_id_):
out.putVarInt32(90)
out.putPrefixedString(self.app_id_)
if (self.has_crontimetable_):
out.putVarInt32(99)
self.crontimetable_.OutputPartial(out)
out.putVarInt32(100)
if (self.has_description_):
out.putVarInt32(122)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(130)
out.putVarInt32(self.payload_.ByteSizePartial())
self.payload_.OutputPartial(out)
if (self.has_retry_parameters_):
out.putVarInt32(138)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_mode_):
out.putVarInt32(144)
out.putVarInt32(self.mode_)
if (self.has_tag_):
out.putVarInt32(154)
out.putPrefixedString(self.tag_)
if (self.has_cron_retry_parameters_):
out.putVarInt32(162)
out.putVarInt32(self.cron_retry_parameters_.ByteSizePartial())
self.cron_retry_parameters_.OutputPartial(out)
if (self.has_datastore_transaction_):
out.putVarInt32(170)
out.putPrefixedString(self.datastore_transaction_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 34:
self.set_url(d.getPrefixedString())
continue
if tt == 40:
self.set_method(d.getVarInt32())
continue
if tt == 51:
self.add_header().TryMerge(d)
continue
if tt == 74:
self.set_body(d.getPrefixedString())
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 90:
self.set_app_id(d.getPrefixedString())
continue
if tt == 99:
self.mutable_crontimetable().TryMerge(d)
continue
if tt == 122:
self.set_description(d.getPrefixedString())
continue
if tt == 130:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_payload().TryMerge(tmp)
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 144:
self.set_mode(d.getVarInt32())
continue
if tt == 154:
self.set_tag(d.getPrefixedString())
continue
if tt == 162:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cron_retry_parameters().TryMerge(tmp)
continue
if tt == 170:
self.set_datastore_transaction(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatInt32(self.method_))
if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
cnt=0
for e in self.header_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Header%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_datastore_transaction_: res+=prefix+("datastore_transaction: %s\n" % self.DebugFormatString(self.datastore_transaction_))
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_crontimetable_:
res+=prefix+"CronTimetable {\n"
res+=self.crontimetable_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_))
if self.has_payload_:
res+=prefix+"payload <\n"
res+=self.payload_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
if self.has_cron_retry_parameters_:
res+=prefix+"cron_retry_parameters <\n"
res+=self.cron_retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kqueue_name = 1
ktask_name = 2
keta_usec = 3
kmethod = 5
kurl = 4
kHeaderGroup = 6
kHeaderkey = 7
kHeadervalue = 8
kbody = 9
ktransaction = 10
kdatastore_transaction = 21
kapp_id = 11
kCronTimetableGroup = 12
kCronTimetableschedule = 13
kCronTimetabletimezone = 14
kdescription = 15
kpayload = 16
kretry_parameters = 17
kmode = 18
ktag = 19
kcron_retry_parameters = 20
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "task_name",
3: "eta_usec",
4: "url",
5: "method",
6: "Header",
7: "key",
8: "value",
9: "body",
10: "transaction",
11: "app_id",
12: "CronTimetable",
13: "schedule",
14: "timezone",
15: "description",
16: "payload",
17: "retry_parameters",
18: "mode",
19: "tag",
20: "cron_retry_parameters",
21: "datastore_transaction",
}, 21)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.STARTGROUP,
7: ProtocolBuffer.Encoder.STRING,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.STARTGROUP,
13: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.STRING,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.STRING,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.NUMERIC,
19: ProtocolBuffer.Encoder.STRING,
20: ProtocolBuffer.Encoder.STRING,
21: ProtocolBuffer.Encoder.STRING,
}, 21, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAddRequest'
class TaskQueueAddResponse(ProtocolBuffer.ProtocolMessage):
has_chosen_task_name_ = 0
chosen_task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def chosen_task_name(self): return self.chosen_task_name_
def set_chosen_task_name(self, x):
self.has_chosen_task_name_ = 1
self.chosen_task_name_ = x
def clear_chosen_task_name(self):
if self.has_chosen_task_name_:
self.has_chosen_task_name_ = 0
self.chosen_task_name_ = ""
def has_chosen_task_name(self): return self.has_chosen_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_chosen_task_name()): self.set_chosen_task_name(x.chosen_task_name())
def Equals(self, x):
if x is self: return 1
if self.has_chosen_task_name_ != x.has_chosen_task_name_: return 0
if self.has_chosen_task_name_ and self.chosen_task_name_ != x.chosen_task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n
def Clear(self):
self.clear_chosen_task_name()
def OutputUnchecked(self, out):
if (self.has_chosen_task_name_):
out.putVarInt32(10)
out.putPrefixedString(self.chosen_task_name_)
def OutputPartial(self, out):
if (self.has_chosen_task_name_):
out.putVarInt32(10)
out.putPrefixedString(self.chosen_task_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_chosen_task_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kchosen_task_name = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "chosen_task_name",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAddResponse'
class TaskQueueBulkAddRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.add_request_ = []
if contents is not None: self.MergeFromString(contents)
def add_request_size(self): return len(self.add_request_)
def add_request_list(self): return self.add_request_
def add_request(self, i):
return self.add_request_[i]
def mutable_add_request(self, i):
return self.add_request_[i]
def add_add_request(self):
x = TaskQueueAddRequest()
self.add_request_.append(x)
return x
def clear_add_request(self):
self.add_request_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.add_request_size()): self.add_add_request().CopyFrom(x.add_request(i))
def Equals(self, x):
if x is self: return 1
if len(self.add_request_) != len(x.add_request_): return 0
for e1, e2 in zip(self.add_request_, x.add_request_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.add_request_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.add_request_)
for i in xrange(len(self.add_request_)): n += self.lengthString(self.add_request_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.add_request_)
for i in xrange(len(self.add_request_)): n += self.lengthString(self.add_request_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_add_request()
def OutputUnchecked(self, out):
for i in xrange(len(self.add_request_)):
out.putVarInt32(10)
out.putVarInt32(self.add_request_[i].ByteSize())
self.add_request_[i].OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.add_request_)):
out.putVarInt32(10)
out.putVarInt32(self.add_request_[i].ByteSizePartial())
self.add_request_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_add_request().TryMerge(tmp)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.add_request_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("add_request%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kadd_request = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "add_request",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueBulkAddRequest'
class TaskQueueBulkAddResponse_TaskResult(ProtocolBuffer.ProtocolMessage):
has_result_ = 0
result_ = 0
has_chosen_task_name_ = 0
chosen_task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def result(self): return self.result_
def set_result(self, x):
self.has_result_ = 1
self.result_ = x
def clear_result(self):
if self.has_result_:
self.has_result_ = 0
self.result_ = 0
def has_result(self): return self.has_result_
def chosen_task_name(self): return self.chosen_task_name_
def set_chosen_task_name(self, x):
self.has_chosen_task_name_ = 1
self.chosen_task_name_ = x
def clear_chosen_task_name(self):
if self.has_chosen_task_name_:
self.has_chosen_task_name_ = 0
self.chosen_task_name_ = ""
def has_chosen_task_name(self): return self.has_chosen_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_result()): self.set_result(x.result())
if (x.has_chosen_task_name()): self.set_chosen_task_name(x.chosen_task_name())
def Equals(self, x):
if x is self: return 1
if self.has_result_ != x.has_result_: return 0
if self.has_result_ and self.result_ != x.result_: return 0
if self.has_chosen_task_name_ != x.has_chosen_task_name_: return 0
if self.has_chosen_task_name_ and self.chosen_task_name_ != x.chosen_task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_result_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: result not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.result_)
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_result_):
n += 1
n += self.lengthVarInt64(self.result_)
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n
def Clear(self):
self.clear_result()
self.clear_chosen_task_name()
def OutputUnchecked(self, out):
out.putVarInt32(16)
out.putVarInt32(self.result_)
if (self.has_chosen_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.chosen_task_name_)
def OutputPartial(self, out):
if (self.has_result_):
out.putVarInt32(16)
out.putVarInt32(self.result_)
if (self.has_chosen_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.chosen_task_name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 16:
self.set_result(d.getVarInt32())
continue
if tt == 26:
self.set_chosen_task_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_result_: res+=prefix+("result: %s\n" % self.DebugFormatInt32(self.result_))
if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_))
return res
class TaskQueueBulkAddResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.taskresult_ = []
if contents is not None: self.MergeFromString(contents)
def taskresult_size(self): return len(self.taskresult_)
def taskresult_list(self): return self.taskresult_
def taskresult(self, i):
return self.taskresult_[i]
def mutable_taskresult(self, i):
return self.taskresult_[i]
def add_taskresult(self):
x = TaskQueueBulkAddResponse_TaskResult()
self.taskresult_.append(x)
return x
def clear_taskresult(self):
self.taskresult_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.taskresult_size()): self.add_taskresult().CopyFrom(x.taskresult(i))
def Equals(self, x):
if x is self: return 1
if len(self.taskresult_) != len(x.taskresult_): return 0
for e1, e2 in zip(self.taskresult_, x.taskresult_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.taskresult_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.taskresult_)
for i in xrange(len(self.taskresult_)): n += self.taskresult_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.taskresult_)
for i in xrange(len(self.taskresult_)): n += self.taskresult_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_taskresult()
def OutputUnchecked(self, out):
for i in xrange(len(self.taskresult_)):
out.putVarInt32(11)
self.taskresult_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.taskresult_)):
out.putVarInt32(11)
self.taskresult_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_taskresult().TryMerge(d)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.taskresult_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("TaskResult%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kTaskResultGroup = 1
kTaskResultresult = 2
kTaskResultchosen_task_name = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "TaskResult",
2: "result",
3: "chosen_task_name",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueBulkAddResponse'
class TaskQueueDeleteRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_app_id_ = 0
app_id_ = ""
def __init__(self, contents=None):
self.task_name_ = []
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name_size(self): return len(self.task_name_)
def task_name_list(self): return self.task_name_
def task_name(self, i):
return self.task_name_[i]
def set_task_name(self, i, x):
self.task_name_[i] = x
def add_task_name(self, x):
self.task_name_.append(x)
def clear_task_name(self):
self.task_name_ = []
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
for i in xrange(x.task_name_size()): self.add_task_name(x.task_name(i))
if (x.has_app_id()): self.set_app_id(x.app_id())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if len(self.task_name_) != len(x.task_name_): return 0
for e1, e2 in zip(self.task_name_, x.task_name_):
if e1 != e2: return 0
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += 1 * len(self.task_name_)
for i in xrange(len(self.task_name_)): n += self.lengthString(len(self.task_name_[i]))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
n += 1 * len(self.task_name_)
for i in xrange(len(self.task_name_)): n += self.lengthString(len(self.task_name_[i]))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n
def Clear(self):
self.clear_queue_name()
self.clear_task_name()
self.clear_app_id()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
for i in xrange(len(self.task_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_[i])
if (self.has_app_id_):
out.putVarInt32(26)
out.putPrefixedString(self.app_id_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
for i in xrange(len(self.task_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_[i])
if (self.has_app_id_):
out.putVarInt32(26)
out.putPrefixedString(self.app_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 18:
self.add_task_name(d.getPrefixedString())
continue
if tt == 26:
self.set_app_id(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
cnt=0
for e in self.task_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("task_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kqueue_name = 1
ktask_name = 2
kapp_id = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "task_name",
3: "app_id",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteRequest'
class TaskQueueDeleteResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.result_ = []
if contents is not None: self.MergeFromString(contents)
def result_size(self): return len(self.result_)
def result_list(self): return self.result_
def result(self, i):
return self.result_[i]
def set_result(self, i, x):
self.result_[i] = x
def add_result(self, x):
self.result_.append(x)
def clear_result(self):
self.result_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.result_size()): self.add_result(x.result(i))
def Equals(self, x):
if x is self: return 1
if len(self.result_) != len(x.result_): return 0
for e1, e2 in zip(self.result_, x.result_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthVarInt64(self.result_[i])
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthVarInt64(self.result_[i])
return n
def Clear(self):
self.clear_result()
def OutputUnchecked(self, out):
for i in xrange(len(self.result_)):
out.putVarInt32(24)
out.putVarInt32(self.result_[i])
def OutputPartial(self, out):
for i in xrange(len(self.result_)):
out.putVarInt32(24)
out.putVarInt32(self.result_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 24:
self.add_result(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.result_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("result%s: %s\n" % (elm, self.DebugFormatInt32(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kresult = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
3: "result",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteResponse'
class TaskQueueForceRunRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_task_name()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 26:
self.set_task_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
ktask_name = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "task_name",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueForceRunRequest'
class TaskQueueForceRunResponse(ProtocolBuffer.ProtocolMessage):
has_result_ = 0
result_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def result(self): return self.result_
def set_result(self, x):
self.has_result_ = 1
self.result_ = x
def clear_result(self):
if self.has_result_:
self.has_result_ = 0
self.result_ = 0
def has_result(self): return self.has_result_
def MergeFrom(self, x):
assert x is not self
if (x.has_result()): self.set_result(x.result())
def Equals(self, x):
if x is self: return 1
if self.has_result_ != x.has_result_: return 0
if self.has_result_ and self.result_ != x.result_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_result_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: result not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.result_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_result_):
n += 1
n += self.lengthVarInt64(self.result_)
return n
def Clear(self):
self.clear_result()
def OutputUnchecked(self, out):
out.putVarInt32(24)
out.putVarInt32(self.result_)
def OutputPartial(self, out):
if (self.has_result_):
out.putVarInt32(24)
out.putVarInt32(self.result_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 24:
self.set_result(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_result_: res+=prefix+("result: %s\n" % self.DebugFormatInt32(self.result_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kresult = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
3: "result",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueForceRunResponse'
class TaskQueueUpdateQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_bucket_refill_per_second_ = 0
bucket_refill_per_second_ = 0.0
has_bucket_capacity_ = 0
bucket_capacity_ = 0
has_user_specified_rate_ = 0
user_specified_rate_ = ""
has_retry_parameters_ = 0
retry_parameters_ = None
has_max_concurrent_requests_ = 0
max_concurrent_requests_ = 0
has_mode_ = 0
mode_ = 0
has_acl_ = 0
acl_ = None
def __init__(self, contents=None):
self.header_override_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def bucket_refill_per_second(self): return self.bucket_refill_per_second_
def set_bucket_refill_per_second(self, x):
self.has_bucket_refill_per_second_ = 1
self.bucket_refill_per_second_ = x
def clear_bucket_refill_per_second(self):
if self.has_bucket_refill_per_second_:
self.has_bucket_refill_per_second_ = 0
self.bucket_refill_per_second_ = 0.0
def has_bucket_refill_per_second(self): return self.has_bucket_refill_per_second_
def bucket_capacity(self): return self.bucket_capacity_
def set_bucket_capacity(self, x):
self.has_bucket_capacity_ = 1
self.bucket_capacity_ = x
def clear_bucket_capacity(self):
if self.has_bucket_capacity_:
self.has_bucket_capacity_ = 0
self.bucket_capacity_ = 0
def has_bucket_capacity(self): return self.has_bucket_capacity_
def user_specified_rate(self): return self.user_specified_rate_
def set_user_specified_rate(self, x):
self.has_user_specified_rate_ = 1
self.user_specified_rate_ = x
def clear_user_specified_rate(self):
if self.has_user_specified_rate_:
self.has_user_specified_rate_ = 0
self.user_specified_rate_ = ""
def has_user_specified_rate(self): return self.has_user_specified_rate_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
# Warning: this method does not acquire the lock.
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def max_concurrent_requests(self): return self.max_concurrent_requests_
def set_max_concurrent_requests(self, x):
self.has_max_concurrent_requests_ = 1
self.max_concurrent_requests_ = x
def clear_max_concurrent_requests(self):
if self.has_max_concurrent_requests_:
self.has_max_concurrent_requests_ = 0
self.max_concurrent_requests_ = 0
def has_max_concurrent_requests(self): return self.has_max_concurrent_requests_
def mode(self): return self.mode_
def set_mode(self, x):
self.has_mode_ = 1
self.mode_ = x
def clear_mode(self):
if self.has_mode_:
self.has_mode_ = 0
self.mode_ = 0
def has_mode(self): return self.has_mode_
def acl(self):
if self.acl_ is None:
self.lazy_init_lock_.acquire()
try:
if self.acl_ is None: self.acl_ = TaskQueueAcl()
finally:
self.lazy_init_lock_.release()
return self.acl_
def mutable_acl(self): self.has_acl_ = 1; return self.acl()
def clear_acl(self):
# Warning: this method does not acquire the lock.
if self.has_acl_:
self.has_acl_ = 0;
if self.acl_ is not None: self.acl_.Clear()
def has_acl(self): return self.has_acl_
def header_override_size(self): return len(self.header_override_)
def header_override_list(self): return self.header_override_
def header_override(self, i):
return self.header_override_[i]
def mutable_header_override(self, i):
return self.header_override_[i]
def add_header_override(self):
x = TaskQueueHttpHeader()
self.header_override_.append(x)
return x
def clear_header_override(self):
self.header_override_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_bucket_refill_per_second()): self.set_bucket_refill_per_second(x.bucket_refill_per_second())
if (x.has_bucket_capacity()): self.set_bucket_capacity(x.bucket_capacity())
if (x.has_user_specified_rate()): self.set_user_specified_rate(x.user_specified_rate())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_max_concurrent_requests()): self.set_max_concurrent_requests(x.max_concurrent_requests())
if (x.has_mode()): self.set_mode(x.mode())
if (x.has_acl()): self.mutable_acl().MergeFrom(x.acl())
for i in xrange(x.header_override_size()): self.add_header_override().CopyFrom(x.header_override(i))
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_bucket_refill_per_second_ != x.has_bucket_refill_per_second_: return 0
if self.has_bucket_refill_per_second_ and self.bucket_refill_per_second_ != x.bucket_refill_per_second_: return 0
if self.has_bucket_capacity_ != x.has_bucket_capacity_: return 0
if self.has_bucket_capacity_ and self.bucket_capacity_ != x.bucket_capacity_: return 0
if self.has_user_specified_rate_ != x.has_user_specified_rate_: return 0
if self.has_user_specified_rate_ and self.user_specified_rate_ != x.user_specified_rate_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_max_concurrent_requests_ != x.has_max_concurrent_requests_: return 0
if self.has_max_concurrent_requests_ and self.max_concurrent_requests_ != x.max_concurrent_requests_: return 0
if self.has_mode_ != x.has_mode_: return 0
if self.has_mode_ and self.mode_ != x.mode_: return 0
if self.has_acl_ != x.has_acl_: return 0
if self.has_acl_ and self.acl_ != x.acl_: return 0
if len(self.header_override_) != len(x.header_override_): return 0
for e1, e2 in zip(self.header_override_, x.header_override_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_bucket_refill_per_second_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_refill_per_second not set.')
if (not self.has_bucket_capacity_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_capacity not set.')
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
if (self.has_acl_ and not self.acl_.IsInitialized(debug_strs)): initialized = 0
for p in self.header_override_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
n += self.lengthVarInt64(self.bucket_capacity_)
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSize())
n += 1 * len(self.header_override_)
for i in xrange(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSize())
return n + 11
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_bucket_refill_per_second_):
n += 9
if (self.has_bucket_capacity_):
n += 1
n += self.lengthVarInt64(self.bucket_capacity_)
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSizePartial())
n += 1 * len(self.header_override_)
for i in xrange(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_bucket_refill_per_second()
self.clear_bucket_capacity()
self.clear_user_specified_rate()
self.clear_retry_parameters()
self.clear_max_concurrent_requests()
self.clear_mode()
self.clear_acl()
self.clear_header_override()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
out.putVarInt32(32)
out.putVarInt32(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
if (self.has_retry_parameters_):
out.putVarInt32(50)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(56)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(64)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(74)
out.putVarInt32(self.acl_.ByteSize())
self.acl_.OutputUnchecked(out)
for i in xrange(len(self.header_override_)):
out.putVarInt32(82)
out.putVarInt32(self.header_override_[i].ByteSize())
self.header_override_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_bucket_refill_per_second_):
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
if (self.has_bucket_capacity_):
out.putVarInt32(32)
out.putVarInt32(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
if (self.has_retry_parameters_):
out.putVarInt32(50)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(56)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(64)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(74)
out.putVarInt32(self.acl_.ByteSizePartial())
self.acl_.OutputPartial(out)
for i in xrange(len(self.header_override_)):
out.putVarInt32(82)
out.putVarInt32(self.header_override_[i].ByteSizePartial())
self.header_override_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 25:
self.set_bucket_refill_per_second(d.getDouble())
continue
if tt == 32:
self.set_bucket_capacity(d.getVarInt32())
continue
if tt == 42:
self.set_user_specified_rate(d.getPrefixedString())
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 56:
self.set_max_concurrent_requests(d.getVarInt32())
continue
if tt == 64:
self.set_mode(d.getVarInt32())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_acl().TryMerge(tmp)
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_header_override().TryMerge(tmp)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_bucket_refill_per_second_: res+=prefix+("bucket_refill_per_second: %s\n" % self.DebugFormat(self.bucket_refill_per_second_))
if self.has_bucket_capacity_: res+=prefix+("bucket_capacity: %s\n" % self.DebugFormatInt32(self.bucket_capacity_))
if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_max_concurrent_requests_: res+=prefix+("max_concurrent_requests: %s\n" % self.DebugFormatInt32(self.max_concurrent_requests_))
if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
if self.has_acl_:
res+=prefix+"acl <\n"
res+=self.acl_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.header_override_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("header_override%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kbucket_refill_per_second = 3
kbucket_capacity = 4
kuser_specified_rate = 5
kretry_parameters = 6
kmax_concurrent_requests = 7
kmode = 8
kacl = 9
kheader_override = 10
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "bucket_refill_per_second",
4: "bucket_capacity",
5: "user_specified_rate",
6: "retry_parameters",
7: "max_concurrent_requests",
8: "mode",
9: "acl",
10: "header_override",
}, 10)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
}, 10, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateQueueRequest'
class TaskQueueUpdateQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateQueueResponse'
class TaskQueueFetchQueuesRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_max_rows_ = 0
max_rows_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def max_rows(self): return self.max_rows_
def set_max_rows(self, x):
self.has_max_rows_ = 1
self.max_rows_ = x
def clear_max_rows(self):
if self.has_max_rows_:
self.has_max_rows_ = 0
self.max_rows_ = 0
def has_max_rows(self): return self.has_max_rows_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_max_rows()): self.set_max_rows(x.max_rows())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_max_rows_ != x.has_max_rows_: return 0
if self.has_max_rows_ and self.max_rows_ != x.max_rows_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_max_rows_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: max_rows not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthVarInt64(self.max_rows_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_max_rows_):
n += 1
n += self.lengthVarInt64(self.max_rows_)
return n
def Clear(self):
self.clear_app_id()
self.clear_max_rows()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(16)
out.putVarInt32(self.max_rows_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_max_rows_):
out.putVarInt32(16)
out.putVarInt32(self.max_rows_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 16:
self.set_max_rows(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_max_rows_: res+=prefix+("max_rows: %s\n" % self.DebugFormatInt32(self.max_rows_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kmax_rows = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "max_rows",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueuesRequest'
class TaskQueueFetchQueuesResponse_Queue(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_bucket_refill_per_second_ = 0
bucket_refill_per_second_ = 0.0
has_bucket_capacity_ = 0
bucket_capacity_ = 0.0
has_user_specified_rate_ = 0
user_specified_rate_ = ""
has_paused_ = 0
paused_ = 0
has_retry_parameters_ = 0
retry_parameters_ = None
has_max_concurrent_requests_ = 0
max_concurrent_requests_ = 0
has_mode_ = 0
mode_ = 0
has_acl_ = 0
acl_ = None
has_creator_name_ = 0
creator_name_ = "apphosting"
def __init__(self, contents=None):
self.header_override_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def bucket_refill_per_second(self): return self.bucket_refill_per_second_
def set_bucket_refill_per_second(self, x):
self.has_bucket_refill_per_second_ = 1
self.bucket_refill_per_second_ = x
def clear_bucket_refill_per_second(self):
if self.has_bucket_refill_per_second_:
self.has_bucket_refill_per_second_ = 0
self.bucket_refill_per_second_ = 0.0
def has_bucket_refill_per_second(self): return self.has_bucket_refill_per_second_
def bucket_capacity(self): return self.bucket_capacity_
def set_bucket_capacity(self, x):
self.has_bucket_capacity_ = 1
self.bucket_capacity_ = x
def clear_bucket_capacity(self):
if self.has_bucket_capacity_:
self.has_bucket_capacity_ = 0
self.bucket_capacity_ = 0.0
def has_bucket_capacity(self): return self.has_bucket_capacity_
def user_specified_rate(self): return self.user_specified_rate_
def set_user_specified_rate(self, x):
self.has_user_specified_rate_ = 1
self.user_specified_rate_ = x
def clear_user_specified_rate(self):
if self.has_user_specified_rate_:
self.has_user_specified_rate_ = 0
self.user_specified_rate_ = ""
def has_user_specified_rate(self): return self.has_user_specified_rate_
def paused(self): return self.paused_
def set_paused(self, x):
self.has_paused_ = 1
self.paused_ = x
def clear_paused(self):
if self.has_paused_:
self.has_paused_ = 0
self.paused_ = 0
def has_paused(self): return self.has_paused_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
# Warning: this method does not acquire the lock.
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def max_concurrent_requests(self): return self.max_concurrent_requests_
def set_max_concurrent_requests(self, x):
self.has_max_concurrent_requests_ = 1
self.max_concurrent_requests_ = x
def clear_max_concurrent_requests(self):
if self.has_max_concurrent_requests_:
self.has_max_concurrent_requests_ = 0
self.max_concurrent_requests_ = 0
def has_max_concurrent_requests(self): return self.has_max_concurrent_requests_
def mode(self): return self.mode_
def set_mode(self, x):
self.has_mode_ = 1
self.mode_ = x
def clear_mode(self):
if self.has_mode_:
self.has_mode_ = 0
self.mode_ = 0
def has_mode(self): return self.has_mode_
def acl(self):
if self.acl_ is None:
self.lazy_init_lock_.acquire()
try:
if self.acl_ is None: self.acl_ = TaskQueueAcl()
finally:
self.lazy_init_lock_.release()
return self.acl_
def mutable_acl(self): self.has_acl_ = 1; return self.acl()
def clear_acl(self):
# Warning: this method does not acquire the lock.
if self.has_acl_:
self.has_acl_ = 0;
if self.acl_ is not None: self.acl_.Clear()
def has_acl(self): return self.has_acl_
def header_override_size(self): return len(self.header_override_)
def header_override_list(self): return self.header_override_
def header_override(self, i):
return self.header_override_[i]
def mutable_header_override(self, i):
return self.header_override_[i]
def add_header_override(self):
x = TaskQueueHttpHeader()
self.header_override_.append(x)
return x
def clear_header_override(self):
self.header_override_ = []
def creator_name(self): return self.creator_name_
def set_creator_name(self, x):
self.has_creator_name_ = 1
self.creator_name_ = x
def clear_creator_name(self):
if self.has_creator_name_:
self.has_creator_name_ = 0
self.creator_name_ = "apphosting"
def has_creator_name(self): return self.has_creator_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_bucket_refill_per_second()): self.set_bucket_refill_per_second(x.bucket_refill_per_second())
if (x.has_bucket_capacity()): self.set_bucket_capacity(x.bucket_capacity())
if (x.has_user_specified_rate()): self.set_user_specified_rate(x.user_specified_rate())
if (x.has_paused()): self.set_paused(x.paused())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_max_concurrent_requests()): self.set_max_concurrent_requests(x.max_concurrent_requests())
if (x.has_mode()): self.set_mode(x.mode())
if (x.has_acl()): self.mutable_acl().MergeFrom(x.acl())
for i in xrange(x.header_override_size()): self.add_header_override().CopyFrom(x.header_override(i))
if (x.has_creator_name()): self.set_creator_name(x.creator_name())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_bucket_refill_per_second_ != x.has_bucket_refill_per_second_: return 0
if self.has_bucket_refill_per_second_ and self.bucket_refill_per_second_ != x.bucket_refill_per_second_: return 0
if self.has_bucket_capacity_ != x.has_bucket_capacity_: return 0
if self.has_bucket_capacity_ and self.bucket_capacity_ != x.bucket_capacity_: return 0
if self.has_user_specified_rate_ != x.has_user_specified_rate_: return 0
if self.has_user_specified_rate_ and self.user_specified_rate_ != x.user_specified_rate_: return 0
if self.has_paused_ != x.has_paused_: return 0
if self.has_paused_ and self.paused_ != x.paused_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_max_concurrent_requests_ != x.has_max_concurrent_requests_: return 0
if self.has_max_concurrent_requests_ and self.max_concurrent_requests_ != x.max_concurrent_requests_: return 0
if self.has_mode_ != x.has_mode_: return 0
if self.has_mode_ and self.mode_ != x.mode_: return 0
if self.has_acl_ != x.has_acl_: return 0
if self.has_acl_ and self.acl_ != x.acl_: return 0
if len(self.header_override_) != len(x.header_override_): return 0
for e1, e2 in zip(self.header_override_, x.header_override_):
if e1 != e2: return 0
if self.has_creator_name_ != x.has_creator_name_: return 0
if self.has_creator_name_ and self.creator_name_ != x.creator_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_bucket_refill_per_second_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_refill_per_second not set.')
if (not self.has_bucket_capacity_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_capacity not set.')
if (not self.has_paused_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: paused not set.')
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
if (self.has_acl_ and not self.acl_.IsInitialized(debug_strs)): initialized = 0
for p in self.header_override_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSize())
n += 1 * len(self.header_override_)
for i in xrange(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSize())
if (self.has_creator_name_): n += 1 + self.lengthString(len(self.creator_name_))
return n + 21
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_bucket_refill_per_second_):
n += 9
if (self.has_bucket_capacity_):
n += 9
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_paused_):
n += 2
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSizePartial())
n += 1 * len(self.header_override_)
for i in xrange(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSizePartial())
if (self.has_creator_name_): n += 1 + self.lengthString(len(self.creator_name_))
return n
def Clear(self):
self.clear_queue_name()
self.clear_bucket_refill_per_second()
self.clear_bucket_capacity()
self.clear_user_specified_rate()
self.clear_paused()
self.clear_retry_parameters()
self.clear_max_concurrent_requests()
self.clear_mode()
self.clear_acl()
self.clear_header_override()
self.clear_creator_name()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
out.putVarInt32(33)
out.putDouble(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
out.putVarInt32(48)
out.putBoolean(self.paused_)
if (self.has_retry_parameters_):
out.putVarInt32(58)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(64)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(72)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(82)
out.putVarInt32(self.acl_.ByteSize())
self.acl_.OutputUnchecked(out)
for i in xrange(len(self.header_override_)):
out.putVarInt32(90)
out.putVarInt32(self.header_override_[i].ByteSize())
self.header_override_[i].OutputUnchecked(out)
if (self.has_creator_name_):
out.putVarInt32(98)
out.putPrefixedString(self.creator_name_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_bucket_refill_per_second_):
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
if (self.has_bucket_capacity_):
out.putVarInt32(33)
out.putDouble(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
if (self.has_paused_):
out.putVarInt32(48)
out.putBoolean(self.paused_)
if (self.has_retry_parameters_):
out.putVarInt32(58)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(64)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(72)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(82)
out.putVarInt32(self.acl_.ByteSizePartial())
self.acl_.OutputPartial(out)
for i in xrange(len(self.header_override_)):
out.putVarInt32(90)
out.putVarInt32(self.header_override_[i].ByteSizePartial())
self.header_override_[i].OutputPartial(out)
if (self.has_creator_name_):
out.putVarInt32(98)
out.putPrefixedString(self.creator_name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 25:
self.set_bucket_refill_per_second(d.getDouble())
continue
if tt == 33:
self.set_bucket_capacity(d.getDouble())
continue
if tt == 42:
self.set_user_specified_rate(d.getPrefixedString())
continue
if tt == 48:
self.set_paused(d.getBoolean())
continue
if tt == 58:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 64:
self.set_max_concurrent_requests(d.getVarInt32())
continue
if tt == 72:
self.set_mode(d.getVarInt32())
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_acl().TryMerge(tmp)
continue
if tt == 90:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_header_override().TryMerge(tmp)
continue
if tt == 98:
self.set_creator_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_bucket_refill_per_second_: res+=prefix+("bucket_refill_per_second: %s\n" % self.DebugFormat(self.bucket_refill_per_second_))
if self.has_bucket_capacity_: res+=prefix+("bucket_capacity: %s\n" % self.DebugFormat(self.bucket_capacity_))
if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
if self.has_paused_: res+=prefix+("paused: %s\n" % self.DebugFormatBool(self.paused_))
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_max_concurrent_requests_: res+=prefix+("max_concurrent_requests: %s\n" % self.DebugFormatInt32(self.max_concurrent_requests_))
if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
if self.has_acl_:
res+=prefix+"acl <\n"
res+=self.acl_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.header_override_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("header_override%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_creator_name_: res+=prefix+("creator_name: %s\n" % self.DebugFormatString(self.creator_name_))
return res
class TaskQueueFetchQueuesResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.queue_ = []
if contents is not None: self.MergeFromString(contents)
def queue_size(self): return len(self.queue_)
def queue_list(self): return self.queue_
def queue(self, i):
return self.queue_[i]
def mutable_queue(self, i):
return self.queue_[i]
def add_queue(self):
x = TaskQueueFetchQueuesResponse_Queue()
self.queue_.append(x)
return x
def clear_queue(self):
self.queue_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.queue_size()): self.add_queue().CopyFrom(x.queue(i))
def Equals(self, x):
if x is self: return 1
if len(self.queue_) != len(x.queue_): return 0
for e1, e2 in zip(self.queue_, x.queue_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.queue_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.queue_)
for i in xrange(len(self.queue_)): n += self.queue_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.queue_)
for i in xrange(len(self.queue_)): n += self.queue_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_queue()
def OutputUnchecked(self, out):
for i in xrange(len(self.queue_)):
out.putVarInt32(11)
self.queue_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.queue_)):
out.putVarInt32(11)
self.queue_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_queue().TryMerge(d)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.queue_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Queue%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kQueueGroup = 1
kQueuequeue_name = 2
kQueuebucket_refill_per_second = 3
kQueuebucket_capacity = 4
kQueueuser_specified_rate = 5
kQueuepaused = 6
kQueueretry_parameters = 7
kQueuemax_concurrent_requests = 8
kQueuemode = 9
kQueueacl = 10
kQueueheader_override = 11
kQueuecreator_name = 12
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Queue",
2: "queue_name",
3: "bucket_refill_per_second",
4: "bucket_capacity",
5: "user_specified_rate",
6: "paused",
7: "retry_parameters",
8: "max_concurrent_requests",
9: "mode",
10: "acl",
11: "header_override",
12: "creator_name",
}, 12)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.DOUBLE,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.STRING,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.NUMERIC,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.STRING,
}, 12, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueuesResponse'
class TaskQueueFetchQueueStatsRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_max_num_tasks_ = 0
max_num_tasks_ = 0
def __init__(self, contents=None):
self.queue_name_ = []
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name_size(self): return len(self.queue_name_)
def queue_name_list(self): return self.queue_name_
def queue_name(self, i):
return self.queue_name_[i]
def set_queue_name(self, i, x):
self.queue_name_[i] = x
def add_queue_name(self, x):
self.queue_name_.append(x)
def clear_queue_name(self):
self.queue_name_ = []
def max_num_tasks(self): return self.max_num_tasks_
def set_max_num_tasks(self, x):
self.has_max_num_tasks_ = 1
self.max_num_tasks_ = x
def clear_max_num_tasks(self):
if self.has_max_num_tasks_:
self.has_max_num_tasks_ = 0
self.max_num_tasks_ = 0
def has_max_num_tasks(self): return self.has_max_num_tasks_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
for i in xrange(x.queue_name_size()): self.add_queue_name(x.queue_name(i))
if (x.has_max_num_tasks()): self.set_max_num_tasks(x.max_num_tasks())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if len(self.queue_name_) != len(x.queue_name_): return 0
for e1, e2 in zip(self.queue_name_, x.queue_name_):
if e1 != e2: return 0
if self.has_max_num_tasks_ != x.has_max_num_tasks_: return 0
if self.has_max_num_tasks_ and self.max_num_tasks_ != x.max_num_tasks_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += 1 * len(self.queue_name_)
for i in xrange(len(self.queue_name_)): n += self.lengthString(len(self.queue_name_[i]))
if (self.has_max_num_tasks_): n += 1 + self.lengthVarInt64(self.max_num_tasks_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += 1 * len(self.queue_name_)
for i in xrange(len(self.queue_name_)): n += self.lengthString(len(self.queue_name_[i]))
if (self.has_max_num_tasks_): n += 1 + self.lengthVarInt64(self.max_num_tasks_)
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_max_num_tasks()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
for i in xrange(len(self.queue_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_[i])
if (self.has_max_num_tasks_):
out.putVarInt32(24)
out.putVarInt32(self.max_num_tasks_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
for i in xrange(len(self.queue_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_[i])
if (self.has_max_num_tasks_):
out.putVarInt32(24)
out.putVarInt32(self.max_num_tasks_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.add_queue_name(d.getPrefixedString())
continue
if tt == 24:
self.set_max_num_tasks(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
cnt=0
for e in self.queue_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("queue_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_max_num_tasks_: res+=prefix+("max_num_tasks: %s\n" % self.DebugFormatInt32(self.max_num_tasks_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kmax_num_tasks = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "max_num_tasks",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueueStatsRequest'
class TaskQueueScannerQueueInfo(ProtocolBuffer.ProtocolMessage):
has_executed_last_minute_ = 0
executed_last_minute_ = 0
has_executed_last_hour_ = 0
executed_last_hour_ = 0
has_sampling_duration_seconds_ = 0
sampling_duration_seconds_ = 0.0
has_requests_in_flight_ = 0
requests_in_flight_ = 0
has_enforced_rate_ = 0
enforced_rate_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def executed_last_minute(self): return self.executed_last_minute_
def set_executed_last_minute(self, x):
self.has_executed_last_minute_ = 1
self.executed_last_minute_ = x
def clear_executed_last_minute(self):
if self.has_executed_last_minute_:
self.has_executed_last_minute_ = 0
self.executed_last_minute_ = 0
def has_executed_last_minute(self): return self.has_executed_last_minute_
def executed_last_hour(self): return self.executed_last_hour_
def set_executed_last_hour(self, x):
self.has_executed_last_hour_ = 1
self.executed_last_hour_ = x
def clear_executed_last_hour(self):
if self.has_executed_last_hour_:
self.has_executed_last_hour_ = 0
self.executed_last_hour_ = 0
def has_executed_last_hour(self): return self.has_executed_last_hour_
def sampling_duration_seconds(self): return self.sampling_duration_seconds_
def set_sampling_duration_seconds(self, x):
self.has_sampling_duration_seconds_ = 1
self.sampling_duration_seconds_ = x
def clear_sampling_duration_seconds(self):
if self.has_sampling_duration_seconds_:
self.has_sampling_duration_seconds_ = 0
self.sampling_duration_seconds_ = 0.0
def has_sampling_duration_seconds(self): return self.has_sampling_duration_seconds_
def requests_in_flight(self): return self.requests_in_flight_
def set_requests_in_flight(self, x):
self.has_requests_in_flight_ = 1
self.requests_in_flight_ = x
def clear_requests_in_flight(self):
if self.has_requests_in_flight_:
self.has_requests_in_flight_ = 0
self.requests_in_flight_ = 0
def has_requests_in_flight(self): return self.has_requests_in_flight_
def enforced_rate(self): return self.enforced_rate_
def set_enforced_rate(self, x):
self.has_enforced_rate_ = 1
self.enforced_rate_ = x
def clear_enforced_rate(self):
if self.has_enforced_rate_:
self.has_enforced_rate_ = 0
self.enforced_rate_ = 0.0
def has_enforced_rate(self): return self.has_enforced_rate_
def MergeFrom(self, x):
assert x is not self
if (x.has_executed_last_minute()): self.set_executed_last_minute(x.executed_last_minute())
if (x.has_executed_last_hour()): self.set_executed_last_hour(x.executed_last_hour())
if (x.has_sampling_duration_seconds()): self.set_sampling_duration_seconds(x.sampling_duration_seconds())
if (x.has_requests_in_flight()): self.set_requests_in_flight(x.requests_in_flight())
if (x.has_enforced_rate()): self.set_enforced_rate(x.enforced_rate())
def Equals(self, x):
if x is self: return 1
if self.has_executed_last_minute_ != x.has_executed_last_minute_: return 0
if self.has_executed_last_minute_ and self.executed_last_minute_ != x.executed_last_minute_: return 0
if self.has_executed_last_hour_ != x.has_executed_last_hour_: return 0
if self.has_executed_last_hour_ and self.executed_last_hour_ != x.executed_last_hour_: return 0
if self.has_sampling_duration_seconds_ != x.has_sampling_duration_seconds_: return 0
if self.has_sampling_duration_seconds_ and self.sampling_duration_seconds_ != x.sampling_duration_seconds_: return 0
if self.has_requests_in_flight_ != x.has_requests_in_flight_: return 0
if self.has_requests_in_flight_ and self.requests_in_flight_ != x.requests_in_flight_: return 0
if self.has_enforced_rate_ != x.has_enforced_rate_: return 0
if self.has_enforced_rate_ and self.enforced_rate_ != x.enforced_rate_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_executed_last_minute_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: executed_last_minute not set.')
if (not self.has_executed_last_hour_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: executed_last_hour not set.')
if (not self.has_sampling_duration_seconds_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: sampling_duration_seconds not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.executed_last_minute_)
n += self.lengthVarInt64(self.executed_last_hour_)
if (self.has_requests_in_flight_): n += 1 + self.lengthVarInt64(self.requests_in_flight_)
if (self.has_enforced_rate_): n += 9
return n + 11
def ByteSizePartial(self):
n = 0
if (self.has_executed_last_minute_):
n += 1
n += self.lengthVarInt64(self.executed_last_minute_)
if (self.has_executed_last_hour_):
n += 1
n += self.lengthVarInt64(self.executed_last_hour_)
if (self.has_sampling_duration_seconds_):
n += 9
if (self.has_requests_in_flight_): n += 1 + self.lengthVarInt64(self.requests_in_flight_)
if (self.has_enforced_rate_): n += 9
return n
def Clear(self):
self.clear_executed_last_minute()
self.clear_executed_last_hour()
self.clear_sampling_duration_seconds()
self.clear_requests_in_flight()
self.clear_enforced_rate()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.executed_last_minute_)
out.putVarInt32(16)
out.putVarInt64(self.executed_last_hour_)
out.putVarInt32(25)
out.putDouble(self.sampling_duration_seconds_)
if (self.has_requests_in_flight_):
out.putVarInt32(32)
out.putVarInt32(self.requests_in_flight_)
if (self.has_enforced_rate_):
out.putVarInt32(41)
out.putDouble(self.enforced_rate_)
def OutputPartial(self, out):
if (self.has_executed_last_minute_):
out.putVarInt32(8)
out.putVarInt64(self.executed_last_minute_)
if (self.has_executed_last_hour_):
out.putVarInt32(16)
out.putVarInt64(self.executed_last_hour_)
if (self.has_sampling_duration_seconds_):
out.putVarInt32(25)
out.putDouble(self.sampling_duration_seconds_)
if (self.has_requests_in_flight_):
out.putVarInt32(32)
out.putVarInt32(self.requests_in_flight_)
if (self.has_enforced_rate_):
out.putVarInt32(41)
out.putDouble(self.enforced_rate_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_executed_last_minute(d.getVarInt64())
continue
if tt == 16:
self.set_executed_last_hour(d.getVarInt64())
continue
if tt == 25:
self.set_sampling_duration_seconds(d.getDouble())
continue
if tt == 32:
self.set_requests_in_flight(d.getVarInt32())
continue
if tt == 41:
self.set_enforced_rate(d.getDouble())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_executed_last_minute_: res+=prefix+("executed_last_minute: %s\n" % self.DebugFormatInt64(self.executed_last_minute_))
if self.has_executed_last_hour_: res+=prefix+("executed_last_hour: %s\n" % self.DebugFormatInt64(self.executed_last_hour_))
if self.has_sampling_duration_seconds_: res+=prefix+("sampling_duration_seconds: %s\n" % self.DebugFormat(self.sampling_duration_seconds_))
if self.has_requests_in_flight_: res+=prefix+("requests_in_flight: %s\n" % self.DebugFormatInt32(self.requests_in_flight_))
if self.has_enforced_rate_: res+=prefix+("enforced_rate: %s\n" % self.DebugFormat(self.enforced_rate_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kexecuted_last_minute = 1
kexecuted_last_hour = 2
ksampling_duration_seconds = 3
krequests_in_flight = 4
kenforced_rate = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "executed_last_minute",
2: "executed_last_hour",
3: "sampling_duration_seconds",
4: "requests_in_flight",
5: "enforced_rate",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.DOUBLE,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueScannerQueueInfo'
class TaskQueueFetchQueueStatsResponse_QueueStats(ProtocolBuffer.ProtocolMessage):
has_num_tasks_ = 0
num_tasks_ = 0
has_oldest_eta_usec_ = 0
oldest_eta_usec_ = 0
has_scanner_info_ = 0
scanner_info_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def num_tasks(self): return self.num_tasks_
def set_num_tasks(self, x):
self.has_num_tasks_ = 1
self.num_tasks_ = x
def clear_num_tasks(self):
if self.has_num_tasks_:
self.has_num_tasks_ = 0
self.num_tasks_ = 0
def has_num_tasks(self): return self.has_num_tasks_
def oldest_eta_usec(self): return self.oldest_eta_usec_
def set_oldest_eta_usec(self, x):
self.has_oldest_eta_usec_ = 1
self.oldest_eta_usec_ = x
def clear_oldest_eta_usec(self):
if self.has_oldest_eta_usec_:
self.has_oldest_eta_usec_ = 0
self.oldest_eta_usec_ = 0
def has_oldest_eta_usec(self): return self.has_oldest_eta_usec_
def scanner_info(self):
if self.scanner_info_ is None:
self.lazy_init_lock_.acquire()
try:
if self.scanner_info_ is None: self.scanner_info_ = TaskQueueScannerQueueInfo()
finally:
self.lazy_init_lock_.release()
return self.scanner_info_
def mutable_scanner_info(self): self.has_scanner_info_ = 1; return self.scanner_info()
def clear_scanner_info(self):
# Warning: this method does not acquire the lock.
if self.has_scanner_info_:
self.has_scanner_info_ = 0;
if self.scanner_info_ is not None: self.scanner_info_.Clear()
def has_scanner_info(self): return self.has_scanner_info_
def MergeFrom(self, x):
assert x is not self
if (x.has_num_tasks()): self.set_num_tasks(x.num_tasks())
if (x.has_oldest_eta_usec()): self.set_oldest_eta_usec(x.oldest_eta_usec())
if (x.has_scanner_info()): self.mutable_scanner_info().MergeFrom(x.scanner_info())
def Equals(self, x):
if x is self: return 1
if self.has_num_tasks_ != x.has_num_tasks_: return 0
if self.has_num_tasks_ and self.num_tasks_ != x.num_tasks_: return 0
if self.has_oldest_eta_usec_ != x.has_oldest_eta_usec_: return 0
if self.has_oldest_eta_usec_ and self.oldest_eta_usec_ != x.oldest_eta_usec_: return 0
if self.has_scanner_info_ != x.has_scanner_info_: return 0
if self.has_scanner_info_ and self.scanner_info_ != x.scanner_info_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_num_tasks_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: num_tasks not set.')
if (not self.has_oldest_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: oldest_eta_usec not set.')
if (self.has_scanner_info_ and not self.scanner_info_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.num_tasks_)
n += self.lengthVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_): n += 1 + self.lengthString(self.scanner_info_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_num_tasks_):
n += 1
n += self.lengthVarInt64(self.num_tasks_)
if (self.has_oldest_eta_usec_):
n += 1
n += self.lengthVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_): n += 1 + self.lengthString(self.scanner_info_.ByteSizePartial())
return n
def Clear(self):
self.clear_num_tasks()
self.clear_oldest_eta_usec()
self.clear_scanner_info()
def OutputUnchecked(self, out):
out.putVarInt32(16)
out.putVarInt32(self.num_tasks_)
out.putVarInt32(24)
out.putVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_):
out.putVarInt32(34)
out.putVarInt32(self.scanner_info_.ByteSize())
self.scanner_info_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_num_tasks_):
out.putVarInt32(16)
out.putVarInt32(self.num_tasks_)
if (self.has_oldest_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_):
out.putVarInt32(34)
out.putVarInt32(self.scanner_info_.ByteSizePartial())
self.scanner_info_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 16:
self.set_num_tasks(d.getVarInt32())
continue
if tt == 24:
self.set_oldest_eta_usec(d.getVarInt64())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_scanner_info().TryMerge(tmp)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_num_tasks_: res+=prefix+("num_tasks: %s\n" % self.DebugFormatInt32(self.num_tasks_))
if self.has_oldest_eta_usec_: res+=prefix+("oldest_eta_usec: %s\n" % self.DebugFormatInt64(self.oldest_eta_usec_))
if self.has_scanner_info_:
res+=prefix+"scanner_info <\n"
res+=self.scanner_info_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
class TaskQueueFetchQueueStatsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.queuestats_ = []
if contents is not None: self.MergeFromString(contents)
def queuestats_size(self): return len(self.queuestats_)
def queuestats_list(self): return self.queuestats_
def queuestats(self, i):
return self.queuestats_[i]
def mutable_queuestats(self, i):
return self.queuestats_[i]
def add_queuestats(self):
x = TaskQueueFetchQueueStatsResponse_QueueStats()
self.queuestats_.append(x)
return x
def clear_queuestats(self):
self.queuestats_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.queuestats_size()): self.add_queuestats().CopyFrom(x.queuestats(i))
def Equals(self, x):
if x is self: return 1
if len(self.queuestats_) != len(x.queuestats_): return 0
for e1, e2 in zip(self.queuestats_, x.queuestats_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.queuestats_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.queuestats_)
for i in xrange(len(self.queuestats_)): n += self.queuestats_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.queuestats_)
for i in xrange(len(self.queuestats_)): n += self.queuestats_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_queuestats()
def OutputUnchecked(self, out):
for i in xrange(len(self.queuestats_)):
out.putVarInt32(11)
self.queuestats_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.queuestats_)):
out.putVarInt32(11)
self.queuestats_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_queuestats().TryMerge(d)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.queuestats_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("QueueStats%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kQueueStatsGroup = 1
kQueueStatsnum_tasks = 2
kQueueStatsoldest_eta_usec = 3
kQueueStatsscanner_info = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "QueueStats",
2: "num_tasks",
3: "oldest_eta_usec",
4: "scanner_info",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueueStatsResponse'
class TaskQueuePauseQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_pause_ = 0
pause_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def pause(self): return self.pause_
def set_pause(self, x):
self.has_pause_ = 1
self.pause_ = x
def clear_pause(self):
if self.has_pause_:
self.has_pause_ = 0
self.pause_ = 0
def has_pause(self): return self.has_pause_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_pause()): self.set_pause(x.pause())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_pause_ != x.has_pause_: return 0
if self.has_pause_ and self.pause_ != x.pause_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_pause_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: pause not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_pause_):
n += 2
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_pause()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(24)
out.putBoolean(self.pause_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_pause_):
out.putVarInt32(24)
out.putBoolean(self.pause_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 24:
self.set_pause(d.getBoolean())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_pause_: res+=prefix+("pause: %s\n" % self.DebugFormatBool(self.pause_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kpause = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "pause",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePauseQueueRequest'
class TaskQueuePauseQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePauseQueueResponse'
class TaskQueuePurgeQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePurgeQueueRequest'
class TaskQueuePurgeQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePurgeQueueResponse'
class TaskQueueDeleteQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteQueueRequest'
class TaskQueueDeleteQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteQueueResponse'
class TaskQueueDeleteGroupRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
return n
def Clear(self):
self.clear_app_id()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteGroupRequest'
class TaskQueueDeleteGroupResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteGroupResponse'
class TaskQueueQueryTasksRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_start_task_name_ = 0
start_task_name_ = ""
has_start_eta_usec_ = 0
start_eta_usec_ = 0
has_start_tag_ = 0
start_tag_ = ""
has_max_rows_ = 0
max_rows_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def start_task_name(self): return self.start_task_name_
def set_start_task_name(self, x):
self.has_start_task_name_ = 1
self.start_task_name_ = x
def clear_start_task_name(self):
if self.has_start_task_name_:
self.has_start_task_name_ = 0
self.start_task_name_ = ""
def has_start_task_name(self): return self.has_start_task_name_
def start_eta_usec(self): return self.start_eta_usec_
def set_start_eta_usec(self, x):
self.has_start_eta_usec_ = 1
self.start_eta_usec_ = x
def clear_start_eta_usec(self):
if self.has_start_eta_usec_:
self.has_start_eta_usec_ = 0
self.start_eta_usec_ = 0
def has_start_eta_usec(self): return self.has_start_eta_usec_
def start_tag(self): return self.start_tag_
def set_start_tag(self, x):
self.has_start_tag_ = 1
self.start_tag_ = x
def clear_start_tag(self):
if self.has_start_tag_:
self.has_start_tag_ = 0
self.start_tag_ = ""
def has_start_tag(self): return self.has_start_tag_
def max_rows(self): return self.max_rows_
def set_max_rows(self, x):
self.has_max_rows_ = 1
self.max_rows_ = x
def clear_max_rows(self):
if self.has_max_rows_:
self.has_max_rows_ = 0
self.max_rows_ = 1
def has_max_rows(self): return self.has_max_rows_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_start_task_name()): self.set_start_task_name(x.start_task_name())
if (x.has_start_eta_usec()): self.set_start_eta_usec(x.start_eta_usec())
if (x.has_start_tag()): self.set_start_tag(x.start_tag())
if (x.has_max_rows()): self.set_max_rows(x.max_rows())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_start_task_name_ != x.has_start_task_name_: return 0
if self.has_start_task_name_ and self.start_task_name_ != x.start_task_name_: return 0
if self.has_start_eta_usec_ != x.has_start_eta_usec_: return 0
if self.has_start_eta_usec_ and self.start_eta_usec_ != x.start_eta_usec_: return 0
if self.has_start_tag_ != x.has_start_tag_: return 0
if self.has_start_tag_ and self.start_tag_ != x.start_tag_: return 0
if self.has_max_rows_ != x.has_max_rows_: return 0
if self.has_max_rows_ and self.max_rows_ != x.max_rows_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
if (self.has_start_task_name_): n += 1 + self.lengthString(len(self.start_task_name_))
if (self.has_start_eta_usec_): n += 1 + self.lengthVarInt64(self.start_eta_usec_)
if (self.has_start_tag_): n += 1 + self.lengthString(len(self.start_tag_))
if (self.has_max_rows_): n += 1 + self.lengthVarInt64(self.max_rows_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_start_task_name_): n += 1 + self.lengthString(len(self.start_task_name_))
if (self.has_start_eta_usec_): n += 1 + self.lengthVarInt64(self.start_eta_usec_)
if (self.has_start_tag_): n += 1 + self.lengthString(len(self.start_tag_))
if (self.has_max_rows_): n += 1 + self.lengthVarInt64(self.max_rows_)
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_start_task_name()
self.clear_start_eta_usec()
self.clear_start_tag()
self.clear_max_rows()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_start_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.start_task_name_)
if (self.has_start_eta_usec_):
out.putVarInt32(32)
out.putVarInt64(self.start_eta_usec_)
if (self.has_max_rows_):
out.putVarInt32(40)
out.putVarInt32(self.max_rows_)
if (self.has_start_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.start_tag_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_start_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.start_task_name_)
if (self.has_start_eta_usec_):
out.putVarInt32(32)
out.putVarInt64(self.start_eta_usec_)
if (self.has_max_rows_):
out.putVarInt32(40)
out.putVarInt32(self.max_rows_)
if (self.has_start_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.start_tag_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 26:
self.set_start_task_name(d.getPrefixedString())
continue
if tt == 32:
self.set_start_eta_usec(d.getVarInt64())
continue
if tt == 40:
self.set_max_rows(d.getVarInt32())
continue
if tt == 50:
self.set_start_tag(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_start_task_name_: res+=prefix+("start_task_name: %s\n" % self.DebugFormatString(self.start_task_name_))
if self.has_start_eta_usec_: res+=prefix+("start_eta_usec: %s\n" % self.DebugFormatInt64(self.start_eta_usec_))
if self.has_start_tag_: res+=prefix+("start_tag: %s\n" % self.DebugFormatString(self.start_tag_))
if self.has_max_rows_: res+=prefix+("max_rows: %s\n" % self.DebugFormatInt32(self.max_rows_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kstart_task_name = 3
kstart_eta_usec = 4
kstart_tag = 6
kmax_rows = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "start_task_name",
4: "start_eta_usec",
5: "max_rows",
6: "start_tag",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.STRING,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryTasksRequest'
class TaskQueueQueryTasksResponse_TaskHeader(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(66)
out.putPrefixedString(self.key_)
out.putVarInt32(74)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(66)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(74)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 60: break
if tt == 66:
self.set_key(d.getPrefixedString())
continue
if tt == 74:
self.set_value(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
class TaskQueueQueryTasksResponse_TaskCronTimetable(ProtocolBuffer.ProtocolMessage):
has_schedule_ = 0
schedule_ = ""
has_timezone_ = 0
timezone_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def schedule(self): return self.schedule_
def set_schedule(self, x):
self.has_schedule_ = 1
self.schedule_ = x
def clear_schedule(self):
if self.has_schedule_:
self.has_schedule_ = 0
self.schedule_ = ""
def has_schedule(self): return self.has_schedule_
def timezone(self): return self.timezone_
def set_timezone(self, x):
self.has_timezone_ = 1
self.timezone_ = x
def clear_timezone(self):
if self.has_timezone_:
self.has_timezone_ = 0
self.timezone_ = ""
def has_timezone(self): return self.has_timezone_
def MergeFrom(self, x):
assert x is not self
if (x.has_schedule()): self.set_schedule(x.schedule())
if (x.has_timezone()): self.set_timezone(x.timezone())
def Equals(self, x):
if x is self: return 1
if self.has_schedule_ != x.has_schedule_: return 0
if self.has_schedule_ and self.schedule_ != x.schedule_: return 0
if self.has_timezone_ != x.has_timezone_: return 0
if self.has_timezone_ and self.timezone_ != x.timezone_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_schedule_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: schedule not set.')
if (not self.has_timezone_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: timezone not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.schedule_))
n += self.lengthString(len(self.timezone_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_schedule_):
n += 1
n += self.lengthString(len(self.schedule_))
if (self.has_timezone_):
n += 1
n += self.lengthString(len(self.timezone_))
return n
def Clear(self):
self.clear_schedule()
self.clear_timezone()
def OutputUnchecked(self, out):
out.putVarInt32(114)
out.putPrefixedString(self.schedule_)
out.putVarInt32(122)
out.putPrefixedString(self.timezone_)
def OutputPartial(self, out):
if (self.has_schedule_):
out.putVarInt32(114)
out.putPrefixedString(self.schedule_)
if (self.has_timezone_):
out.putVarInt32(122)
out.putPrefixedString(self.timezone_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 108: break
if tt == 114:
self.set_schedule(d.getPrefixedString())
continue
if tt == 122:
self.set_timezone(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_schedule_: res+=prefix+("schedule: %s\n" % self.DebugFormatString(self.schedule_))
if self.has_timezone_: res+=prefix+("timezone: %s\n" % self.DebugFormatString(self.timezone_))
return res
class TaskQueueQueryTasksResponse_TaskRunLog(ProtocolBuffer.ProtocolMessage):
has_dispatched_usec_ = 0
dispatched_usec_ = 0
has_lag_usec_ = 0
lag_usec_ = 0
has_elapsed_usec_ = 0
elapsed_usec_ = 0
has_response_code_ = 0
response_code_ = 0
has_retry_reason_ = 0
retry_reason_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def dispatched_usec(self): return self.dispatched_usec_
def set_dispatched_usec(self, x):
self.has_dispatched_usec_ = 1
self.dispatched_usec_ = x
def clear_dispatched_usec(self):
if self.has_dispatched_usec_:
self.has_dispatched_usec_ = 0
self.dispatched_usec_ = 0
def has_dispatched_usec(self): return self.has_dispatched_usec_
def lag_usec(self): return self.lag_usec_
def set_lag_usec(self, x):
self.has_lag_usec_ = 1
self.lag_usec_ = x
def clear_lag_usec(self):
if self.has_lag_usec_:
self.has_lag_usec_ = 0
self.lag_usec_ = 0
def has_lag_usec(self): return self.has_lag_usec_
def elapsed_usec(self): return self.elapsed_usec_
def set_elapsed_usec(self, x):
self.has_elapsed_usec_ = 1
self.elapsed_usec_ = x
def clear_elapsed_usec(self):
if self.has_elapsed_usec_:
self.has_elapsed_usec_ = 0
self.elapsed_usec_ = 0
def has_elapsed_usec(self): return self.has_elapsed_usec_
def response_code(self): return self.response_code_
def set_response_code(self, x):
self.has_response_code_ = 1
self.response_code_ = x
def clear_response_code(self):
if self.has_response_code_:
self.has_response_code_ = 0
self.response_code_ = 0
def has_response_code(self): return self.has_response_code_
def retry_reason(self): return self.retry_reason_
def set_retry_reason(self, x):
self.has_retry_reason_ = 1
self.retry_reason_ = x
def clear_retry_reason(self):
if self.has_retry_reason_:
self.has_retry_reason_ = 0
self.retry_reason_ = ""
def has_retry_reason(self): return self.has_retry_reason_
def MergeFrom(self, x):
assert x is not self
if (x.has_dispatched_usec()): self.set_dispatched_usec(x.dispatched_usec())
if (x.has_lag_usec()): self.set_lag_usec(x.lag_usec())
if (x.has_elapsed_usec()): self.set_elapsed_usec(x.elapsed_usec())
if (x.has_response_code()): self.set_response_code(x.response_code())
if (x.has_retry_reason()): self.set_retry_reason(x.retry_reason())
def Equals(self, x):
if x is self: return 1
if self.has_dispatched_usec_ != x.has_dispatched_usec_: return 0
if self.has_dispatched_usec_ and self.dispatched_usec_ != x.dispatched_usec_: return 0
if self.has_lag_usec_ != x.has_lag_usec_: return 0
if self.has_lag_usec_ and self.lag_usec_ != x.lag_usec_: return 0
if self.has_elapsed_usec_ != x.has_elapsed_usec_: return 0
if self.has_elapsed_usec_ and self.elapsed_usec_ != x.elapsed_usec_: return 0
if self.has_response_code_ != x.has_response_code_: return 0
if self.has_response_code_ and self.response_code_ != x.response_code_: return 0
if self.has_retry_reason_ != x.has_retry_reason_: return 0
if self.has_retry_reason_ and self.retry_reason_ != x.retry_reason_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_dispatched_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: dispatched_usec not set.')
if (not self.has_lag_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: lag_usec not set.')
if (not self.has_elapsed_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: elapsed_usec not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.dispatched_usec_)
n += self.lengthVarInt64(self.lag_usec_)
n += self.lengthVarInt64(self.elapsed_usec_)
if (self.has_response_code_): n += 2 + self.lengthVarInt64(self.response_code_)
if (self.has_retry_reason_): n += 2 + self.lengthString(len(self.retry_reason_))
return n + 6
def ByteSizePartial(self):
n = 0
if (self.has_dispatched_usec_):
n += 2
n += self.lengthVarInt64(self.dispatched_usec_)
if (self.has_lag_usec_):
n += 2
n += self.lengthVarInt64(self.lag_usec_)
if (self.has_elapsed_usec_):
n += 2
n += self.lengthVarInt64(self.elapsed_usec_)
if (self.has_response_code_): n += 2 + self.lengthVarInt64(self.response_code_)
if (self.has_retry_reason_): n += 2 + self.lengthString(len(self.retry_reason_))
return n
def Clear(self):
self.clear_dispatched_usec()
self.clear_lag_usec()
self.clear_elapsed_usec()
self.clear_response_code()
self.clear_retry_reason()
def OutputUnchecked(self, out):
out.putVarInt32(136)
out.putVarInt64(self.dispatched_usec_)
out.putVarInt32(144)
out.putVarInt64(self.lag_usec_)
out.putVarInt32(152)
out.putVarInt64(self.elapsed_usec_)
if (self.has_response_code_):
out.putVarInt32(160)
out.putVarInt64(self.response_code_)
if (self.has_retry_reason_):
out.putVarInt32(218)
out.putPrefixedString(self.retry_reason_)
def OutputPartial(self, out):
if (self.has_dispatched_usec_):
out.putVarInt32(136)
out.putVarInt64(self.dispatched_usec_)
if (self.has_lag_usec_):
out.putVarInt32(144)
out.putVarInt64(self.lag_usec_)
if (self.has_elapsed_usec_):
out.putVarInt32(152)
out.putVarInt64(self.elapsed_usec_)
if (self.has_response_code_):
out.putVarInt32(160)
out.putVarInt64(self.response_code_)
if (self.has_retry_reason_):
out.putVarInt32(218)
out.putPrefixedString(self.retry_reason_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 132: break
if tt == 136:
self.set_dispatched_usec(d.getVarInt64())
continue
if tt == 144:
self.set_lag_usec(d.getVarInt64())
continue
if tt == 152:
self.set_elapsed_usec(d.getVarInt64())
continue
if tt == 160:
self.set_response_code(d.getVarInt64())
continue
if tt == 218:
self.set_retry_reason(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_dispatched_usec_: res+=prefix+("dispatched_usec: %s\n" % self.DebugFormatInt64(self.dispatched_usec_))
if self.has_lag_usec_: res+=prefix+("lag_usec: %s\n" % self.DebugFormatInt64(self.lag_usec_))
if self.has_elapsed_usec_: res+=prefix+("elapsed_usec: %s\n" % self.DebugFormatInt64(self.elapsed_usec_))
if self.has_response_code_: res+=prefix+("response_code: %s\n" % self.DebugFormatInt64(self.response_code_))
if self.has_retry_reason_: res+=prefix+("retry_reason: %s\n" % self.DebugFormatString(self.retry_reason_))
return res
class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
# RequestMethod values
GET = 1
POST = 2
HEAD = 3
PUT = 4
DELETE = 5
_RequestMethod_NAMES = {
1: "GET",
2: "POST",
3: "HEAD",
4: "PUT",
5: "DELETE",
}
def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
RequestMethod_Name = classmethod(RequestMethod_Name)
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_url_ = 0
url_ = ""
has_method_ = 0
method_ = 0
has_retry_count_ = 0
retry_count_ = 0
has_body_size_ = 0
body_size_ = 0
has_body_ = 0
body_ = ""
has_creation_time_usec_ = 0
creation_time_usec_ = 0
has_crontimetable_ = 0
crontimetable_ = None
has_runlog_ = 0
runlog_ = None
has_description_ = 0
description_ = ""
has_payload_ = 0
payload_ = None
has_retry_parameters_ = 0
retry_parameters_ = None
has_first_try_usec_ = 0
first_try_usec_ = 0
has_tag_ = 0
tag_ = ""
has_execution_count_ = 0
execution_count_ = 0
def __init__(self, contents=None):
self.header_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def url(self): return self.url_
def set_url(self, x):
self.has_url_ = 1
self.url_ = x
def clear_url(self):
if self.has_url_:
self.has_url_ = 0
self.url_ = ""
def has_url(self): return self.has_url_
def method(self): return self.method_
def set_method(self, x):
self.has_method_ = 1
self.method_ = x
def clear_method(self):
if self.has_method_:
self.has_method_ = 0
self.method_ = 0
def has_method(self): return self.has_method_
def retry_count(self): return self.retry_count_
def set_retry_count(self, x):
self.has_retry_count_ = 1
self.retry_count_ = x
def clear_retry_count(self):
if self.has_retry_count_:
self.has_retry_count_ = 0
self.retry_count_ = 0
def has_retry_count(self): return self.has_retry_count_
def header_size(self): return len(self.header_)
def header_list(self): return self.header_
def header(self, i):
return self.header_[i]
def mutable_header(self, i):
return self.header_[i]
def add_header(self):
x = TaskQueueQueryTasksResponse_TaskHeader()
self.header_.append(x)
return x
def clear_header(self):
self.header_ = []
def body_size(self): return self.body_size_
def set_body_size(self, x):
self.has_body_size_ = 1
self.body_size_ = x
def clear_body_size(self):
if self.has_body_size_:
self.has_body_size_ = 0
self.body_size_ = 0
def has_body_size(self): return self.has_body_size_
def body(self): return self.body_
def set_body(self, x):
self.has_body_ = 1
self.body_ = x
def clear_body(self):
if self.has_body_:
self.has_body_ = 0
self.body_ = ""
def has_body(self): return self.has_body_
def creation_time_usec(self): return self.creation_time_usec_
def set_creation_time_usec(self, x):
self.has_creation_time_usec_ = 1
self.creation_time_usec_ = x
def clear_creation_time_usec(self):
if self.has_creation_time_usec_:
self.has_creation_time_usec_ = 0
self.creation_time_usec_ = 0
def has_creation_time_usec(self): return self.has_creation_time_usec_
def crontimetable(self):
if self.crontimetable_ is None:
self.lazy_init_lock_.acquire()
try:
if self.crontimetable_ is None: self.crontimetable_ = TaskQueueQueryTasksResponse_TaskCronTimetable()
finally:
self.lazy_init_lock_.release()
return self.crontimetable_
def mutable_crontimetable(self): self.has_crontimetable_ = 1; return self.crontimetable()
def clear_crontimetable(self):
# Warning: this method does not acquire the lock.
if self.has_crontimetable_:
self.has_crontimetable_ = 0;
if self.crontimetable_ is not None: self.crontimetable_.Clear()
def has_crontimetable(self): return self.has_crontimetable_
def runlog(self):
if self.runlog_ is None:
self.lazy_init_lock_.acquire()
try:
if self.runlog_ is None: self.runlog_ = TaskQueueQueryTasksResponse_TaskRunLog()
finally:
self.lazy_init_lock_.release()
return self.runlog_
def mutable_runlog(self): self.has_runlog_ = 1; return self.runlog()
def clear_runlog(self):
# Warning: this method does not acquire the lock.
if self.has_runlog_:
self.has_runlog_ = 0;
if self.runlog_ is not None: self.runlog_.Clear()
def has_runlog(self): return self.has_runlog_
def description(self): return self.description_
def set_description(self, x):
self.has_description_ = 1
self.description_ = x
def clear_description(self):
if self.has_description_:
self.has_description_ = 0
self.description_ = ""
def has_description(self): return self.has_description_
def payload(self):
if self.payload_ is None:
self.lazy_init_lock_.acquire()
try:
if self.payload_ is None: self.payload_ = MessageSet()
finally:
self.lazy_init_lock_.release()
return self.payload_
def mutable_payload(self): self.has_payload_ = 1; return self.payload()
def clear_payload(self):
# Warning: this method does not acquire the lock.
if self.has_payload_:
self.has_payload_ = 0;
if self.payload_ is not None: self.payload_.Clear()
def has_payload(self): return self.has_payload_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
# Warning: this method does not acquire the lock.
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def first_try_usec(self): return self.first_try_usec_
def set_first_try_usec(self, x):
self.has_first_try_usec_ = 1
self.first_try_usec_ = x
def clear_first_try_usec(self):
if self.has_first_try_usec_:
self.has_first_try_usec_ = 0
self.first_try_usec_ = 0
def has_first_try_usec(self): return self.has_first_try_usec_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def execution_count(self): return self.execution_count_
def set_execution_count(self, x):
self.has_execution_count_ = 1
self.execution_count_ = x
def clear_execution_count(self):
if self.has_execution_count_:
self.has_execution_count_ = 0
self.execution_count_ = 0
def has_execution_count(self): return self.has_execution_count_
def MergeFrom(self, x):
assert x is not self
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_url()): self.set_url(x.url())
if (x.has_method()): self.set_method(x.method())
if (x.has_retry_count()): self.set_retry_count(x.retry_count())
for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
if (x.has_body_size()): self.set_body_size(x.body_size())
if (x.has_body()): self.set_body(x.body())
if (x.has_creation_time_usec()): self.set_creation_time_usec(x.creation_time_usec())
if (x.has_crontimetable()): self.mutable_crontimetable().MergeFrom(x.crontimetable())
if (x.has_runlog()): self.mutable_runlog().MergeFrom(x.runlog())
if (x.has_description()): self.set_description(x.description())
if (x.has_payload()): self.mutable_payload().MergeFrom(x.payload())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_first_try_usec()): self.set_first_try_usec(x.first_try_usec())
if (x.has_tag()): self.set_tag(x.tag())
if (x.has_execution_count()): self.set_execution_count(x.execution_count())
def Equals(self, x):
if x is self: return 1
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_url_ != x.has_url_: return 0
if self.has_url_ and self.url_ != x.url_: return 0
if self.has_method_ != x.has_method_: return 0
if self.has_method_ and self.method_ != x.method_: return 0
if self.has_retry_count_ != x.has_retry_count_: return 0
if self.has_retry_count_ and self.retry_count_ != x.retry_count_: return 0
if len(self.header_) != len(x.header_): return 0
for e1, e2 in zip(self.header_, x.header_):
if e1 != e2: return 0
if self.has_body_size_ != x.has_body_size_: return 0
if self.has_body_size_ and self.body_size_ != x.body_size_: return 0
if self.has_body_ != x.has_body_: return 0
if self.has_body_ and self.body_ != x.body_: return 0
if self.has_creation_time_usec_ != x.has_creation_time_usec_: return 0
if self.has_creation_time_usec_ and self.creation_time_usec_ != x.creation_time_usec_: return 0
if self.has_crontimetable_ != x.has_crontimetable_: return 0
if self.has_crontimetable_ and self.crontimetable_ != x.crontimetable_: return 0
if self.has_runlog_ != x.has_runlog_: return 0
if self.has_runlog_ and self.runlog_ != x.runlog_: return 0
if self.has_description_ != x.has_description_: return 0
if self.has_description_ and self.description_ != x.description_: return 0
if self.has_payload_ != x.has_payload_: return 0
if self.has_payload_ and self.payload_ != x.payload_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_first_try_usec_ != x.has_first_try_usec_: return 0
if self.has_first_try_usec_ and self.first_try_usec_ != x.first_try_usec_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
if self.has_execution_count_ != x.has_execution_count_: return 0
if self.has_execution_count_ and self.execution_count_ != x.execution_count_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
for p in self.header_:
if not p.IsInitialized(debug_strs): initialized=0
if (not self.has_creation_time_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: creation_time_usec not set.')
if (self.has_crontimetable_ and not self.crontimetable_.IsInitialized(debug_strs)): initialized = 0
if (self.has_runlog_ and not self.runlog_.IsInitialized(debug_strs)): initialized = 0
if (self.has_payload_ and not self.payload_.IsInitialized(debug_strs)): initialized = 0
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
if (self.has_body_size_): n += 1 + self.lengthVarInt64(self.body_size_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
n += self.lengthVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSize()
if (self.has_runlog_): n += 4 + self.runlog_.ByteSize()
if (self.has_description_): n += 2 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSize())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_first_try_usec_): n += 2 + self.lengthVarInt64(self.first_try_usec_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_execution_count_): n += 2 + self.lengthVarInt64(self.execution_count_)
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSizePartial()
if (self.has_body_size_): n += 1 + self.lengthVarInt64(self.body_size_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_creation_time_usec_):
n += 1
n += self.lengthVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSizePartial()
if (self.has_runlog_): n += 4 + self.runlog_.ByteSizePartial()
if (self.has_description_): n += 2 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSizePartial())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_first_try_usec_): n += 2 + self.lengthVarInt64(self.first_try_usec_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_execution_count_): n += 2 + self.lengthVarInt64(self.execution_count_)
return n
def Clear(self):
self.clear_task_name()
self.clear_eta_usec()
self.clear_url()
self.clear_method()
self.clear_retry_count()
self.clear_header()
self.clear_body_size()
self.clear_body()
self.clear_creation_time_usec()
self.clear_crontimetable()
self.clear_runlog()
self.clear_description()
self.clear_payload()
self.clear_retry_parameters()
self.clear_first_try_usec()
self.clear_tag()
self.clear_execution_count()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
if (self.has_retry_count_):
out.putVarInt32(48)
out.putVarInt32(self.retry_count_)
for i in xrange(len(self.header_)):
out.putVarInt32(59)
self.header_[i].OutputUnchecked(out)
out.putVarInt32(60)
if (self.has_body_size_):
out.putVarInt32(80)
out.putVarInt32(self.body_size_)
if (self.has_body_):
out.putVarInt32(90)
out.putPrefixedString(self.body_)
out.putVarInt32(96)
out.putVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_):
out.putVarInt32(107)
self.crontimetable_.OutputUnchecked(out)
out.putVarInt32(108)
if (self.has_runlog_):
out.putVarInt32(131)
self.runlog_.OutputUnchecked(out)
out.putVarInt32(132)
if (self.has_description_):
out.putVarInt32(170)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(178)
out.putVarInt32(self.payload_.ByteSize())
self.payload_.OutputUnchecked(out)
if (self.has_retry_parameters_):
out.putVarInt32(186)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_first_try_usec_):
out.putVarInt32(192)
out.putVarInt64(self.first_try_usec_)
if (self.has_tag_):
out.putVarInt32(202)
out.putPrefixedString(self.tag_)
if (self.has_execution_count_):
out.putVarInt32(208)
out.putVarInt32(self.execution_count_)
def OutputPartial(self, out):
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
if (self.has_retry_count_):
out.putVarInt32(48)
out.putVarInt32(self.retry_count_)
for i in xrange(len(self.header_)):
out.putVarInt32(59)
self.header_[i].OutputPartial(out)
out.putVarInt32(60)
if (self.has_body_size_):
out.putVarInt32(80)
out.putVarInt32(self.body_size_)
if (self.has_body_):
out.putVarInt32(90)
out.putPrefixedString(self.body_)
if (self.has_creation_time_usec_):
out.putVarInt32(96)
out.putVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_):
out.putVarInt32(107)
self.crontimetable_.OutputPartial(out)
out.putVarInt32(108)
if (self.has_runlog_):
out.putVarInt32(131)
self.runlog_.OutputPartial(out)
out.putVarInt32(132)
if (self.has_description_):
out.putVarInt32(170)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(178)
out.putVarInt32(self.payload_.ByteSizePartial())
self.payload_.OutputPartial(out)
if (self.has_retry_parameters_):
out.putVarInt32(186)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_first_try_usec_):
out.putVarInt32(192)
out.putVarInt64(self.first_try_usec_)
if (self.has_tag_):
out.putVarInt32(202)
out.putPrefixedString(self.tag_)
if (self.has_execution_count_):
out.putVarInt32(208)
out.putVarInt32(self.execution_count_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 34:
self.set_url(d.getPrefixedString())
continue
if tt == 40:
self.set_method(d.getVarInt32())
continue
if tt == 48:
self.set_retry_count(d.getVarInt32())
continue
if tt == 59:
self.add_header().TryMerge(d)
continue
if tt == 80:
self.set_body_size(d.getVarInt32())
continue
if tt == 90:
self.set_body(d.getPrefixedString())
continue
if tt == 96:
self.set_creation_time_usec(d.getVarInt64())
continue
if tt == 107:
self.mutable_crontimetable().TryMerge(d)
continue
if tt == 131:
self.mutable_runlog().TryMerge(d)
continue
if tt == 170:
self.set_description(d.getPrefixedString())
continue
if tt == 178:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_payload().TryMerge(tmp)
continue
if tt == 186:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 192:
self.set_first_try_usec(d.getVarInt64())
continue
if tt == 202:
self.set_tag(d.getPrefixedString())
continue
if tt == 208:
self.set_execution_count(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatInt32(self.method_))
if self.has_retry_count_: res+=prefix+("retry_count: %s\n" % self.DebugFormatInt32(self.retry_count_))
cnt=0
for e in self.header_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Header%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_body_size_: res+=prefix+("body_size: %s\n" % self.DebugFormatInt32(self.body_size_))
if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
if self.has_creation_time_usec_: res+=prefix+("creation_time_usec: %s\n" % self.DebugFormatInt64(self.creation_time_usec_))
if self.has_crontimetable_:
res+=prefix+"CronTimetable {\n"
res+=self.crontimetable_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_runlog_:
res+=prefix+"RunLog {\n"
res+=self.runlog_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_))
if self.has_payload_:
res+=prefix+"payload <\n"
res+=self.payload_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_first_try_usec_: res+=prefix+("first_try_usec: %s\n" % self.DebugFormatInt64(self.first_try_usec_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
if self.has_execution_count_: res+=prefix+("execution_count: %s\n" % self.DebugFormatInt32(self.execution_count_))
return res
class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.task_ = []
if contents is not None: self.MergeFromString(contents)
def task_size(self): return len(self.task_)
def task_list(self): return self.task_
def task(self, i):
return self.task_[i]
def mutable_task(self, i):
return self.task_[i]
def add_task(self):
x = TaskQueueQueryTasksResponse_Task()
self.task_.append(x)
return x
def clear_task(self):
self.task_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.task_size()): self.add_task().CopyFrom(x.task(i))
def Equals(self, x):
if x is self: return 1
if len(self.task_) != len(x.task_): return 0
for e1, e2 in zip(self.task_, x.task_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.task_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.task_)
for i in xrange(len(self.task_)): n += self.task_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.task_)
for i in xrange(len(self.task_)): n += self.task_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_task()
def OutputUnchecked(self, out):
for i in xrange(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_task().TryMerge(d)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.task_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Task%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kTaskGroup = 1
kTasktask_name = 2
kTasketa_usec = 3
kTaskurl = 4
kTaskmethod = 5
kTaskretry_count = 6
kTaskHeaderGroup = 7
kTaskHeaderkey = 8
kTaskHeadervalue = 9
kTaskbody_size = 10
kTaskbody = 11
kTaskcreation_time_usec = 12
kTaskCronTimetableGroup = 13
kTaskCronTimetableschedule = 14
kTaskCronTimetabletimezone = 15
kTaskRunLogGroup = 16
kTaskRunLogdispatched_usec = 17
kTaskRunLoglag_usec = 18
kTaskRunLogelapsed_usec = 19
kTaskRunLogresponse_code = 20
kTaskRunLogretry_reason = 27
kTaskdescription = 21
kTaskpayload = 22
kTaskretry_parameters = 23
kTaskfirst_try_usec = 24
kTasktag = 25
kTaskexecution_count = 26
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Task",
2: "task_name",
3: "eta_usec",
4: "url",
5: "method",
6: "retry_count",
7: "Header",
8: "key",
9: "value",
10: "body_size",
11: "body",
12: "creation_time_usec",
13: "CronTimetable",
14: "schedule",
15: "timezone",
16: "RunLog",
17: "dispatched_usec",
18: "lag_usec",
19: "elapsed_usec",
20: "response_code",
21: "description",
22: "payload",
23: "retry_parameters",
24: "first_try_usec",
25: "tag",
26: "execution_count",
27: "retry_reason",
}, 27)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.STARTGROUP,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.NUMERIC,
13: ProtocolBuffer.Encoder.STARTGROUP,
14: ProtocolBuffer.Encoder.STRING,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.STARTGROUP,
17: ProtocolBuffer.Encoder.NUMERIC,
18: ProtocolBuffer.Encoder.NUMERIC,
19: ProtocolBuffer.Encoder.NUMERIC,
20: ProtocolBuffer.Encoder.NUMERIC,
21: ProtocolBuffer.Encoder.STRING,
22: ProtocolBuffer.Encoder.STRING,
23: ProtocolBuffer.Encoder.STRING,
24: ProtocolBuffer.Encoder.NUMERIC,
25: ProtocolBuffer.Encoder.STRING,
26: ProtocolBuffer.Encoder.NUMERIC,
27: ProtocolBuffer.Encoder.STRING,
}, 27, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryTasksResponse'
class TaskQueueFetchTaskRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_task_name()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 26:
self.set_task_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
ktask_name = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "task_name",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchTaskRequest'
class TaskQueueFetchTaskResponse(ProtocolBuffer.ProtocolMessage):
has_task_ = 0
def __init__(self, contents=None):
self.task_ = TaskQueueQueryTasksResponse()
if contents is not None: self.MergeFromString(contents)
def task(self): return self.task_
def mutable_task(self): self.has_task_ = 1; return self.task_
def clear_task(self):self.has_task_ = 0; self.task_.Clear()
def has_task(self): return self.has_task_
def MergeFrom(self, x):
assert x is not self
if (x.has_task()): self.mutable_task().MergeFrom(x.task())
def Equals(self, x):
if x is self: return 1
if self.has_task_ != x.has_task_: return 0
if self.has_task_ and self.task_ != x.task_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_task_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task not set.')
elif not self.task_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.task_.ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_task_):
n += 1
n += self.lengthString(self.task_.ByteSizePartial())
return n
def Clear(self):
self.clear_task()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.task_.ByteSize())
self.task_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_task_):
out.putVarInt32(10)
out.putVarInt32(self.task_.ByteSizePartial())
self.task_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_task().TryMerge(tmp)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_task_:
res+=prefix+"task <\n"
res+=self.task_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ktask = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "task",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchTaskResponse'
class TaskQueueUpdateStorageLimitRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_limit_ = 0
limit_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def limit(self): return self.limit_
def set_limit(self, x):
self.has_limit_ = 1
self.limit_ = x
def clear_limit(self):
if self.has_limit_:
self.has_limit_ = 0
self.limit_ = 0
def has_limit(self): return self.has_limit_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_limit()): self.set_limit(x.limit())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_limit_ != x.has_limit_: return 0
if self.has_limit_ and self.limit_ != x.limit_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_limit_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: limit not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthVarInt64(self.limit_)
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
if (self.has_limit_):
n += 1
n += self.lengthVarInt64(self.limit_)
return n
def Clear(self):
self.clear_app_id()
self.clear_limit()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(16)
out.putVarInt64(self.limit_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_limit_):
out.putVarInt32(16)
out.putVarInt64(self.limit_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 16:
self.set_limit(d.getVarInt64())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt64(self.limit_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
klimit = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "limit",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateStorageLimitRequest'
class TaskQueueUpdateStorageLimitResponse(ProtocolBuffer.ProtocolMessage):
has_new_limit_ = 0
new_limit_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def new_limit(self): return self.new_limit_
def set_new_limit(self, x):
self.has_new_limit_ = 1
self.new_limit_ = x
def clear_new_limit(self):
if self.has_new_limit_:
self.has_new_limit_ = 0
self.new_limit_ = 0
def has_new_limit(self): return self.has_new_limit_
def MergeFrom(self, x):
assert x is not self
if (x.has_new_limit()): self.set_new_limit(x.new_limit())
def Equals(self, x):
if x is self: return 1
if self.has_new_limit_ != x.has_new_limit_: return 0
if self.has_new_limit_ and self.new_limit_ != x.new_limit_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_new_limit_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: new_limit not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.new_limit_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_new_limit_):
n += 1
n += self.lengthVarInt64(self.new_limit_)
return n
def Clear(self):
self.clear_new_limit()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.new_limit_)
def OutputPartial(self, out):
if (self.has_new_limit_):
out.putVarInt32(8)
out.putVarInt64(self.new_limit_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_new_limit(d.getVarInt64())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_new_limit_: res+=prefix+("new_limit: %s\n" % self.DebugFormatInt64(self.new_limit_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
knew_limit = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "new_limit",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateStorageLimitResponse'
class TaskQueueQueryAndOwnTasksRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_lease_seconds_ = 0
lease_seconds_ = 0.0
has_max_tasks_ = 0
max_tasks_ = 0
has_group_by_tag_ = 0
group_by_tag_ = 0
has_tag_ = 0
tag_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def lease_seconds(self): return self.lease_seconds_
def set_lease_seconds(self, x):
self.has_lease_seconds_ = 1
self.lease_seconds_ = x
def clear_lease_seconds(self):
if self.has_lease_seconds_:
self.has_lease_seconds_ = 0
self.lease_seconds_ = 0.0
def has_lease_seconds(self): return self.has_lease_seconds_
def max_tasks(self): return self.max_tasks_
def set_max_tasks(self, x):
self.has_max_tasks_ = 1
self.max_tasks_ = x
def clear_max_tasks(self):
if self.has_max_tasks_:
self.has_max_tasks_ = 0
self.max_tasks_ = 0
def has_max_tasks(self): return self.has_max_tasks_
def group_by_tag(self): return self.group_by_tag_
def set_group_by_tag(self, x):
self.has_group_by_tag_ = 1
self.group_by_tag_ = x
def clear_group_by_tag(self):
if self.has_group_by_tag_:
self.has_group_by_tag_ = 0
self.group_by_tag_ = 0
def has_group_by_tag(self): return self.has_group_by_tag_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_lease_seconds()): self.set_lease_seconds(x.lease_seconds())
if (x.has_max_tasks()): self.set_max_tasks(x.max_tasks())
if (x.has_group_by_tag()): self.set_group_by_tag(x.group_by_tag())
if (x.has_tag()): self.set_tag(x.tag())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_lease_seconds_ != x.has_lease_seconds_: return 0
if self.has_lease_seconds_ and self.lease_seconds_ != x.lease_seconds_: return 0
if self.has_max_tasks_ != x.has_max_tasks_: return 0
if self.has_max_tasks_ and self.max_tasks_ != x.max_tasks_: return 0
if self.has_group_by_tag_ != x.has_group_by_tag_: return 0
if self.has_group_by_tag_ and self.group_by_tag_ != x.group_by_tag_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_lease_seconds_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: lease_seconds not set.')
if (not self.has_max_tasks_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: max_tasks not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += self.lengthVarInt64(self.max_tasks_)
if (self.has_group_by_tag_): n += 2
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n + 11
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_lease_seconds_):
n += 9
if (self.has_max_tasks_):
n += 1
n += self.lengthVarInt64(self.max_tasks_)
if (self.has_group_by_tag_): n += 2
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n
def Clear(self):
self.clear_queue_name()
self.clear_lease_seconds()
self.clear_max_tasks()
self.clear_group_by_tag()
self.clear_tag()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(17)
out.putDouble(self.lease_seconds_)
out.putVarInt32(24)
out.putVarInt64(self.max_tasks_)
if (self.has_group_by_tag_):
out.putVarInt32(32)
out.putBoolean(self.group_by_tag_)
if (self.has_tag_):
out.putVarInt32(42)
out.putPrefixedString(self.tag_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
if (self.has_lease_seconds_):
out.putVarInt32(17)
out.putDouble(self.lease_seconds_)
if (self.has_max_tasks_):
out.putVarInt32(24)
out.putVarInt64(self.max_tasks_)
if (self.has_group_by_tag_):
out.putVarInt32(32)
out.putBoolean(self.group_by_tag_)
if (self.has_tag_):
out.putVarInt32(42)
out.putPrefixedString(self.tag_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 17:
self.set_lease_seconds(d.getDouble())
continue
if tt == 24:
self.set_max_tasks(d.getVarInt64())
continue
if tt == 32:
self.set_group_by_tag(d.getBoolean())
continue
if tt == 42:
self.set_tag(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_lease_seconds_: res+=prefix+("lease_seconds: %s\n" % self.DebugFormat(self.lease_seconds_))
if self.has_max_tasks_: res+=prefix+("max_tasks: %s\n" % self.DebugFormatInt64(self.max_tasks_))
if self.has_group_by_tag_: res+=prefix+("group_by_tag: %s\n" % self.DebugFormatBool(self.group_by_tag_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kqueue_name = 1
klease_seconds = 2
kmax_tasks = 3
kgroup_by_tag = 4
ktag = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "lease_seconds",
3: "max_tasks",
4: "group_by_tag",
5: "tag",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.DOUBLE,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryAndOwnTasksRequest'
class TaskQueueQueryAndOwnTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_retry_count_ = 0
retry_count_ = 0
has_body_ = 0
body_ = ""
has_tag_ = 0
tag_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def retry_count(self): return self.retry_count_
def set_retry_count(self, x):
self.has_retry_count_ = 1
self.retry_count_ = x
def clear_retry_count(self):
if self.has_retry_count_:
self.has_retry_count_ = 0
self.retry_count_ = 0
def has_retry_count(self): return self.has_retry_count_
def body(self): return self.body_
def set_body(self, x):
self.has_body_ = 1
self.body_ = x
def clear_body(self):
if self.has_body_:
self.has_body_ = 0
self.body_ = ""
def has_body(self): return self.has_body_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def MergeFrom(self, x):
assert x is not self
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_retry_count()): self.set_retry_count(x.retry_count())
if (x.has_body()): self.set_body(x.body())
if (x.has_tag()): self.set_tag(x.tag())
def Equals(self, x):
if x is self: return 1
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_retry_count_ != x.has_retry_count_: return 0
if self.has_retry_count_ and self.retry_count_ != x.retry_count_: return 0
if self.has_body_ != x.has_body_: return 0
if self.has_body_ and self.body_ != x.body_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n
def Clear(self):
self.clear_task_name()
self.clear_eta_usec()
self.clear_retry_count()
self.clear_body()
self.clear_tag()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_retry_count_):
out.putVarInt32(32)
out.putVarInt32(self.retry_count_)
if (self.has_body_):
out.putVarInt32(42)
out.putPrefixedString(self.body_)
if (self.has_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.tag_)
def OutputPartial(self, out):
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_retry_count_):
out.putVarInt32(32)
out.putVarInt32(self.retry_count_)
if (self.has_body_):
out.putVarInt32(42)
out.putPrefixedString(self.body_)
if (self.has_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.tag_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 32:
self.set_retry_count(d.getVarInt32())
continue
if tt == 42:
self.set_body(d.getPrefixedString())
continue
if tt == 50:
self.set_tag(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_retry_count_: res+=prefix+("retry_count: %s\n" % self.DebugFormatInt32(self.retry_count_))
if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
return res
class TaskQueueQueryAndOwnTasksResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.task_ = []
if contents is not None: self.MergeFromString(contents)
def task_size(self): return len(self.task_)
def task_list(self): return self.task_
def task(self, i):
return self.task_[i]
def mutable_task(self, i):
return self.task_[i]
def add_task(self):
x = TaskQueueQueryAndOwnTasksResponse_Task()
self.task_.append(x)
return x
def clear_task(self):
self.task_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.task_size()): self.add_task().CopyFrom(x.task(i))
def Equals(self, x):
if x is self: return 1
if len(self.task_) != len(x.task_): return 0
for e1, e2 in zip(self.task_, x.task_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.task_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.task_)
for i in xrange(len(self.task_)): n += self.task_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.task_)
for i in xrange(len(self.task_)): n += self.task_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_task()
def OutputUnchecked(self, out):
for i in xrange(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_task().TryMerge(d)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.task_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Task%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kTaskGroup = 1
kTasktask_name = 2
kTasketa_usec = 3
kTaskretry_count = 4
kTaskbody = 5
kTasktag = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Task",
2: "task_name",
3: "eta_usec",
4: "retry_count",
5: "body",
6: "tag",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryAndOwnTasksResponse'
class TaskQueueModifyTaskLeaseRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_lease_seconds_ = 0
lease_seconds_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def lease_seconds(self): return self.lease_seconds_
def set_lease_seconds(self, x):
self.has_lease_seconds_ = 1
self.lease_seconds_ = x
def clear_lease_seconds(self):
if self.has_lease_seconds_:
self.has_lease_seconds_ = 0
self.lease_seconds_ = 0.0
def has_lease_seconds(self): return self.has_lease_seconds_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_lease_seconds()): self.set_lease_seconds(x.lease_seconds())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_lease_seconds_ != x.has_lease_seconds_: return 0
if self.has_lease_seconds_ and self.lease_seconds_ != x.lease_seconds_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
if (not self.has_lease_seconds_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: lease_seconds not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
return n + 12
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_lease_seconds_):
n += 9
return n
def Clear(self):
self.clear_queue_name()
self.clear_task_name()
self.clear_eta_usec()
self.clear_lease_seconds()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
out.putVarInt32(33)
out.putDouble(self.lease_seconds_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_lease_seconds_):
out.putVarInt32(33)
out.putDouble(self.lease_seconds_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 33:
self.set_lease_seconds(d.getDouble())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_lease_seconds_: res+=prefix+("lease_seconds: %s\n" % self.DebugFormat(self.lease_seconds_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kqueue_name = 1
ktask_name = 2
keta_usec = 3
klease_seconds = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "task_name",
3: "eta_usec",
4: "lease_seconds",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.DOUBLE,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueModifyTaskLeaseRequest'
class TaskQueueModifyTaskLeaseResponse(ProtocolBuffer.ProtocolMessage):
has_updated_eta_usec_ = 0
updated_eta_usec_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def updated_eta_usec(self): return self.updated_eta_usec_
def set_updated_eta_usec(self, x):
self.has_updated_eta_usec_ = 1
self.updated_eta_usec_ = x
def clear_updated_eta_usec(self):
if self.has_updated_eta_usec_:
self.has_updated_eta_usec_ = 0
self.updated_eta_usec_ = 0
def has_updated_eta_usec(self): return self.has_updated_eta_usec_
def MergeFrom(self, x):
assert x is not self
if (x.has_updated_eta_usec()): self.set_updated_eta_usec(x.updated_eta_usec())
def Equals(self, x):
if x is self: return 1
if self.has_updated_eta_usec_ != x.has_updated_eta_usec_: return 0
if self.has_updated_eta_usec_ and self.updated_eta_usec_ != x.updated_eta_usec_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_updated_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: updated_eta_usec not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.updated_eta_usec_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_updated_eta_usec_):
n += 1
n += self.lengthVarInt64(self.updated_eta_usec_)
return n
def Clear(self):
self.clear_updated_eta_usec()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.updated_eta_usec_)
def OutputPartial(self, out):
if (self.has_updated_eta_usec_):
out.putVarInt32(8)
out.putVarInt64(self.updated_eta_usec_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_updated_eta_usec(d.getVarInt64())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_updated_eta_usec_: res+=prefix+("updated_eta_usec: %s\n" % self.DebugFormatInt64(self.updated_eta_usec_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kupdated_eta_usec = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "updated_eta_usec",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueModifyTaskLeaseResponse'
if _extension_runtime:
pass
__all__ = ['TaskQueueServiceError','TaskQueueRetryParameters','TaskQueueAcl','TaskQueueHttpHeader','TaskQueueMode','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddRequest_CronTimetable','TaskQueueAddResponse','TaskQueueBulkAddRequest','TaskQueueBulkAddResponse','TaskQueueBulkAddResponse_TaskResult','TaskQueueDeleteRequest','TaskQueueDeleteResponse','TaskQueueForceRunRequest','TaskQueueForceRunResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueScannerQueueInfo','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats','TaskQueuePauseQueueRequest','TaskQueuePauseQueueResponse','TaskQueuePurgeQueueRequest','TaskQueuePurgeQueueResponse','TaskQueueDeleteQueueRequest','TaskQueueDeleteQueueResponse','TaskQueueDeleteGroupRequest','TaskQueueDeleteGroupResponse','TaskQueueQueryTasksRequest','TaskQueueQueryTasksResponse','TaskQueueQueryTasksResponse_TaskHeader','TaskQueueQueryTasksResponse_TaskCronTimetable','TaskQueueQueryTasksResponse_TaskRunLog','TaskQueueQueryTasksResponse_Task','TaskQueueFetchTaskRequest','TaskQueueFetchTaskResponse','TaskQueueUpdateStorageLimitRequest','TaskQueueUpdateStorageLimitResponse','TaskQueueQueryAndOwnTasksRequest','TaskQueueQueryAndOwnTasksResponse','TaskQueueQueryAndOwnTasksResponse_Task','TaskQueueModifyTaskLeaseRequest','TaskQueueModifyTaskLeaseResponse']
| [
"toork@uw.edu"
] | toork@uw.edu |
dc75500ae42c946d704a13d2ae7eeeb42dddde45 | 77741a07e162b9578f2ff3f37761856cd2ec3122 | /node_modules/connect-mongostore/node_modules/bson/build/config.gypi | c6b3c26214493c4a691d1a776d81493a900a75fd | [
"Apache-2.0",
"MIT"
] | permissive | Alisher4444/shopping-cart | 3327cc4fb376458819ff4845cb18705968126a91 | fb369f63bfc3ced742fd5818af276d06591f1e8d | refs/heads/master | 2021-01-23T07:44:45.749064 | 2017-03-28T11:20:28 | 2017-03-28T11:20:28 | 86,442,278 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,389 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"coverage": "false",
"debug_devtools": "node",
"force_dynamic_crt": 0,
"gas_version": "2.23",
"host_arch": "x64",
"icu_data_file": "icudt58l.dat",
"icu_data_in": "../../deps/icu-small/source/data/in/icudt58l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_locales": "en,root",
"icu_path": "deps/icu-small",
"icu_small": "true",
"icu_ver_major": "58",
"node_byteorder": "little",
"node_enable_d8": "false",
"node_enable_v8_vtunejit": "false",
"node_install_npm": "true",
"node_module_version": 51,
"node_no_browser_globals": "false",
"node_prefix": "/",
"node_release_urlbase": "https://nodejs.org/download/release/",
"node_shared": "false",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_use_bundled_v8": "true",
"node_use_dtrace": "false",
"node_use_etw": "false",
"node_use_lttng": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"node_use_v8_platform": "true",
"openssl_fips": "",
"openssl_no_asm": 0,
"shlib_suffix": "so.51",
"target_arch": "x64",
"uv_parent_path": "/deps/uv/",
"uv_use_dtrace": "false",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_inspector": "true",
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_random_seed": 0,
"v8_use_snapshot": "true",
"want_separate_host_toolset": 0,
"want_separate_host_toolset_mkpeephole": 0,
"nodedir": "/home/alisher/.node-gyp/7.6.0",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"cache_lock_stale": "60000",
"legacy_bundling": "",
"sign_git_tag": "",
"user_agent": "npm/4.1.2 node/v7.6.0 linux x64",
"always_auth": "",
"bin_links": "true",
"key": "",
"description": "true",
"fetch_retries": "2",
"heading": "npm",
"if_present": "",
"init_version": "1.0.0",
"user": "",
"force": "",
"only": "",
"cache_min": "10",
"init_license": "ISC",
"editor": "vi",
"rollback": "true",
"tag_version_prefix": "v",
"cache_max": "Infinity",
"userconfig": "/home/alisher/.npmrc",
"engine_strict": "",
"init_author_name": "",
"init_author_url": "",
"tmp": "/tmp",
"depth": "Infinity",
"save_dev": "",
"usage": "",
"metrics_registry": "https://registry.npmjs.org/",
"progress": "true",
"https_proxy": "",
"onload_script": "",
"rebuild_bundle": "true",
"save_bundle": "",
"shell": "/bin/bash",
"dry_run": "",
"prefix": "/usr/local",
"scope": "",
"browser": "",
"cache_lock_wait": "10000",
"registry": "https://registry.npmjs.org/",
"save_optional": "",
"searchopts": "",
"versions": "",
"cache": "/home/alisher/.npm",
"send_metrics": "",
"global_style": "",
"ignore_scripts": "",
"version": "",
"local_address": "",
"viewer": "man",
"color": "true",
"fetch_retry_mintimeout": "10000",
"maxsockets": "50",
"umask": "0002",
"fetch_retry_maxtimeout": "60000",
"message": "%s",
"ca": "",
"cert": "",
"global": "",
"link": "",
"save": "true",
"access": "",
"also": "",
"unicode": "true",
"long": "",
"production": "",
"unsafe_perm": "true",
"node_version": "7.6.0",
"tag": "latest",
"git_tag_version": "true",
"shrinkwrap": "true",
"fetch_retry_factor": "10",
"proprietary_attribs": "true",
"save_exact": "",
"strict_ssl": "true",
"dev": "",
"globalconfig": "/usr/local/etc/npmrc",
"init_module": "/home/alisher/.npm-init.js",
"parseable": "",
"globalignorefile": "/usr/local/etc/npmignore",
"cache_lock_retries": "10",
"searchstaleness": "900",
"save_prefix": "^",
"scripts_prepend_node_path": "warn-only",
"group": "1000",
"init_author_email": "",
"searchexclude": "",
"git": "git",
"optional": "true",
"json": ""
}
}
| [
"alisher.bazarkhanov@gmail.com"
] | alisher.bazarkhanov@gmail.com |
bbdd38098ebc51601efb9b898bf5258e1baffde4 | e3c55bf263c7f920fa14988ea94e0be316fa993a | /Steganography.py | 8f0d450ff997f9613e599b4a71720ce19a2ff129 | [
"MIT"
] | permissive | Markus28/Steganography | a6f7a0df1973d06feb850bc733d97cf25c4bb976 | 5e1d35fd45901e93a1fff2b4be688178a488259e | refs/heads/master | 2020-05-29T11:44:32.537658 | 2019-09-14T22:09:36 | 2019-09-14T22:09:36 | 66,156,273 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 19,829 | py | from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import os
import sys
import base64
import ntpath
import StringIO
from Crypto.Cipher import AES
from Crypto import Random
import hashlib
import random
import cv2
import time
import numpy as np
from scipy.io import wavfile
import subprocess
from shutil import copyfile
import contextlib
import signal
class AESCipher(object):
def __init__(self, key):
self.bs = 32
self.key = hashlib.sha256(key.encode()).digest()
def encrypt(self, raw):
raw = self._pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw))
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8')
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs)
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s)-1:])]
class AudioWriter(object):
def __init__(self, source, target):
self.source = source
self.target = target
def write_txt(self, msg):
sample_rate, data = wavfile.read(self.source)
msg = "%d:%s"%(8*len(msg), msg)
msg = str_to_bytes(msg)
result = apply_bytes(data, msg)
wavfile.write(self.target, sample_rate, result)
def write_file(self, src_path, key=""):
print '[Write file]'
txt = file_to_txt(src_path)
txt = '{{{&&filename %s&&}}}%s'%(filename(src_path), txt)
self.write_encrypted_txt(txt, key)
print 'Done\n'
def write_encrypted_txt(self, txt_msg, key=""):
c = AESCipher(key)
print 'Encrypting...'
encrypted_msg = c.encrypt(txt_msg)
self.write_txt(encrypted_msg)
class AudioReader(object):
def __init__(self, source):
self.source = source
def read_txt(self):
sample_rate, data = wavfile.read(self.source)
count = 0
receiving = False
msg = ''
encoded_byte = []
tot = 0
following = 0
received = 0
for i, encoded in enumerate(data):
if (i+1)%8 == 0:
tot = 0
for pos, bit in enumerate(encoded_byte):
tot += (2**pos)*bit
encoded_byte = []
if chr(tot) == ':' and not receiving:
following = int(msg)
receiving = True
msg = ''
elif not receiving:
msg += chr(tot)
elif received < following:
received += 8
msg += chr(tot)
else:
return msg
else:
encoded_byte.append(last_bit(encoded))
return msg
def read_file_content(self, key=""):
txt = self.read_encrypted_txt(key)
if txt.find('{{{&&filename ') == -1:
raise Exception('The message found is no file')
else:
end = txt.find('&&}}}')
target_file_name = txt[len('{{{&&filename '):end]
encoded_content = txt[end+len('&&}}}'):]
return encoded_content, target_file_name
def read_file(self, target_directory, key=""):
print '[Read file]'
encoded_content, target_file_name = self.read_file_content(key)
target_path = os.path.join(target_directory, target_file_name)
txt_to_file(encoded_content, target_path)
print 'Done\n'
def read_encrypted_txt(self, key=""):
encrypted_msg = self.read_txt()
c = AESCipher(key)
print 'Decrypting...'
return c.decrypt(encrypted_msg)
class VideoReader(object):
def __init__(self, source):
self.source = source
def read_txt(self):
print 'Opening video...'
vidcap = cv2.VideoCapture(self.source)
fps = vidcap.get(cv2.CAP_PROP_FPS)
count = 0
success = True
print 'Saving frames...'
while success:
success, image = vidcap.read()
cv2.imwrite('C:\Users\Markus\AppData\Local\Temp\\frame%d.png'%count, image)
count += 1
receiving = False
message = ''
tot = 0
for n in xrange(count-1):
print 'Opening frame...'
im = Image.open('C:\Users\Markus\AppData\Local\Temp\\frame%d.png'%n)
im = im.convert('RGB')
width, height = im.size
max_info = (width*height*3)/8
pix = im.load()
pixels = []
index_dict = {}
tot = 0
c = 0
print 'Reading pixels...'
pixels = [item for row in im.getdata() for item in row]
chunked_pixels = chunks(pixels,8)
print 'Looking for message...'
for i, encoded_byte in enumerate(chunked_pixels):
if c>max_info:
print 'Reached end, next frame...'
break
for pos, byte in enumerate(encoded_byte):
tot += (2**pos)*last_bit(byte)
if chr(tot) == ':' and not receiving:
try:
following = int(message)
except ValueError as e:
print 'No message'
return message
receiving = True
message = ''
received = 0
elif not receiving:
message += chr(tot)
elif received < following:
received += 8
message += chr(tot)
else:
return message
tot = 0
srm('C:\Users\Markus\AppData\Local\Temp\\frame%d.png'%n)
return message
def read_file_content(self, key=""):
txt = self.read_encrypted_txt(key)
if txt.find('{{{&&filename ') == -1:
raise Exception('The message found is no file')
else:
end = txt.find('&&}}}')
target_file_name = txt[len('{{{&&filename '):end]
encoded_content = txt[end+len('&&}}}'):]
return encoded_content, target_file_name
def read_file(self, target_directory, key=""):
print '[Read file]'
encoded_content, target_file_name = self.read_file_content(key)
target_path = os.path.join(target_directory, target_file_name)
txt_to_file(encoded_content, target_path)
print 'Done\n'
def read_encrypted_txt(self, key=""):
encrypted_msg = self.read_txt()
c = AESCipher(key)
print 'Decrypting...'
return c.decrypt(encrypted_msg)
#For debugging only
def show_image(self, key=""):
print '[Show image]'
encoded_content, file_name = self.read_file_content(key)
content = encoded_content.decode('base64')
encoded_content = None
txt_to_image(content, name=file_name)
print 'Done\n'
class VideoWriter(object):
def __init__(self, source, target):
self.source = source
self.target = target
def write_txt(self, msg):
msg = "%d:%s"%(8*len(msg), msg)
vidcap = cv2.VideoCapture(self.source)
fps = vidcap.get(cv2.CAP_PROP_FPS)
#success, image = vidcap.read()
count = 0
success = True
while success:
success, image = vidcap.read()
cv2.imwrite('C:\Users\Markus\AppData\Local\Temp\\frame%d.png'%count, image)
count += 1
vidcap.release()
im = Image.open('C:\Users\Markus\AppData\Local\Temp\\frame0.png')
im = im.convert('RGB')
width, height = im.size
res = width * height
max_info = (res*3)/8
if max_info*(count-1)<len(msg):
raise IndexError('Target too small')
msg = chunks(msg, max_info)
for i, fragment in enumerate(msg):
with silent_print():
writer = ImageWriter('C:\Users\Markus\AppData\Local\Temp\\frame%d.png'%i, 'C:\Users\Markus\AppData\Local\Temp\\frame%d.png'%i)
writer.write_txt(fragment, header=False)
print 'Creating Video, %d frames'%count
img1 = cv2.imread('C:\Users\Markus\AppData\Local\Temp\\frame0.png')
height , width , layers = img1.shape
video = cv2.VideoWriter('C:\Users\Markus\AppData\Local\Temp\\steg_video.avi',-1,fps ,(width,height))
print 'Collecting frames'
for i in xrange(count-1):
print 'C:\Users\Markus\AppData\Local\Temp\\frame%d.png'%i
video.write( cv2.imread('C:\Users\Markus\AppData\Local\Temp\\frame%d.png'%i))
srm('C:\Users\Markus\AppData\Local\Temp\\frame%d.png'%i)
cv2.destroyAllWindows()
video.release()
try:
print subprocess.check_output('ffmpeg -i %s -ab 160k -ac 2 -ar 44100 -vn %s -y'%(self.source, 'C:\Users\Markus\AppData\Local\Temp\\steg_audio.wav'), stderr = subprocess.STDOUT, shell = True)
print subprocess.check_output('ffmpeg -i %s -i %s -codec copy -shortest %s -y'%('C:\Users\Markus\AppData\Local\Temp\\steg_video.avi', 'C:\Users\Markus\AppData\Local\Temp\\steg_audio.wav', os.path.abspath(self.target)), stderr = subprocess.STDOUT, shell = True)
except subprocess.CalledProcessError as e:
sys.stderr.write('Warning: Could not transfer audiodata from original video. %s Did you install ffmpeg properly?\n' %e.output)
copyfile('C:\Users\Markus\AppData\Local\Temp\\steg_video.avi', self.target)
try:
srm('C:\Users\Markus\AppData\Local\Temp\\steg_audio.wav')
srm('C:\Users\Markus\AppData\Local\Temp\\steg_video.avi')
except IOError:
pass
def write_file(self, src_path, key=""):
print '[Write file]'
txt = file_to_txt(src_path)
txt = '{{{&&filename %s&&}}}%s'%(filename(src_path), txt)
self.write_encrypted_txt(txt, key)
print 'Done\n'
def write_encrypted_txt(self, txt_msg, key=""):
c = AESCipher(key)
print 'Encrypting...'
encrypted_msg = c.encrypt(txt_msg)
self.write_txt(encrypted_msg)
class ImageWriter(object):
def __init__(self, source, target):
self.source = source
self.target = target
def write_txt(self, txt_msg, header=True): #header only false for video
im = Image.open(self.source)
im = im.convert('RGB')
width, height = im.size
pix = im.load()
pixels = []
index_dict = {}
print 'Reading pixels...'
pixels = [item for row in im.getdata() for item in row]
print 'Adding message...'
if header:
txt_msg = '%d:%s'%(len(txt_msg)*8, txt_msg)
pixels = apply_bytes(pixels, str_to_bytes(txt_msg))
print 'Converting to RGB...'
altered_generator = chunks(pixels, 3)
print 'Writing image...'
pixels = []
for item in altered_generator:
pixels.append(tuple(item))
im.putdata(pixels)
print 'Saving image...'
im.save(self.target)
print 'Saved'
def write_encrypted_txt(self, txt_msg, key=""):
c = AESCipher(key)
print 'Encrypting...'
encrypted_msg = c.encrypt(txt_msg)
self.write_txt(encrypted_msg)
def write_file(self, src_path, key=""):
print '[Write file]'
txt = file_to_txt(src_path)
txt = '{{{&&filename %s&&}}}%s'%(filename(src_path), txt)
self.write_encrypted_txt(txt, key)
print 'Done\n'
def delete_msg(self):
print '[Delete message]'
im = Image.open(self.target)
im = im.convert('RGB')
width, height = im.size
pix = im.load()
pixels = []
index_dict = {}
print 'Reading pixels...'
pixels = [item for row in im.getdata() for item in row]
print 'Adding random bits..'
pixels = apply_bytes(pixels, [ord(c) for c in os.urandom(len(pixels)/8)])
pixel_generator = chunks(pixels, 3)
print 'Writing image...'
pixels = []
for item in pixel_generator:
pixels.append(tuple(item))
im.putdata(pixels)
print 'Saving image...'
im.save(self.target)
print 'Done\n'
class ImageReader(object):
def __init__(self, source):
self.source = source
def read_txt(self):
print 'Opening image...'
im = Image.open(self.source)
im = im.convert('RGB')
width, height = im.size
pix = im.load()
pixels = []
index_dict = {}
print 'Reading pixels...'
pixels = [item for row in im.getdata() for item in row]
chunked_pixels = chunks(pixels,8)
pixels = None
tot = 0
message = ''
receiving = False
print 'Looking for message...'
for i, encoded_byte in enumerate(chunked_pixels):
for n, byte in enumerate(encoded_byte):
tot += (2**n)*last_bit(byte)
if chr(tot) == ':' and not receiving:
try:
following = int(message)
except ValueError as e:
print 'No message'
break
receiving = True
message = ''
received = 0
elif not receiving:
message += chr(tot)
elif received < following:
received += 8
message += chr(tot)
else:
break
tot = 0
return message
def read_encrypted_txt(self, key=""):
encrypted_msg = self.read_txt()
c = AESCipher(key)
print 'Decrypting...'
return c.decrypt(encrypted_msg)
def read_file_content(self, key=""):
txt = self.read_encrypted_txt(key)
if txt.find('{{{&&filename ') == -1:
raise Exception('The message found is no file')
else:
end = txt.find('&&}}}')
target_file_name = txt[len('{{{&&filename '):end]
encoded_content = txt[end+len('&&}}}'):]
return encoded_content, target_file_name
def read_file(self, target_directory, key=""):
print '[Read file]'
encoded_content, target_file_name = self.read_file_content(key)
target_path = os.path.join(target_directory, target_file_name)
txt_to_file(encoded_content, target_path)
print 'Done\n'
#For debugging only
def show_image(self, key=""):
print '[Show image]'
encoded_content, file_name = self.read_file_content(key)
content = encoded_content.decode('base64')
encoded_content = None
txt_to_image(content, name=file_name)
print 'Done\n'
@contextlib.contextmanager
def silent_print():
actual_stdout = sys.stdout
sys.stdout = StringIO.StringIO()
yield
sys.stdout = actual_stdout
def txt_to_image(content, name='figure'):
buff = StringIO.StringIO()
buff.write(content)
buff.seek(0)
im = Image.open(buff)
imgplot = plt.imshow(im)
plt.title(name)
plt.show()
def filename(path):
head, tail = ntpath.split(path)
return tail or ntpath.basename(head)
def add_bit(byte, bit):
if bit:
return byte | 1
else:
return byte & 0b11111110
def str_to_bytes(string):
return [ord(c) for c in string]
def last_bit(byte):
return byte & 1
def apply_bytes(target, msg):
if len(target)<8*len(msg):
raise IndexError('Target is too small')
else:
for i, byte in enumerate(msg):
for n in xrange(8):
target[(i*8)+n] = add_bit(target[(i*8)+n],last_bit(byte>>n))
return target
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i + n]
def file_to_txt(path):
with open(path, 'rb') as f:
content = f.read()
return base64.b64encode(content)
def txt_to_file(txt, path):
with open(path, 'wb') as f:
f.write(txt.decode('base64'))
def srm(path, reps = 3):
with open(path, 'rb') as f:
l = len(f.read())
for _ in xrange(reps):
with open(path, 'wb') as f:
f.write(os.urandom(l))
os.remove(path)
if __name__ == '__main__': #Do some testing
try:
print 'Testing VideoWriter:'
writer = VideoWriter("SampleVideo_1280x720_1mb.mp4", 'altered_media/video_altered.avi')
writer.write_file('small_galaxy.jpg', 'pass')
print 'Testing VideoReader:'
reader = VideoReader('altered_media/video_altered.avi')
reader.read_file('test_outputs', 'pass')
count = 0
while not os.path.isfile('test_outputs/small_galaxy.jpg'):
count += 1
time.sleep(1)
if count>10:
count = 0
raise Exception('No Output was created')
with open('small_galaxy.jpg', 'rb') as f_original:
with open('test_outputs/small_galaxy.jpg', 'rb') as f_altered:
if not f_original.read() == f_altered.read():
raise Exception('Test Input not equal to output')
print 'Test passed!\n'
except Exception as e:
sys.stderr.write('Test failed with exception: %s\n'%(e))
sys.stderr.write('Did you install OpenCV properly?\n')
try:
print 'Testing ImageWriter'
with open('HumanRights.txt', 'r') as f:
rights = f.read()
writer = ImageWriter('images.png', 'altered_media/images_altered.png')
writer.write_file('HumanRights.txt', 'pass')
print 'Testing ImageReader:'
reader = ImageReader('altered_media/images_altered.png')
reader.read_file('test_outputs', 'pass')
while not os.path.isfile('test_outputs/HumanRights.txt'):
count += 1
time.sleep(1)
if count>10:
count = 0
raise Exception('No Output was created')
with open('test_outputs/HumanRights.txt') as f:
if not f.read() == rights:
raise Exception('Test Input not equal to output')
print "Testing ImageWriter's delete function:"
writer.delete_msg()
try:
reader.read_file('', 'pass')
sys.stderr.write('Could not delete message\n')
except: #It is supposed to fail
print 'Test passed!\n'
except Exception as e:
sys.stderr.write('Test failed with exception: %s\n'%(e))
try:
print 'Testing AudioWriter'
writer = AudioWriter('alcohol.wav', 'altered_media/alcohol_altered.wav')
writer.write_file('quote.rar', 'pass')
print 'Testing AudioReader'
reader = AudioReader('altered_media/alcohol_altered.wav')
reader.read_file('test_outputs', 'pass')
with open('quote.rar', 'r') as f:
archive = f.read()
while not os.path.isfile('test_outputs/quote.rar'):
count += 1
time.sleep(1)
if count>10:
count = 0
raise Exception('No Output was created')
with open('test_outputs/quote.rar') as f:
if not f.read() == archive:
raise Exception('Test Input not equal to output')
print 'Test passed!\n'
except Exception as e:
sys.stderr.write('Test failed with exception: %s\n'%(e))
| [
"montcyril@gmail.com"
] | montcyril@gmail.com |
ba8f9e33ad748f4bc592da7f8827d698b4c68c2f | c446c6343a057ab91bc4544055244b897570f0e2 | /Snakefile | e4d060883b730fe0146376109dd5c456ab0ca1b2 | [] | no_license | WagnerGroup/pyqmc-pbc-workflow | f400d7974ca9c3cc9e2b9ec346a429006ebf37cc | de9d7d23bf20ca723558e17729d6a14df1677959 | refs/heads/main | 2023-03-10T21:17:28.982041 | 2021-02-27T14:56:01 | 2021-02-27T14:56:01 | 337,127,646 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,984 | import functions
import concurrent
import numpy as np
import pyqmc
import json
settings=json.load(open("settings.json"))
rule MEAN_FIELD:
input: "{dir}/system.json"
output: "{dir}/{functional}_{nx}_{ny}_{nz}/{basis}_{exp_to_discard}/mf.chk"
resources:
walltime=settings["mean-field"]["walltime"], partition=settings["partition"]
run:
kmesh = (int(wildcards.nx),int(wildcards.ny), int(wildcards.nz))
functions.mean_field(output[0],kmesh=kmesh, exp_to_discard=float(wildcards.exp_to_discard), settings=json.load(open(input[0])), basis=wildcards.basis, functional=wildcards.functional)
def opt_dependency(wildcards):
d={}
basedir = f"{wildcards.dir}/"
print(wildcards)
nconfig = int(wildcards.nconfig)
nconfigs=settings["optimization"]["nconfigs"]
ind = nconfigs.index(nconfig)
if hasattr(wildcards,'hci_tol'):
startingwf = f'hci{wildcards.hci_tol}'
else:
startingwf = "mf"
if hasattr(wildcards, 'hci_tol'):
basefile = basedir+f"opt_hci{wildcards.hci_tol}_{wildcards.determinant_cutoff}_{wildcards.orbitals}_"
else:
basefile = basedir+f"{wildcards.superdir}/opt_mf_{wildcards.orbitals}_"
if ind > 0:
d['start_from'] = basefile+f"{wildcards.statenumber}_{nconfigs[ind-1]}.chk"
elif int(wildcards.statenumber)> 0:
d['start_from'] = basedir+f"{wildcards.superdir}/input_mf_{wildcards.orbitals}_{wildcards.statenumber}_{nconfigs[ind-1]}.chk"
for i in range(int(wildcards.statenumber)):
d[f'anchor_wf{i}'] = basefile+f"{i}_{nconfigs[-1]}.chk"
return d
def convert_superdir(superdir):
return np.array([int(x) for x in superdir.split('_')]).reshape(3,3)
def convert_twist(twist):
return np.array([float(x) for x in twist.split('-')])
rule OPTIMIZE_MF:
input: unpack(opt_dependency), mf = "{dir}/mf.chk"
output: "{dir}/{superdir}/opt_mf_{twist}_{orbitals}_{statenumber}_{nconfig}.chk"
resources:
walltime=settings['optimization']['walltime'], partition=settings["partition"]
run:
n = int(wildcards.statenumber)
start_from = None
if hasattr(input, 'start_from'):
start_from=input.start_from
if wildcards.orbitals=='orbitals':
slater_kws={'optimize_orbitals':True}
elif wildcards.orbitals=='fixed':
slater_kws={'optimize_orbitals':False}
elif wildcards.orbitals=='large':
slater_kws={'optimize_orbitals':True, 'optimize_zeros':False}
else:
raise Exception("Did not expect",wildcards.orbitals)
slater_kws['twist']=convert_twist(wildcards.twist)
S = convert_superdir(wildcards.superdir)
if n==0:
anchor_wfs=None
else:
anchor_wfs = [input[f'anchor_wf{i}'] for i in range(n)]
with concurrent.futures.ProcessPoolExecutor(max_workers=settings["qmc_threads"]) as client:
pyqmc.OPTIMIZE(input.mf, output[0], anchors = anchor_wfs, start_from=start_from,
nconfig=int(wildcards.nconfig), slater_kws=slater_kws,
linemin_kws=settings['optimization']['linemin_kws'],
S=S, client=client, npartitions=settings["qmc_threads"])
rule PREPARE_EXCITED_STATE:
input: mf = "{dir}/mf.chk", gs= "{dir}/{superdir}/opt_mf_{orbitals}_0_{nconfig}.chk"
output: "{dir}/{superdir}/input_mf_{orbitals}_1_{nconfig}.chk"
run:
functions.construct_excited_state(input.mf, input.gs, output[0])
rule VMC:
input: mf = "{dir}/mf.chk", opt = "{dir}/{superdir}/opt_{variables}.chk"
output: "{dir}/{superdir}/vmc_{variables}.chk"
threads: settings["qmc_threads"]
resources:
walltime="24:00:00", partition=settings["partition"]
run:
S = convert_superdir(wildcards.superdir)
with concurrent.futures.ProcessPoolExecutor(max_workers=settings["qmc_threads"]) as client:
pyqmc.VMC(input.mf, output[0], start_from=input.opt, nconfig=8000, client=client, npartitions=settings["qmc_threads"], S=S, vmc_kws=dict(nblocks=80))
rule DMC:
input: mf = "{dir}/mf.chk", opt = "{dir}/{superdir}/opt_{variables}.chk"
output: "{dir}/{superdir}/dmc_{variables}_{tstep}.chk"
threads: settings["qmc_threads"]
resources:
walltime="24:00:00", partition=settings["partition"]
run:
multideterminant = None
startingwf = input.opt.split('/')[-1].split('_')[1]
if 'hci' in startingwf:
multideterminant = wildcards.dir+"/"+startingwf+".chk"
tstep = float(wildcards.tstep)
nsteps = int(30/tstep)
S = convert_superdir(wildcards.superdir)
with concurrent.futures.ProcessPoolExecutor(max_workers=settings["qmc_threads"]) as client:
pyqmc.DMC(input.mf, output[0], S=S, start_from=input.opt, dmc_kws=dict(tstep=tstep, nsteps=nsteps), nconfig=8000, client=client, npartitions=settings["qmc_threads"])
| [
"lucas.wagner@gmail.com"
] | lucas.wagner@gmail.com | |
66692bd24c758481fae08982ce8fdf0a3fb9a107 | c1cf8aab423f850805e7f0b9ac7e8ebed22dfc47 | /11_basic_operations.py | 13e54193924798532135382e8fba779af92ddfaf | [] | no_license | karim-aly/OpenCV-Python | ae1bdc070138296be083c875a03d2f338f0fd525 | 26437ddc75d689b26d4a3b44efc265f0733b6935 | refs/heads/master | 2021-06-14T20:37:48.390247 | 2017-03-12T17:22:18 | 2017-03-12T17:22:18 | 83,915,755 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,526 | py | import cv2 as cv
import numpy as np
from matplotlib import pyplot as plt
# load a color image
img = cv.imread('messi5.jpg')
####### Single Pixel Accessing #######
# You can access a pixel value by its row and column coordinates.
# For BGR image, it returns an array of Blue, Green, Red values.
# For grayscale image, just corresponding intensity is returned.
px = img[100,100]
print("pixel at (100, 100):", px)
# accessing only blue pixel
blue = img[100,100,0]
print("blue channel value at (100, 100):", blue)
# You can modify the pixel values the same way.
img[100,100] = [255,255,255]
print("pixel at (100, 100):", img[100,100])
# Warning
# Numpy is a optimized library for fast array calculations.
# So simply accessing each and every pixel values and modifying it will be very slow and it is discouraged.
####### Better pixel accessing and editing method: #######
# accessing RED value
red = img.item(10,10,2)
print("red channel value at (10, 10):", red)
# modifying RED value
img.itemset((10,10,2),100)
red = img.item(10,10,2)
print("red channel value at (10, 10):", red)
####### Accessing Image Properties #######
# 1) Shape of image is accessed by img.shape :
# It returns a tuple of number of rows, columns and channels (channels if image is colored)
print("image shape:", img.shape)
# 2) Total number of pixels is accessed by img.size :
print("image size (number of pixels * channels):", img.size)
# 3) Image datatype is obtained by img.dtype :
# img.dtype is very important while debugging because a large number of errors in OpenCV-Python code is caused by invalid datatype.
print("image data type:", img.dtype)
####### ROI (Region of Interest) #######
# ROI is obtained using Numpy indexing.
# Here I am selecting the ball and copying it to another region in the image:
ball = img[280:340, 330:390]
img[273:333, 100:160] = ball
cv.imshow("ball moved", img)
cv.waitKey(0)
cv.destroyAllWindows()
####### Splitting and Merging Image Channels #######
# split channels
b,g,r = cv.split(img)
# merge channels
img = cv.merge((b,g,r))
# Note
# cv2.split() is a costly operation (in terms of time),
# so only use it if necessary. Numpy indexing is much more efficient and should be used if possible.
# Numpy Indexing
b = img[:,:,0]
g = img[:,:,1]
r = img[:,:,2]
# Suppose, you want to make all the red pixels to zero,
# you need not split like this and put it equal to zero.
# You can simply use Numpy indexing which is faster.
img[:,:,2] = 0
####### Making Borders for Images (Padding) #######
"""
If you want to create a border around the image, something like a photo frame,
you can use cv2.copyMakeBorder() function.
But it has more applications for convolution operation, zero padding etc. This function takes following arguments:
- src : input image
- top, bottom, left, right : border width in number of pixels in corresponding directions
- borderType : Flag defining what kind of border to be added. It can be following types:
- cv2.BORDER_CONSTANT - Adds a constant colored border. The value should be given as next argument.
- cv2.BORDER_REFLECT - Border will be mirror reflection of the border elements, like this : fedcba|abcdefgh|hgfedcb
- cv2.BORDER_REFLECT_101 or cv2.BORDER_DEFAULT - Same as above, but with a slight change, like this : gfedcb|abcdefgh|gfedcba
- cv2.BORDER_REPLICATE - Last element is replicated throughout, like this: aaaaaa|abcdefgh|hhhhhhh
- cv2.BORDER_WRAP - Can’t explain, it will look like this : cdefgh|abcdefgh|abcdefg
- value - Color of border if border type is cv2.BORDER_CONSTANT """
# Below is a sample code demonstrating all these border types for better understanding:
BLUE = [255,0,0]
img1 = cv.imread('opencv_logo.png')
constant= cv.copyMakeBorder(img1, 10, 10, 10, 10, cv.BORDER_CONSTANT, value=BLUE)
reflect = cv.copyMakeBorder(img1, 10, 10, 10, 10, cv.BORDER_REFLECT)
reflect101 = cv.copyMakeBorder(img1, 10, 10, 10, 10, cv.BORDER_REFLECT_101)
replicate = cv.copyMakeBorder(img1, 10, 10, 10, 10, cv.BORDER_REPLICATE)
wrap = cv.copyMakeBorder(img1, 10, 10, 10, 10, cv.BORDER_WRAP)
plt.subplot(231),plt.imshow(img1,'gray'),plt.title('ORIGINAL')
plt.subplot(232),plt.imshow(constant,'gray'),plt.title('CONSTANT')
plt.subplot(233),plt.imshow(reflect,'gray'),plt.title('REFLECT')
plt.subplot(234),plt.imshow(reflect101,'gray'),plt.title('REFLECT_101')
plt.subplot(235),plt.imshow(replicate,'gray'),plt.title('REPLICATE')
plt.subplot(236),plt.imshow(wrap,'gray'),plt.title('WRAP')
plt.show() | [
"karimalyazzam@gmail.com"
] | karimalyazzam@gmail.com |
cfccdfad8b4f394fc126e00b86f458af855f98dd | d8d8d7d731c57617ef165f22b8f1a58395e71714 | /spec_pythonizer/sanity_check.py | a97b7e690a4fedf22c297a83adbc1423da3dd383 | [
"MIT"
] | permissive | hwwhww/research | e2568293282befb47567c6bd89b8b7738c1404d6 | 4bde287aae017cd93fa936f587f21cddcc7c4129 | refs/heads/master | 2020-04-27T01:18:01.106838 | 2019-03-05T02:37:34 | 2019-03-05T02:37:34 | 173,961,168 | 1 | 0 | MIT | 2019-03-05T14:25:04 | 2019-03-05T14:25:04 | null | UTF-8 | Python | false | false | 3,489 | py | from copy import deepcopy
from spec import (
FAR_FUTURE_EPOCH,
GENESIS_EPOCH,
MAX_DEPOSIT_AMOUNT,
SLOTS_PER_EPOCH,
ZERO_HASH,
BeaconBlock,
DepositData,
DepositInput,
Eth1Data,
Validator,
int_to_bytes48,
merkle_root,
get_genesis_beacon_state,
get_block_root,
get_state_root,
get_empty_block,
advance_slot,
process_block,
state_transition,
)
def get_sample_genesis_validator(index):
return Validator(
pubkey=int_to_bytes48(index),
withdrawal_credentials=ZERO_HASH,
activation_epoch=GENESIS_EPOCH,
exit_epoch=FAR_FUTURE_EPOCH,
withdrawable_epoch=FAR_FUTURE_EPOCH,
initiated_exit=False,
slashed=False,
)
def add_validators_to_genesis(state, num_validators):
# currently bypassing normal deposit route
# TODO: get merkle root working and use normal genesis_deposits
state.validator_registry = [
get_sample_genesis_validator(i)
for i in range(num_validators)
]
state.validator_balances = [
int(MAX_DEPOSIT_AMOUNT) for i in range(num_validators)
]
def construct_empty_block_for_next_slot(state):
empty_block = get_empty_block()
empty_block.slot = state.slot + 1
previous_block_header = deepcopy(state.latest_block_header)
if previous_block_header.state_root == ZERO_HASH:
previous_block_header.state_root = state.hash_tree_root()
empty_block.previous_block_root = previous_block_header.hash_tree_root()
return empty_block
def test_slot_transition(state):
test_state = deepcopy(state)
advance_slot(test_state)
assert test_state.slot == state.slot + 1
assert get_state_root(test_state, state.slot) == state.hash_tree_root()
return test_state
def test_empty_block_transition(state):
test_state = deepcopy(state)
block = construct_empty_block_for_next_slot(state)
advance_slot(test_state)
process_block(test_state, block)
assert len(test_state.eth1_data_votes) == len(state.eth1_data_votes) + 1
assert get_block_root(test_state, state.slot) == block.previous_block_root
def test_skipped_slots(state):
test_state = deepcopy(state)
block = construct_empty_block_for_next_slot(test_state)
block.slot += 3
state_transition(test_state, block)
assert test_state.slot == block.slot
for slot in range(state.slot, test_state.slot):
assert get_block_root(test_state, slot) == block.previous_block_root
def test_empty_epoch_transition(state):
test_state = deepcopy(state)
block = construct_empty_block_for_next_slot(test_state)
block.slot += SLOTS_PER_EPOCH
state_transition(test_state, block)
assert test_state.slot == block.slot
for slot in range(state.slot, test_state.slot):
assert get_block_root(test_state, slot) == block.previous_block_root
def sanity_tests():
print("Buidling state with 100 validators...")
genesis_state = get_genesis_beacon_state(
[],
0,
Eth1Data(
deposit_root="\x00"*32,
block_hash="\x00"*32
),
)
add_validators_to_genesis(genesis_state, 100)
print("done!")
print()
print("Running some sanity check tests...")
test_slot_transition(genesis_state)
test_empty_block_transition(genesis_state)
test_skipped_slots(genesis_state)
test_empty_epoch_transition(genesis_state)
print("done!")
if __name__ == "__main__":
sanity_tests() | [
"dannyjryan@gmail.com"
] | dannyjryan@gmail.com |
e4e62fa5b63d53d9d3a3ecd4d5d2cd972c8a7787 | a02a1f3213510f8822bebf38d8e1228d7f19e573 | /Converter.py | 655fb40a6749d214c7b768e72efb61de4c0ebb43 | [] | no_license | HDing-CN/rainRemoval | 672c07466bfeadc2e1658a7c25c283c0aba5708b | 3acd7218feb7db6919c44c6ba6c763f54542ee12 | refs/heads/master | 2022-12-19T21:50:31.241029 | 2020-10-17T23:32:59 | 2020-10-17T23:32:59 | 262,968,556 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,726 | py | import numpy as np
import cv2
from itertools import cycle
import os
from os.path import isfile, join
def convert_single_channel(video, view):
print('start converting to new viewing angle only on single color channel')
video = np.asarray(video)
N, H, W, C = video.shape
new_video = []
for k in range(C):
if view == 'tw':
for i in range(H):
img = video[:, i, :, k]
new_video.append(img)
elif view == 'th':
for i in range(W):
img = video[:, :, i, k]
new_video.append(img)
print('the shape of new video is: ' + str(np.array(new_video).shape))
print('----------------------')
return new_video
def convert_RGB_channel(video, view):
print('start converting to new viewing angle including all 3 RGB channels')
video = np.asarray(video)
N, H, W, C = video.shape
new_video = []
if view == 'tw':
for i in range(H):
img = video[:, i, :, :]
new_video.append(img)
elif view == 'th':
for i in range(W):
img = video[:, :, i, :]
new_video.append(img)
print('the shape of new video is: ' + str(np.array(new_video).shape))
print('----------------------')
return new_video
def play_video(img_list):
img_iter = cycle(img_list)
key = 0
while key & 0xFF != 27:
cv2.imshow('window title', next(img_iter))
key = cv2.waitKey(60) # 1000为间隔1000毫秒 cv2.waitKey()参数不为零的时候则可以和循环结合产生动态画面
def convert_images_to_video(path_in, path_out, fps):
frame_array = []
files = [f for f in os.listdir(path_in) if isfile(join(path_in, f))]
# for sorting the file names properly
files.sort()
print (files)
for i in range(len(files)):
if files[i][-3:] == 'jpg':
filename = path_in + files[i]
# reading each files
img = cv2.imread(filename)
height, width, layers = img.shape
size = (width, height)
# inserting the frames into an image array
frame_array.append(img)
out = cv2.VideoWriter(path_out, cv2.VideoWriter_fourcc(*'DIVX'), fps, size)
for i in range(len(frame_array)):
# writing to a image array
out.write(frame_array[i])
out.release()
def numOfDigits(x):
res = 0
while x > 0:
res += 1
x = x // 10
return res
def convert_npy_to_images(npy_file, path_out):
npy_data = np.load(npy_file)
for i in range(npy_data.shape[0]):
print(i + 1)
cv2.imwrite(path_out + 'result_' + ((4 - numOfDigits(i + 1)) * '0') + str(i + 1) + '.jpg', npy_data[i])
| [
"hding@amazon.com"
] | hding@amazon.com |
c01a756b86dab86351f0cf370efdb332e7680fcc | b6890b36a98f1fc8ac6193549c3684c2e06fb9e9 | /day-05-in-class/in-class-05-monte-carlo.py | 4256dacc5c12bfc5b05f703ad7b91bedbb26606d | [] | no_license | wiltand1/cmse_20 | 25399f21f7932b46079943522ee2bda078fd7460 | b69a3227ea1445a2f5c8bd6466ecff24b0913000 | refs/heads/master | 2022-04-17T07:56:48.053379 | 2020-04-16T19:43:35 | 2020-04-16T19:43:35 | 232,853,062 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 669 | py | import math
import random
npoints = 16
points_in = 0
points_out = 0
x_in = []
x_out = []
y_in = []
y_out = []
for i in range(npoints):
x = random.random()
y = random.random()
if y > math.pi*(0.5*x)**2 and y < math.pi*(1*x)**2:
points_in += 1
x_in.append(x)
y_in.append(y)
else:
points_out += 1
x_in.append(x)
y_in.append(y)
area_computed = (points_in/npoints)*4
area_actual = math.pi*1**2-math.pi*0.5**2
print("The computed area of the donut is", area_computed)
print("The actual area of the donut is", area_actual)
print("The calculated error is", abs(area_computed-area_actual)/area_actual)
| [
"youraddress@emailserverwiltand1@msu.edu"
] | youraddress@emailserverwiltand1@msu.edu |
7b459863ace7904da1f6e6affba8dd4247466e96 | fea6e9d6b20b0c5f2a05a6f2433aae4176b2a00a | /server/applibs/account/models/phoneuser.py | 2b9987502723a11b18689e14372406d54281c047 | [] | no_license | fanshuai/kubrick | fddf6c21bcd500223d9a05bd002e47eb1ecf8839 | b7ed6588e13d2916a4162d56509d2794742a1eb1 | refs/heads/main | 2023-03-24T12:21:44.562850 | 2021-03-19T15:11:40 | 2021-03-19T15:11:40 | 349,445,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,497 | py | import json
import logging
import phonenumbers
from django.db import models, transaction
from django.utils.functional import cached_property
from phonenumbers import PhoneNumberFormat as PNFormat
from phonenumbers import carrier, geocoder
from mirage import fields as mg_fields
from server.constant import mochoice as mc
from server.corelib.sequence import idshift
from server.corelib.hash_id import pk_hashid_decode
from server.djextend.basemodel import BasicModel, BIDModel
from server.third.aliyun.dayu import sms_action, sms_constant
from server.corelib.dealer.deal_time import get_now
from server.corelib.dealer import format_phonenumber
from server.constant.normal import COUNTRY_CODES
logger = logging.getLogger('kubrick.debug')
class PhoneManager(models.Manager):
def get_phone(self, number, **kwargs):
""" 手机号获取 """
number, msg = format_phonenumber(number)
if not number:
logger.warning(f'get_phone__parse_error {number} {msg}')
raise ValueError(f'Phone number parse error: {msg}.')
kwargs['shahash'] = idshift.hash_sha1(number)
inst, is_created = self.get_or_create(number=number, defaults=kwargs)
logger.info(f'get_phone__created {inst.pk} {is_created} {inst.usrid} {inst.show}')
if is_created:
inst.check_context()
return inst
def user_phone_qs(self, usrid):
""" 用户手机号列表 """
qs = self.filter(usrid=usrid, is_verified=True).order_by('order', 'pk')
return qs
def user_phone_main(self, usrid):
""" 用户主手机号 """
phone = self.user_phone_qs(usrid=usrid).first()
return phone
def check_phone_exist(self, number):
""" 手机号是否已注册 """
number, msg = format_phonenumber(number)
if not number:
logger.warning(f'check_phone_exist__parse_error {number} {msg}')
return False
is_exists = self.filter(number=number, usrid__gt=0).exists()
return is_exists
class Phone(BasicModel, BIDModel):
""" 手机号绑定,用户可绑定1~5个 """
class Meta:
verbose_name = 'Phone'
verbose_name_plural = verbose_name
index_together = ['carrier', 'nation', 'region', 'is_verified']
db_table = 'k_ac_phone'
ordering = ('-pk',)
limit = 3 # 用户手机号绑定数量限制
shahash = models.CharField('SHA1签名', max_length=50, unique=True)
number = mg_fields.EncryptedCharField(verbose_name='手机号', max_length=50, unique=True) # E164,加密
national = mg_fields.EncryptedCharField(verbose_name='号码', max_length=50, db_index=True, default='')
usrid = models.BigIntegerField('用户', db_index=True, default=0)
carrier = models.CharField('运营商', max_length=50, default='')
nation = models.CharField('国家', max_length=20, default='')
region = models.CharField('归属地', max_length=50, default='')
is_verified = models.BooleanField('已验证', default=False)
verified_at = models.DateTimeField('验证时间', null=True, default=None)
order = models.PositiveSmallIntegerField('顺序', default=0)
objects = PhoneManager()
@cached_property
def parse_info(self):
info = phonenumbers.parse(self.number, None)
return info
@property
def user(self):
if not self.usrid:
return None
info = self.get_user(self.usrid)
return info
@property
def is_main(self):
if not (self.usrid and self.is_verified):
return False
is_ok = self.order == 0
return is_ok
@property
def sibling_qs(self):
""" 该用户下其他手机号 """
objects = self.__class__.objects
if self.usrid and self.is_verified:
qs = objects.filter(
usrid=self.usrid, is_verified=True,
).exclude(pk=self.pk).order_by('order', 'pk')
else:
qs = objects.none()
return qs
@property
def tail(self):
""" 尾号 """
return f'**{self.number[-4:]}'
@property
def show(self):
""" 脱敏显示 """
n = len(self.national)
if self.country in COUNTRY_CODES and n == 11:
s = f"{self.national[:1]}**{self.national[3:4]}***{self.national[-4:]}"
elif n > 9:
cut = n - 6
s = f"{self.national[:2]}{'*' * cut}{self.national[-4:]}"
else:
cut = n - 3
s = f"{self.national[:1]}{'*' * cut}{self.national[-2:]}"
return s
@property
def summary(self):
desc = f'{self.pk} {self.usrid} {self.show}'
return desc
@property
def country(self):
code = self.parse_info.country_code
return code
@property
def fmt_natl(self):
""" 国内号码格式化 """
fmt = phonenumbers.format_number(self.parse_info, PNFormat.NATIONAL)
return fmt
@property
def fmt_intl(self):
""" 国际号码格式化 """
fmt = phonenumbers.format_number(self.parse_info, PNFormat.INTERNATIONAL)
return fmt
def check_context(self):
self.national = str(self.parse_info.national_number)
self.carrier = carrier.name_for_number(self.parse_info, 'en')
self.nation = geocoder.country_name_for_number(self.parse_info, 'en')
self.region = geocoder.description_for_number(self.parse_info, 'en')
up_fields = ['national', 'carrier', 'nation', 'region', 'updated_at']
self.save(update_fields=up_fields)
@transaction.atomic
def set_main(self):
""" 设为主手机号 """
self.refresh_from_db()
if not (self.usrid and self.is_verified):
logger.info(f'set_main__not_verified {self.summary}')
return False
self.order = 0
up_fields = ['order', 'updated_at']
self.save(update_fields=up_fields)
for index, phone in enumerate(self.sibling_qs):
phone.order = index + 1
phone.save(update_fields=up_fields)
logger.info(f'set_main__done {self.pk} {self.show}')
return True
def user_phone_bind(self, usrid):
""" 关联用户 """
assert usrid > 0, f'user_phone_bind__no_user {self.pk}'
if self.usrid == usrid:
logger.warning(f'user_phone_bind__done {self.pk} {usrid}')
return True, '已绑定'
if self.usrid:
self.extra_log('bind', usrid=self.usrid, new=usrid, type='repeat')
logger.warning(f'user_phone_bind__repeat {self.pk} {self.usrid}')
return False, '已被绑定'
self.usrid = usrid
self.is_verified = True
self.verified_at = get_now()
self.save(update_fields=['usrid', 'is_verified', 'verified_at', 'updated_at'])
self.extra_log('usrid', usrid=usrid, type='create')
return True, '成功'
def captcha_send_for_sign(self):
""" 验证码发送,仅登录 """
assert self.usrid > 0, f'captcha_send_for_sign__no_user {self.pk}'
ret = PNVerify.objects.pnvc_send(self.pk, mc.PNVScene.Sign)
logger.info(f'captcha_send_for_sign__done {self.summary} {ret}')
return ret
def captcha_verify_for_sign(self, code):
""" 验证码验证,仅登录 """
assert self.usrid > 0, f'captcha_send_for_sign__no_user {self.pk}'
is_ok = PNVerify.objects.pnvc_verify(self.pk, code, mc.PNVScene.Sign)
if not is_ok:
return None
return self.user
def captcha_send_for_bind(self):
""" 验证码发送,用户绑定新手机号 """
if self.usrid > 0:
return False, '手机号已被绑定'
ret = PNVerify.objects.pnvc_send(self.pk, mc.PNVScene.Bind)
logger.info(f'captcha_send_for_bind__done {self.summary} {ret}')
return True, ret
def captcha_verify_for_bind(self, code, usrid):
""" 验证码验证,用户绑定新手机号 """
if self.usrid > 0:
return False, '手机号已被绑定'
assert isinstance(usrid, int) and usrid > 0, usrid
is_ok = PNVerify.objects.pnvc_verify(self.pk, code, mc.PNVScene.Bind)
if not is_ok:
return False, '验证码不正确'
if self.usrid > 0:
return False, '手机号已被绑定'
is_ok, reason = self.user_phone_bind(usrid)
return is_ok, reason
def captcha_send_for_unbind(self):
""" 验证码发送,解除绑定手机号 """
if not self.is_verified:
return False, '手机号未绑定'
if self.is_main:
return False, '主手机号无法解除绑定'
ret = PNVerify.objects.pnvc_send(self.pk, mc.PNVScene.Unbind)
logger.info(f'captcha_send_for_unbind__done {self.summary} {ret}')
return True, ret
def captcha_verify_for_unbind(self, code):
""" 验证码验证,解除绑定手机号 """
is_ok = PNVerify.objects.pnvc_verify(self.pk, code, mc.PNVScene.Unbind)
if not is_ok:
return False, '验证码不正确'
if not self.is_verified:
return False, '手机号未绑定'
if self.is_main:
return False, '主手机号无法解除绑定'
self.usrid = 0
self.order = 0
self.verified_at = None
self.is_verified = False
up_fields = ['usrid', 'order', 'is_verified', 'verified_at', 'updated_at']
self.save(update_fields=up_fields)
self.extra_log('usrid', usrid=0, type='unbind')
return True, self.user
def captcha_send_for_symbol_strike(self):
""" 验证码发送,场景码删除 """
if not (self.usrid and self.is_verified):
return False, '手机号信息不正确'
ret = PNVerify.objects.pnvc_send(self.pk, scene=mc.PNVScene.UNSymbol)
logger.info(f'captcha_send_for_sign__done {self.summary} {ret}')
return True, ret
def captcha_verify_for_symbol_strike(self, code):
""" 验证码验证,场景码删除 """
if not (self.usrid and self.is_verified):
return False
is_ok = PNVerify.objects.pnvc_verify(self.pk, code, mc.PNVScene.UNSymbol)
return is_ok
class PNVerifyManager(models.Manager):
""" PNVerify.objects """
def pnvc_send(self, phoneid, scene):
""" 短信验证码发送,50秒内有记录不重发 """
now = get_now()
seconds_ago = now.add(seconds=-50)
pnv_qs = self.filter(
phoneid=phoneid, scene=scene,
created_at__gt=seconds_ago,
is_verified=False,
)
if pnv_qs.exists():
send_dic = dict(
phoneid=phoneid, scene=scene,
seconds_ago=seconds_ago.isoformat(),
now=now.isoformat(),
pnv_count=pnv_qs.count(),
)
send_info = json.dumps(send_dic, sort_keys=True)
logger.info(f'pnvc_just_sent {send_info}')
return False, None
template = sms_constant.SMS_CODE_SCENE_MAP[scene]
inst = self.create(phoneid=phoneid, scene=scene, template=template)
inst.sms_code_send()
return True, inst.pk
def pnvc_verify(self, phoneid, code, scene):
""" 短信验证码验证,过去6分钟内未使用的验证码 """
now = get_now()
minutes_ago = now.add(minutes=-6)
pnv_qs = self.filter(
phoneid=phoneid, scene=scene,
created_at__gt=minutes_ago,
is_verified=False,
).order_by('-pk')
for pnv in pnv_qs:
is_ok, msg = pnv.sms_code_verify(code)
if is_ok:
return True
return False
def sms_code_report_receipt(self, dic):
""" 验证码短信发送回执MNS订阅 """
try:
assert isinstance(dic, dict)
bid = pk_hashid_decode(dic['tid'])
inst = self.get(pk=bid, bizid=dic['biz_id'])
is_ok = inst.report_receipt(dic)
except (IndexError, AssertionError, PNVerify.DoesNotExist) as exc:
logger.warning(f'sms_code_report_receipt__error {dic} {str(exc)}')
is_ok = True
return is_ok
class PNVerify(BasicModel, BIDModel):
""" 手机号短信验证 """
class Meta:
verbose_name = 'PNVerify'
verbose_name_plural = verbose_name
db_table = 'k_ac_pnverify'
ordering = ('-pk',)
phoneid = models.BigIntegerField('手机号ID', db_index=True)
captcha_hmac = models.CharField('验证码签名', max_length=50, default='')
captcha_at = models.DateTimeField('发送时间', null=True, default=None)
verified_at = models.DateTimeField('验证时间', null=True, default=None)
is_verified = models.BooleanField('是否已验证', default=False)
scene = models.PositiveSmallIntegerField(choices=mc.PNVScene.choices, default=0)
status = models.SmallIntegerField('发送状态', choices=mc.SMSStatus.choices, default=0)
bizid = models.CharField('回执', db_index=True, max_length=50, default='')
template = models.CharField('模板', max_length=50, default='')
sign = models.CharField('短信签名', max_length=25, default='')
objects = PNVerifyManager()
@property
def sms_outid(self):
""" 短信发送外部ID """
return f'code-{self.hid}'
@cached_property
def phone_info(self):
info = Phone.objects.get(pk=self.phoneid)
return info
@property
def number(self):
number = self.phone_info.number
return number
@property
def usrid(self):
number = self.phone_info.usrid
return number
@property
def is_status_final(self):
""" 是否已终态 """
is_yes = self.status in [
mc.SMSStatus.Success,
mc.SMSStatus.Failure,
]
return is_yes
def sms_code_send(self):
""" 短信验证码发送 """
if self.is_verified:
return f'is_verified'
self.captcha_at = get_now()
code = idshift.generate_captcha()
self.captcha_hmac = idshift.hmac_hash(self.pk, code)
self.save(update_fields=['captcha_hmac', 'captcha_at'])
try:
result = sms_action.sms_send__code(self, code)
self.extra['resp_send'] = result
self.bizid = result.get('BizId', '')
self.status = mc.SMSStatus.Waiting if self.bizid else mc.SMSStatus.Init
self.save(update_fields=['status', 'bizid', 'extra', 'updated_at'])
except Exception as exc:
self.extra['send_error'] = str(exc)
self.save(update_fields=['extra', 'updated_at'])
logger.warning(f'sms_code_send__error {str(exc)}')
logger.exception(exc)
return code
def sms_code_verify(self, code):
""" 短信验证码验证 """
if self.is_verified:
return None, 'is_verified'
if not self.captcha_hmac == idshift.hmac_hash(self.pk, code):
return False, 'failure'
self.is_verified = True
self.verified_at = get_now()
self.save(update_fields=['is_verified', 'verified_at', 'updated_at'])
return True, 'success'
def sms_code_query(self):
""" 主动查询回执状态 """
if self.is_status_final:
logger.info(f'sms_code_query__final {self.pk}')
return
if not self.bizid:
logger.warning(f'sms_code_query__no_bizid {self.pk}')
return
try:
result = sms_action.sms_query__code(self)
self.status = result['SendStatus']
self.extra['resp_query'] = result
self.save(update_fields=['status', 'extra', 'updated_at'])
except Exception as exc:
self.extra['query_error'] = str(exc)
self.save(update_fields=['extra', 'updated_at'])
logger.warning(f'sms_code_query__error {str(exc)}')
logger.exception(exc)
def report_receipt(self, result):
""" 短信发送回执MNS订阅 """
if self.status in [mc.SMSStatus.Init, mc.SMSStatus.Waiting]: # 回调时序问题
self.status = mc.SMSStatus.Success if result['success'] else mc.SMSStatus.Failure
self.save(update_fields=['status', 'updated_at'])
self.extra_log('report', result=result)
return True
| [
"zfaner@gmail.com"
] | zfaner@gmail.com |
91e35d122c3244912dcba634ce0ab3662d198689 | 42ae4ecdd41ca9c77e152114a14b5fdc83654799 | /sinx.py | 034ca6db99fd4c53718d47fbad732263914d7f1c | [] | no_license | smaugx/matplotlib | 2e82747ecf5c5977b54b006e46c17d9a815b53b3 | 3ce937ec55d63acf33cbc2771f8857e1f0346b2a | refs/heads/master | 2021-09-05T15:25:30.819241 | 2018-01-29T08:05:58 | 2018-01-29T08:05:58 | 119,353,396 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.pyplot import savefig
x = np.linspace(0, 2 * np.pi, 1000)
y = np.sin(x)
#plt.plot(x,np.sin(x),x,np.sin(2*x))
#plt.plot(x,y,marker='o',mec='b',mfc='w')
plt.plot(x,y,mec='b',mfc='w')
#plt.show()
savefig('./sinx.png')
| [
"linuxcode2niki@gmail.com"
] | linuxcode2niki@gmail.com |
8980e2c8b13abaaab2ebad57556f52347554b925 | a9919484603c010156c65a9e435efc478473221e | /dataset.py | aabb174f76921fedaa2cb69aafed4e0bd2b20c8f | [] | no_license | chease3640/tf_transformer | 9147b7840e5546eee7a9910611e37051d52a87c7 | a877bcb61b549299083d0e91ade3c8cf252e2e78 | refs/heads/main | 2023-04-15T15:02:11.744924 | 2021-04-19T11:55:06 | 2021-04-19T11:55:06 | 359,439,295 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,761 | py | #!/usr/bin/env python
# coding: utf-8
"""This module contains efficient data read and transform using tf.data API.
There are total 2 table schema for model training and predict as follows:
1. Input train data schema (1 fields):
id STRING
x1 STRING
x2 STRING
2. Input predict data schema (2 fields):
id STRING
x STRING
Notes: The order can not be changed, fields separator is \t.
"""
import random
from enum import Enum
import collections
import tensorflow as tf
class DataSchemaEnum(Enum):
train = ("", "", "")
predict = ("", "")
class BatchedInput(
collections.namedtuple(
"BatchedInput", ("initializer", "id", "src", "tgt", "label"))): # 4 attributes
pass
class Dataset(object):
def __init__(self, schema, data, params, slice_id=0, slice_count=1):
"""
Args:
schema: Instance of DataSchemaEnum class.
data: Data file string.
params: Parameters defined in config.py.
slice_count: Used in distributed settings.
slice_id: Used in distributed settings.
"""
assert isinstance(schema, DataSchemaEnum)
self.schema = schema
self.source = params.source.lower()
self.data = data
self.num_epochs = params.num_epochs
self.batch_size = params.batch_size
self.seq_max_len = params.seq_max_len
self.shuffle_buffer_size = params.num_samples or params.batch_size * 1000
# self.reshuffle_each_iteration = True # Defaults to True
self.random_seed = params.random_seed
self.slice_count = slice_count
self.slice_id = slice_id
# Any out-of-vocabulary token will return a bucket ID based on its hash if num_oov_buckets is greater than zero.
# Otherwise the default_value. The bucket ID range is [vocabulary size, vocabulary size + num_oov_buckets - 1].
self.vocab_table = tf.contrib.lookup.index_table_from_file(
vocabulary_file=params.vocab_file, num_oov_buckets=0, default_value=0)
if self.schema == DataSchemaEnum.train:
self._transform_func = self._transform
else:
self._transform_func = self._transform_predict
def get_iterator(self):
record_defaults = self.schema.value
if self.source == "local":
dataset = tf.data.TextLineDataset(self.data).map(
lambda line: tf.decode_csv(
line, record_defaults=record_defaults, field_delim='\t', use_quote_delim=False))
if self.source == "xxx":
dataset = tf.data.TableRecordDataset(
self.data, record_defaults=record_defaults, slice_count=self.slice_count, slice_id=self.slice_id)
# Data transform
dataset = dataset.map(self._transform_func, num_parallel_calls=4).prefetch(2*self.batch_size)
if self.schema == DataSchemaEnum.train:
dataset = dataset.shuffle(self.shuffle_buffer_size, self.random_seed).repeat(self.num_epochs)
batched_dataset = self._batching_func(dataset)
batched_iter = batched_dataset.make_initializable_iterator() # sess.run(iterator.initializer)
if self.schema == DataSchemaEnum.train:
src, tgt, label = batched_iter.get_next()
return BatchedInput(initializer=batched_iter.initializer, id=None, src=src, tgt=tgt, label=label)
else:
id, src = batched_iter.get_next()
return BatchedInput(initializer=batched_iter.initializer, id=id, src=src, tgt=None, label=None)
def _transform(self, x1, x2, label):
src = tf.cast(self.vocab_table.lookup(tf.string_split([x1]).values[:self.seq_max_len]), tf.int32)
tgt = tf.cast(self.vocab_table.lookup(tf.string_split([x2]).values[:self.seq_max_len]), tf.int32)
label = tf.string_to_number(label, out_type=tf.float32)
return src, tgt, label
def _transform_predict(self, id, x):
src = tf.cast(self.vocab_table.lookup(tf.string_split([x]).values[:self.seq_max_len]), tf.int32)
return id, src
def _batching_func(self, x):
if self.schema == DataSchemaEnum.train:
padded_shapes = (
tf.TensorShape([self.seq_max_len]),
tf.TensorShape([self.seq_max_len]),
tf.TensorShape([]), # label
)
padding_values = (0, 0, 0.0)
else:
padded_shapes = padded_shapes_all = (
tf.TensorShape([]), # id1
tf.TensorShape([self.seq_max_len])
)
padding_values = ('', 0)
return x.padded_batch(
self.batch_size, padded_shapes=padded_shapes, padding_values=padding_values) # tf >= 1.10 use drop_remainder=True
| [
"noreply@github.com"
] | chease3640.noreply@github.com |
2e2b8705b460a63f5c112ef28e86945c639ebe7a | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/CISCO-DNS-CLIENT-MIB.py | 01fce15acb4eef7b472a8ae3f75dca628704efbc | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 8,248 | py | #
# PySNMP MIB module CISCO-DNS-CLIENT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-DNS-CLIENT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:38:02 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "ValueRangeConstraint")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
ModuleIdentity, Bits, Unsigned32, Counter32, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, NotificationType, iso, Integer32, Counter64, Gauge32, TimeTicks, ObjectIdentity, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "Bits", "Unsigned32", "Counter32", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "NotificationType", "iso", "Integer32", "Counter64", "Gauge32", "TimeTicks", "ObjectIdentity", "MibIdentifier")
DisplayString, TextualConvention, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "RowStatus")
ciscoDNSClientMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 436))
ciscoDNSClientMIB.setRevisions(('2004-09-09 00:00',))
if mibBuilder.loadTexts: ciscoDNSClientMIB.setLastUpdated('200409090000Z')
if mibBuilder.loadTexts: ciscoDNSClientMIB.setOrganization('Cisco Systems Inc. ')
ciscoDNSClientMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 0))
ciscoDNSClientMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 1))
ciscoDNSClientMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 2))
cdcConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1))
cdcDNSConfigEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdcDNSConfigEnable.setStatus('current')
cdcNoOfDNSServerConfig = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdcNoOfDNSServerConfig.setStatus('current')
cdcDNSServerNextAvailIndex = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdcDNSServerNextAvailIndex.setStatus('current')
cdcDNSServerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4), )
if mibBuilder.loadTexts: cdcDNSServerTable.setStatus('current')
cdcDNSServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4, 1), ).setIndexNames((0, "CISCO-DNS-CLIENT-MIB", "cdcDNSServerIndex"))
if mibBuilder.loadTexts: cdcDNSServerEntry.setStatus('current')
cdcDNSServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: cdcDNSServerIndex.setStatus('current')
cdcDNSServerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4, 1, 2), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdcDNSServerAddrType.setStatus('current')
cdcDNSServerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4, 1, 3), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdcDNSServerAddr.setStatus('current')
cdcDNSServerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdcDNSServerStatus.setStatus('current')
cdcDefaultDNSDomainName = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 5), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdcDefaultDNSDomainName.setStatus('current')
cdcDNSDomainNameTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 6), )
if mibBuilder.loadTexts: cdcDNSDomainNameTable.setStatus('current')
cdcDNSDomainNameEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 6, 1), ).setIndexNames((0, "CISCO-DNS-CLIENT-MIB", "cdcDNSDomainNameIndex"))
if mibBuilder.loadTexts: cdcDNSDomainNameEntry.setStatus('current')
cdcDNSDomainNameIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 6, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 64)))
if mibBuilder.loadTexts: cdcDNSDomainNameIndex.setStatus('current')
cdcDNSDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 6, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdcDNSDomainName.setStatus('current')
cdcDNSDomainNameStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 6, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdcDNSDomainNameStatus.setStatus('current')
ciscoDNSClientMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 2, 1))
ciscoDNSClientMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 2, 2))
ciscoDNSClientMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 436, 2, 1, 1)).setObjects(("CISCO-DNS-CLIENT-MIB", "ciscoDNSServerConfigGroup"), ("CISCO-DNS-CLIENT-MIB", "ciscoDNSDomainNameConfigGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDNSClientMIBCompliance = ciscoDNSClientMIBCompliance.setStatus('current')
ciscoDNSServerConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 436, 2, 2, 1)).setObjects(("CISCO-DNS-CLIENT-MIB", "cdcDNSConfigEnable"), ("CISCO-DNS-CLIENT-MIB", "cdcNoOfDNSServerConfig"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSServerNextAvailIndex"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSServerAddrType"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSServerAddr"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSServerStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDNSServerConfigGroup = ciscoDNSServerConfigGroup.setStatus('current')
ciscoDNSDomainNameConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 436, 2, 2, 2)).setObjects(("CISCO-DNS-CLIENT-MIB", "cdcDefaultDNSDomainName"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSDomainName"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSDomainNameStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDNSDomainNameConfigGroup = ciscoDNSDomainNameConfigGroup.setStatus('current')
mibBuilder.exportSymbols("CISCO-DNS-CLIENT-MIB", cdcDNSDomainName=cdcDNSDomainName, ciscoDNSClientMIBCompliances=ciscoDNSClientMIBCompliances, cdcDNSServerStatus=cdcDNSServerStatus, cdcConfigGroup=cdcConfigGroup, cdcDNSDomainNameIndex=cdcDNSDomainNameIndex, ciscoDNSClientMIBCompliance=ciscoDNSClientMIBCompliance, cdcDNSDomainNameStatus=cdcDNSDomainNameStatus, ciscoDNSClientMIBGroups=ciscoDNSClientMIBGroups, PYSNMP_MODULE_ID=ciscoDNSClientMIB, cdcDNSDomainNameEntry=cdcDNSDomainNameEntry, ciscoDNSClientMIBNotifs=ciscoDNSClientMIBNotifs, cdcDNSServerNextAvailIndex=cdcDNSServerNextAvailIndex, cdcDNSDomainNameTable=cdcDNSDomainNameTable, cdcDNSServerAddrType=cdcDNSServerAddrType, ciscoDNSDomainNameConfigGroup=ciscoDNSDomainNameConfigGroup, ciscoDNSServerConfigGroup=ciscoDNSServerConfigGroup, ciscoDNSClientMIB=ciscoDNSClientMIB, cdcDNSServerEntry=cdcDNSServerEntry, cdcDefaultDNSDomainName=cdcDefaultDNSDomainName, cdcDNSServerIndex=cdcDNSServerIndex, cdcNoOfDNSServerConfig=cdcNoOfDNSServerConfig, cdcDNSConfigEnable=cdcDNSConfigEnable, cdcDNSServerTable=cdcDNSServerTable, ciscoDNSClientMIBObjects=ciscoDNSClientMIBObjects, cdcDNSServerAddr=cdcDNSServerAddr, ciscoDNSClientMIBConformance=ciscoDNSClientMIBConformance)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
a567d591cdf24c0735bcb885e6877837227e144a | 857794546116eef6163a04c3f9939a792ea2163f | /xarrange_cef_mkt_nav_quotes_times.py | 5272061f83da328088db0609625dbe6915548f3b | [] | no_license | Beliavsky/CEF_and_ETF_data | 9ab05f67459d0ae27a38a9e921cf0d251da6527f | 4f40583c5f3aa6505aa1ce9a53b174014a11f0ea | refs/heads/main | 2023-08-21T18:16:22.493858 | 2021-10-14T15:51:00 | 2021-10-14T15:51:00 | 417,181,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,451 | py | # 03/18/2020 08:03 PM added market and NAV times
# 03/18/2020 07:23 PM branched from xarrange_cef_mkt_nav_quotes.py to xarrange_cef_mkt_nav_quotes_times.py
# 03/18/2020 08:20 AM computes and prints premium
# process a file with lines such as these, produced by rterm < xget_quotes_pimco_cefs.r
# Trade Time Last Change % Change Open High Low
# PCI 2020-03-12 16:02:11 19.41 -2.7399998 -12.370203 20.46 20.5000 18.10
# XPCIX 2020-03-12 20:00:29 20.53 -0.8700000 -4.065420 NA NA NA
# PDI 2020-03-12 16:02:00 23.91 -3.2800007 -12.063261 21.10 25.6800 21.00
# XPDIX 2020-03-12 20:01:27 24.53 -0.8700000 -3.425197 NA NA NA
from sys import argv
import datetime
infile = argv[1]
# infile = "cef_mkt_nav_quotes.txt"
fp = open(infile,"r")
iline = 0
COL_PCT = 5
COL_CLOSE = 3
PRINT_NAV_SYM = False
PRINT_WHETHER_INTRADAY = False
fp.readline()
fmt_sym = "%5s"
now = datetime.datetime.now()
intraday = now.hour < 20 and now.hour > 8
if PRINT_WHETHER_INTRADAY:
print("intraday =",intraday)
if intraday:
print(" sym","".join("%9s"%_ for _ in ["mkt_ret","NAV_ret","premium","mkt","NAV"]),end="")
else:
print(" sym","".join("%9s"%_ for _ in ["mkt_ret","NAV_ret","diff","premium","mkt","NAV"]),end="")
print("".join("%12s"%_ for _ in ["mkt_time","nav_time"]))
fmt_r=" %8.2f"
for line in fp:
if ("Volume" in line):
break
text = line.strip()
words = text.split()
xret = float(words[COL_PCT])
last = float(words[COL_CLOSE])
num_str = "%8s"%'{0:.2f}'.format(xret)
date_str = words[1][5:] # trim the year at the beginnning
time_str = words[2][:-3]
quote_time = date_str + "@" + time_str
if (iline%2 == 0):
print(fmt_sym%words[0],num_str,end=" ")
close_last = last
mkt_ret = xret
mkt_time = quote_time
else:
nav_last = last
nav_ret = xret
nav_time = quote_time
diff_ret = mkt_ret - nav_ret
if PRINT_NAV_SYM:
print(fmt_sym%words[0],num_str,end=" ")
else:
print(num_str,end=" ")
prem = (100*(close_last-nav_last)/nav_last)
if intraday:
print("".join(fmt_r%_ for _ in [prem,close_last,nav_last]),end=" ")
else:
print("".join(fmt_r%_ for _ in [diff_ret,prem,close_last,nav_last]),end=" ")
print(mkt_time,nav_time)
iline = iline + 1
| [
"noreply@github.com"
] | Beliavsky.noreply@github.com |
6d018da31acfda0e7f95e98aaad0a1fc9db773ee | 10f7043ad519878fcf55a1b7f9d8050ab92230a1 | /app.py | 216628070718b1775d84d34dc27f92a4f620df58 | [
"Apache-2.0"
] | permissive | Rajatkhatri7/OCR-Api | 542a12f9b0ede951d246c00e2ad90e761747ab31 | 677da04b1aad44810384c548eee1c698c2e26c0f | refs/heads/main | 2023-05-29T02:04:46.035743 | 2021-06-16T08:25:18 | 2021-06-16T08:25:18 | 376,799,444 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,166 | py | import os
from flask import Flask, json, render_template, request , jsonify
from pathlib import Path
import json
from ocr_core import ocr_core # ocr function
#global variables
BASE_DIR = Path(__file__).resolve().parent
UPLOAD_FOLDER = str(BASE_DIR) + "/src/uploads/"
# allowed files
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg'])
app = Flask(__name__)
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
# @app.route('/')
# def home_page():
# return render_template('index.html')
@app.route('/', methods=['GET', 'POST'])
def upload_page():
if request.method == 'POST':
# check if the post request has the file part
if 'file' not in request.files:
return render_template('upload.html', msg='No file selected')
file = request.files['file']
fname = file.filename
# If the user does not select a file, the browser submits an
# empty file without a filename.
if fname == '':
return render_template('upload.html', msg='No file selected')
#if file is in file and it is in our allowed files then
if file and allowed_file(fname):
extracted_text = ocr_core(file)
#postprocessing the output
extracted_text = extracted_text.replace("\n" , "")
extracted_text = extracted_text.replace("\f" , "")
# print(extracted_text)
#saving image
file.save(UPLOAD_FOLDER+fname)
#saving output as json
with open('output.json' ,'w') as f:
output = {fname: extracted_text}
json.dump(output ,f)
return render_template('upload.html',
msg='Successfully processed',
extracted_text=extracted_text,
img_src=UPLOAD_FOLDER + file.filename)
# return jsonify(output)
elif request.method == 'GET':
return render_template('upload.html')
if __name__ == '__main__':
app.debug = True
app.run()
| [
"rajatkhatri0002@gmail.com"
] | rajatkhatri0002@gmail.com |
3b6a980ffb87af3580820c10aa1428a173c1618d | 9399d687b2e41245968ba0e9d413a6789d773b1d | /CI/erlang/erlang/libs/fake_ne/interface/FakeNeKeyword.py | d00bfa40b778bc57c8f8d8b44ee00d7e54648cad | [] | no_license | jiangliu888/DemoForSpeed | be41bdb85a1d1f5ca9350a3a1f681ced5ec9b929 | 11319bc19c074327d863ac2813a04cef3487f8d6 | refs/heads/main | 2023-08-23T14:16:21.686155 | 2021-10-17T12:01:34 | 2021-10-17T12:01:34 | 388,452,435 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,959 | py | import os
from client.device import DeviceClient
from erlang.libs.uranus.interface.EsInterface import EsInterface
from erlang.libs.uranus.interface.UranusInterface import UranusInterface
from erlang.libs.variables import MeasureResultVariables
class FakeNeKeyword(object):
DB_PORT = 3000
DB_REST_PORT = 3500
OFP_REST_PORT = 4000
fake_ne_list = {}
def __init__(self):
pass
@staticmethod
def get_fake_ne_measure_tunnels(neid):
return DeviceClient.get_device_config(int(neid), "TUNNEL")
@staticmethod
def get_fake_ne_measure_tasks(neid):
return DeviceClient.get_device_config(int(neid), "MEASURE")
@staticmethod
def get_fake_ne_measure_task_with_address(ne_id, local_ip, remote_ip):
tasks = FakeNeKeyword.get_fake_ne_measure_tasks(ne_id)
print tasks
return filter(lambda x: (x["remote-ipv4-address"] == remote_ip) and (x["local-ipv4-address"] == local_ip), tasks)
@staticmethod
def get_fake_ne_tunnels_with_dstNeId(local_NeId, dstNeId):
s_id = int(local_NeId) >> 4
tunnels = FakeNeKeyword.get_fake_ne_measure_tunnels(s_id)
print tunnels
return filter(lambda x: x["dst"] == dstNeId, tunnels)
@staticmethod
def get_fake_ne_measure_tasks_with_dstNeId(local_NeId, dstNeId):
s_id = int(local_NeId) >> 4
tasks = FakeNeKeyword.get_fake_ne_measure_tasks(s_id)
print tasks
return filter(lambda x: x["dstNeId"] == dstNeId, tasks)
@staticmethod
def get_fake_ne_flows_id(ne_id):
res = DeviceClient.get_routes(int(ne_id))
return map(int, res) if res else []
@staticmethod
def change_ne_link_measure_result(ne_id, jitter, loss, delay=[0, 0, 0, 0], loss_target=[]):
cmd = "ps -ef |grep create_measure|grep {} |awk {}".format(ne_id, r"'{print $10}'")
r = os.popen(cmd)
info = r.read().split('\n')[0]
print 'info is {}'.format(info)
cmd = "ps -ef |grep create_measure|grep {} |awk {}|xargs sudo kill -9".format(ne_id, r"'{print $2}'")
ret = os.system(cmd)
print 'cmd is {} and ret is {}'.format(cmd, ret)
cmd = "sh -c 'python erlang/libs/fake_ne/create_measure_result.py {} {} {} {} {} {} >> logs/{}measure.log &'".format(info, int(ne_id), ' '.join(jitter), ' '.join(loss), ' '.join(delay), ' '.join(loss_target), int(ne_id))
print cmd
ret = os.system(cmd)
assert ret == 0
@staticmethod
def export_data_to_es(topo_name):
for es_data in MeasureResultVariables.topo(topo_name):
EsInterface.bulk_insert_12_measure_results(es_data['netLink'], es_data['ttl'], es_data['jitter'], es_data['loss'])
@staticmethod
def get_fake_ne_type(ne_id):
rec, ne_info = UranusInterface.get_netcfg_ne_config_with_id(ne_id)
ne_type = ne_info["type"]
return ne_type
| [
"admin@example.com"
] | admin@example.com |
4d00ccd7e2aa83e59a80c5067dca230245fd07bc | 09f8a8bb1655cc76a29ac60896d1d42b0145f3c2 | /Utils.py | f617be1131d20d1c307cca7ba5b167e85ef6ea3f | [
"BSD-3-Clause"
] | permissive | FlatL1neAPT/PoshC2_Python | 4d1eb4d6a639395a32a2674ee49a17969a2b8a79 | 39f755f67bf4de15e93f56cd690e50924aa8bba0 | refs/heads/master | 2020-05-04T02:58:16.717780 | 2019-03-08T10:32:55 | 2019-03-08T10:32:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,015 | py | import os, base64, string, random, re
validate_sleep_regex = re.compile("^[0-9]*[smh]$")
def gen_key():
key = os.urandom(256/8)
return base64.b64encode(key)
def formStrMacro(varstr, instr):
holder = []
str1 = ''
str2 = ''
str1 = varstr + ' = "' + instr[:54] + '"'
for i in xrange(54, len(instr), 48):
holder.append(varstr + ' = '+ varstr +' + "'+instr[i:i+48])
str2 = '"\r\n'.join(holder)
str2 = str2 + "\""
str1 = str1 + "\r\n"+str2
return str1
def formStr(varstr, instr):
holder = []
str1 = ''
str2 = ''
str1 = varstr + ' = "' + instr[:56] + '"'
for i in xrange(56, len(instr), 48):
holder.append('"'+instr[i:i+48])
str2 = '"\r\n'.join(holder)
str2 = str2 + "\""
str1 = str1 + "\r\n"+str2
return "%s;" % str1
def randomuri(size = 15, chars=string.ascii_letters + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
def validate_sleep_time(sleeptime):
sleeptime = sleeptime.strip()
return validate_sleep_regex.match(sleeptime) | [
"email"
] | email |
fbf8daf4bdf2aad64abe58cfe148dc29d70aeed1 | d9bec654ec199b2d97337766576d96d95bf7f81a | /home/migrations/0003_auto_20210323_0637.py | b66714b2f97db66a9138bb46046789d04511e7bb | [] | no_license | falvo02421/Absensi-karyawan | ceccf4eaf3efb75861f546faadad9530669a44ae | 10ed70fdb875f1cfe89671073621969a88148d46 | refs/heads/main | 2023-06-03T04:33:53.180180 | 2021-06-19T11:23:01 | 2021-06-19T11:23:01 | 355,257,693 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 708 | py | # Generated by Django 3.1.5 on 2021-03-22 23:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0002_auto_20210318_2209'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
),
migrations.AddField(
model_name='post',
name='category',
field=models.CharField(default='coding', max_length=255),
),
]
| [
"noreply@github.com"
] | falvo02421.noreply@github.com |
a9ae5097afbf68db22e045080f2b8d85de9f6360 | d95d25e8dd1ce5961762e5c8523172d9c58bdf3e | /www/metclass_test/aiotodotest.py | 34403419498d22ac4fd25fed03195eb6742425bf | [] | no_license | juntian2013/awesome-python-webapp | 8dddcb12b6a57761976def14a1aac7a73e87665e | 0ad54273a08c7ef46bd5c873486a64e4e93ef98c | refs/heads/master | 2021-05-08T22:09:30.127691 | 2018-01-31T09:28:45 | 2018-01-31T09:28:45 | 119,665,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | import requests,json
body = json.dumps({"name":"feed the api"})
url = "http://localhost:8080/todos/"
r = requests.post(url=url,data=body)
r.content
| [
"cs10b2010@126.com"
] | cs10b2010@126.com |
d318c67bfe43dfcfe07f336f3f50f911165df8fe | 763d50bae84af9c7fdc5fa7a68b0b37c9f613f24 | /projects/migrations/0003_auto_20200620_2033.py | 5927d275b1093432d39f60d0ea9747228399b19d | [
"MIT"
] | permissive | ketzu/knet-django | 767f3c1e80b644d7a9907e8726dabcc336d7cc59 | cc2ebef54bf4bd1bac6cd79c3458a6e1f3eb28a5 | refs/heads/master | 2023-04-14T13:18:11.484234 | 2020-09-20T09:04:05 | 2020-09-20T09:04:05 | 272,266,394 | 0 | 0 | MIT | 2021-04-08T21:00:19 | 2020-06-14T19:06:35 | Python | UTF-8 | Python | false | false | 793 | py | # Generated by Django 3.0.7 on 2020-06-20 18:33
from django.db import migrations, models
import stdimage.models
class Migration(migrations.Migration):
dependencies = [
('projects', '0002_auto_20200615_2044'),
]
operations = [
migrations.AlterField(
model_name='project',
name='git',
field=models.CharField(blank=True, max_length=500, null=True),
),
migrations.AlterField(
model_name='project',
name='image',
field=stdimage.models.StdImageField(null=True, upload_to='projects/'),
),
migrations.AlterField(
model_name='project',
name='link',
field=models.CharField(blank=True, max_length=500, null=True),
),
]
| [
"david.moedinger@ketzu.net"
] | david.moedinger@ketzu.net |
c81609759a584d86444b2de3a819ea5a348374f2 | 6a92231b6ae8af4cb6eff50440b0872f0fba6b69 | /polls/views.py | 71243eb6de631331ea2c254a438c6ff823bfa6dd | [] | no_license | qkrdbwls613/django_project | 209ecef378bb2660ebdbeb815e3d5a6e734fa5d9 | dc464e20172922df93a494f43815fb1f3f9b5bd7 | refs/heads/master | 2023-02-03T04:45:55.166021 | 2020-12-20T18:12:58 | 2020-12-20T18:12:58 | 318,987,882 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,328 | py | from django.shortcuts import render, get_object_or_404
from .models import Question, Choice
from django.http import HttpResponse, HttpResponseRedirect
from django.urls import reverse
from django.views import generic
# Create your views here.
### Generic View (class-based views)
class IndexView(generic.ListView):
template_name = 'polls/index.html'
context_object_name = 'latest_question_list'
"""Return the last five published questions."""
def get_queryset(self):
return Question.objects.order_by('-pub_date')[:5]
class DetailView(generic.DetailView):
model = Question
template_name = 'polls/detail.html'
class ResultsView(generic.DetailView):
model = Question
template_name = 'polls/results.html'
def vote(request, question_id):
question = get_object_or_404(Question, pk=question_id)
try:
selected_choice = question.choice_set.get(pk=request.POST['choice'])
except(KeyError, Choice.DoesNotExist):
#Redisplay the question voting form
return render(request, 'polls/detail.html', {
'question':question,
'error_message': "You didn't select a choice.",
})
else:
selected_choice.votes += 1
selected_choice.save()
return HttpResponseRedirect(reverse('polls:results', args=(question.id,)))
| [
"qkrdbwls613"
] | qkrdbwls613 |
fbc34bce75ef0bcc33b60c5c56c4ee439012a1ba | 7e470dd54740ca6331d1341328e344a713329a77 | /src/DQD_counting_statistics/zero_freq_statistics.py | a710fb1843cd96bbb95ceec6215f5f71d5d12580 | [] | no_license | rstones/DQD_counting_statistics | 127eb2ad83c5c69bdfb168975077f541c09d4bbc | 3eb5ad9876b59c43c35150238c3af3396b3ad100 | refs/heads/master | 2020-04-07T03:10:59.294391 | 2017-10-22T10:58:06 | 2017-10-22T10:58:06 | 53,421,629 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,366 | py | '''
Created on 8 Mar 2016
@author: rstones
'''
import numpy as np
import quant_mech.utils as utils
from DQD_counting_statistics.DQD_model import DQDModel
import matplotlib.pyplot as plt
bias_values = np.array([0, 1.5, 3., 4.5, 6.])
Gamma_R_range = np.logspace(-4, 3, 1000)
model = DQDModel(remove_elements=True)
current = np.zeros((bias_values.size, Gamma_R_range.size))
F2 = np.zeros((bias_values.size, Gamma_R_range.size))
coherence = np.zeros((bias_values.size, Gamma_R_range.size), dtype='complex')
for i,v in enumerate(bias_values):
model.bias = v
for j,Gamma_R in enumerate(Gamma_R_range):
model.Gamma_R = Gamma_R
ss = utils.stationary_state_svd(model.liouvillian(), model.density_vector_populations())
current[i,j] = model.mean(ss)
F2[i,j] = model.second_order_fano_factor(ss)
coherence[i,j] = ss[2]
np.savez('../../data/DQD_zero_freq_counting_statistics_data.npz', Gamma_R_range=Gamma_R_range, bias_values=bias_values, current=current, F2=F2, coherence=coherence)
fig,(ax1,ax2,ax3) = plt.subplots(1,3)
for i,v in enumerate(bias_values):
ax1.semilogx(Gamma_R_range, current[i], label=v)
ax2.semilogx(Gamma_R_range, F2[i], label=v)
ax3.semilogx(Gamma_R_range, np.real(coherence[i]), label=v)
ax1.legend().draggable()
ax2.legend().draggable()
ax3.legend().draggable()
plt.show()
| [
"r.stones@ucl.ac.uk"
] | r.stones@ucl.ac.uk |
20918f080964777e9fb5a2f25eac7aeeaec8b011 | 9a32c23a1579c55ae23270ad1d1f3a457bde9616 | /python/baseline_demo.py | bc5fc6bfe810f1b5f10524135bed533fd92c3b42 | [] | no_license | mathycee/bid-lands | 00debbe06851cc9224256a13bfaf789bc2345478 | cc6e9b570c47df94407a77450de8a82fc4649804 | refs/heads/master | 2020-04-10T19:12:30.083853 | 2016-04-04T14:51:43 | 2016-04-04T14:51:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,320 | py | from DecisionTree import *
from evaluation import getANLP
from matplotlib.pyplot import *
from math import ceil
def getTrainData_b(ifname_data,ifname_bid):
fin = open(ifname_data,'r')
i = 0
w = []
winAuctions = []
winbid = {}
losebid = {}
for line in fin:
i += 1
if i==1:
continue
w.append(eval(line.split()[PAY_PRICE_INDEX]))
fin.close()
i = -1
fin = open(ifname_bid,'r')
for line in fin:
i += 1
linelist = line.split()
mybidprice = eval(linelist[0])
winAuction = eval(linelist[1])
winAuctions.append(winAuction)
if winAuction==1:
if not winbid.has_key(w[i]):
winbid[w[i]] = 0
winbid[w[i]] += 1
elif winAuction==0:
if not losebid.has_key(mybidprice):
losebid[mybidprice] = 0
losebid[mybidprice] += 1
fin.close()
print len(w),i+1
return w,winAuctions,winbid,losebid
def getTestData_b(ifname_data):
fin = open(ifname_data,'r')
wt = []
i = -2
for line in fin:
i += 1
if i == -1:
continue
wt.append(eval(line.split()[PAY_PRICE_INDEX]))
fin.close()
return wt
def baseline_demo(info):
fout_baseline = open(info.fname_baseline,'w')
fout_q = open(info.fname_baseline_q,'w')
fout_w = open(info.fname_baseline_w,'w')
w,winAuctions,winbid,losebid = getTrainData_b(info.fname_trainlog,info.fname_trainbid)
print "trainData read success."
wt = getTestData_b(info.fname_testlog)
print "testData read success."
# get priceSet
wcount = [0]*UPPER
if info.mode==NORMAL or info.mode==SURVIVAL:
for i in range(0,len(winAuctions)):
if winAuctions[i]==1:
wcount[w[i]] += 1
if info.mode==FULL :
for i in range(0,len(winAuctions)):
wcount[w[i]] += 1
minPrice = 0
maxPrice = UPPER
q = calProbDistribution(wcount,winbid,losebid,minPrice,maxPrice,info)
w = q2w(q)
print "q calculation success."
if len(q)!=0:
fout_q.write(str(q[0]))
for i in range(1,len(q)):
fout_q.write(' '+str(q[i]))
fout_q.close()
if len(w)!=0:
fout_w.write(str(w[0]))
for i in range(1,len(w)):
fout_w.write(' '+str(w[i]))
fout_w.close()
# n calculation
priceSet = wt
minPrice = 0
maxPrice = max(priceSet)
n = [0.]*UPPER
for i in range(0,len(priceSet)):
pay_price = priceSet[i]
n[pay_price] += 1
print "n calculation success."
# qt,wt calculation
qt = calProbDistribution_n(n,minPrice,maxPrice,info)
wt = q2w(qt)
# evaluation
# ANLP
fout_baseline.write("baseline campaign "+str(info.campaign)+" mode "+MODE_NAME_LIST[info.mode]+" basebid "+info.basebid+'\n')
fout_baseline.write("laplace "+str(info.laplace)+"\n")
print "baseline campaign "+str(info.campaign)+" mode "+MODE_NAME_LIST[info.mode]+" basebid "+info.basebid
print "laplace "+str(info.laplace)
for step in STEP_LIST:
qi = changeBucketUniform(q,step)
ni = deepcopy(n)
bucket = len(qi)
anlp,N = getANLP(qi,ni,minPrice,maxPrice)
fout_baseline.write("bucket "+str(bucket)+" step "+str(step)+"\n")
fout_baseline.write("Average negative log probability = "+str(anlp)+" N = "+str(N)+"\n")
print "bucket "+str(bucket)+" step "+str(step)
print "Average negative log probability = "+str(anlp)+" N = "+str(N)
# KLD & pearsonr
bucket = len(q)
step = STEP_LIST[0]
KLD = KLDivergence(q,qt)
N = sum(n)
fout_baseline.write("bucket "+str(bucket)+" step "+str(step)+"\n")
fout_baseline.write("KLD = "+str(KLD)+" N = "+str(N)+"\n")
print "bucket "+str(bucket)+" step "+str(step)
print "KLD = "+str(KLD)+" N = "+str(N)
fout_baseline.close()
fout_q.close()
fout_w.close()
return q,w
if __name__ == '__main__':
IFROOT = '..\\make-ipinyou-data\\'
OFROOT = '..\\data\\SurvivalModel\\'
BASE_BID = '0'
suffix_list = ['n','s','f']
q = {}
w = {}
for campaign in CAMPAIGN_LIST:
print
print campaign
q[campaign] = {}
w[campaign] = {}
for mode in MODE_LIST:
q[campaign][mode] = {}
w[campaign][mode] = {}
for laplace in [LAPLACE]:
print MODE_NAME_LIST[mode],
info = Info()
info.basebid = BASE_BID
info.laplace = laplace
info.mode = mode
modeName = MODE_NAME_LIST[mode]
suffix = suffix_list[mode]
q[campaign][mode][laplace] = []
w[campaign][mode][laplace] = []
# create os directory
if not os.path.exists(OFROOT+campaign+'\\'+modeName):
os.makedirs(OFROOT+campaign+'\\'+modeName)
# info assignment
info.campaign = campaign
info.fname_trainlog = IFROOT+campaign+'\\train.log.txt'
info.fname_testlog = IFROOT+campaign+'\\test.log.txt'
info.fname_nodeData = OFROOT+campaign+'\\'+modeName+'\\nodeData_'+campaign+suffix+'.txt'
info.fname_nodeInfo = OFROOT+campaign+'\\'+modeName+'\\nodeInfos_'+campaign+suffix+'.txt'
info.fname_trainbid = IFROOT+campaign+'\\train_bid.txt'
info.fname_testbid = IFROOT+campaign+'\\test_bid.txt'
info.fname_baseline = OFROOT+campaign+'\\'+modeName+'\\baseline_'+campaign+suffix+'.txt'
info.fname_monitor = OFROOT+campaign+'\\'+modeName+'\\monitor_'+campaign+suffix+'.txt'
info.fname_testKmeans = OFROOT+campaign+'\\'+modeName+'\\testKmeans_'+campaign+suffix+'.txt'
info.fname_testSurvival = OFROOT+campaign+'\\'+modeName+'\\testSurvival_'+campaign+suffix+'.txt'
info.fname_evaluation = OFROOT+campaign+'\\'+modeName+'\\evaluation_'+campaign+suffix+'.txt'
info.fname_baseline_q = OFROOT+campaign+'\\'+modeName+'\\baseline_q_'+campaign+suffix+'.txt'
info.fname_tree_q = OFROOT+campaign+'\\'+modeName+'\\tree_q_'+campaign+suffix+'.txt'
info.fname_baseline_w = OFROOT+campaign+'\\'+modeName+'\\baseline_w_'+campaign+suffix+'.txt'
info.fname_tree_w = OFROOT+campaign+'\\'+modeName+'\\tree_w_'+campaign+suffix+'.txt'
info.fname_pruneNode = OFROOT+campaign+'\\'+modeName+'\\pruneNode_'+campaign+suffix+'.txt'
info.fname_pruneEval = OFROOT+campaign+'\\'+modeName+'\\pruneEval_'+campaign+suffix+'.txt'
info.fname_testwin = OFROOT+campaign+'\\'+modeName+'\\testwin_'+campaign+suffix+'.txt'
q[campaign][mode][laplace],w[campaign][mode][laplace] = baseline_demo(info)
if len(MODE_LIST)==3:
fdir = OFROOT+campaign+'\\compare\\'
if not os.path.exists(fdir):
os.makedirs(fdir)
laplace = LAPLACE_LIST[0]
fout_compare = open(fdir+'compare_l'+str(LAPLACE)+'.txt','w')
fout_q = open(fdir+'q_l'+str(LAPLACE)+'.txt','w')
fout_w = open(fdir+'w_l'+str(LAPLACE)+'.txt','w')
# make length equal
print "len:",len(q[campaign][NORMAL][laplace]),len(q[campaign][SURVIVAL][laplace]),len(q[campaign][FULL][laplace])
maxLen = max(len(q[campaign][NORMAL][laplace]),len(q[campaign][SURVIVAL][laplace]),len(q[campaign][FULL][laplace]))
fillLen(q[campaign][NORMAL][laplace],maxLen)
fillLen(q[campaign][SURVIVAL][laplace],maxLen)
fillLen(q[campaign][FULL][laplace],maxLen)
maxLen = max(len(w[campaign][NORMAL][laplace]),len(w[campaign][SURVIVAL][laplace]),len(w[campaign][FULL][laplace]))
fillLen(w[campaign][NORMAL][laplace],maxLen)
fillLen(w[campaign][SURVIVAL][laplace],maxLen)
fillLen(w[campaign][FULL][laplace],maxLen)
# fout q,w
fout_q.write(str(campaign)+'\n')
for mode in MODE_LIST:
fout_q.write(MODE_NAME_LIST[mode]+'\n')
for i in range(0,len(q[campaign][mode][laplace])):
fout_q.write(str(q[campaign][mode][laplace][i])+' ')
fout_q.write('\n')
fout_q.write('\n')
fout_w.write(str(campaign)+'\n')
for mode in MODE_LIST:
fout_w.write(MODE_NAME_LIST[mode]+'\n')
for i in range(0,len(w[campaign][mode][laplace])):
fout_w.write(str(w[campaign][mode][laplace][i])+' ')
fout_w.write('\n')
fout_w.write('\n')
# fout evaluation with different bucket
fout_compare.write(str(campaign)+'\n')
for step in STEP_LIST:
q_n = q[campaign][NORMAL][laplace]
q_s = changeBucketUniform(q[campaign][SURVIVAL][laplace],step)
q_f = q[campaign][FULL][laplace]
w_n = w[campaign][NORMAL][laplace]
w_s = w[campaign][SURVIVAL][laplace]
w_f = w[campaign][FULL][laplace]
bucket = len(q_n)
fout_q.close()
fout_w.close()
# plot
figure(1)
plot(range(0,len(q_n)),q_n)
plot(range(0,len(q_s)),q_s)
plot(range(0,len(q_f)),q_f)
title("market price probability compare")
xlabel("market price")
ylabel("market price probability")
savefig(fdir+'\\q_'+campaign+'_l'+str(laplace)+'.png')
close(1)
figure(2)
plot(range(0,len(w_n)),w_n)
plot(range(0,len(w_s)),w_s)
plot(range(0,len(w_f)),w_f)
title("win probability compare")
xlabel("market price")
ylabel("win probability")
savefig(fdir+'\\w_'+campaign+'_l'+str(laplace)+'.png')
close(2)
| [
"785547425@qq.com"
] | 785547425@qq.com |
ef82a35376200f13baed1c3fb88eb212a5d84882 | 12474339bb0f1d8385f692c7210176ec53159d3f | /athene.py | 0bb209b1467677582846e018afeb4c59b168dd1d | [] | no_license | girvel/Athene | cc4c56eb08591b015348f9758c968c4f64828476 | e6b18a15de3b3254ac678da9489acf14c5f2c449 | refs/heads/master | 2022-02-27T10:38:14.889959 | 2019-10-05T17:07:45 | 2019-10-05T17:07:45 | 213,038,380 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,070 | py | class And:
def __init__(self, *arguments):
self.arguments = arguments
def calculate(self):
return all(a.calculate() for a in self.arguments)
class Or:
def __init__(self, *arguments):
self.arguments = arguments
def calculate(self):
return any(a.calculate() for a in self.arguments)
class Not:
def __init__(self, argument):
self.argument = argument
def calculate(self):
return not self.argument.calculate()
class Implication:
def __init__(self, argument1, argument2):
self.argument1 = argument1
self.argument2 = argument2
def calculate(self):
return not self.argument1.calculate() or self.argument2.calculate()
class Value:
def __init__(self, value):
self.value = value
def calculate(self):
return self.value
a = Value(True)
b = Value(True)
c = Value(True)
expr = Implication(
And(
Implication(a, b),
Implication(b, c),
),
Implication(a, c),
)
if __name__ == '__main__':
print(expr.calculate())
| [
"widauka@ya.ru"
] | widauka@ya.ru |
e6a48ce39aa713d23d60c4ea365bae32998199dd | 541a65de9fc79cdc5301de66a22a8f82e06d352b | /price_optimization/Accuracy_check/Build_sequence_change_product_test.py | a9acef17bfaca2c6c0e552658255f1abeaf47bf4 | [] | no_license | sajith1995s/Reasearh_Scripts | 0b99b232e424296b15b4f429f0758ba0dc76b8c9 | 7c5b4c6851307193ca2ae446d98b5a134952f415 | refs/heads/master | 2022-07-07T05:43:26.439148 | 2019-11-09T04:31:28 | 2019-11-09T04:31:28 | 194,444,917 | 0 | 0 | null | 2022-06-21T22:18:14 | 2019-06-29T20:14:11 | Python | UTF-8 | Python | false | false | 7,416 | py | import csv
result = []
predicted = []
# Change product Algorithm
def algorithm(want_product, motherboard_pro, cpu_pro, ram_pro, vga_pro, hard_disk_pro, min_price, max_price):
products = []
with open('csv/products.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
if row[0] == want_product:
if row[0] == "motherboard":
list = {"name": row[1], "size": row[2], "price": row[3], "type": row[4], "warranty": row[5], "image": row[6], "owner": row[7], "cpu_brand": row[8],"model": row[9], "user_rating": row[10]}
if min_price <= list["price"] and max_price >= list["price"]:
products.append(list)
elif row[0] == "cpu":
list = {"name": row[1], "size": row[2], "price": row[3], "warranty": row[4], "image": row[5], "owner": row[6], "model": row[7], "socket": row[8], "speed": row[9], "proccessor_type": row[10], "user_rating": row[11]}
if min_price <= list["price"] and max_price >= list["price"]:
products.append(list)
elif row[0] == "ram":
list = {"name": row[1], "size": row[2], "price": row[3], "type": row[4], "warranty": row[5], "image": row[6], "owner": row[7], "model": row[8], "user_rating": row[9]}
if min_price <= list["price"] and max_price >= list["price"]:
products.append(list)
elif row[0] == "vga":
list = {"name": row[1], "size": row[2], "price": row[3], "type": row[4], "warranty": row[5], "image": row[6], "owner": row[7], "chipset": row[8], "capacity": row[9], "user_rating": row[10]}
if min_price <= list["price"] and max_price >= list["price"]:
products.append(list)
elif row[0] == "hard_disk":
list = {"name": row[1], "size": row[2], "price": row[3], "warranty": row[4], "image": row[5], "owner": row[6], "user_rating": row[7]}
if min_price <= list["price"] and max_price >= list["price"]:
products.append(list)
if want_product == "motherboard":
for pro in products:
if (pro["cpu_brand"] in cpu_pro["socket"]) or (cpu_pro["socket"] in pro["cpu_brand"]):
if pro["memory_type"] == ram_pro["type"]:
if pro["pci_slot"] == vga_pro["slot"]:
result.append(pro)
elif want_product == "cpu":
for pro in products:
if (pro["socket"] in motherboard_pro["cpu_brand"]) or (motherboard_pro["cpu_brand"] in pro["socket"]):
result.append(pro)
elif want_product == "ram":
for pro in products:
if (pro["type"] in motherboard_pro["memory_type"]) or (motherboard_pro["memory_type"] in pro["type"]):
result.append(pro)
elif want_product == "ram":
for pro in products:
if (pro["slot"] in motherboard_pro["pci_slot"]) or (motherboard_pro["pci_slot"] in pro["slot"]):
result.append(pro)
with open('csv/test_change_product.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
want_product = ''
for row in readCSV:
if row[0] == "motherboard":
col_motherboard = {"name": row[1], "size": row[2], "price": row[3], "type": row[4], "warranty": row[5], "image": row[6], "owner": row[7], "cpu_brand": row[8], "model": row[9], "user_rating": row[10]}
elif row[0] == "cpu":
col_cpu = {"name": row[1], "size": row[2], "price": row[3], "warranty": row[4], "image": row[5], "owner": row[6], "model": row[7], "socket": row[8], "speed": row[9], "proccessor_type": row[10], "user_rating": row[11]}
elif row[0] == "ram":
col_ram = {"name": row[1], "size": row[2], "price": row[3], "type": row[4], "warranty": row[5], "image": row[6], "owner": row[7], "model": row[8], "user_rating": row[9]}
elif row[0] == "vga":
col_vga = {"name": row[1], "size": row[2], "price": row[3], "type": row[4], "warranty": row[5], "image": row[6], "owner": row[7], "chipset": row[8], "capacity": row[9], "user_rating": row[10]}
elif row[0] == "hard_disk":
col_hard_disk = {"name": row[1], "size": row[2], "price": row[3], "warranty": row[4], "image": row[5], "owner": row[6], "user_rating": row[7]}
elif row[0] == "want_product":
want_product = row[1]
min_price = row[2]
max_price = row[3]
algorithm(want_product, col_motherboard, col_cpu, col_ram, col_vga, col_hard_disk, min_price, max_price)
with open('csv/real_change_product.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
predicted.append(row)
i = 0
correct = 0
incorrect = 0
for res in result:
if res["category"] == "motherboard":
if res["name"] == predicted[i][0] and res["size"] == predicted[i][1] and res["price"] == predicted[i][2] and res["type"] == predicted[i][3] and res["warranty"] == predicted[i][4] and res["image"] == predicted[i][5] and res["owner"] == predicted[i][6] and res["cpu_brand"] == predicted[i][7] and res["model"] == predicted[i][8] and res["user_rating"] == predicted[i][9]:
correct = correct + 1
else:
incorrect = incorrect + 1
elif res["category"] == "cpu":
if res["name"] == predicted[i][0] and res["size"] == predicted[i][1] and res["price"] == predicted[i][2] and res["warranty"] == predicted[i][3] and res["image"] == predicted[i][4] and res["owner"] == predicted[i][5] and res["model"] == predicted[i][6] and res["socket"] == predicted[i][7] and res["speed"] == predicted[i][8] and res["proccessor_type"] == predicted[i][9] and res["user_rating"] == predicted[i][10]:
correct = correct + 1
else:
incorrect = incorrect + 1
elif res["category"] == "ram":
if res["name"] == predicted[i][0] and res["size"] == predicted[i][1] and res["price"] == predicted[i][2] and res["type"] == predicted[i][3] and res["warranty"] == predicted[i][4] and res["image"] == predicted[i][5] and res["owner"] == predicted[i][6] and res["model"] == predicted[i][7] and res["user_rating"] == predicted[i][8]:
correct = correct + 1
else:
incorrect = incorrect + 1
elif res["category"] == "vga":
if res["name"] == predicted[i][0] and res["size"] == predicted[i][1] and res["price"] == predicted[i][2] and res["type"] == predicted[i][3] and res["warranty"] == predicted[i][4] and res["image"] == predicted[i][5] and res["owner"] == predicted[i][6] and res["chipset"] == predicted[i][7] and res["capacity"] == predicted[i][8] and res["user_rating"] == predicted[i][9]:
correct = correct + 1
else:
incorrect = incorrect + 1
elif res["category"] == "hard_disk":
if res["name"] == predicted[i][0] and res["size"] == predicted[i][1] and res["price"] == predicted[i][2] and res["warranty"] == predicted[i][3] and res["image"] == predicted[i][4] and res["owner"] == predicted[i][5] and res["user_rating"] == predicted[i][6]:
correct = correct + 1
else:
incorrect = incorrect + 1
i = i + 1
accuracy = (correct / (correct + incorrect)) * 100
print("Accuracy Level = " + accuracy)
| [
"virajlakshitha39@gmail.com"
] | virajlakshitha39@gmail.com |
cd6c69845e5a73b3b9376f0c68a51864342f01d2 | e4ce8df3aeb0f8c8eb2195ef9dabe466e137023b | /3月/4.py | 7f728c025da50b95f61a9b95f7f36065989eacf0 | [] | no_license | eecopt/Study-notes | 82c995a10d16570e8d9c1124dfd8b581ec836336 | c3bdd6651702fcdf0baefd262cf4a7899fb4a951 | refs/heads/master | 2021-03-11T11:41:57.365102 | 2020-05-01T14:49:02 | 2020-05-01T14:49:02 | 246,526,305 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 419 | py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
for i in range(ord('x'),ord('z') + 1):
for j in range(ord('x'),ord('z') + 1):
if i != j:
for k in range(ord('x'),ord('z') + 1):
if (i != k) and (j != k):
if (i != ord('x')) and (k != ord('x')) and (k != ord('z')):
print ('order is a -- %s\t b -- %s\tc--%s' % (chr(i),chr(j),chr(k))) | [
"3503419092@qq.com"
] | 3503419092@qq.com |
28459452020b3f9d921767c1fd75d3f868741f99 | 26f23588e80acc2b28d4cc70a8fbcf78c5b33a20 | /PythonModels/learnBasic/file_options.py | 4173a88638e76c5058927e4ba42da592ecbd3ca6 | [] | no_license | Timehsw/PythonCouldbeEverything | aa31b3e32bf68b49fe8e96b971637353a8ef644f | 85d4f1a2c93c7b1edc34ceb9e8bb3c8d7beb30e9 | refs/heads/master | 2021-01-01T15:38:25.253094 | 2018-01-22T06:49:05 | 2018-01-22T06:49:05 | 97,661,530 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | # coding=utf8
__author__ = 'zenith'
#读文件
f=open("D:\data.txt","r")
#print(f.read())
#print(f.readline().strip())
#print(f.readline().strip())
for line in f.readlines():
print(line.strip())
f.close()
#文件追加内容
f=open("D:\data.txt","a")
f.write("\n超人学院")
f.close()
#文件覆盖内容
f=open("D:\data.txt","w")
f.write("\n超人学院")
f.close()
| [
"hsw.time@gmail.com"
] | hsw.time@gmail.com |
b9471a7ff4c1049c12d691354010582c295a8452 | d042f025bc8c34fd4425e5a382a342dfe61c49b6 | /core_app/migrations/0010_auto_20200910_1008.py | 85c3625271c0e2fc834e8649ea51f9b903e95e45 | [] | no_license | jannnnnnnnnn/job_tracker_app | adaff7397d4fc049b8587e78da58849c8acad255 | 15e29e824df03284b04621d863a251a5bf60bc4e | refs/heads/master | 2023-02-20T05:40:15.430645 | 2021-01-23T00:44:31 | 2021-01-23T00:44:31 | 292,585,184 | 1 | 5 | null | 2021-01-23T00:44:32 | 2020-09-03T13:56:18 | Python | UTF-8 | Python | false | false | 370 | py | # Generated by Django 3.0.8 on 2020-09-10 16:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core_app', '0009_auto_20200910_0814'),
]
operations = [
migrations.RenameField(
model_name='profile',
old_name='postalcode',
new_name='zipcode',
),
]
| [
"draz@ualberta.ca"
] | draz@ualberta.ca |
2ce65950f3a3c6a7276ff0eaa5fe5c7f9c82aea8 | 78632136f73b88bf807a7e8598710531931db790 | /src/이코테/다이나믹 프로그래밍/1로 만들기.py | f31aa5863a3ee86a8f09f6abb9ad352d5beebf5e | [] | no_license | ycs1m1yk/PS | b5962264324cc82ce7a256555442237da615de02 | a6e3619c119f4cb86d1ba160302597738bbb1f9f | refs/heads/master | 2022-05-09T10:26:49.545895 | 2022-04-17T15:30:06 | 2022-04-17T15:30:06 | 222,115,340 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 527 | py | import time
MAX = 30000
cache = [0]*MAX
def make_one(x):
ret = cache[x]
if x == 1:
return 0
if ret != 0:
return ret
ret = make_one(x-1) + 1
if(x % 5 == 0):
ret = min(ret, make_one(x//5)+1)
elif(x % 3 == 0):
ret = min(ret, make_one(x//3)+1)
elif(x % 2 == 0):
ret = min(ret, make_one(x//2)+1)
return ret
x = int(input())
start_time = time.time()
print(make_one(x))
elapsed_time = time.time() - start_time
print(f"elapsed time: {elapsed_time:.4f}s")
| [
"satoly4@gmail.com"
] | satoly4@gmail.com |
e627e38d127df4495b21bc9f5850539cd623030f | d5e26c8fbd1af417fbd1ba9e1069d0dba6bd3cf9 | /core/src/main/python/compare_models.py | f6e8cfa731daab2702f96e565cd30611c58ac6b3 | [
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-warranty-disclaimer",
"MIT",
"UPL-1.0"
] | permissive | mwooten/weblogic-deploy-tooling-ct | 47b946e191fe1d6312c77c0176f783655a29ded5 | a9a269981077fb99897a6cde9de3444b7c94e326 | refs/heads/master | 2020-03-19T08:57:21.179962 | 2018-06-06T17:22:39 | 2018-06-06T17:22:39 | 136,248,018 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,822 | py | """
Copyright (c) 2017, 2018, Oracle and/or its affiliates. All rights reserved.
The Universal Permissive License (UPL), Version 1.0
The entry point for WDT Tester component that compares domain models
"""
import os
import sys
from java.lang import IllegalArgumentException
from oracle.weblogic.deploy.util import FileUtils
from oracle.weblogic.deploy.testing import VerificationException
from oracle.weblogic.deploy.util import TranslateException
from oracle.weblogic.deploy.util import WebLogicDeployToolingVersion
# java classes from weblogic-deploy-tooling-ct
from oracle.weblogic.deploy.testing import CompareModelsException
from oracle.weblogic.deploy.testing import TestingConstants
sys.path.append(os.path.dirname(os.path.realpath(sys.argv[0])))
# python classes from weblogic-deploy-tooling
from wlsdeploy.util import wlst_helper
from wlsdeploy.util.model_context import ModelContext
from wlsdeploy.util.weblogic_helper import WebLogicHelper
from wlsdeploy.util.cla_utils import CommandLineArgUtil
from wlsdeploy.util.model_translator import FileToPython
# python classes from weblogic-deploy-tooling-ct
from wlsdeploy.testing.exception import exception_helper
from wlsdeploy.testing.logging.platform_logger import PlatformLogger
from wlsdeploy.testing.compare.model_comparer import ModelComparer
from wlsdeploy.testing.common import testing_common
_program_name = 'compareModels'
_class_name = 'compare_models'
__logger = PlatformLogger('wlsdeploy.compare_models', resource_bundle_name=TestingConstants.RESOURCE_BUNDLE_NAME)
__wls_helper = WebLogicHelper(__logger)
_EXPECTED_MODEL_FILE_SWITCH = '-expected_model_file'
_ACTUAL_MODEL_FILE_SWITCH = '-actual_model_file'
_EXPECTED_MODEL_OVERRIDES_FILE_SWITCH = '-expected_model_overrides_file'
_ACTUAL_MODEL_OVERRIDES_FILE_SWITCH = '-actual_model_overrides_file'
_COMPARE_RESULT_FILE_SWITCH = '-compare_results_file'
__required_arguments = [
CommandLineArgUtil.ORACLE_HOME_SWITCH,
]
__optional_arguments = [
CommandLineArgUtil.JAVA_HOME_SWITCH,
]
def __process_args(args):
"""
Process the command-line arguments and prompt the user for any missing information
:param (list) args:
the command-line arguments list
:raises CLAException:
if an error occurs while validating and processing the command-line arguments
"""
cla_util = CommandLineArgUtil(_program_name, __required_arguments, __optional_arguments)
required_arg_map, optional_arg_map = cla_util.process_args(args)
__verify_required_args_present(required_arg_map)
__process_java_home_arg(optional_arg_map)
combined_arg_map = optional_arg_map.copy()
combined_arg_map.update(required_arg_map)
return ModelContext(_program_name, combined_arg_map)
def __verify_required_args_present(required_arg_map):
"""
Verify that the required args are present.
:param (dict) required_arg_map:
the required arguments map
:raises CLAException: if one or more of the required arguments are missing
"""
_method_name = '__verify_required_args_present'
for req_arg in __required_arguments:
if req_arg not in required_arg_map:
ex = exception_helper.create_verification_exception('WLSDPLY-20005', _program_name, req_arg)
ex.setExitCode(CommandLineArgUtil.USAGE_ERROR_EXIT_CODE)
__logger.throwing(ex, class_name=_class_name, method_name=_method_name)
raise ex
return
def __process_java_home_arg(optional_arg_map):
"""
Verify that java_home is set. If not, set it.
:param optional_arg_map: the optional arguments map
:raises CLAException: if the java home argument is not valid
"""
_method_name = '__process_java_home_arg'
if CommandLineArgUtil.JAVA_HOME_SWITCH not in optional_arg_map:
java_home_name = os.environ.get('JAVA_HOME')
try:
java_home = FileUtils.validateExistingDirectory(java_home_name)
except IllegalArgumentException, iae:
ex = exception_helper.create_verification_exception('WLSDPLY-12400', _program_name, java_home_name,
iae.getLocalizedMessage(), error=iae)
ex.setExitCode(CommandLineArgUtil.ARG_VALIDATION_ERROR_EXIT_CODE)
__logger.throwing(ex, class_name=_class_name, method_name=_method_name)
raise ex
optional_arg_map[CommandLineArgUtil.JAVA_HOME_SWITCH] = java_home.getAbsolutePath()
return
def __verify_expected_model_file_arg(compare_models_args_map):
"""
:param compare_models_args_map:
:return:
"""
_method_name = '__verify_expected_model_file_arg'
expected_model_file_name = None
if _EXPECTED_MODEL_FILE_SWITCH in compare_models_args_map:
try:
expected_model_file_name = compare_models_args_map[_EXPECTED_MODEL_FILE_SWITCH]
compare_models_args_map[_EXPECTED_MODEL_FILE_SWITCH] = \
FileUtils.validateExistingFile(expected_model_file_name)
except IllegalArgumentException, iae:
ex = exception_helper.create_verification_exception('WLSDPLY-20014',
_EXPECTED_MODEL_FILE_SWITCH,
iae.getLocalizedMessage(), error=iae)
__logger.throwing(ex, class_name=_class_name, method_name=_method_name)
raise ex
if expected_model_file_name is None:
ex = exception_helper.create_verification_exception('WLSDPLY-20005',
_program_name,
_EXPECTED_MODEL_FILE_SWITCH)
ex.setExitCode(CommandLineArgUtil.ARG_VALIDATION_ERROR_EXIT_CODE)
__logger.throwing(ex, class_name=_class_name, method_name=_method_name)
raise ex
return
def __verify_actual_model_file_arg(compare_models_args_map):
"""
:param compare_models_args_map:
:return:
"""
_method_name = '__verify_actual_model_file_arg'
actual_model_file_name = None
if _ACTUAL_MODEL_FILE_SWITCH in compare_models_args_map:
try:
actual_model_file_name = compare_models_args_map[_ACTUAL_MODEL_FILE_SWITCH]
compare_models_args_map[_ACTUAL_MODEL_FILE_SWITCH] = \
FileUtils.validateExistingFile(actual_model_file_name)
except IllegalArgumentException, iae:
ex = exception_helper.create_verification_exception('WLSDPLY-20014',
_ACTUAL_MODEL_FILE_SWITCH,
iae.getLocalizedMessage(), error=iae)
__logger.throwing(ex, class_name=_class_name, method_name=_method_name)
raise ex
if actual_model_file_name is None:
ex = exception_helper.create_verification_exception('WLSDPLY-20005',
_program_name,
_ACTUAL_MODEL_FILE_SWITCH)
ex.setExitCode(CommandLineArgUtil.ARG_VALIDATION_ERROR_EXIT_CODE)
__logger.throwing(ex, class_name=_class_name, method_name=_method_name)
raise ex
return
def __verify_expected_model_overrides_file_arg(compare_models_args_map):
"""
:param compare_models_args_map:
:return:
"""
_method_name = '__verify_expected_models_overrides_file_arg'
expected_models_overrides_file = None
if compare_models_args_map[_EXPECTED_MODEL_OVERRIDES_FILE_SWITCH] is not None:
try:
expected_model_overrides_file = compare_models_args_map[_EXPECTED_MODEL_OVERRIDES_FILE_SWITCH]
compare_models_args_map[_EXPECTED_MODEL_OVERRIDES_FILE_SWITCH] = \
FileUtils.validateExistingFile(expected_model_overrides_file)
except IllegalArgumentException, iae:
ex = exception_helper.create_verification_exception('WLSDPLY-20014',
_EXPECTED_MODEL_OVERRIDES_FILE_SWITCH,
iae.getLocalizedMessage(), error=iae)
__logger.throwing(ex, class_name=_class_name, method_name=_method_name)
raise ex
return
def __verify_actual_model_overrides_file_arg(compare_models_args_map):
"""
:param compare_models_args_map:
:return:
"""
_method_name = '__verify_actual_models_overrides_file_arg'
actual_models_overrides_file = None
if compare_models_args_map[_ACTUAL_MODEL_OVERRIDES_FILE_SWITCH] is not None:
try:
actual_model_overrides_file = compare_models_args_map[_ACTUAL_MODEL_OVERRIDES_FILE_SWITCH]
compare_models_args_map[_ACTUAL_MODEL_OVERRIDES_FILE_SWITCH] = \
FileUtils.validateExistingFile(actual_model_overrides_file)
except IllegalArgumentException, iae:
ex = exception_helper.create_verification_exception('WLSDPLY-20014',
_ACTUAL_MODEL_OVERRIDES_FILE_SWITCH,
iae.getLocalizedMessage(), error=iae)
__logger.throwing(ex, class_name=_class_name, method_name=_method_name)
raise ex
return
def __process_compare_models_args(compare_models_args_map):
__verify_expected_model_file_arg(compare_models_args_map)
__verify_actual_model_file_arg(compare_models_args_map)
__verify_expected_model_overrides_file_arg(compare_models_args_map)
__verify_actual_model_overrides_file_arg(compare_models_args_map)
return
def __compare_models(compare_models_args_map):
"""
:param compare_models_args_map:
:return:
:raises CompareModelsException:
:raises VerificationException:
"""
_method_name = '__compare_models'
expected_model_file = None
try:
expected_model_file = compare_models_args_map[_EXPECTED_MODEL_FILE_SWITCH]
expected_model_dict = FileToPython(expected_model_file.getAbsolutePath(), True).parse()
except TranslateException, te:
__logger.severe('WLSDPLY-20009', _program_name, expected_model_file.getAbsolutePath(), te.getLocalizedMessage(),
error=te, class_name=_class_name, method_name=_method_name)
ex = exception_helper.create_verification_exception(te.getLocalizedMessage(), error=te)
__logger.throwing(ex, class_name=_class_name, method_name=_method_name)
raise ex
actual_model_file = None
try:
actual_model_file = compare_models_args_map[_ACTUAL_MODEL_FILE_SWITCH]
actual_model_dict = FileToPython(actual_model_file.getAbsolutePath(), True).parse()
except TranslateException, te:
__logger.severe('WLSDPLY-20009', _program_name, actual_model_file.getAbsolutePath(), te.getLocalizedMessage(),
error=te, class_name=_class_name, method_name=_method_name)
ex = exception_helper.create_verification_exception(te.getLocalizedMessage(), error=te)
__logger.throwing(ex, class_name=_class_name, method_name=_method_name)
raise ex
expected_model_overrides_file = compare_models_args_map[_EXPECTED_MODEL_OVERRIDES_FILE_SWITCH]
if expected_model_overrides_file is not None:
__logger.info('WLSDPLY-09924', expected_model_overrides_file, "expected",
class_name=_class_name, method_name=_method_name)
testing_common.apply_substitution_variables_file(expected_model_overrides_file, expected_model_dict, __logger)
actual_model_overrides_file = compare_models_args_map[_ACTUAL_MODEL_OVERRIDES_FILE_SWITCH]
if actual_model_overrides_file is not None:
__logger.info('WLSDPLY-09924', actual_model_overrides_file, "actual",
class_name=_class_name, method_name=_method_name)
testing_common.apply_substitution_variables_file(actual_model_overrides_file, actual_model_dict, __logger)
if expected_model_dict is not None and actual_model_dict is not None:
model_comparer = ModelComparer(logger=__logger)
comparison_results = model_comparer.compare_models(expected_model_dict,
actual_model_dict)
comparison_results.log_results(__logger)
compare_results_file = compare_models_args_map[_COMPARE_RESULT_FILE_SWITCH]
if compare_results_file is not None:
model_comparer.write_compare_results(compare_results_file)
return
def main(args):
"""
The entry point for run test program
:param args:
:return:
"""
_method_name = 'main'
wlst_helper.silence()
__logger.entering(args[0], class_name=_class_name, method_name=_method_name)
for index, arg in enumerate(args):
__logger.finer('sys.argv[{0}] = {1}', str(index), arg, class_name=_class_name, method_name=_method_name)
compare_models_args_map = {
_EXPECTED_MODEL_FILE_SWITCH: None,
_ACTUAL_MODEL_FILE_SWITCH: None,
_EXPECTED_MODEL_OVERRIDES_FILE_SWITCH: None,
_ACTUAL_MODEL_OVERRIDES_FILE_SWITCH: None,
_COMPARE_RESULT_FILE_SWITCH: None
}
if _EXPECTED_MODEL_FILE_SWITCH in args:
index = sys.argv.index(_EXPECTED_MODEL_FILE_SWITCH)
value = sys.argv[index+1]
compare_models_args_map[_EXPECTED_MODEL_FILE_SWITCH] = value
sys.argv.remove(_EXPECTED_MODEL_FILE_SWITCH)
sys.argv.remove(value)
if _ACTUAL_MODEL_FILE_SWITCH in args:
index = sys.argv.index(_ACTUAL_MODEL_FILE_SWITCH)
value = sys.argv[index+1]
compare_models_args_map[_ACTUAL_MODEL_FILE_SWITCH] = value
sys.argv.remove(_ACTUAL_MODEL_FILE_SWITCH)
sys.argv.remove(value)
if _EXPECTED_MODEL_OVERRIDES_FILE_SWITCH in args:
index = sys.argv.index(_EXPECTED_MODEL_OVERRIDES_FILE_SWITCH)
value = sys.argv[index+1]
compare_models_args_map[_EXPECTED_MODEL_OVERRIDES_FILE_SWITCH] = value
sys.argv.remove(_EXPECTED_MODEL_OVERRIDES_FILE_SWITCH)
sys.argv.remove(value)
if _ACTUAL_MODEL_OVERRIDES_FILE_SWITCH in args:
index = sys.argv.index(_ACTUAL_MODEL_OVERRIDES_FILE_SWITCH)
value = sys.argv[index+1]
compare_models_args_map[_ACTUAL_MODEL_OVERRIDES_FILE_SWITCH] = value
sys.argv.remove(_ACTUAL_MODEL_OVERRIDES_FILE_SWITCH)
sys.argv.remove(value)
if _COMPARE_RESULT_FILE_SWITCH in args:
index = sys.argv.index(_COMPARE_RESULT_FILE_SWITCH)
value = sys.argv[index+1]
compare_models_args_map[_COMPARE_RESULT_FILE_SWITCH] = value
sys.argv.remove(_COMPARE_RESULT_FILE_SWITCH)
sys.argv.remove(value)
try:
__process_args(args)
__process_compare_models_args(compare_models_args_map)
except VerificationException, ve:
exit_code = ve.getExitCode()
if exit_code != CommandLineArgUtil.HELP_EXIT_CODE:
__logger.severe('WLSDPLY-20008', _program_name, ve.getLocalizedMessage(), error=ve,
class_name=_class_name, method_name=_method_name)
sys.exit(exit_code)
try:
__compare_models(compare_models_args_map)
except (CompareModelsException, VerificationException), e:
__logger.severe('WLSDPLY-09812', _program_name,
e.getClass().getSimpleName(),
e.getLocalizedMessage(), error=e,
class_name=_class_name, method_name=_method_name)
sys.exit(CommandLineArgUtil.PROG_ERROR_EXIT_CODE)
return
if __name__ == "main":
WebLogicDeployToolingVersion.logVersionInfo(_program_name)
main(sys.argv)
| [
"mike.wooten@oracle.com"
] | mike.wooten@oracle.com |
7635e2b62ef503cb48a2e97a4726b3ce2b22a9fe | c7613949a341556021e6c97596d53e7e63cbee24 | /main.py | f9f32a148c2b535764ce33aeac075f0b123c340f | [] | no_license | EdwinAlmira/proyectoPython | 90724f145172e4fb51c92d8a4149aa72dad94c88 | 68f86fd580bfec50ba11623daf25c32a282ca4e7 | refs/heads/master | 2021-02-06T16:15:24.756571 | 2020-03-11T21:30:37 | 2020-03-11T21:30:37 | 243,930,660 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,560 | py | #imports
import sys
#Listado
clients = ['pablo','gerardo']
#Create function
def create_client(client_name):
global clients
if client_name not in clients:
clients.append(client_name)
else:
print('Client already in the client\'s list')
#Update function
def update_client(old_name, new_name):
global clients
if old_name in clients:
name_index = clients.index(old_name)
clients[name_index] = new_name
list_clients()
else:
_not_register()
pass
#Delete function
def delete_client(client_name):
global clients
if client_name in clients:
clients.remove(client_name)
list_clients()
else:
_not_register()
pass
#Search function
def search_client(client_name):
global clients
for client in clients:
if client != client_name:
continue
else:
return True
def list_clients():
global clients
for idx, client in enumerate(clients):
print('{}:{}'.format(idx, client))
#Command list
def print_welcome():
print('Welcome to Project Ventas')
print('*'*50)
print('What would you like to do today? ')
print('[C]reate client')
print('[D]elete client')
print('[U]pdate client')
print('[S]earch client')
#Privada
def _client_question():
name_client = None
while not name_client:
name_client = input('What is the client name? ')
if name_client == 'exit':
name_client = None
break
#Cierra el programa
if not name_client:
sys.exit()
return name_client
def _not_register():
print('Client its not in client\'s list')
#Start point of the code
if __name__ == '__main__':
print_welcome()
command = input()
command = command.upper();
if command == 'C':
client_name = _client_question();
create_client(client_name)
list_clients()
elif command == 'D':
client_name = _client_question()
delete_client(client_name)
elif command == 'U':
client_name = _client_question()
new_name = input('What its the new client name? ')
update_client(client_name,new_name)
elif command == 'S':
client_name = _client_question()
found = search_client(client_name)
if found:
print('The client is in the client\'s list')
else:
print('Client {} not found'.format(client_name))
else:
print('Invalid command')
| [
"ealopezalmira@gmail.com"
] | ealopezalmira@gmail.com |
4ec82c4d69562c103864beb83bc5eac587470077 | 1af49694004c6fbc31deada5618dae37255ce978 | /third_party/blink/renderer/bindings/scripts/bind_gen/__init__.py | 44c068af8ca05cd83d23acbbb3e0bc2dfd11be14 | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"BSD-3-Clause"
] | permissive | sadrulhc/chromium | 59682b173a00269ed036eee5ebfa317ba3a770cc | a4b950c23db47a0fdd63549cccf9ac8acd8e2c41 | refs/heads/master | 2023-02-02T07:59:20.295144 | 2020-12-01T21:32:32 | 2020-12-01T21:32:32 | 317,678,056 | 3 | 0 | BSD-3-Clause | 2020-12-01T21:56:26 | 2020-12-01T21:56:25 | null | UTF-8 | Python | false | false | 2,353 | py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os.path
import sys
# Set up |sys.path| so that this module works without user-side setup of
# PYTHONPATH assuming Chromium's directory tree structure.
def _setup_sys_path():
expected_path = 'third_party/blink/renderer/bindings/scripts/bind_gen/'
this_dir = os.path.dirname(__file__)
root_dir = os.path.abspath(
os.path.join(this_dir, *(['..'] * expected_path.count('/'))))
module_dirs = (
# //third_party/blink/renderer/bindings/scripts/web_idl
os.path.join(root_dir, 'third_party', 'blink', 'renderer', 'bindings',
'scripts'),
# //third_party/blink/renderer/build/scripts/blinkbuild
os.path.join(root_dir, 'third_party', 'blink', 'renderer', 'build',
'scripts'),
# //third_party/mako/mako
os.path.join(root_dir, 'third_party', 'mako'),
)
for module_dir in reversed(module_dirs):
# Preserve sys.path[0] as is.
# https://docs.python.org/3/library/sys.html?highlight=path[0]#sys.path
sys.path.insert(1, module_dir)
_setup_sys_path()
from .callback_function import generate_callback_functions
from .callback_interface import generate_callback_interfaces
from .dictionary import generate_dictionaries
from .enumeration import generate_enumerations
from .interface import generate_interfaces
from .namespace import generate_namespaces
from .task_queue import TaskQueue
from .union import generate_unions
def init(web_idl_database_path, root_src_dir, root_gen_dir, component_reldirs):
"""
Args:
web_idl_database_path: File path to the web_idl.Database.
root_src_dir: Project's root directory, which corresponds to "//" in GN.
root_gen_dir: Root directory of generated files, which corresponds to
"//out/Default/gen" in GN.
component_reldirs: Pairs of component and output directory.
"""
from . import package_initializer
package_initializer.init(web_idl_database_path=web_idl_database_path,
root_src_dir=root_src_dir,
root_gen_dir=root_gen_dir,
component_reldirs=component_reldirs)
| [
"commit-bot@chromium.org"
] | commit-bot@chromium.org |
b02702d79f0095758bb048b0ee93700d8d522848 | cb7519b77e3c772f5c8f388e2e19150c7dbcf830 | /product/views.py | 276a04ba34e6b55497c7454ac0ae4b98a25ee112 | [] | no_license | kyoyo/ecshop | 2022395d3dd0f2a5c6e62ff614f0b5456412d3e5 | 0dcebbe16e040a7316ac17e59c9c4473a1b2731c | refs/heads/master | 2021-05-08T01:29:41.358321 | 2017-10-30T09:38:19 | 2017-10-30T09:38:19 | 107,854,955 | 12 | 4 | null | null | null | null | UTF-8 | Python | false | false | 8,218 | py | from django.shortcuts import render
from django.views.generic import ListView,DetailView
from .models import Category,Goods
from django.conf import settings
# Create your views here.
class CategoryDetailView(ListView):
template_name = 'product/goods_list.html'
context_object_name = 'goods_list'
page_type = ''
paginate_by = settings.PAGINATE_BY
page_kwarg = 'page'
def get_queryset(self):
category_id = self.kwargs['category_id']
orderBy = self.request.GET.get('orderBy')
category = Category.objects.get(pk=category_id)
if orderBy == 'SALES_COUNT':
goods_list = Goods.objects.filter(category=category).order_by('-sales_count')
elif orderBy == 'PRICE':
goods_list = Goods.objects.filter(category=category).order_by('price')
else:
goods_list = Goods.objects.filter(category=category)
return goods_list
# def get_context_data(self, **kwargs):
# kwargs['category_id'] = self.kwargs['category_id']
#
# return super(CategoryDetailView, self).get_context_data(**kwargs)
def get_context_data(self, **kwargs):
"""
在视图函数中将模板变量传递给模板是通过给 render 函数的 context 参数传递一个字典实现的,
例如 render(request, 'blog/index.html', context={'post_list': post_list}),
这里传递了一个 {'post_list': post_list} 字典给模板。
在类视图中,这个需要传递的模板变量字典是通过 get_context_data 获得的,
所以我们复写该方法,以便我们能够自己再插入一些我们自定义的模板变量进去。
"""
kwargs['category_id'] = self.kwargs['category_id']
# 首先获得父类生成的传递给模板的字典。
context = super().get_context_data(**kwargs)
# 父类生成的字典中已有 paginator、page_obj、is_paginated 这三个模板变量,
# paginator 是 Paginator 的一个实例,
# page_obj 是 Page 的一个实例,
# is_paginated 是一个布尔变量,用于指示是否已分页。
# 例如如果规定每页 10 个数据,而本身只有 5 个数据,其实就用不着分页,此时 is_paginated=False。
# 关于什么是 Paginator,Page 类在 Django Pagination 简单分页:http://zmrenwu.com/post/34/ 中已有详细说明。
# 由于 context 是一个字典,所以调用 get 方法从中取出某个键对应的值。
#category_id = context.get('category_id')
paginator = context.get('paginator')
page = context.get('page_obj')
is_paginated = context.get('is_paginated')
# 调用自己写的 pagination_data 方法获得显示分页导航条需要的数据,见下方。
pagination_data = self.pagination_data(paginator, page, is_paginated)
# 将分页导航条的模板变量更新到 context 中,注意 pagination_data 方法返回的也是一个字典。
context.update(pagination_data)
# 将更新后的 context 返回,以便 ListView 使用这个字典中的模板变量去渲染模板。
# 注意此时 context 字典中已有了显示分页导航条所需的数据。
return context
def pagination_data(self, paginator, page, is_paginated):
if not is_paginated:
# 如果没有分页,则无需显示分页导航条,不用任何分页导航条的数据,因此返回一个空的字典
return {}
# 当前页左边连续的页码号,初始值为空
left = []
# 当前页右边连续的页码号,初始值为空
right = []
# 标示第 1 页页码后是否需要显示省略号
left_has_more = False
# 标示最后一页页码前是否需要显示省略号
right_has_more = False
# 标示是否需要显示第 1 页的页码号。
# 因为如果当前页左边的连续页码号中已经含有第 1 页的页码号,此时就无需再显示第 1 页的页码号,
# 其它情况下第一页的页码是始终需要显示的。
# 初始值为 False
first = False
# 标示是否需要显示最后一页的页码号。
# 需要此指示变量的理由和上面相同。
last = False
# 获得用户当前请求的页码号
page_number = page.number
# 获得分页后的总页数
total_pages = paginator.num_pages
# 获得整个分页页码列表,比如分了四页,那么就是 [1, 2, 3, 4]
page_range = paginator.page_range
if page_number == 1:
# 如果用户请求的是第一页的数据,那么当前页左边的不需要数据,因此 left=[](已默认为空)。
# 此时只要获取当前页右边的连续页码号,
# 比如分页页码列表是 [1, 2, 3, 4],那么获取的就是 right = [2, 3]。
# 注意这里只获取了当前页码后连续两个页码,你可以更改这个数字以获取更多页码。
right = page_range[page_number:page_number + 5]
# 如果最右边的页码号比最后一页的页码号减去 1 还要小,
# 说明最右边的页码号和最后一页的页码号之间还有其它页码,因此需要显示省略号,通过 right_has_more 来指示。
if right[-1] < total_pages - 1:
right_has_more = True
# 如果最右边的页码号比最后一页的页码号小,说明当前页右边的连续页码号中不包含最后一页的页码
# 所以需要显示最后一页的页码号,通过 last 来指示
if right[-1] < total_pages:
last = True
elif page_number == total_pages:
# 如果用户请求的是最后一页的数据,那么当前页右边就不需要数据,因此 right=[](已默认为空),
# 此时只要获取当前页左边的连续页码号。
# 比如分页页码列表是 [1, 2, 3, 4],那么获取的就是 left = [2, 3]
# 这里只获取了当前页码后连续两个页码,你可以更改这个数字以获取更多页码。
left = page_range[(page_number - 5) if (page_number - 5) > 0 else 0:page_number - 1]
# 如果最左边的页码号比第 2 页页码号还大,
# 说明最左边的页码号和第 1 页的页码号之间还有其它页码,因此需要显示省略号,通过 left_has_more 来指示。
if left[0] > 2:
left_has_more = True
# 如果最左边的页码号比第 1 页的页码号大,说明当前页左边的连续页码号中不包含第一页的页码,
# 所以需要显示第一页的页码号,通过 first 来指示
if left[0] > 1:
first = True
else:
# 用户请求的既不是最后一页,也不是第 1 页,则需要获取当前页左右两边的连续页码号,
# 这里只获取了当前页码前后连续两个页码,你可以更改这个数字以获取更多页码。
left = page_range[(page_number - 5) if (page_number - 5) > 0 else 0:page_number - 1]
right = page_range[page_number:page_number + 4]
# 是否需要显示最后一页和最后一页前的省略号
if right[-1] < total_pages - 1:
right_has_more = True
if right[-1] < total_pages:
last = True
# 是否需要显示第 1 页和第 1 页后的省略号
if left[0] > 2:
left_has_more = True
if left[0] > 1:
first = True
data = {
'left': left,
'right': right,
'left_has_more': left_has_more,
'right_has_more': right_has_more,
'first': first,
'last': last,
}
return data
class GoodsDetailView(DetailView):
template_name = 'product/goods_detail.html'
model = Goods
pk_url_kwarg = 'goods_id'
context_object_name = 'goods'
| [
"kyo.stone2015@gmail.com"
] | kyo.stone2015@gmail.com |
7978d48814a86434a335c4d6f6ad9ce32c0ddfc7 | 871f5cce3dc447662447843370dfdb7f9fa4df6c | /testing_scripts/test2.py | 056029773c3a8641f13d3949e6b0900f0dea4d00 | [] | no_license | hrishi32/Video-Colorization | a25da366a455c93027745404394c190eabc2be6d | 627d9274d972458bdf50d2c6e1a8b20c1e27a236 | refs/heads/master | 2020-05-24T19:33:20.164039 | 2019-05-19T13:45:44 | 2019-05-19T13:45:44 | 187,436,952 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,461 | py | #!/usr/bin/env python
# coding: utf-8
import keras
from keras.models import *
import numpy as np
import cv2
import glob
import sys
import os
def frameExtractor(path):
videoObject = cv2.VideoCapture(path)
success = 1
while success:
success, image = videoObject.read()
yield image[:,:480,:]/255.0
print("Model loading")
model = load_model(sys.argv[1])
print("Model loaded")
it = frameExtractor('./TomAndJerry9.mp4')
gt = list()
for i in range(2000):
im = next(it)
gt.append(im)
gt = np.array(gt)
preds = np.copy(gt)
abc = preds
for i in range(2,len(gt)):
if i%20==0: print(i)
gr = gt[i-2:i+1]
abc = gr[2]
gray = cv2.cvtColor((abc*255.0).astype(np.uint8), cv2.COLOR_BGR2GRAY)/255.0
inpu = np.stack((preds[i-2][:,:,0],preds[i-2][:,:,1],preds[i-2][:,:,2],preds[i-1][:,:,0],preds[i-1][:,:,1],preds[i-1][:,:,2],gray), axis=-1)
inpu_1 = gray.reshape((360, 480, 1))
input_1_list = np.array([inpu_1],dtype=float)
input_list = np.array([inpu], dtype=float)
preds[i] = model.predict({"input_1":input_list,"input_2":input_1_list})[0]
dirname = sys.argv[1]+'d/'
if not os.path.exists(dirname):
os.makedirs(dirname)
for i in range(2000):
temp = np.zeros(shape=(360,480*2,3))
temp[:,:480,:], temp[:,480:,:] = preds[i], gt[i]
# temp[:,:,0], temp[:,:,1], temp[:,:,2] = temp[:,:,2], temp[:,:,1], temp[:,:,0]
cv2.imwrite(dirname+str(i)+'.png',(temp*255).astype(np.uint8))
| [
"sarodehrishikesh18@gmail.com"
] | sarodehrishikesh18@gmail.com |
50d41bc04b35250d86a4adb67e67092dd7f34b51 | 34339da2c834d79c9d3142afb8c498c62fb8917d | /thenewboston_node/blockchain/tasks/debug_task.py | 5af50cb9f0a73b2cb20d0323ab22fd1023029219 | [
"MIT"
] | permissive | olegtropinin/thenewboston-node | 5abfcbe02404f7c5347af724fb06c7f6420226ba | 2de4e14ef6855646121840224a82fcfc505b213c | refs/heads/master | 2023-08-23T09:33:25.286098 | 2021-10-14T22:53:15 | 2021-10-14T22:53:15 | 417,582,617 | 0 | 0 | MIT | 2021-10-15T17:27:52 | 2021-10-15T17:27:51 | null | UTF-8 | Python | false | false | 190 | py | # TODO(dmu) HIGH: Remove this example task once real tasks are created
from celery import shared_task
@shared_task(bind=True)
def debug_task(self):
print(f'Request: {self.request!r}')
| [
"dmugtasimov@gmail.com"
] | dmugtasimov@gmail.com |
6817f6dec4c9946e360e3675139c6c8fea5de0f2 | 4106e79ee431c3567304cfc8e429c5c667ba9fc4 | /util.py | 87d9679e4f6d8ee44baee62f75ac924bd8fa406f | [] | no_license | justinwilly/Sprint-Challenge--Graphs | dc313f39b040b6677fd78babf1783003d61b1f9c | 6d65af8a6efeeb9c747a00d1589ebc86e1e28560 | refs/heads/master | 2022-12-26T22:20:27.930909 | 2020-10-07T20:34:08 | 2020-10-07T20:34:08 | 263,991,022 | 0 | 0 | null | 2020-10-07T20:34:09 | 2020-05-14T18:14:07 | null | UTF-8 | Python | false | false | 3,393 | py | # Note: This Queue class is sub-optimal. Why?
class Queue():
def __init__(self):
self.queue = []
def enqueue(self, value):
self.queue.append(value)
def dequeue(self):
if self.size() > 0:
return self.queue.pop(0)
else:
return None
def size(self):
return len(self.queue)
class Stack():
def __init__(self):
self.stack = []
def push(self, value):
self.stack.append(value)
def pop(self):
if self.size() > 0:
return self.stack.pop()
else:
return None
def size(self):
return len(self.stack)
class Graph:
"""Represent a graph as a dictionary of vertices mapping labels to edges."""
def __init__(self):
# adjacency list
self.vertices = {}
def add_vertex(self, vertex_id):
"""
Add a vertex to the graph.
"""
# TODO
self.vertices[vertex_id] = set()
def add_edge(self, v1, v2):
"""
Add a directed edge to the graph from v1 to v2
"""
# TODO
# check if they exist
if v1 in self.vertices and v2 in self.vertices:
# add the edge
self.vertices[v1].add(v2)
else:
print("ERROR ADDING EDGE: vertex not found")
def get_neighbors(self, vertex_id):
"""
Get all neighbors (edges) of a vertex.
"""
if vertex_id in self.vertices:
return self.vertices[vertex_id]
else:
return None
# might want to raise an exception here instead
def bft(self, starting_vertex):
"""
Print each vertex in breadth-first order
beginning from starting_vertex.
"""
# Create a queue and enqueue starting vertex
que = Queue()
que.enqueue([starting_vertex])
# create a set of traversed vertices
visited = set()
# while queue is not empty
while que.size() > 0:
# dequeue/pop the first vertex
path = que.dequeue()
# if not visited
if path[-1] not in visited:
# DO THE THING!
print(path[-1])
# mark as visited
visited.add(path[-1])
# enqueue all neighbors
for next_vert in self.get_neighbors(path[-1]):
new_path = list(path)
new_path.append(next_vert)
que.enqueue(new_path)
def dft(self, starting_vertex):
"""
Print each vertex in depth-first order
beginning from starting_vertex.
"""
stack = Stack()
stack.push([starting_vertex])
# create a set of traversed vertices
visited = set()
# while queue is not empty
while stack.size() > 0:
# dequeue/pop the first vertex
path = stack.pop()
# if not visited
if path[-1] not in visited:
# DO THE THING!
print(path[-1])
# mark as visited
visited.add(path[-1])
# enqueue all neighbors
for next_vert in self.get_neighbors(path[-1]):
new_path = list(path)
new_path.append(next_vert)
stack.push(new_path) | [
"45780143+justinwilly@users.noreply.github.com"
] | 45780143+justinwilly@users.noreply.github.com |
9755b139b741d0c2700bb0413e958eed81d94419 | f2668e062d0c72c7e96a007f555459fecfd02ebe | /wagtail_review/__init__.py | 2a422415622321b2cc310a82fd0013f1e2c4c900 | [
"BSD-3-Clause"
] | permissive | BackBayRider/wagtail-review | 170e1f48d421ed46f56c8607756b25d495e35c6c | 45841611921d3cf67be94370e2ab6c332b0f838c | refs/heads/master | 2023-04-19T11:26:15.577124 | 2021-03-01T16:22:32 | 2021-03-01T16:23:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | default_app_config = 'wagtail_review.apps.WagtailReviewAppConfig' | [
"matthew@torchbox.com"
] | matthew@torchbox.com |
c8d1f9bd715d63c1a95aa03a193d4b226997120f | b1ed07f9a475e77d6295ecfc1e5564a42b1b6800 | /core/templatetags/my_custom_tags.py | ce3bab3735a9a905747cfb1ff78c996de02c146a | [
"MIT"
] | permissive | SubhanRzayev/E-commerce-Tmart | 4a764a8c84976455d5c9d7d3942efeff807f7536 | 239218397f4ee55ab6ae4ef1798fbc83bc7d1159 | refs/heads/main | 2023-07-08T19:15:07.807938 | 2021-08-13T12:24:40 | 2021-08-13T12:24:40 | 395,606,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py | from blog.models import Category
from django.template import Library
from core.models import *
register = Library()
@register.filter
def main_catagory(self):
if self.category == None:
return self.category
| [
"subhanrzayev97@gmail.com"
] | subhanrzayev97@gmail.com |
8a2ab4c6d3a5e094042ddf4c2df9dbb5ffce65ca | 29091a32fbcbfc5c5db0b1e2a8aa344835a82f68 | /ctrlengine/sensors/__init__.py | 9e8adab2a9274c69dc8209f319d7f4752f1ae404 | [
"MIT"
] | permissive | 0xJeremy/ctrl.engine | 52b0244f42e9a7a92486ba1fcfcf2fe2fedc5631 | 19abba70df149a05edc5722cc95ceacc538448e6 | refs/heads/master | 2022-11-17T23:48:40.547073 | 2020-07-06T22:31:37 | 2020-07-06T22:31:37 | 241,662,968 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67 | py | from .camera import camera
from .realsense import realsense_camera
| [
"jeremy.kanovsky@tufts.edu"
] | jeremy.kanovsky@tufts.edu |
088a093e36d31ff4a4fc4890cd0ea0a3f98a32e7 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/artificial/transf_BoxCox/trend_MovingMedian/cycle_30/ar_/test_artificial_32_BoxCox_MovingMedian_30__100.py | eaa3d461b45d44c18f2e1bbaffa799b7393f51fd | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 265 | py | import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 30, transform = "BoxCox", sigma = 0.0, exog_count = 100, ar_order = 0); | [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
f1cee029a06eff14cdeffffac24ff6c8d018a802 | 53c3ec3f0e86de2cf91f5c17819532ebc94fb05d | /network/cl.py | 7d87db70e0f4cf7b9c5a4edbf153d4911dc481e0 | [] | no_license | gobackmysoul/test | d52d40f7bf01b71d909925b2a20b66fa415dab85 | 47a4ae6f29651e00cafd5e8a73074194db4d2f05 | refs/heads/master | 2020-04-14T16:44:39.102773 | 2019-01-03T10:55:51 | 2019-01-03T10:55:51 | 163,960,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 585 | py | #!/usr/bin/python3.5
# -*- coding:utf-8 -*-
# @Filename : cl.py
# @Author : 搏鲨
# @Create date : 18-9-22 上午10:27
# @Email : 1170120381@qq.com
# @QQ : 1170120381
# @Blog : http://www.cnblogs.com/bosha/
# @license : (C) Copyright 2018-2020, 搏鲨所有.
"""
"""
import sys
# from client import client_sockfd
# client_sockfd.sendto(b'456', ('127.0.0.1', 8888))
from socket import *
sockfd = socket(AF_INET, SOCK_DGRAM)
sockfd.connect(('8.8.8.8', 80))
ip = sockfd.getsockname()[0]
print(type(ip)) | [
"1821950544@qq.com"
] | 1821950544@qq.com |
e7a4a6f5c28e38bae86f0c0f5778bea9e373777a | 1ba63f3fdbfb7e42e5182d78ec24d2865e3549eb | /Ingestion/CTD/aux/ba_gui.py | 8271a7024ba573121e1535b19b18188b160270dd | [] | no_license | Fiskaaling/FA_Ingestion_engine | 6494fd8f26572b08b907301520270980c2abf983 | 92533ed17c12aaec1a2f3516d8ddc2fbb345f3dd | refs/heads/master | 2022-11-26T18:08:27.050661 | 2020-06-25T07:42:32 | 2020-06-25T07:42:32 | 147,329,930 | 2 | 0 | null | 2020-06-25T07:42:25 | 2018-09-04T10:37:27 | Python | UTF-8 | Python | false | false | 7,088 | py | # Hesin fílurin er til at minka um gui skrambul í bin_average fílinum
import os
import numpy as np
from tkinter import Label, TOP, W
import pandas as pd
def refresh_qframe(Quality_frame, list_of_casts, parent_folder, filnavn, mappunavn_dict):
textsize = 16 # TODO: Set hettar í ein settings fíl
metadata = []
for widget in Quality_frame.winfo_children(): # Tømur quality frame
widget.destroy()
finished_processing = True
for cast in list_of_casts:
casttext = cast
if os.path.exists(parent_folder + '/ASCII/ASCII_Downcast/metadata/' + cast.split('.')[0] + '_metadata.csv'):
cast_metadata_df = pd.read_csv(parent_folder + '/ASCII/ASCII_Downcast/metadata/' + cast.split('.')[0] + '_metadata.csv', index_col=False)
cast_metadata_keys = cast_metadata_df.key
cast_metadata_values = cast_metadata_df.value
cast_metadata = {}
for i, key in enumerate(cast_metadata_keys):
cast_metadata[key] = cast_metadata_values[i]
if cast == filnavn[mappunavn_dict['filur']]:
metadata = cast_metadata
if float(cast_metadata['cast_quality']) < 0:
casttext += ' -'
else:
casttext += ' ✓'
else:
finished_processing = False
if os.path.exists(parent_folder + '/ASCII/ASCII_Downcast/metadata/' + cast.split('.')[0] + '_do_not_use_.csv'):
casttext += ' X'
finished_processing = True
if cast == filnavn[mappunavn_dict['filur']]:
Label(Quality_frame, text=casttext, font=("Courier", textsize, 'underline')).pack(side=TOP, anchor=W)
else:
Label(Quality_frame, text=casttext, font=("Courier", textsize)).pack(side=TOP, anchor=W)
return metadata, finished_processing
def kanna_events(event_dict, log_w):
if event_dict['soak_start'] == -1:
log_w('Ávaring! Soak Start er ikki funnið')
event_dict['soak_start'] = 50
if event_dict['soak_stop'] == -1:
log_w('Ávaring! Soak Stop er ikki funnið')
event_dict['soak_stop'] = 100
if event_dict['downcast_start'] == -1:
log_w('Ávaring! Downcast Start er ikki funnið')
event_dict['downcast_start'] = 150
if event_dict['downcast_stop'] == -1:
log_w('Ávaring! Downcast Stop er ikki funnið')
event_dict['downcast_stop'] = 200
if event_dict['upcast_stop'] == -1:
log_w('Ávaring! Upcast Stop er ikki funnið')
event_dict['upcast_stop'] = 250
def zoom_in(selected_event, ax, event_dict, depth):
time_fulllength = event_dict['time_fulllength']
if selected_event == 0:
ax.set_xlim(time_fulllength[event_dict['soak_start']] - 5, time_fulllength[event_dict['soak_start']] + 5)
ax.set_ylim(np.min(depth[event_dict['soak_start'] - (5 * 16):event_dict['soak_start'] + (5 * 16)]) - 0.5, np.max(depth[event_dict['soak_start'] - (5 * 16):event_dict['soak_start'] + (5 * 16)]) + 0.5)
if selected_event == 1:
ax.set_xlim(time_fulllength[event_dict['soak_stop']] - 5, time_fulllength[event_dict['soak_stop']] + 5)
ax.set_ylim(np.min(depth[event_dict['soak_stop'] - (5 * 16):event_dict['soak_stop'] + (5 * 16)]) - 0.5,
np.max(depth[event_dict['soak_stop'] - (5 * 16):event_dict['soak_stop'] + (5 * 16)]) + 0.5)
if selected_event == 2:
ax.set_xlim(time_fulllength[event_dict['downcast_start']] - 5, time_fulllength[event_dict['downcast_start']] + 5)
ax.set_ylim(np.min(depth[event_dict['downcast_start'] - (5 * 16):event_dict['downcast_start'] + (5 * 16)]) - 0.5,
np.max(depth[event_dict['downcast_start'] - (5 * 16):event_dict['downcast_start'] + (5 * 16)]) + 0.5)
if selected_event == 3:
ax.set_xlim(time_fulllength[event_dict['downcast_stop']] - 5, time_fulllength[event_dict['downcast_stop']] + 5)
ax.set_ylim(np.min(depth[event_dict['downcast_stop'] - (5 * 16):event_dict['downcast_stop'] + (5 * 16)]) - 0.5,
np.max(depth[event_dict['downcast_stop'] - (5 * 16):event_dict['downcast_stop'] + (5 * 16)]) + 0.5)
if selected_event == 4:
ax.set_xlim(time_fulllength[event_dict['upcast_stop']] - 5, time_fulllength[event_dict['upcast_stop']] + 5)
ax.set_ylim(np.min(depth[event_dict['upcast_stop'] - (5 * 16):event_dict['upcast_stop'] + (5 * 16)]) - 0.5,
np.max(depth[event_dict['upcast_stop'] - (5 * 16):event_dict['upcast_stop'] + (5 * 16)]) + 0.5)
def update_annotations(selected_event, ax, event_dict, maxd):
time_fulllength = event_dict['time_fulllength']
if selected_event == 0:
annotation = ax.annotate('Soak Start',
xy=(time_fulllength[event_dict['soak_start']], maxd + 1),
xytext=(time_fulllength[event_dict['soak_start']], maxd + 2),
xycoords='data',
textcoords='data',
ha='center',
arrowprops=dict(arrowstyle="->"))
elif selected_event == 1:
annotation = ax.annotate('Soak Stop',
xy=(time_fulllength[event_dict['soak_stop']], maxd + 1),
xytext=(time_fulllength[event_dict['soak_stop']], maxd + 2),
xycoords='data',
textcoords='data',
ha='center',
arrowprops=dict(arrowstyle="->"))
elif selected_event == 2:
annotation = ax.annotate('Downcast Start',
xy=(time_fulllength[event_dict['downcast_start']], maxd + 1),
xytext=(time_fulllength[event_dict['downcast_start']], maxd + 2),
xycoords='data',
textcoords='data',
ha='center',
arrowprops=dict(arrowstyle="->"))
elif selected_event == 3:
annotation = ax.annotate('Downcast Stop',
xy=(time_fulllength[event_dict['downcast_stop']], maxd + 1),
xytext=(time_fulllength[event_dict['downcast_stop']], maxd + 2),
xycoords='data',
textcoords='data',
ha='center',
arrowprops=dict(arrowstyle="->"))
elif selected_event == 4:
annotation = ax.annotate('Upcast Stop',
xy=(time_fulllength[event_dict['upcast_stop']], maxd + 1),
xytext=(time_fulllength[event_dict['upcast_stop']], maxd + 2),
xycoords='data',
textcoords='data',
ha='center',
arrowprops=dict(arrowstyle="->"))
return annotation
| [
"johannus@fiskaaling.fo"
] | johannus@fiskaaling.fo |
4b0b8daf7547c7e417e5cb7ee7e7ad92e7ccbb55 | 7a56929fb3bdfc824faf357f0b72e79d53c40363 | /assignment1/cs231n/classifiers/linear_svm.py | 9bb845695ebd7d6ffda1f7a358a7f05b517eb905 | [] | no_license | entgl-t/CS231n | 19d0a7a70755280405a582b08ac0e2269c080fc6 | 54ea6a152981f615650096a49f1d67dd79ed6277 | refs/heads/master | 2022-12-09T13:21:50.089435 | 2020-01-10T15:12:57 | 2020-01-10T15:12:57 | 233,016,115 | 0 | 0 | null | 2022-12-07T23:32:57 | 2020-01-10T09:47:44 | Jupyter Notebook | UTF-8 | Python | false | false | 4,625 | py | import numpy as np
from random import shuffle
from past.builtins import xrange
def svm_loss_naive(W, X, y, reg):
"""
Structured SVM loss function, naive implementation (with loops).
Inputs have dimension D, there are C classes, and we operate on minibatches
of N examples.
Inputs:
- W: A numpy array of shape (D, C) containing weights.
- X: A numpy array of shape (N, D) containing a minibatch of data.
- y: A numpy array of shape (N,) containing training labels; y[i] = c means
that X[i] has label c, where 0 <= c < C.
- reg: (float) regularization strength
Returns a tuple of:
- loss as single float
- gradient with respect to weights W; an array of same shape as W
"""
dW = np.zeros(W.shape) # initialize the gradient as zero
# compute the loss and the gradient
num_classes = W.shape[1]
num_train = X.shape[0]
loss = 0.0
grad = []
for i in xrange(num_train):
scores = X[i].dot(W)
correct_class_score = scores[y[i]]
for j in xrange(num_classes):
if j == y[i]:
continue
margin = scores[j] - correct_class_score + 1 # note delta = 1
if margin > 0:
loss += margin
dW[:,y[i]] -= X[i]
dW[:,j] += X[i]
# Right now the loss is a sum over all training examples, but we want it
# to be an average instead so we divide by num_train.
loss /= num_train
dW /= num_train
# Add regularization to the loss.
loss += reg * np.sum(W * W)
dW +=reg*W
#############################################################################
# TODO: #
# Compute the gradient of the loss function and store it dW. #
# Rather that first computing the loss and then computing the derivative, #
# it may be simpler to compute the derivative at the same time that the #
# loss is being computed. As a result you may need to modify some of the #
# code above to compute the gradient. #
############################################################## ###############
return loss, dW
def svm_loss_vectorized(W, X, y, reg):
"""
Structured SVM loss function, vectorized implementation.
Inputs and outputs are the same as svm_loss_naive.
"""
loss = 0.0
dW = np.zeros(W.shape) # initialize the gradient as zero
#############################################################################
# TODO: #
# Implement a vectorized version of the structured SVM loss, storing the #
# result in loss. #
#############################################################################
num_train = X.shape[0]
num_classes = W.shape[1]
scores = X.dot(W)
y = y.reshape(y.shape[0],)
corrected_class_scores = scores[np.arange(scores.shape[0]),y]
#print('corrected_class_scores',corrected_class_scores.shape)
#print(corrected_class_scores.shape)
corrected_class_scores = corrected_class_scores.reshape(corrected_class_scores.shape[0],1)
margin = scores - corrected_class_scores + 1
margin[np.arange(margin.shape[0]),y] = 0
margin[margin < 0] = 0
loss = np.sum(margin)
loss /= num_train
# Add regularization to the loss.
loss += reg * np.sum(W * W)
print(loss)
#############################################################################
# END OF YOUR CODE #
#############################################################################
#############################################################################
# TODO: #
# Implement a vectorized version of the gradient for the structured SVM #
# loss, storing the result in dW. #
# #
# Hint: Instead of computing the gradient from scratch, it may be easier #
# to reuse some of the intermediate values that you used to compute the #
# loss. #
#############################################################################
margin_binary = margin
margin_binary[margin_binary > 0] = 1
margin_binary[np.arange(margin_binary.shape[0]),y] = -np.sum(margin_binary,axis = 1)
margin_binary = margin_binary.T
dW = margin_binary.dot(X)
dW = dW.T
dW /=num_train
dW +=reg*W
return loss, dW
| [
"doniyor.tropmann@gmail.com"
] | doniyor.tropmann@gmail.com |
c51fe6bf797c8e68f550ef920d793a474a3c2284 | 893d3bfadad7d233b42322a685081df0eb82e440 | /dataSetMerger.py | 9b8ae9ab0079ecd00b007a203d55af185e4e9e16 | [] | no_license | hantus/Internship | 9ad858ddca847725da790d15ebef91c9db575627 | 16d0b7d581715d4835055ef3e4b4331f8d80d302 | refs/heads/master | 2022-10-30T06:03:08.490194 | 2020-06-19T20:47:04 | 2020-06-19T20:47:04 | 265,277,704 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,975 | py | import numpy as np
dataSet1 = np.load('data/1person10_merged.npy')
dataSet2 = np.load('data/1person_hat10_merged.npy')
dataSet3 = np.load('data/1person_hood10_merged.npy')
dataSet4 = np.load('data/2ppl10_merged.npy')
dataSet5 = np.load('data/2ppl_1hat10_merged.npy')
dataSet6 = np.load('data/1person_add10_merged.npy')
dataSet7 = np.load('data/2ppl_add10_merged.npy')
dataSets = []
dataSets.append(dataSet1)
dataSets.append(dataSet2)
dataSets.append(dataSet3)
dataSets.append(dataSet4)
dataSets.append(dataSet5)
dataSets.append(dataSet6)
dataSets.append(dataSet7)
data = []
for ds in dataSets:
for i in range(ds.shape[0]):
data.append(ds[i])
data = np.asarray(data)
print(data.shape)
labels1 = np.load('data/1person10_merged_Labels.npy')
labels2 = np.load('data/1person_hat10_merged_Labels.npy')
labels3 = np.load('data/1person_hood10_merged_Labels.npy')
labels4 = np.load('data/2ppl10_merged_Labels.npy')
labels5 = np.load('data/2ppl_1hat10_merged_Labels.npy')
labels6 = np.load('data/1person_add10_merged_Labels.npy')
labels7 = np.load('data/2ppl_add10_merged_Labels.npy')
labelSets = []
labelSets.append(labels1)
labelSets.append(labels2)
labelSets.append(labels3)
labelSets.append(labels4)
labelSets.append(labels5)
labelSets.append(labels6)
labelSets.append(labels7)
labels = []
for ds in labelSets:
for i in range(ds.shape[0]):
labels.append(ds[i])
labels = np.asarray(labels)
print(labels.shape)
np.save('data/dataSet.npy', data)
np.save('data/labels.npy', labels)
total = labels.shape[0]
allIN = np.count_nonzero(labels == 3)
allOUT = np.count_nonzero(labels == 2)
allX = np.count_nonzero(labels == 1)
print("Total number of samples: {}".format(total))
print("Examples of IN {} which is {}% of the data".format(allIN, round(allIN/total*100, 2)))
print("Examples of OUT {} which is {}% of the data".format(allOUT, round(allOUT/total*100, 2)))
print("Examples of X {} which is {}% of the data".format(allX, round(allX/total*100, 2)))
| [
"piotr.handkowski@etu.univ-grenoble-alpes.fr"
] | piotr.handkowski@etu.univ-grenoble-alpes.fr |
353717fed1fffc1dd0f1e4b9724d49c1af9d4f19 | edcb05e9463a222e5b57564dd574632d74e0d7a7 | /pages/urls.py | ed1cb739fbd7fa43693c6cf3a79204053567cf95 | [] | no_license | hungnc6969/dj1_helloworld | ab4d0223d942a05960cd4b055655396e5bcbecb4 | 20e772b176e3d737c67824496680665f23bc5746 | refs/heads/master | 2023-05-28T03:41:26.774325 | 2020-04-28T00:49:20 | 2020-04-28T00:49:20 | 259,492,274 | 0 | 0 | null | 2021-06-10T22:52:48 | 2020-04-28T00:47:57 | Python | UTF-8 | Python | false | false | 200 | py | # pages/urls.py
# file nay mapping url den conten trong file views.py
from django.urls import path
from .views import homePageView
urlpatterns = [
path('', homePageView, name = 'home'),
]
| [
"hungnc6969@gmail.com"
] | hungnc6969@gmail.com |
f9a3518f256b925c3a31d214b721e8d53706123e | f0b741f24ccf8bfe9bd1950425d83b6291d21b10 | /backend/api/v2beta1/python_http_client/test/test_v2beta1_runtime_config.py | ea104015c08bff480428f747f9b1fe16d1dd0715 | [
"Apache-2.0"
] | permissive | kubeflow/pipelines | e678342b8a325559dec0a6e1e484c525fdcc8ce8 | 3fb199658f68e7debf4906d9ce32a9a307e39243 | refs/heads/master | 2023-09-04T11:54:56.449867 | 2023-09-01T19:07:33 | 2023-09-01T19:12:27 | 133,100,880 | 3,434 | 1,675 | Apache-2.0 | 2023-09-14T20:19:06 | 2018-05-12T00:31:47 | Python | UTF-8 | Python | false | false | 1,580 | py | # coding: utf-8
"""
Kubeflow Pipelines API
This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition.
Contact: kubeflow-pipelines@google.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kfp_server_api
from kfp_server_api.models.v2beta1_runtime_config import V2beta1RuntimeConfig # noqa: E501
from kfp_server_api.rest import ApiException
class TestV2beta1RuntimeConfig(unittest.TestCase):
"""V2beta1RuntimeConfig unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test V2beta1RuntimeConfig
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kfp_server_api.models.v2beta1_runtime_config.V2beta1RuntimeConfig() # noqa: E501
if include_optional :
return V2beta1RuntimeConfig(
parameters = {
'key' : None
},
pipeline_root = '0'
)
else :
return V2beta1RuntimeConfig(
)
def testV2beta1RuntimeConfig(self):
"""Test V2beta1RuntimeConfig"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"noreply@github.com"
] | kubeflow.noreply@github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.