code stringlengths 281 23.7M |
|---|
class Storage(Runnable):
def __init__(self, storage_uri: str, loop: asyncio.AbstractEventLoop=None, threaded: bool=False) -> None:
super().__init__(loop=loop, threaded=threaded)
self._storage_uri = storage_uri
self._backend: AbstractStorageBackend = self._get_backend_instance(storage_uri)
self._is_connected = False
self._connected_state = AsyncState(False)
async def wait_connected(self) -> None:
(await self._connected_state.wait(True))
def is_connected(self) -> bool:
return self._is_connected
async def run(self) -> None:
(await self._backend.connect())
self._is_connected = True
self._connected_state.set(True)
try:
while True:
(await asyncio.sleep(1))
finally:
(await self._backend.disconnect())
self._is_connected = False
def _get_backend_instance(cls, uri: str) -> AbstractStorageBackend:
backend_name = urlparse(uri).scheme
backend_class = BACKENDS.get(backend_name, None)
if (backend_class is None):
raise ValueError(f"Backend `{backend_name}` is not supported. Supported are {', '.join(BACKENDS.keys())} ")
return backend_class(uri)
async def get_collection(self, collection_name: str) -> AsyncCollection:
(await self._backend.ensure_collection(collection_name))
return AsyncCollection(collection_name=collection_name, storage_backend=self._backend)
def get_sync_collection(self, collection_name: str) -> SyncCollection:
if (not self._loop):
raise ValueError('Storage not started!')
return SyncCollection(self.get_collection(collection_name), self._loop)
def __repr__(self) -> str:
return f"[GenericStorage({self._storage_uri}){('Connected' if self.is_connected else 'Not connected')}]" |
def test_footballmatch_module_scanleagues(lfs_match, monkeypatch):
monkeypatch.setattr('qtile_extras.resources.footballscores.FootballMatch._find_team_page', (lambda _: False))
sth = lfs_match('Southampton')
assert (sth.home_team == 'Norwich City')
assert (sth.away_team == 'Southampton') |
def test_cli_version(capsys):
version = '1.2.3'
cli = Radicli(version=version)
ran = False
('test', a=Arg('--a'))
def test(a: str):
assert (a == 'hello')
nonlocal ran
ran = True
with pytest.raises(SystemExit):
cli.run(['', '--version'])
captured = capsys.readouterr()
assert (captured.out.strip() == version)
cli.run(['', 'test', '--a', 'hello'])
assert ran |
def quadrupole3d_23(ax, da, A, bx, db, B, R):
result = numpy.zeros((6, 6, 10), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (x0 * ((ax * A[0]) + (bx * B[0])))
x2 = (- x1)
x3 = (x2 + B[0])
x4 = (x2 + R[0])
x5 = ((- 2.0) * x1)
x6 = (x5 + R[0])
x7 = (x6 + B[0])
x8 = (x0 * x7)
x9 = (x2 + A[0])
x10 = (x3 * x4)
x11 = (2.0 * x10)
x12 = (x0 + x11)
x13 = (x12 * x9)
x14 = (x13 + x8)
x15 = (4.0 * x14)
x16 = (4.0 * x10)
x17 = (x4 ** 2)
x18 = (3.0 * x0)
x19 = ((2.0 * x17) + x18)
x20 = (x12 * x4)
x21 = (x20 + x8)
x22 = (2.0 * x3)
x23 = ((x0 * (x16 + x19)) + (x21 * x22))
x24 = (x3 * x9)
x25 = (2.0 * x24)
x26 = (x4 * x9)
x27 = (2.0 * x26)
x28 = (x0 * (((x11 + x18) + x25) + x27))
x29 = ((x15 * x3) + (4.0 * x28))
x30 = (x0 * (((x15 * x4) + x23) + x29))
x31 = (x0 * (x6 + A[0]))
x32 = (x0 + x27)
x33 = (x31 + (x32 * x4))
x34 = ((2.0 * x13) + (x18 * x7))
x35 = (x0 * ((x20 + x33) + x34))
x36 = (2.0 * x14)
x37 = (x28 + (x36 * x4))
x38 = (x3 * x37)
x39 = (x35 + x38)
x40 = (2.0 * x9)
x41 = (x30 + (x39 * x40))
x42 = (x37 * x9)
x43 = (x12 * x3)
x44 = (x0 * ((x5 + A[0]) + B[0]))
x45 = (x0 + x25)
x46 = (x3 * x45)
x47 = (x44 + x46)
x48 = (x0 * ((x34 + x43) + x47))
x49 = ((x14 * x22) + x28)
x50 = (x48 + (x49 * x9))
x51 = (2.0 * x0)
x52 = 2.
x53 = (0. * x52)
x54 = ((ax * bx) * x0)
x55 = (((5. * da) * db) * numpy.exp(((- x54) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x56 = ((x0 ** 1.5) * x55)
x57 = (x53 * x56)
x58 = (x0 * ((ax * A[1]) + (bx * B[1])))
x59 = (- x58)
x60 = (x59 + B[1])
x61 = (0. * x56)
x62 = (x41 * x61)
x63 = (x0 * ((ax * A[2]) + (bx * B[2])))
x64 = (- x63)
x65 = (x64 + B[2])
x66 = (x60 ** 2)
x67 = (0.5 * x0)
x68 = (x66 + x67)
x69 = (x35 + x42)
x70 = ((x0 ** 1.5) * x55)
x71 = (0. * x70)
x72 = (x69 * x71)
x73 = (0. * x56)
x74 = 1.
x75 = (x65 * x74)
x76 = (x73 * x75)
x77 = (x65 ** 2)
x78 = (x67 + x77)
x79 = ((x0 * (x19 + (4.0 * x26))) + (x33 * x40))
x80 = (0. * x52)
x81 = (x79 * x80)
x82 = (1.5 * x0)
x83 = (x66 + x82)
x84 = (x60 * x70)
x85 = (x83 * x84)
x86 = (x71 * x79)
x87 = (x77 + x82)
x88 = (x65 * x70)
x89 = (x87 * x88)
x90 = (x59 + A[1])
x91 = (x56 * x90)
x92 = 3.
x93 = (0. * x92)
x94 = (x93 * ((x22 * x39) + x30))
x95 = (x60 * x90)
x96 = (x67 + x95)
x97 = (x71 * x74)
x98 = (x39 * x97)
x99 = (0. * x39)
x100 = ((- 2.0) * x58)
x101 = (x100 + B[1])
x102 = (x0 * (x101 + A[1]))
x103 = (2.0 * x95)
x104 = (x0 + x103)
x105 = (x104 * x60)
x106 = (x102 + x105)
x107 = (0. * x70)
x108 = (x107 * x74)
x109 = (x108 * x37)
x110 = (x70 * x96)
x111 = (0.25 * x37)
x112 = (x37 * x97)
x113 = (x18 + (2.0 * x66))
x114 = (x0 * (x113 + (4.0 * x95)))
x115 = (2.0 * x106)
x116 = (x114 + (x115 * x60))
x117 = (x70 * x92)
x118 = (0. * x117)
x119 = (x118 * x33)
x120 = (x33 * x97)
x121 = (0. * x78)
x122 = (x33 * x74)
x123 = (x65 * x87)
x124 = (0. * x117)
x125 = (x124 * x33)
x126 = (x64 + A[2])
x127 = (x126 * x56)
x128 = (x127 * x74)
x129 = (x126 * x65)
x130 = (x129 + x67)
x131 = (x130 * x70)
x132 = ((- 2.0) * x63)
x133 = (x132 + B[2])
x134 = (x0 * (x133 + A[2]))
x135 = (2.0 * x129)
x136 = (x0 + x135)
x137 = (x136 * x65)
x138 = (x134 + x137)
x139 = (x60 * x83)
x140 = (0. * x68)
x141 = (x18 + (2.0 * x77))
x142 = (x0 * ((4.0 * x129) + x141))
x143 = (2.0 * x138)
x144 = (x142 + (x143 * x65))
x145 = (((0. * x23) * x3) + ((0. * x51) * ((x20 + x43) + (2.0 * x8))))
x146 = (x67 + (x90 ** 2))
x147 = (x146 * x70)
x148 = (x147 * x52)
x149 = (x104 * x90)
x150 = (x102 + x149)
x151 = (x107 * x23)
x152 = (x65 * x71)
x153 = (x114 + (x115 * x90))
x154 = (x107 * x21)
x155 = (x21 * x97)
x156 = (x17 + x67)
x157 = (x156 * x70)
x158 = (x80 * ((x153 * x60) + (x51 * (((2.0 * x102) + x105) + x149))))
x159 = (0. * x123)
x160 = (x91 * x92)
x161 = (x23 * x97)
x162 = (0.5 * x110)
x163 = (0. * x117)
x164 = (x156 * x163)
x165 = (0. * x74)
x166 = (x156 * x165)
x167 = ((x126 ** 2) + x67)
x168 = (x167 * x70)
x169 = (x168 * x52)
x170 = (x60 * x71)
x171 = (x126 * x136)
x172 = (x134 + x171)
x173 = ((x126 * x143) + x142)
x174 = (0. * x139)
x175 = (x174 * x52)
x176 = (x80 * ((x173 * x65) + (x51 * (((2.0 * x134) + x137) + x171))))
x177 = (x59 + R[1])
x178 = (x3 ** 2)
x179 = ((2.0 * x178) + x18)
x180 = (x0 * (x179 + (4.0 * x24)))
x181 = (x180 + (x40 * x47))
x182 = (x57 * ((x0 * (((x15 * x9) + x181) + x29)) + (x22 * x50)))
x183 = (x177 * x60)
x184 = (x183 + x67)
x185 = (x50 * x71)
x186 = (x177 * x65)
x187 = (x50 * x73)
x188 = (x101 + R[1])
x189 = (x0 * x188)
x190 = (2.0 * x183)
x191 = (x0 + x190)
x192 = (x191 * x60)
x193 = (x189 + x192)
x194 = (x28 + (x36 * x9))
x195 = (x107 * x194)
x196 = (x184 * x71)
x197 = (x196 * x74)
x198 = (x71 * x78)
x199 = (4.0 * x183)
x200 = (2.0 * x60)
x201 = ((x0 * (x113 + x199)) + (x193 * x200))
x202 = (x31 + (x32 * x9))
x203 = (x202 * x70)
x204 = (x203 * x53)
x205 = (0. * x52)
x206 = (x203 * x205)
x207 = (x177 * x90)
x208 = (x207 + x67)
x209 = ((x3 * x49) + x48)
x210 = (x163 * x209)
x211 = (x191 * x90)
x212 = (x189 + x211)
x213 = (x108 * x49)
x214 = (x65 * x97)
x215 = (2.0 * x207)
x216 = (x0 * (((x103 + x18) + x190) + x215))
x217 = ((x200 * x212) + x216)
x218 = (x108 * x14)
x219 = (x212 * x70)
x220 = (0.25 * x14)
x221 = (x121 * x74)
x222 = (x208 * x70)
x223 = ((x18 * x188) + (2.0 * x211))
x224 = (x0 * ((x106 + x192) + x223))
x225 = ((x217 * x60) + x224)
x226 = (x26 + x67)
x227 = (x163 * x226)
x228 = (x117 * x226)
x229 = (0. * x209)
x230 = (x127 * x92)
x231 = (x177 * x97)
x232 = (x126 * x97)
x233 = (0.5 * x131)
x234 = (x165 * x226)
x235 = (x184 * x70)
x236 = (x0 * ((x100 + A[1]) + R[1]))
x237 = (x0 + x215)
x238 = (x236 + (x237 * x90))
x239 = (x238 * x70)
x240 = (x43 + x8)
x241 = ((x0 * (x16 + x179)) + (x22 * x240))
x242 = (x241 * x53)
x243 = (x212 * x90)
x244 = (x216 + (2.0 * x243))
x245 = (x107 * x240)
x246 = ((x217 * x90) + x224)
x247 = (x10 + x67)
x248 = (x247 * x71)
x249 = (x248 * x74)
x250 = (4.0 * x212)
x251 = ((4.0 * x216) + (x250 * x60))
x252 = ((x0 * ((x153 + (4.0 * x243)) + x251)) + (x200 * x246))
x253 = (x4 * x57)
x254 = (x4 * x73)
x255 = (x205 * x4)
x256 = (x163 * x208)
x257 = (x165 * x240)
x258 = (x165 * x247)
x259 = (0. * x4)
x260 = (x4 * x97)
x261 = (x168 * x80)
x262 = (0. * x168)
x263 = (x172 * x71)
x264 = (x4 * x56)
x265 = (x64 + R[2])
x266 = (x265 * x60)
x267 = (x265 * x65)
x268 = (x267 + x67)
x269 = (x265 * x71)
x270 = (x268 * x71)
x271 = (x270 * x74)
x272 = (x133 + R[2])
x273 = (x0 * x272)
x274 = (2.0 * x267)
x275 = (x0 + x274)
x276 = (x275 * x65)
x277 = (x273 + x276)
x278 = (4.0 * x267)
x279 = (2.0 * x65)
x280 = ((x0 * (x141 + x278)) + (x277 * x279))
x281 = (x265 * x97)
x282 = (x90 * x97)
x283 = (x268 * x70)
x284 = (x126 * x265)
x285 = (x284 + x67)
x286 = (x60 * x97)
x287 = (x126 * x275)
x288 = (x273 + x287)
x289 = (x140 * x74)
x290 = (x285 * x70)
x291 = (x288 * x70)
x292 = (2.0 * x284)
x293 = (x0 * (((x135 + x18) + x274) + x292))
x294 = ((x279 * x288) + x293)
x295 = ((x18 * x272) + (2.0 * x287))
x296 = (x0 * ((x138 + x276) + x295))
x297 = ((x294 * x65) + x296)
x298 = (0. * x148)
x299 = (0. * x147)
x300 = (x4 * x71)
x301 = (x163 * x285)
x302 = (x0 * ((x132 + A[2]) + R[2]))
x303 = (x0 + x292)
x304 = ((x126 * x303) + x302)
x305 = (x304 * x70)
x306 = (x126 * x288)
x307 = (x293 + (2.0 * x306))
x308 = ((x126 * x294) + x296)
x309 = (4.0 * x288)
x310 = ((4.0 * x293) + (x309 * x65))
x311 = ((x0 * ((x173 + (4.0 * x306)) + x310)) + (x279 * x308))
x312 = (x177 ** 2)
x313 = (x312 + x67)
x314 = (x313 * x70)
x315 = (x45 * x9)
x316 = (x80 * ((x181 * x3) + (x51 * ((x315 + (2.0 * x44)) + x46))))
x317 = (x177 * x191)
x318 = (x189 + x317)
x319 = (x107 * x318)
x320 = (x315 + x44)
x321 = (x18 + (2.0 * x312))
x322 = ((x0 * (x199 + x321)) + (x200 * x318))
x323 = (x107 * x322)
x324 = (x67 + (x9 ** 2))
x325 = (x324 * x70)
x326 = ((x322 * x60) + (x51 * (((2.0 * x189) + x192) + x317)))
x327 = (x326 * x80)
x328 = (x180 + (x22 * x47))
x329 = ((x177 * x237) + x236)
x330 = (x118 * x329)
x331 = (((2.0 * x177) * x212) + x216)
x332 = (x108 * x331)
x333 = (x24 + x67)
x334 = (x0 * ((x223 + x317) + x329))
x335 = (x331 * x60)
x336 = (x334 + x335)
x337 = (x336 * x97)
x338 = (0.25 * x331)
x339 = (x333 * x70)
x340 = (x0 * (((x177 * x250) + x251) + x322))
x341 = (x93 * ((x200 * x336) + x340))
x342 = (x56 * x9)
x343 = (x9 * x97)
x344 = (x124 * x329)
x345 = (x163 * x313)
x346 = (x165 * x47)
x347 = (x165 * x333)
x348 = (0. * x9)
x349 = (2.0 * x90)
x350 = ((x0 * ((4.0 * x207) + x321)) + (x329 * x349))
x351 = (x3 * x350)
x352 = (x178 + x82)
x353 = ((x352 * x70) * x80)
x354 = (x178 + x67)
x355 = (x331 * x90)
x356 = (x334 + x355)
x357 = (x356 * x71)
x358 = ((x336 * x349) + x340)
x359 = (x358 * x61)
x360 = (x3 * x73)
x361 = (x3 * x352)
x362 = (0. * x354)
x363 = (x362 * x74)
x364 = (0. * x3)
x365 = (x3 * x97)
x366 = (0. * x361)
x367 = (x3 * x71)
x368 = (x177 * x71)
x369 = (x325 * x80)
x370 = (0. * x325)
x371 = (0.5 * x333)
x372 = ((0. * x342) * x92)
x373 = (x205 * x361)
x374 = (0.25 * x3)
x375 = (x265 ** 2)
x376 = (x375 + x67)
x377 = (x376 * x70)
x378 = (x265 * x275)
x379 = (x273 + x378)
x380 = (x107 * x379)
x381 = (x18 + (2.0 * x375))
x382 = ((x0 * (x278 + x381)) + (x279 * x379))
x383 = (x107 * x382)
x384 = ((x382 * x65) + (x51 * (((2.0 * x273) + x276) + x378)))
x385 = (x163 * x376)
x386 = ((x265 * x303) + x302)
x387 = (x118 * x386)
x388 = (((2.0 * x265) * x288) + x293)
x389 = (x108 * x388)
x390 = (x0 * ((x295 + x378) + x386))
x391 = (x388 * x65)
x392 = (x390 + x391)
x393 = (x392 * x97)
x394 = (x124 * x386)
x395 = (x392 * x74)
x396 = (x0 * (((x265 * x309) + x310) + x382))
x397 = (x93 * ((x279 * x392) + x396))
x398 = (2.0 * x126)
x399 = ((x0 * ((4.0 * x284) + x381)) + (x386 * x398))
x400 = (x126 * x388)
x401 = (x390 + x400)
x402 = (x401 * x71)
x403 = ((x392 * x398) + x396)
x404 = (x403 * x61)
result[(0, 0, 0)] = numpy.sum(((- x57) * ((x3 * x41) + (x51 * ((((2.0 * x35) + x38) + x42) + x50)))))
result[(0, 0, 1)] = numpy.sum(((- x60) * x62))
result[(0, 0, 2)] = numpy.sum(((- x62) * x65))
result[(0, 0, 3)] = numpy.sum(((- x68) * x72))
result[(0, 0, 4)] = numpy.sum((((- x60) * x69) * x76))
result[(0, 0, 5)] = numpy.sum(((- x72) * x78))
result[(0, 0, 6)] = numpy.sum(((- x81) * x85))
result[(0, 0, 7)] = numpy.sum((((- x65) * x68) * x86))
result[(0, 0, 8)] = numpy.sum((((- x60) * x78) * x86))
result[(0, 0, 9)] = numpy.sum(((- x81) * x89))
result[(0, 1, 0)] = numpy.sum(((- x91) * x94))
result[(0, 1, 1)] = numpy.sum(((- x96) * x98))
result[(0, 1, 2)] = numpy.sum((((- x75) * x91) * x99))
result[(0, 1, 3)] = numpy.sum(((- x106) * x109))
result[(0, 1, 4)] = numpy.sum((((- x110) * x111) * x65))
result[(0, 1, 5)] = numpy.sum((((- x112) * x78) * x90))
result[(0, 1, 6)] = numpy.sum(((- x116) * x119))
result[(0, 1, 7)] = numpy.sum((((- x106) * x120) * x65))
result[(0, 1, 8)] = numpy.sum((((- x110) * x121) * x122))
result[(0, 1, 9)] = numpy.sum((((- x123) * x125) * x90))
result[(0, 2, 0)] = numpy.sum(((- x127) * x94))
result[(0, 2, 1)] = numpy.sum((((- x128) * x60) * x99))
result[(0, 2, 2)] = numpy.sum(((- x130) * x98))
result[(0, 2, 3)] = numpy.sum((((- x112) * x126) * x68))
result[(0, 2, 4)] = numpy.sum((((- x111) * x131) * x60))
result[(0, 2, 5)] = numpy.sum(((- x109) * x138))
result[(0, 2, 6)] = numpy.sum((((- x125) * x126) * x139))
result[(0, 2, 7)] = numpy.sum((((- x122) * x131) * x140))
result[(0, 2, 8)] = numpy.sum((((- x120) * x138) * x60))
result[(0, 2, 9)] = numpy.sum(((- x119) * x144))
result[(0, 3, 0)] = numpy.sum(((- x145) * x148))
result[(0, 3, 1)] = numpy.sum(((- x150) * x151))
result[(0, 3, 2)] = numpy.sum((((- x146) * x152) * x23))
result[(0, 3, 3)] = numpy.sum(((- x153) * x154))
result[(0, 3, 4)] = numpy.sum((((- x150) * x155) * x65))
result[(0, 3, 5)] = numpy.sum((((- x121) * x147) * x21))
result[(0, 3, 6)] = numpy.sum(((- x157) * x158))
result[(0, 3, 7)] = numpy.sum((((- x152) * x153) * x156))
result[(0, 3, 8)] = numpy.sum((((- x121) * x150) * x157))
result[(0, 3, 9)] = numpy.sum((((- x148) * x156) * x159))
result[(0, 4, 0)] = numpy.sum((((- x126) * x145) * x160))
result[(0, 4, 1)] = numpy.sum((((- x126) * x161) * x96))
result[(0, 4, 2)] = numpy.sum((((- x130) * x161) * x90))
result[(0, 4, 3)] = numpy.sum((((- x106) * x126) * x155))
result[(0, 4, 4)] = numpy.sum((((- x130) * x162) * x21))
result[(0, 4, 5)] = numpy.sum((((- x138) * x155) * x90))
result[(0, 4, 6)] = numpy.sum((((- x116) * x126) * x164))
result[(0, 4, 7)] = numpy.sum((((- x106) * x131) * x166))
result[(0, 4, 8)] = numpy.sum((((- x110) * x138) * x166))
result[(0, 4, 9)] = numpy.sum((((- x144) * x164) * x90))
result[(0, 5, 0)] = numpy.sum(((- x145) * x169))
result[(0, 5, 1)] = numpy.sum((((- x167) * x170) * x23))
result[(0, 5, 2)] = numpy.sum(((- x151) * x172))
result[(0, 5, 3)] = numpy.sum((((- x140) * x168) * x21))
result[(0, 5, 4)] = numpy.sum((((- x155) * x172) * x60))
result[(0, 5, 5)] = numpy.sum(((- x154) * x173))
result[(0, 5, 6)] = numpy.sum((((- x157) * x167) * x175))
result[(0, 5, 7)] = numpy.sum((((- x140) * x157) * x172))
result[(0, 5, 8)] = numpy.sum((((- x156) * x170) * x173))
result[(0, 5, 9)] = numpy.sum(((- x157) * x176))
result[(1, 0, 0)] = numpy.sum(((- x177) * x182))
result[(1, 0, 1)] = numpy.sum(((- x184) * x185))
result[(1, 0, 2)] = numpy.sum(((- x186) * x187))
result[(1, 0, 3)] = numpy.sum(((- x193) * x195))
result[(1, 0, 4)] = numpy.sum((((- x194) * x197) * x65))
result[(1, 0, 5)] = numpy.sum((((- x177) * x194) * x198))
result[(1, 0, 6)] = numpy.sum(((- x201) * x204))
result[(1, 0, 7)] = numpy.sum((((- x152) * x193) * x202))
result[(1, 0, 8)] = numpy.sum((((- x121) * x184) * x203))
result[(1, 0, 9)] = numpy.sum((((- x186) * x206) * x87))
result[(1, 1, 0)] = numpy.sum(((- x208) * x210))
result[(1, 1, 1)] = numpy.sum(((- x212) * x213))
result[(1, 1, 2)] = numpy.sum((((- x208) * x214) * x49))
result[(1, 1, 3)] = numpy.sum(((- x217) * x218))
result[(1, 1, 4)] = numpy.sum((((- x219) * x220) * x65))
result[(1, 1, 5)] = numpy.sum((((- x14) * x221) * x222))
result[(1, 1, 6)] = numpy.sum(((- x225) * x227))
result[(1, 1, 7)] = numpy.sum((((- x214) * x217) * x226))
result[(1, 1, 8)] = numpy.sum((((- x219) * x221) * x226))
result[(1, 1, 9)] = numpy.sum((((- x159) * x208) * x228))
result[(1, 2, 0)] = numpy.sum((((- x177) * x229) * x230))
result[(1, 2, 1)] = numpy.sum((((- x126) * x197) * x49))
result[(1, 2, 2)] = numpy.sum((((- x130) * x231) * x49))
result[(1, 2, 3)] = numpy.sum((((- x14) * x193) * x232))
result[(1, 2, 4)] = numpy.sum((((- x14) * x184) * x233))
result[(1, 2, 5)] = numpy.sum((((- x138) * x14) * x231))
result[(1, 2, 6)] = numpy.sum((((- x126) * x201) * x227))
result[(1, 2, 7)] = numpy.sum((((- x131) * x193) * x234))
result[(1, 2, 8)] = numpy.sum((((- x138) * x234) * x235))
result[(1, 2, 9)] = numpy.sum((((- x144) * x177) * x227))
result[(1, 3, 0)] = numpy.sum(((- x239) * x242))
result[(1, 3, 1)] = numpy.sum(((- x244) * x245))
result[(1, 3, 2)] = numpy.sum((((- x152) * x238) * x240))
result[(1, 3, 3)] = numpy.sum(((- x246) * x248))
result[(1, 3, 4)] = numpy.sum((((- x244) * x249) * x65))
result[(1, 3, 5)] = numpy.sum((((- x121) * x239) * x247))
result[(1, 3, 6)] = numpy.sum(((- x252) * x253))
result[(1, 3, 7)] = numpy.sum((((- x246) * x254) * x65))
result[(1, 3, 8)] = numpy.sum((((- x198) * x244) * x4))
result[(1, 3, 9)] = numpy.sum((((- x123) * x239) * x255))
result[(1, 4, 0)] = numpy.sum((((- x126) * x241) * x256))
result[(1, 4, 1)] = numpy.sum((((- x212) * x232) * x240))
result[(1, 4, 2)] = numpy.sum((((- x131) * x208) * x257))
result[(1, 4, 3)] = numpy.sum((((- x126) * x217) * x249))
result[(1, 4, 4)] = numpy.sum((((- x212) * x233) * x247))
result[(1, 4, 5)] = numpy.sum((((- x138) * x222) * x258))
result[(1, 4, 6)] = numpy.sum((((- x225) * x230) * x259))
result[(1, 4, 7)] = numpy.sum((((- x130) * x217) * x260))
result[(1, 4, 8)] = numpy.sum((((- x138) * x212) * x260))
result[(1, 4, 9)] = numpy.sum((((- x144) * x256) * x4))
result[(1, 5, 0)] = numpy.sum((((- x177) * x241) * x261))
result[(1, 5, 1)] = numpy.sum((((- x184) * x240) * x262))
result[(1, 5, 2)] = numpy.sum((((- x177) * x240) * x263))
result[(1, 5, 3)] = numpy.sum((((- x193) * x247) * x262))
result[(1, 5, 4)] = numpy.sum((((- x172) * x235) * x258))
result[(1, 5, 5)] = numpy.sum((((- x173) * x177) * x248))
result[(1, 5, 6)] = numpy.sum((((- x201) * x261) * x4))
result[(1, 5, 7)] = numpy.sum((((- x193) * x263) * x4))
result[(1, 5, 8)] = numpy.sum((((- x173) * x196) * x4))
result[(1, 5, 9)] = numpy.sum((((- x176) * x177) * x264))
result[(2, 0, 0)] = numpy.sum(((- x182) * x265))
result[(2, 0, 1)] = numpy.sum(((- x187) * x266))
result[(2, 0, 2)] = numpy.sum(((- x185) * x268))
result[(2, 0, 3)] = numpy.sum((((- x194) * x269) * x68))
result[(2, 0, 4)] = numpy.sum((((- x194) * x271) * x60))
result[(2, 0, 5)] = numpy.sum(((- x195) * x277))
result[(2, 0, 6)] = numpy.sum((((- x206) * x266) * x83))
result[(2, 0, 7)] = numpy.sum((((- x140) * x203) * x268))
result[(2, 0, 8)] = numpy.sum((((- x170) * x202) * x277))
result[(2, 0, 9)] = numpy.sum(((- x204) * x280))
result[(2, 1, 0)] = numpy.sum((((- x160) * x229) * x265))
result[(2, 1, 1)] = numpy.sum((((- x281) * x49) * x96))
result[(2, 1, 2)] = numpy.sum((((- x271) * x49) * x90))
result[(2, 1, 3)] = numpy.sum((((- x106) * x14) * x281))
result[(2, 1, 4)] = numpy.sum((((- x14) * x162) * x268))
result[(2, 1, 5)] = numpy.sum((((- x14) * x277) * x282))
result[(2, 1, 6)] = numpy.sum((((- x116) * x227) * x265))
result[(2, 1, 7)] = numpy.sum((((- x106) * x234) * x283))
result[(2, 1, 8)] = numpy.sum((((- x110) * x234) * x277))
result[(2, 1, 9)] = numpy.sum((((- x227) * x280) * x90))
result[(2, 2, 0)] = numpy.sum(((- x210) * x285))
result[(2, 2, 1)] = numpy.sum((((- x285) * x286) * x49))
result[(2, 2, 2)] = numpy.sum(((- x213) * x288))
result[(2, 2, 3)] = numpy.sum((((- x14) * x289) * x290))
result[(2, 2, 4)] = numpy.sum((((- x220) * x291) * x60))
result[(2, 2, 5)] = numpy.sum(((- x218) * x294))
result[(2, 2, 6)] = numpy.sum((((- x174) * x228) * x285))
result[(2, 2, 7)] = numpy.sum((((- x226) * x289) * x291))
result[(2, 2, 8)] = numpy.sum((((- x226) * x286) * x294))
result[(2, 2, 9)] = numpy.sum(((- x227) * x297))
result[(2, 3, 0)] = numpy.sum((((- x241) * x265) * x298))
result[(2, 3, 1)] = numpy.sum((((- x150) * x240) * x269))
result[(2, 3, 2)] = numpy.sum((((- x240) * x268) * x299))
result[(2, 3, 3)] = numpy.sum((((- x153) * x248) * x265))
result[(2, 3, 4)] = numpy.sum((((- x150) * x258) * x283))
result[(2, 3, 5)] = numpy.sum((((- x247) * x277) * x299))
result[(2, 3, 6)] = numpy.sum((((- x158) * x264) * x265))
result[(2, 3, 7)] = numpy.sum((((- x153) * x270) * x4))
result[(2, 3, 8)] = numpy.sum((((- x150) * x277) * x300))
result[(2, 3, 9)] = numpy.sum((((- x280) * x298) * x4))
result[(2, 4, 0)] = numpy.sum((((- x241) * x301) * x90))
result[(2, 4, 1)] = numpy.sum((((- x110) * x257) * x285))
result[(2, 4, 2)] = numpy.sum((((- x240) * x282) * x288))
result[(2, 4, 3)] = numpy.sum((((- x106) * x258) * x290))
result[(2, 4, 4)] = numpy.sum((((- x162) * x247) * x288))
result[(2, 4, 5)] = numpy.sum((((- x249) * x294) * x90))
result[(2, 4, 6)] = numpy.sum((((- x116) * x301) * x4))
result[(2, 4, 7)] = numpy.sum((((- x106) * x260) * x288))
result[(2, 4, 8)] = numpy.sum((((- x260) * x294) * x96))
result[(2, 4, 9)] = numpy.sum((((- x160) * x259) * x297))
result[(2, 5, 0)] = numpy.sum(((- x242) * x305))
result[(2, 5, 1)] = numpy.sum((((- x170) * x240) * x304))
result[(2, 5, 2)] = numpy.sum(((- x245) * x307))
result[(2, 5, 3)] = numpy.sum((((- x140) * x247) * x305))
result[(2, 5, 4)] = numpy.sum((((- x249) * x307) * x60))
result[(2, 5, 5)] = numpy.sum(((- x248) * x308))
result[(2, 5, 6)] = numpy.sum((((- x139) * x255) * x305))
result[(2, 5, 7)] = numpy.sum((((- x300) * x307) * x68))
result[(2, 5, 8)] = numpy.sum((((- x254) * x308) * x60))
result[(2, 5, 9)] = numpy.sum(((- x253) * x311))
result[(3, 0, 0)] = numpy.sum(((- x314) * x316))
result[(3, 0, 1)] = numpy.sum(((- x181) * x319))
result[(3, 0, 2)] = numpy.sum((((- x152) * x181) * x313))
result[(3, 0, 3)] = numpy.sum(((- x320) * x323))
result[(3, 0, 4)] = numpy.sum((((- x214) * x318) * x320))
result[(3, 0, 5)] = numpy.sum((((- x121) * x314) * x320))
result[(3, 0, 6)] = numpy.sum(((- x325) * x327))
result[(3, 0, 7)] = numpy.sum((((- x152) * x322) * x324))
result[(3, 0, 8)] = numpy.sum((((- x121) * x318) * x325))
result[(3, 0, 9)] = numpy.sum(((((- x159) * x314) * x324) * x52))
result[(3, 1, 0)] = numpy.sum(((- x328) * x330))
result[(3, 1, 1)] = numpy.sum(((- x332) * x47))
result[(3, 1, 2)] = numpy.sum((((- x214) * x329) * x47))
result[(3, 1, 3)] = numpy.sum(((- x333) * x337))
result[(3, 1, 4)] = numpy.sum((((- x333) * x338) * x88))
result[(3, 1, 5)] = numpy.sum((((- x221) * x329) * x339))
result[(3, 1, 6)] = numpy.sum(((- x341) * x342))
result[(3, 1, 7)] = numpy.sum((((- x336) * x76) * x9))
result[(3, 1, 8)] = numpy.sum((((- x331) * x343) * x78))
result[(3, 1, 9)] = numpy.sum((((- x123) * x344) * x9))
result[(3, 2, 0)] = numpy.sum((((- x126) * x328) * x345))
result[(3, 2, 1)] = numpy.sum((((- x232) * x318) * x47))
result[(3, 2, 2)] = numpy.sum((((- x131) * x313) * x346))
result[(3, 2, 3)] = numpy.sum((((- x232) * x322) * x333))
result[(3, 2, 4)] = numpy.sum((((- x233) * x318) * x333))
result[(3, 2, 5)] = numpy.sum((((- x138) * x314) * x347))
result[(3, 2, 6)] = numpy.sum((((- x230) * x326) * x348))
result[(3, 2, 7)] = numpy.sum((((- x130) * x322) * x343))
result[(3, 2, 8)] = numpy.sum((((- x138) * x318) * x343))
result[(3, 2, 9)] = numpy.sum((((- x144) * x345) * x9))
result[(3, 3, 0)] = numpy.sum(((- x351) * x353))
result[(3, 3, 1)] = numpy.sum(((- x354) * x357))
result[(3, 3, 2)] = numpy.sum((((- x152) * x350) * x354))
result[(3, 3, 3)] = numpy.sum(((- x3) * x359))
result[(3, 3, 4)] = numpy.sum((((- x356) * x360) * x75))
result[(3, 3, 5)] = numpy.sum(((- x198) * x351))
result[(3, 3, 6)] = numpy.sum(((- x57) * ((x358 * x60) + (x51 * (((x246 + (2.0 * x334)) + x335) + x355)))))
result[(3, 3, 7)] = numpy.sum(((- x359) * x65))
result[(3, 3, 8)] = numpy.sum(((- x357) * x78))
result[(3, 3, 9)] = numpy.sum((((- x350) * x80) * x89))
result[(3, 4, 0)] = numpy.sum((((- x126) * x344) * x361))
result[(3, 4, 1)] = numpy.sum((((- x232) * x331) * x354))
result[(3, 4, 2)] = numpy.sum((((- x131) * x329) * x363))
result[(3, 4, 3)] = numpy.sum((((- x128) * x336) * x364))
result[(3, 4, 4)] = numpy.sum((((- x131) * x3) * x338))
result[(3, 4, 5)] = numpy.sum((((- x138) * x329) * x365))
result[(3, 4, 6)] = numpy.sum(((- x127) * x341))
result[(3, 4, 7)] = numpy.sum(((- x130) * x337))
result[(3, 4, 8)] = numpy.sum(((- x138) * x332))
result[(3, 4, 9)] = numpy.sum(((- x144) * x330))
result[(3, 5, 0)] = numpy.sum((((- x169) * x313) * x366))
result[(3, 5, 1)] = numpy.sum((((- x262) * x318) * x354))
result[(3, 5, 2)] = numpy.sum((((- x172) * x314) * x362))
result[(3, 5, 3)] = numpy.sum((((- x167) * x322) * x367))
result[(3, 5, 4)] = numpy.sum((((- x172) * x318) * x365))
result[(3, 5, 5)] = numpy.sum((((- x173) * x313) * x367))
result[(3, 5, 6)] = numpy.sum(((- x168) * x327))
result[(3, 5, 7)] = numpy.sum(((- x172) * x323))
result[(3, 5, 8)] = numpy.sum(((- x173) * x319))
result[(3, 5, 9)] = numpy.sum(((- x176) * x314))
result[(4, 0, 0)] = numpy.sum(((((- x177) * x265) * x316) * x56))
result[(4, 0, 1)] = numpy.sum((((- x181) * x196) * x265))
result[(4, 0, 2)] = numpy.sum((((- x177) * x181) * x270))
result[(4, 0, 3)] = numpy.sum((((- x193) * x269) * x320))
result[(4, 0, 4)] = numpy.sum(((((- x165) * x235) * x268) * x320))
result[(4, 0, 5)] = numpy.sum((((- x277) * x320) * x368))
result[(4, 0, 6)] = numpy.sum((((- x201) * x265) * x369))
result[(4, 0, 7)] = numpy.sum((((- x193) * x268) * x370))
result[(4, 0, 8)] = numpy.sum((((- x184) * x277) * x370))
result[(4, 0, 9)] = numpy.sum((((- x177) * x280) * x369))
result[(4, 1, 0)] = numpy.sum((((- x256) * x265) * x328))
result[(4, 1, 1)] = numpy.sum((((- x212) * x281) * x47))
result[(4, 1, 2)] = numpy.sum((((- x222) * x268) * x346))
result[(4, 1, 3)] = numpy.sum((((- x217) * x281) * x333))
result[(4, 1, 4)] = numpy.sum((((- x219) * x268) * x371))
result[(4, 1, 5)] = numpy.sum((((- x222) * x277) * x347))
result[(4, 1, 6)] = numpy.sum((((- x225) * x265) * x372))
result[(4, 1, 7)] = numpy.sum((((- x217) * x271) * x9))
result[(4, 1, 8)] = numpy.sum((((- x212) * x277) * x343))
result[(4, 1, 9)] = numpy.sum((((- x256) * x280) * x9))
result[(4, 2, 0)] = numpy.sum((((- x177) * x301) * x328))
result[(4, 2, 1)] = numpy.sum((((- x235) * x285) * x346))
result[(4, 2, 2)] = numpy.sum((((- x231) * x288) * x47))
result[(4, 2, 3)] = numpy.sum((((- x193) * x290) * x347))
result[(4, 2, 4)] = numpy.sum((((- x184) * x291) * x371))
result[(4, 2, 5)] = numpy.sum((((- x231) * x294) * x333))
result[(4, 2, 6)] = numpy.sum((((- x201) * x301) * x9))
result[(4, 2, 7)] = numpy.sum((((- x193) * x288) * x343))
result[(4, 2, 8)] = numpy.sum((((- x197) * x294) * x9))
result[(4, 2, 9)] = numpy.sum((((- x177) * x297) * x372))
result[(4, 3, 0)] = numpy.sum((((- x239) * x265) * x373))
result[(4, 3, 1)] = numpy.sum((((- x244) * x269) * x354))
result[(4, 3, 2)] = numpy.sum((((- x239) * x268) * x362))
result[(4, 3, 3)] = numpy.sum((((- x246) * x265) * x360))
result[(4, 3, 4)] = numpy.sum((((- x244) * x271) * x3))
result[(4, 3, 5)] = numpy.sum((((- x238) * x277) * x367))
result[(4, 3, 6)] = numpy.sum((((- x252) * x265) * x57))
result[(4, 3, 7)] = numpy.sum(((- x246) * x270))
result[(4, 3, 8)] = numpy.sum((((- x107) * x244) * x277))
result[(4, 3, 9)] = numpy.sum((((- x239) * x280) * x53))
result[(4, 4, 0)] = numpy.sum(((((- x117) * x208) * x285) * x366))
result[(4, 4, 1)] = numpy.sum((((- x219) * x285) * x363))
result[(4, 4, 2)] = numpy.sum((((- x208) * x291) * x363))
result[(4, 4, 3)] = numpy.sum((((- x217) * x285) * x365))
result[(4, 4, 4)] = numpy.sum((((- x219) * x288) * x374))
result[(4, 4, 5)] = numpy.sum((((- x208) * x294) * x365))
result[(4, 4, 6)] = numpy.sum(((- x225) * x301))
result[(4, 4, 7)] = numpy.sum((((- x108) * x217) * x288))
result[(4, 4, 8)] = numpy.sum((((- x108) * x212) * x294))
result[(4, 4, 9)] = numpy.sum(((- x256) * x297))
result[(4, 5, 0)] = numpy.sum((((- x177) * x305) * x373))
result[(4, 5, 1)] = numpy.sum((((- x184) * x305) * x362))
result[(4, 5, 2)] = numpy.sum((((- x307) * x354) * x368))
result[(4, 5, 3)] = numpy.sum((((- x193) * x304) * x367))
result[(4, 5, 4)] = numpy.sum((((- x197) * x3) * x307))
result[(4, 5, 5)] = numpy.sum((((- x177) * x308) * x360))
result[(4, 5, 6)] = numpy.sum((((- x201) * x305) * x53))
result[(4, 5, 7)] = numpy.sum((((- x107) * x193) * x307))
result[(4, 5, 8)] = numpy.sum(((- x196) * x308))
result[(4, 5, 9)] = numpy.sum((((- x177) * x311) * x57))
result[(5, 0, 0)] = numpy.sum(((- x316) * x377))
result[(5, 0, 1)] = numpy.sum((((- x170) * x181) * x376))
result[(5, 0, 2)] = numpy.sum(((- x181) * x380))
result[(5, 0, 3)] = numpy.sum((((- x140) * x320) * x377))
result[(5, 0, 4)] = numpy.sum((((- x286) * x320) * x379))
result[(5, 0, 5)] = numpy.sum(((- x320) * x383))
result[(5, 0, 6)] = numpy.sum((((- x175) * x325) * x376))
result[(5, 0, 7)] = numpy.sum((((- x140) * x325) * x379))
result[(5, 0, 8)] = numpy.sum((((- x170) * x324) * x382))
result[(5, 0, 9)] = numpy.sum(((- x369) * x384))
result[(5, 1, 0)] = numpy.sum((((- x328) * x385) * x90))
result[(5, 1, 1)] = numpy.sum((((- x110) * x346) * x376))
result[(5, 1, 2)] = numpy.sum((((- x282) * x379) * x47))
result[(5, 1, 3)] = numpy.sum((((- x106) * x347) * x377))
result[(5, 1, 4)] = numpy.sum((((- x162) * x333) * x379))
result[(5, 1, 5)] = numpy.sum((((- x282) * x333) * x382))
result[(5, 1, 6)] = numpy.sum((((- x116) * x385) * x9))
result[(5, 1, 7)] = numpy.sum((((- x106) * x343) * x379))
result[(5, 1, 8)] = numpy.sum((((- x343) * x382) * x96))
result[(5, 1, 9)] = numpy.sum((((- x160) * x348) * x384))
result[(5, 2, 0)] = numpy.sum(((- x328) * x387))
result[(5, 2, 1)] = numpy.sum((((- x286) * x386) * x47))
result[(5, 2, 2)] = numpy.sum(((- x389) * x47))
result[(5, 2, 3)] = numpy.sum((((- x289) * x339) * x386))
result[(5, 2, 4)] = numpy.sum(((((- 0.25) * x333) * x388) * x84))
result[(5, 2, 5)] = numpy.sum(((- x333) * x393))
result[(5, 2, 6)] = numpy.sum((((- x139) * x394) * x9))
result[(5, 2, 7)] = numpy.sum((((- x343) * x388) * x68))
result[(5, 2, 8)] = numpy.sum(((((- x395) * x60) * x73) * x9))
result[(5, 2, 9)] = numpy.sum(((- x342) * x397))
result[(5, 3, 0)] = numpy.sum((((- x148) * x366) * x376))
result[(5, 3, 1)] = numpy.sum((((- x150) * x362) * x377))
result[(5, 3, 2)] = numpy.sum((((- x299) * x354) * x379))
result[(5, 3, 3)] = numpy.sum((((- x153) * x367) * x376))
result[(5, 3, 4)] = numpy.sum((((- x150) * x365) * x379))
result[(5, 3, 5)] = numpy.sum((((- x146) * x367) * x382))
result[(5, 3, 6)] = numpy.sum(((- x158) * x377))
result[(5, 3, 7)] = numpy.sum(((- x153) * x380))
result[(5, 3, 8)] = numpy.sum(((- x150) * x383))
result[(5, 3, 9)] = numpy.sum(((- x298) * x384))
result[(5, 4, 0)] = numpy.sum((((- x361) * x394) * x90))
result[(5, 4, 1)] = numpy.sum((((- x110) * x363) * x386))
result[(5, 4, 2)] = numpy.sum((((- x282) * x354) * x388))
result[(5, 4, 3)] = numpy.sum((((- x106) * x365) * x386))
result[(5, 4, 4)] = numpy.sum((((- x110) * x374) * x388))
result[(5, 4, 5)] = numpy.sum((((- x364) * x395) * x91))
result[(5, 4, 6)] = numpy.sum(((- x116) * x387))
result[(5, 4, 7)] = numpy.sum(((- x106) * x389))
result[(5, 4, 8)] = numpy.sum(((- x393) * x96))
result[(5, 4, 9)] = numpy.sum(((- x397) * x91))
result[(5, 5, 0)] = numpy.sum((((- x3) * x353) * x399))
result[(5, 5, 1)] = numpy.sum((((- x170) * x354) * x399))
result[(5, 5, 2)] = numpy.sum(((- x354) * x402))
result[(5, 5, 3)] = numpy.sum((((- x367) * x399) * x68))
result[(5, 5, 4)] = numpy.sum(((((- x360) * x401) * x60) * x74))
result[(5, 5, 5)] = numpy.sum(((- x3) * x404))
result[(5, 5, 6)] = numpy.sum((((- x399) * x80) * x85))
result[(5, 5, 7)] = numpy.sum(((- x402) * x68))
result[(5, 5, 8)] = numpy.sum(((- x404) * x60))
result[(5, 5, 9)] = numpy.sum(((- x57) * ((x403 * x65) + (x51 * (((x308 + (2.0 * x390)) + x391) + x400)))))
return result |
def extractMbenshlehWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
('model')
def check_default_uv_set(progress_controller=None):
if (progress_controller is None):
progress_controller = ProgressControllerBase()
v = staging.get('version')
if (v and (Representation.repr_separator in v.take_name)):
progress_controller.complete()
return
all_meshes = pm.ls(type='mesh')
progress_controller.maximum = len(all_meshes)
nodes_with_non_default_uvset = []
for node in all_meshes:
if ('map1' not in node.getUVSetNames()):
nodes_with_non_default_uvset.append(node)
progress_controller.increment()
progress_controller.complete()
if (len(nodes_with_non_default_uvset) > 0):
tra_nodes = list(map((lambda x: x.getParent()), nodes_with_non_default_uvset))
pm.select(tra_nodes)
raise RuntimeError(('There are nodes with <b>non default UVSet (map1)</b>:\n <br><br>%s' % '<br>'.join(map((lambda x: x.name()), tra_nodes[:MAX_NODE_DISPLAY])))) |
def calculate_intrinsic_cost(tx: Transaction) -> Uint:
data_cost = 0
for byte in tx.data:
if (byte == 0):
data_cost += TX_DATA_COST_PER_ZERO
else:
data_cost += TX_DATA_COST_PER_NON_ZERO
if (tx.to == Bytes0(b'')):
create_cost = (TX_CREATE_COST + int(init_code_cost(Uint(len(tx.data)))))
else:
create_cost = 0
access_list_cost = 0
if isinstance(tx, (AccessListTransaction, FeeMarketTransaction)):
for (_address, keys) in tx.access_list:
access_list_cost += TX_ACCESS_LIST_ADDRESS_COST
access_list_cost += (len(keys) * TX_ACCESS_LIST_STORAGE_KEY_COST)
return Uint((((TX_BASE_COST + data_cost) + create_cost) + access_list_cost)) |
class GrimBlock(Directive):
option_spec = {'class': directives.class_option, 'name': directives.unchanged, 'nowrap': directives.flag}
has_content = True
nowrap = True
def run(self):
set_classes(self.options)
self.assert_has_content()
content = '\n'.join(self.content).split('\n\n')
nodes = []
for block in content:
nodes.append(Text('Input: '))
nodes.append(literal(block, Text(block)))
formula = eval(block, pygrim.__dict__)
latex = formula.latex()
latex = (('$$' + latex) + '$$')
node = math_block(latex, Text(latex), **self.options)
node.attributes['nowrap'] = True
nodes.append(node)
return nodes |
def test_difference_raises_exceptions_if_distance_is_used_with_incompatible_values():
with pytest.raises(scared.PreprocessError):
scared.preprocesses.high_order.Difference(frame_1=range(60), frame_2=range(10), distance=12)
with pytest.raises(ValueError):
scared.preprocesses.high_order.Difference(frame_1=range(60), distance=0)
with pytest.raises(ValueError):
scared.preprocesses.high_order.Difference(frame_1=60, distance='foo')
with pytest.raises(ValueError):
scared.preprocesses.high_order.Difference(frame_1=range(60), distance=1.2)
with pytest.raises(ValueError):
scared.preprocesses.high_order.Difference(frame_1=range(60), distance=(- 2))
with pytest.raises(scared.PreprocessError):
scared.preprocesses.high_order.Difference(frame_1=range(60), distance=2, mode='same') |
class InlineResponse2004(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': ([str],), 'meta': (InlineResponse2004Meta,)}
_property
def discriminator():
return None
attribute_map = {'data': 'data', 'meta': 'meta'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
.parametrize(('value', 'expected'), [pytest.param('foo=bar', {'foo': ('bar',)}, id='from str, single key, single value'), pytest.param('foo=bar|baz', {'foo': ('bar', 'baz')}, id='from str, single key, multi value'), pytest.param('foo=bar,fox=fuz', {'foo': ('bar',), 'fox': ('fuz',)}, id='from str, multi key, single value'), pytest.param('foo=bar,foo=fuz', {'foo': ('bar', 'fuz')}, id='from str, redefined key, single value'), pytest.param({'foo': 'bar'}, {'foo': ('bar',)}, id='from str-to-str map'), pytest.param('foo|bar=baz', {'foo|bar': ('baz',)}, id='from str, key with multiple-value separator token'), pytest.param('foo=bar=baz', {'foo': ('bar=baz',)}, id='from str, single value with key-value seperator token'), pytest.param('foo=', {'foo': ('',)}, id='from str, empty value'), pytest.param({'foo': ('bar',)}, {'foo': ('bar',)}, id='from str-to-tuple-of-str map'), pytest.param({'foo': ('bar',), 'bar': 'baz'}, {'foo': ('bar',), 'bar': ('baz',)}, id='from str-to-mixed map')])
def test_comma_separated_mapping_param_type_convert(value: (str | MutableMapping[(str, (Sequence[str] | str))]), expected: MutableMapping[(str, tuple[(str, ...)])]) -> None:
param = mock.Mock(spec=click.Parameter)
ctx = mock.Mock(spec=click.Context)
actual = CommaSeparatedMappingParamType().convert(value=value, param=param, ctx=ctx)
assert (actual == expected) |
class AaveLiquidationClassifier(LiquidationClassifier):
def parse_liquidation(liquidation_trace: DecodedCallTrace, child_transfers: List[Transfer], child_traces: List[ClassifiedTrace]) -> Optional[Liquidation]:
liquidator = liquidation_trace.from_address
liquidated = liquidation_trace.inputs['_user']
debt_token_address = liquidation_trace.inputs['_reserve']
received_token_address = liquidation_trace.inputs['_collateral']
debt_purchase_amount = None
received_amount = None
debt_transfer = get_debt_transfer(liquidator, child_transfers)
received_transfer = get_received_transfer(liquidator, child_transfers)
if ((debt_transfer is not None) and (received_transfer is not None)):
debt_token_address = debt_transfer.token_address
debt_purchase_amount = debt_transfer.amount
received_token_address = received_transfer.token_address
received_amount = received_transfer.amount
return Liquidation(liquidated_user=liquidated, debt_token_address=debt_token_address, liquidator_user=liquidator, debt_purchase_amount=debt_purchase_amount, protocol=Protocol.aave, received_amount=received_amount, received_token_address=received_token_address, transaction_hash=liquidation_trace.transaction_hash, trace_address=liquidation_trace.trace_address, block_number=liquidation_trace.block_number, error=liquidation_trace.error)
else:
return None |
.parametrize('target', _targets)
def test_when_target_class_not_0_1(df_enc, target):
encoder = WoEEncoder(variables=['var_A', 'var_B'])
df_enc['target'] = target
encoder.fit(df_enc, df_enc['target'])
X = encoder.transform(df_enc)
transf_df = df_enc.copy()
transf_df['var_A'] = VAR_A
transf_df['var_B'] = VAR_B
assert (encoder.encoder_dict_ == {'var_A': {'A': 0., 'B': (- 0.), 'C': 0.}, 'var_B': {'A': (- 0.), 'B': 0., 'C': 0.}})
pd.testing.assert_frame_equal(X, transf_df) |
class InstanceTemplate(object):
def __init__(self, **kwargs):
self.creation_timestamp = kwargs.get('creation_timestamp')
self.description = kwargs.get('description')
self.name = kwargs.get('name')
self.properties = kwargs.get('properties')
self.id = kwargs.get('id')
self.project_id = kwargs.get('project_id')
self._json = kwargs.get('raw_instance_template')
def from_dict(cls, instance_template, project_id=None):
kwargs = {'project_id': project_id, 'id': instance_template.get('id'), 'creation_timestamp': instance_template.get('creationTimestamp'), 'name': instance_template.get('name'), 'description': instance_template.get('description'), 'properties': instance_template.get('properties', {}), 'raw_instance_template': json.dumps(instance_template, sort_keys=True)}
return cls(**kwargs)
def from_json(json_string, project_id=None):
instance_template = json.loads(json_string)
return InstanceTemplate.from_dict(instance_template, project_id)
def _create_json_str(self):
resource_dict = {'id': self.id, 'creationTimestamp': self.creation_timestamp, 'name': self.name, 'description': self.description, 'properties': self.properties}
resource_dict = dict(((k, v) for (k, v) in list(resource_dict.items()) if v))
return json.dumps(resource_dict, sort_keys=True)
def json(self):
if (not self._json):
self._json = self._create_json_str()
return self._json
def key(self):
return Key.from_args(self.project_id, self.name) |
class TestCountyCode(unittest.TestCase):
def test_assert_zone_key(self):
try:
zonekey.assert_zone_key('ES', 'ES', 'ESIOS')
except ParserException:
self.fail('assert_zone_key() raised ParserException unexpectedly!')
try:
zonekey.assert_zone_key('ES', 'ES-IB')
except ParserException as ex:
self.assertIsInstance(ex, ParserException)
self.assertEqual(str(ex), 'ES Parser (ES): zone_key expected ES-IB, is ES')
try:
zonekey.assert_zone_key('ES', 'ES-IB', 'ESIOS')
except ParserException as ex:
self.assertIsInstance(ex, ParserException)
self.assertEqual(str(ex), 'ESIOS Parser (ES): zone_key expected ES-IB, is ES') |
def extractEugenewoodburyBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_command
def s1_mask(dm: DataManager, exp_name: str, step: str=None, threshold: float=0.5):
exp = dm.experiment_names[exp_name]
root = dm.get_local_experiment_path(f'{exp.category}/{exp.video}', exp.method, exp.name)
mask_color = np.array([[0.5, 0.8, 0.5]], dtype=np.float32)
if (step is None):
step = exp.local.evals[(- 1)]
folder = ((root / 'eval') / step)
assert folder.exists(), str(folder)
alpha_folder = (folder / 'fg_alpha')
if (not alpha_folder.exists()):
raise ValueError('alpha folder not found!')
mask_folder = (folder / 's1_mask')
mask_folder.mkdir(parents=True, exist_ok=True)
vis_folder = (folder / 's1_visualization')
vis_folder.mkdir(parents=True, exist_ok=True)
for file in tqdm(multi_glob_sorted(alpha_folder, '*.png')):
mask = read_image_np(file)
mask = (mask >= threshold).astype(np.float32)
save_image_np((mask_folder / file.name), mask)
img = read_image_np(((folder / 'input_rgb') / file.name))
mask = (mask > 0.5)
img[mask] = ((img[mask] * (1 - mask_color)) + mask_color)
save_image_np((vis_folder / file.name.replace('.png', '.jpg')), img)
run(['python', 'tools/simple_video.py', str(vis_folder), '--output', str((vis_folder.parent / f'{vis_folder.name}.mp4'))], capture_output=True) |
.skip
('turbomole')
('qcengine')
def test_turbomole_hessian_compare(this_dir):
geom = geom_loader('lib:h2o_bp86_def2svp_opt.xyz')
qce_kwargs = {'program': 'turbomole', 'model': {'method': 'b-p', 'basis': 'def2-SVP'}, 'keywords': {'ri': True, 'grid': 'm5'}}
qce_calc = QCEngine(**qce_kwargs)
geom.set_calculator(qce_calc)
H = geom.hessian
ref_geom = geom.copy()
control_path = (this_dir / 'h2o_bp86_def2svp_control')
ref_calc = Turbomole(control_path)
ref_geom.set_calculator(ref_calc)
H_ref = ref_geom.hessian
assert (geom.energy == pytest.approx(ref_geom.energy))
np.testing.assert_allclose(H, H_ref, rtol=0.0002) |
_defaults()
class EventsRolePermissionSchema(Schema):
class Meta():
type_ = 'event-role-permissions'
self_view = 'v1.events_role_detail'
self_view_kwargs = {'id': '<id>'}
self_view_many = 'v1.events_role_list'
inflect = dasherize
id = fields.Str(dump_only=True)
can_create = fields.Boolean(default=False)
can_read = fields.Boolean(default=False)
can_update = fields.Boolean(default=False)
can_delete = fields.Boolean(default=False)
role = Relationship(self_view='v1.event_role_role', self_view_kwargs={'id': '<id>'}, related_view='v1.role_detail', related_view_kwargs={'id': '<role_id>'}, schema='RoleSchema', type_='role')
service = Relationship(self_view='v1.event_role_service', self_view_kwargs={'id': '<id>'}, related_view='v1.service_detail', related_view_kwargs={'id': '<service_id>'}, schema='ServiceSchema', type_='service') |
def createOffsetFile(dut, dst_file):
if dut.inLocalMode():
try:
tmp_file = open(dst_file, 'w')
tmp_file.write(str(dut.offset))
except IOError:
print(('cannot write to %s' % tmp_file))
sys.exit(1)
finally:
tmp_file.close()
return
else:
dutSsh = getSshProc(dut)
dutSsh.communicate(('echo "%s" > %s' % (str(dut.offset), dst_file)))
return |
class ClusterSizeError(ClusterError):
def __init__(self, msg=None):
super(ClusterSizeError, self).__init__((msg or 'The cluster does not provide the minimum amount of nodes specified in the configuration. Some nodes are running, but the cluster will not be set up yet. Please change the minimum amount of nodes in the configuration or try to start a new cluster after checking the cloud provider settings.')) |
def main():
if (len(sys.argv) != 2):
print('usage: utils/bump-version.py [stack version]')
exit(1)
stack_version = sys.argv[1]
try:
python_version = re.search('^([0-9][0-9\\.]*[0-9]+)', stack_version).group(1)
except AttributeError:
print(f"Couldn't match the given stack version {stack_version!r}")
exit(1)
for _ in range(3):
if (len(python_version.split('.')) >= 3):
break
python_version += '.0'
find_and_replace(path=(SOURCE_DIR / 'elasticsearch/_version.py'), pattern='__versionstr__ = \\"[0-9]+[0-9\\.]*[0-9](?:\\+dev)?\\"', replace=f'__versionstr__ = "{python_version}"')
major_minor_version = '.'.join(python_version.split('.')[:2])
find_and_replace(path=(SOURCE_DIR / '.ci/test-matrix.yml'), pattern='STACK_VERSION:\\s+\\- "[0-9]+[0-9\\.]*[0-9](?:\\-SNAPSHOT)?"', replace=f'''STACK_VERSION:
- "{major_minor_version}.0-SNAPSHOT"''') |
class UndefinedTestCase(unittest.TestCase):
def test_initial_value(self):
b = Bar()
self.assertEqual(b.name, Undefined)
def test_name_change(self):
b = Bar()
b.name = 'first'
self.assertEqual(b.name, 'first')
def test_read_only_write_once(self):
f = Foo()
self.assertEqual(f.name, '')
self.assertIs(f.original_name, Undefined)
f.name = 'first'
self.assertEqual(f.name, 'first')
self.assertEqual(f.original_name, 'first')
f.name = 'second'
self.assertEqual(f.name, 'second')
self.assertEqual(f.original_name, 'first')
def test_read_only_write_once_from_constructor(self):
f = Foo(name='first')
f.name = 'first'
self.assertEqual(f.name, 'first')
self.assertEqual(f.original_name, 'first')
f.name = 'second'
self.assertEqual(f.name, 'second')
self.assertEqual(f.original_name, 'first') |
def compare_internals(xyz_fn):
geom = geom_loader(xyz_fn, coord_type='redund')
cls = {2: Stretch, 3: Bend, 4: Torsion}
for pi in geom.internal._prim_internals:
inds = pi.inds
obj = cls[len(inds)](inds)
print(obj)
(v, g) = obj.calculate(geom.coords3d, gradient=True)
assert (v == pytest.approx(pi.val))
np.testing.assert_allclose(g, pi.grad) |
class Solution():
def levelOrder(self, root: TreeNode) -> List[List[int]]:
def build_levels(node, levels, level):
if (node is None):
return
if (len(levels) == level):
levels.append([])
levels[level].append(node.val)
build_levels(node.left, levels, (level + 1))
build_levels(node.right, levels, (level + 1))
levels = []
build_levels(root, levels, 0)
return levels |
def to_pattern(pattern: Pattern) -> PatternBase:
if isinstance(pattern, PatternBase):
return pattern
if (pattern is None):
return nonePattern
if isinstance(pattern, bool):
return BoolPattern(pattern)
if isinstance(pattern, int):
return IntPattern(pattern)
if isinstance(pattern, float):
return FloatPattern(pattern)
if isinstance(pattern, str):
return StringPattern(pattern)
if isinstance(pattern, list):
if (len(pattern) == 0):
return EmptyListPattern()
return ListPattern(pattern)
if isinstance(pattern, type):
return TypePattern(pattern)
raise TypeError(f'Expected pattern, got {type(pattern).__name__}') |
class TestScheduleE(ApiBaseTest):
kwargs = {'two_year_transaction_period': 2016}
def test_schedule_e_sort_args_ascending(self):
[factories.ScheduleEFactory(expenditure_amount=100, expenditure_date=datetime.date(2016, 1, 1), committee_id='C', support_oppose_indicator='s'), factories.ScheduleEFactory(expenditure_amount=100, expenditure_date=datetime.date(2016, 1, 1), committee_id='C', support_oppose_indicator='o')]
results = self._results(api.url_for(ScheduleEView, sort='support_oppose_indicator'))
self.assertEqual(results[0]['support_oppose_indicator'], 'o')
def test_schedule_e_filter_dissemination_date_range(self):
factories.ScheduleEFactory(dissemination_date=datetime.datetime(2023, 12, 30))
factories.ScheduleEFactory(dissemination_date=datetime.datetime(2021, 8, 29))
factories.ScheduleEFactory(dissemination_date=datetime.datetime(2019, 10, 25))
factories.ScheduleEFactory(dissemination_date=datetime.datetime(2017, 6, 22))
factories.ScheduleEFactory(dissemination_date=datetime.datetime(2015, 10, 15))
results = self._results(api.url_for(ScheduleEView, min_dissemination_date=datetime.date.fromisoformat('2015-01-01')))
assert (len(results) == 5)
results = self._results(api.url_for(ScheduleEView, max_dissemination_date=datetime.date.fromisoformat('2021-10-25')))
assert (len(results) == 4)
def test_schedule_e_filters_date_range(self):
factories.ScheduleEFactory(filing_date=datetime.datetime(2023, 12, 30))
factories.ScheduleEFactory(filing_date=datetime.datetime(2021, 8, 29))
factories.ScheduleEFactory(filing_date=datetime.datetime(2019, 10, 25))
factories.ScheduleEFactory(filing_date=datetime.datetime(2017, 6, 22))
factories.ScheduleEFactory(filing_date=datetime.datetime(2015, 10, 15))
results = self._results(api.url_for(ScheduleEView, min_filing_date=datetime.date.fromisoformat('2015-01-01')))
assert (len(results) == 5)
results = self._results(api.url_for(ScheduleEView, max_filing_date=datetime.date.fromisoformat('2021-10-25')))
assert (len(results) == 4)
def test_schedule_e_amount(self):
[factories.ScheduleEFactory(expenditure_amount=50), factories.ScheduleEFactory(expenditure_amount=100), factories.ScheduleEFactory(expenditure_amount=150), factories.ScheduleEFactory(expenditure_amount=200)]
results = self._results(api.url_for(ScheduleEView, min_amount=100))
self.assertTrue(all(((each['expenditure_amount'] >= 100) for each in results)))
results = self._results(api.url_for(ScheduleEView, max_amount=150))
self.assertTrue(all(((each['expenditure_amount'] <= 150) for each in results)))
results = self._results(api.url_for(ScheduleEView, min_amount=100, max_amount=150))
self.assertTrue(all(((100 <= each['expenditure_amount'] <= 150) for each in results)))
def test_schedule_e_sort(self):
expenditures = [factories.ScheduleEFactory(expenditure_amount=50), factories.ScheduleEFactory(expenditure_amount=100, expenditure_date=datetime.date(2016, 1, 1)), factories.ScheduleEFactory(expenditure_amount=150, expenditure_date=datetime.date(2016, 2, 1)), factories.ScheduleEFactory(expenditure_amount=200, expenditure_date=datetime.date(2016, 3, 1))]
sub_ids = [str(each.sub_id) for each in expenditures]
results = self._results(api.url_for(ScheduleEView, sort='-expenditure_date', sort_nulls_last=True))
self.assertEqual([each['sub_id'] for each in results], sub_ids[::(- 1)])
def test_schedule_e_filters(self):
filters = [('image_number', ScheduleE.image_number, ['123', '456']), ('committee_id', ScheduleE.committee_id, ['C', 'C']), ('support_oppose_indicator', ScheduleE.support_oppose_indicator, ['S', 'O']), ('is_notice', ScheduleE.is_notice, [True, False]), ('candidate_office_state', ScheduleE.candidate_office_state, ['AZ', 'AK']), ('candidate_office_district', ScheduleE.candidate_office_district, ['00', '01']), ('candidate_party', ScheduleE.candidate_party, ['DEM', 'REP']), ('candidate_office', ScheduleE.candidate_office, ['H', 'S', 'P'])]
for (label, column, values) in filters:
[factories.ScheduleEFactory(**{column.key: value}) for value in values]
results = self._results(api.url_for(ScheduleEView, **{label: values[0]}))
assert (len(results) == 1)
assert (results[0][column.key] == values[0])
def test_schedule_e_filter_fulltext_pass(self):
payee_names = ['Test.com', 'Test com', 'Testerosa', 'Test#com', 'Test.com and Test.com']
[factories.ScheduleEFactory(payee_name=payee) for payee in payee_names]
results = self._results(api.url_for(ScheduleEView, payee_name='test'))
self.assertEqual(len(results), len(payee_names))
def test_filter_sched_e_spender_name_text(self):
[factories.ScheduleEFactory(committee_id='C', spender_name_text=sa.func.to_tsvector('international abc action committee C')), factories.ScheduleEFactory(committee_id='C', spender_name_text=sa.func.to_tsvector('international xyz action committee C'))]
results = self._results(api.url_for(ScheduleEView, q_spender='action'))
self.assertEqual(len(results), 2)
results = self._results(api.url_for(ScheduleEView, q_spender='abc'))
self.assertEqual(len(results), 1)
results = self._results(api.url_for(ScheduleEView, q_spender='C'))
self.assertEqual(len(results), 1)
def test_schedule_e_filter_fulltext_fail(self):
payee_names = ['#', '##', '#$%^&*', '%', '', ' ']
[factories.ScheduleEFactory(payee_name_text=payee) for payee in payee_names]
response = self.app.get(api.url_for(ScheduleEView, payee_name=payee_names))
self.assertEqual(response.status_code, 422)
def test_schedule_e_sort_args_descending(self):
[factories.ScheduleEFactory(expenditure_amount=100, expenditure_date=datetime.date(2016, 1, 1), committee_id='C', support_oppose_indicator='s'), factories.ScheduleEFactory(expenditure_amount=100, expenditure_date=datetime.date(2016, 1, 1), committee_id='C', support_oppose_indicator='o')]
results = self._results(api.url_for(ScheduleEView, sort='-support_oppose_indicator'))
self.assertEqual(results[0]['support_oppose_indicator'], 's')
def test_schedule_e_expenditure_description_field(self):
factories.ScheduleEFactory(committee_id='C', expenditure_description='Advertising Costs')
results = self._results(api.url_for(ScheduleEView))
self.assertEqual(len(results), 1)
self.assertEqual(results[0]['expenditure_description'], 'Advertising Costs')
def test_schedule_e_efile_filters(self):
filters = [('image_number', ScheduleEEfile.image_number, ['456', '789']), ('committee_id', ScheduleEEfile.committee_id, ['C', 'C']), ('support_oppose_indicator', ScheduleEEfile.support_oppose_indicator, ['S', 'O']), ('candidate_office', ScheduleEEfile.candidate_office, ['H', 'S', 'P']), ('candidate_party', ScheduleEEfile.candidate_party, ['DEM', 'REP']), ('candidate_office_state', ScheduleEEfile.candidate_office_state, ['AZ', 'AK']), ('candidate_office_district', ScheduleEEfile.candidate_office_district, ['00', '01']), ('filing_form', ScheduleEEfile.filing_form, ['F3X', 'F5']), ('is_notice', ScheduleE.is_notice, [True, False])]
factories.EFilingsFactory(file_number=123)
for (label, column, values) in filters:
[factories.ScheduleEEfileFactory(**{column.key: value}) for value in values]
results = self._results(api.url_for(ScheduleEEfileView, **{label: values[0]}))
assert (len(results) == 1)
assert (results[0][column.key] == values[0])
def test_schedule_e_efile_candidate_id_filter(self):
filters = [('candidate_id', ScheduleEEfile.candidate_id, ['S', 'S'])]
factories.EFilingsFactory(file_number=123)
for (label, column, values) in filters:
[factories.ScheduleEEfileFactory(**{column.key: value}) for value in values]
results = self._results(api.url_for(ScheduleEEfileView, **{label: values[0]}))
assert (len(results) == 1)
assert (results[0][column.key] == values[0])
def test_schedule_e_efile_dissemination_date_range(self):
min_date = datetime.date(2018, 1, 1)
max_date = datetime.date(2019, 12, 31)
results = self._results(api.url_for(ScheduleEEfileView, min_dissemination_date=min_date))
self.assertTrue(all((each for each in results if (each['receipt_date'] >= min_date.isoformat()))))
results = self._results(api.url_for(ScheduleEEfileView, max_dissemination_date=max_date))
self.assertTrue(all((each for each in results if (each['receipt_date'] <= max_date.isoformat()))))
results = self._results(api.url_for(ScheduleEEfileView, min_dissemination_date=min_date, max_dissemination_date=max_date))
self.assertTrue(all((each for each in results if (min_date.isoformat() <= each['dissemination_date'] <= max_date.isoformat()))))
def test_schedule_e_efile_filter_cand_search(self):
[factories.ScheduleEEfileFactory(cand_fulltxt=sa.func.to_tsvector('C, Rob, Senior')), factories.ScheduleEEfileFactory(cand_fulltxt=sa.func.to_tsvector('C, Ted, Berry')), factories.ScheduleEEfileFactory(cand_fulltxt=sa.func.to_tsvector('C, Rob, Junior'))]
factories.EFilingsFactory(file_number=123)
db.session.flush()
results = self._results(api.url_for(ScheduleEEfileView, candidate_search='Rob'))
assert (len(results) == 2)
def test_filter_sched_e_most_recent(self):
[factories.ScheduleEFactory(committee_id='C', filing_form='F24', most_recent=True), factories.ScheduleEFactory(committee_id='C', filing_form='F5', most_recent=False), factories.ScheduleEFactory(committee_id='C', filing_form='F24', most_recent=True), factories.ScheduleEFactory(committee_id='C', filing_form='F3X', most_recent=True), factories.ScheduleEFactory(committee_id='C', filing_form='F3X'), factories.ScheduleEFactory(committee_id='C', filing_form='F3X')]
results = self._results(api.url_for(ScheduleEView, most_recent=True))
self.assertEqual(len(results), 5)
def test_filter_sched_e_efile_most_recent(self):
[factories.ScheduleEEfileFactory(committee_id='C', filing_form='F24', most_recent=True), factories.ScheduleEEfileFactory(committee_id='C', filing_form='F5', most_recent=False), factories.ScheduleEEfileFactory(committee_id='C', filing_form='F24', most_recent=True), factories.ScheduleEEfileFactory(committee_id='C', filing_form='F3X', most_recent=True), factories.ScheduleEEfileFactory(committee_id='C', filing_form='F3X'), factories.ScheduleEEfileFactory(committee_id='C', filing_form='F3X')]
factories.EFilingsFactory(file_number=123)
db.session.flush()
results = self._results(api.url_for(ScheduleEEfileView, most_recent=True))
self.assertEqual(len(results), 5)
def test_schedule_e_filter_form_line_number(self):
[factories.ScheduleEFactory(line_number='24', filing_form='F3X'), factories.ScheduleEFactory(line_number='25', filing_form='F3X'), factories.ScheduleEFactory(line_number='24', filing_form='F3'), factories.ScheduleEFactory(line_number='25', filing_form='F3')]
results = self._results(api.url_for(ScheduleEView, form_line_number='f3X-24'))
self.assertEqual(len(results), 1)
results = self._results(api.url_for(ScheduleEView, form_line_number=('f3x-24', 'f3X-25')))
self.assertEqual(len(results), 2)
results = self._results(api.url_for(ScheduleEView, form_line_number='-F3x-24'))
self.assertEqual(len(results), 3)
response = self.app.get(api.url_for(ScheduleEView, form_line_number='f3x10'))
self.assertEqual(response.status_code, 400)
self.assertIn(b'Invalid form_line_number', response.data) |
class TestCreateChannel(TestCase):
def test_create_channel__simple(self):
chan = create.create_channel('TestChannel1', desc='Testing channel')
self.assertEqual(chan.key, 'TestChannel1')
self.assertEqual(chan.db.desc, 'Testing channel')
def test_create_channel__complex(self):
locks = 'foo:false();bar:true()'
tags = ['tag1', 'tag2', 'tag3']
aliases = ['foo', 'bar', 'tst']
chan = create.create_channel('TestChannel2', desc='Testing channel', aliases=aliases, locks=locks, tags=tags)
self.assertTrue(all(((lock in chan.locks.all()) for lock in locks.split(';'))))
self.assertEqual(chan.tags.all(), tags)
self.assertEqual(list(chan.aliases.all()).sort(), aliases.sort()) |
def Run(params):
config = params.config
if (not config.repos):
msg = 'No repository registered. Use mu register repo_name to register repository.'
Print(msg)
return Status(msg, True, config)
else:
repo_str = '\n'.join(sorted(config.repos))
Print('Tracked Repositories:\n')
Print(repo_str)
return Status(repo_str, True, config) |
def test_subclass_default_values():
class X(Record):
x: int
y: Optional[int] = None
class Z(X):
z: Optional[int] = None
assert (X(x=None).y is None)
assert (X(x=None, y=303).y == 303)
assert (Z(x=None).y is None)
assert (Z(x=None).z is None)
assert (Z(x=None, y=101, z=303).y == 101)
assert (Z(x=None, y=101, z=303).z == 303) |
def create_wikigraph(output_path: Path, wiki='en', version='latest', dumps_path: Path=None, max_workers: int=None, silent: bool=None, force: bool=None):
if (not output_path.exists()):
output_path.mkdir()
msg.good(f'Created output directory: {output_path}')
graph_name = f'{wiki}wiki_core'
graph_path = output_path.joinpath(graph_name)
if ((not force) and graph_path.exists()):
msg.fail(f'Output path already contains {graph_name} directory', 'Use --force to overwrite it', exits=1)
kwargs = {'dumps_path': dumps_path, 'max_workers': max_workers, 'wiki': wiki, 'version': version, 'verbose': (not silent)}
wg = WikiGraph.build(**kwargs)
if (not graph_path.exists()):
graph_path.mkdir()
with msg.loading('dump to disk...'):
wg.dump(graph_path)
spikex_ver = '.'.join(spikex_version.split('.')[:2])
meta = get_meta()
meta['name'] = graph_name
meta['wiki'] = wiki
meta['version'] = wg.version
meta['spikex_version'] = f'>={spikex_ver}'
meta['fullname'] = f'{graph_name}-{spikex_ver}'
meta['sources'].append('Wikipedia')
meta_path = graph_path.joinpath('meta.json')
meta_path.write_text(json_dumps(meta, indent=2))
msg.good(f'Successfully created {graph_name}.') |
class TestSolidRunDifferentDirName(unittest.TestCase):
def setUp(self):
self.solid_test_dir = TestUtils().make_solid_dir('solid0123__FRAG_BC')
os.rename(self.solid_test_dir, (self.solid_test_dir + '_different'))
self.solid_test_dir = (self.solid_test_dir + '_different')
def tearDown(self):
shutil.rmtree(self.solid_test_dir)
def test_solid_run(self):
self.solid_run = SolidRun(self.solid_test_dir)
self.assertTrue(self.solid_run)
self.assertEqual(self.solid_run.run_name, 'solid0123__FRAG_BC') |
class OptionPlotoptionsPyramidSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_log():
with TemporaryDirectory() as tmpdir, mock.patch('Util.LOG_DIRECTORY', tmpdir):
basename = 'test.log'
os.rmdir(tmpdir)
util.configure_logging(basename)
util.sdlog.info('info level log entry')
util.sdlog.warning('error level log entry')
util.sdlog.error('error level log entry')
path = os.path.join(tmpdir, basename)
count = len(open(path).readlines())
assert (count == 3) |
def test_ot_span_without_auto_attach(tracer: Tracer):
with tracer.start_as_current_span('test'):
span = tracer.start_span('testspan', kind=SpanKind.CONSUMER)
with tracer.start_as_current_span('testspan2'):
pass
with trace.use_span(span, end_on_exit=True):
with tracer.start_as_current_span('testspan3'):
pass
client = tracer.client
transaction = client.events[constants.TRANSACTION][0]
span1 = client.events[constants.SPAN][2]
span2 = client.events[constants.SPAN][0]
span3 = client.events[constants.SPAN][1]
assert (span1['transaction_id'] == span1['parent_id'] == transaction['id'])
assert (span1['name'] == 'testspan')
assert (span2['transaction_id'] == span2['parent_id'] == transaction['id'])
assert (span2['name'] == 'testspan2')
assert (span3['transaction_id'] == transaction['id'])
assert (span3['parent_id'] == span1['id'])
assert (span3['name'] == 'testspan3') |
def main():
channel_device_action = app.channel(value_type=DeviceAction)
channel_stage1_stage2 = app.channel(value_type=DeviceAction)
channel_deadletter = app.channel(value_type=DeadLetter)
app.timer(interval=3, on_leader=True)(partial(action_generator, channel_device_action))
app.agent(channel_deadletter, name='dead-letter-agent')(deadletter_agent)
app.agent(channel_device_action, name='stage1-agent', sink=[channel_stage1_stage2])(partial(stage1_agent, channel_deadletter))
app.agent(channel_stage1_stage2, name='stage2-agent')(partial(stage2_agent, channel_deadletter))
app.main() |
class OptionSeriesArearangeSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class BoolArg():
def __call__(self, arg):
if ((not arg) or (arg.lower() in ('none', '0', 'off', 'false'))):
return False
return True
def html(self, name, default, _):
if isinstance(default, str):
default = self(default)
return ('<input name="%s" type="hidden" value="0">\n<input name="%s" id="%s" aria-labeledby="%s %s" type="checkbox" value="1"%s>' % (name, name, name, (name + '_id'), (name + '_description'), (' checked="checked"' if default else ''))) |
.django_db
def test_new_awards_month(client, monkeypatch, add_award_recipients, elasticsearch_award_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
test_payload = {'group': 'month', 'filters': {'time_period': [{'start_date': '2008-10-01', 'end_date': '2010-09-30'}], 'recipient_id': '63248e89-7fb7-2d51-4085-d9-R'}}
expected_results = []
for i in range(1, 13):
new_award_count = 0
if (i == 8):
new_award_count = 15
elif (i == 10):
new_award_count = 1
expected_results.append({'time_period': {'fiscal_year': '2009', 'month': str(i)}, 'new_award_count_in_period': new_award_count})
for i in range(1, 13):
new_award_count = 0
expected_results.append({'time_period': {'fiscal_year': '2010', 'month': str(i)}, 'new_award_count_in_period': new_award_count})
expected_response = {'group': 'month', 'results': expected_results, 'messages': [get_time_period_message()]}
resp = client.post(get_new_awards_over_time_url(), content_type='application/json', data=json.dumps(test_payload))
assert (resp.status_code == 200)
assert (resp.data['group'] == 'month')
assert (expected_response == resp.data)
test_payload['filters']['time_period'] = [{'start_date': '2007-10-01', 'end_date': '2010-09-30'}]
expected_results = []
for i in range(1, 13):
new_award_count = 0
if (i == 4):
new_award_count = 2
expected_results.append({'time_period': {'fiscal_year': '2008', 'month': str(i)}, 'new_award_count_in_period': new_award_count})
for i in range(1, 13):
new_award_count = 0
if (i == 8):
new_award_count = 15
elif (i == 10):
new_award_count = 1
expected_results.append({'time_period': {'fiscal_year': '2009', 'month': str(i)}, 'new_award_count_in_period': new_award_count})
for i in range(1, 13):
new_award_count = 0
expected_results.append({'time_period': {'fiscal_year': '2010', 'month': str(i)}, 'new_award_count_in_period': new_award_count})
expected_response = {'group': 'month', 'results': expected_results, 'messages': [get_time_period_message()]}
resp = client.post(get_new_awards_over_time_url(), content_type='application/json', data=json.dumps(test_payload))
assert (resp.status_code == 200)
assert (resp.data['group'] == 'month')
assert (expected_response == resp.data) |
class FluentSender(object):
def __init__(self, tag, host='localhost', port=24224, bufmax=((1 * 1024) * 1024), timeout=3.0, verbose=False, buffer_overflow_handler=None, nanosecond_precision=False, msgpack_kwargs=None, **kwargs):
self.tag = tag
self.host = host
self.port = port
self.bufmax = bufmax
self.timeout = timeout
self.verbose = verbose
self.buffer_overflow_handler = buffer_overflow_handler
self.nanosecond_precision = nanosecond_precision
self.msgpack_kwargs = ({} if (msgpack_kwargs is None) else msgpack_kwargs)
self.socket = None
self.pendings = None
self.lock = threading.Lock()
self._closed = False
self._last_error_threadlocal = threading.local()
def emit(self, label, data):
if self.nanosecond_precision:
cur_time = EventTime(time.time())
else:
cur_time = int(time.time())
return self.emit_with_time(label, cur_time, data)
def emit_with_time(self, label, timestamp, data):
if (self.nanosecond_precision and isinstance(timestamp, float)):
timestamp = EventTime(timestamp)
try:
bytes_ = self._make_packet(label, timestamp, data)
except Exception as e:
self.last_error = e
bytes_ = self._make_packet(label, timestamp, {'level': 'CRITICAL', 'message': "Can't output to log", 'traceback': traceback.format_exc()})
return self._send(bytes_)
def last_error(self):
return getattr(self._last_error_threadlocal, 'exception', None)
_error.setter
def last_error(self, err):
self._last_error_threadlocal.exception = err
def clear_last_error(self, _thread_id=None):
if hasattr(self._last_error_threadlocal, 'exception'):
delattr(self._last_error_threadlocal, 'exception')
def close(self):
with self.lock:
if self._closed:
return
self._closed = True
if self.pendings:
try:
self._send_data(self.pendings)
except Exception:
self._call_buffer_overflow_handler(self.pendings)
self._close()
self.pendings = None
def _make_packet(self, label, timestamp, data):
if label:
tag = '.'.join((self.tag, label))
else:
tag = self.tag
packet = (tag, timestamp, data)
if self.verbose:
print(packet)
return msgpack.packb(packet, **self.msgpack_kwargs)
def _send(self, bytes_):
with self.lock:
if self._closed:
return False
return self._send_internal(bytes_)
def _send_internal(self, bytes_):
if self.pendings:
self.pendings += bytes_
bytes_ = self.pendings
try:
self._send_data(bytes_)
self.pendings = None
return True
except socket.error as e:
self.last_error = e
self._close()
if (self.pendings and (len(self.pendings) > self.bufmax)):
self._call_buffer_overflow_handler(self.pendings)
self.pendings = None
else:
self.pendings = bytes_
return False
def _check_recv_side(self):
try:
self.socket.settimeout(0.0)
try:
recvd = self.socket.recv(4096)
except socket.error as recv_e:
if (recv_e.errno != errno.EWOULDBLOCK):
raise
return
if (recvd == b''):
raise socket.error(errno.EPIPE, 'Broken pipe')
finally:
self.socket.settimeout(self.timeout)
def _send_data(self, bytes_):
self._reconnect()
bytes_to_send = len(bytes_)
bytes_sent = 0
self._check_recv_side()
while (bytes_sent < bytes_to_send):
sent = self.socket.send(bytes_[bytes_sent:])
if (sent == 0):
raise socket.error(errno.EPIPE, 'Broken pipe')
bytes_sent += sent
self._check_recv_side()
def _reconnect(self):
if (not self.socket):
try:
if self.host.startswith('unix://'):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.timeout)
sock.connect(self.host[len('unix://'):])
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(self.timeout)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.connect((self.host, self.port))
except Exception as e:
try:
sock.close()
except Exception:
pass
raise e
else:
self.socket = sock
def _call_buffer_overflow_handler(self, pending_events):
try:
if self.buffer_overflow_handler:
self.buffer_overflow_handler(pending_events)
except Exception as e:
pass
def _close(self):
try:
sock = self.socket
if sock:
try:
try:
sock.shutdown(socket.SHUT_RDWR)
except socket.error:
pass
finally:
try:
sock.close()
except socket.error:
pass
finally:
self.socket = None
def __enter__(self):
return self
def __exit__(self, typ, value, traceback):
try:
self.close()
except Exception as e:
self.last_error = e |
class serienRecSendeTermine(serienRecBaseScreen, Screen, HelpableScreen):
def __init__(self, session, seriesName, seriesWLID, seriesFSID):
serienRecBaseScreen.__init__(self, session)
Screen.__init__(self, session)
HelpableScreen.__init__(self)
self.database = SRDatabase(serienRecDataBaseFilePath)
self.session = session
self.picload = ePicLoad()
self.seriesName = seriesName
self.addedEpisodes = self.database.getTimerForSeries(seriesFSID, False)
self.seriesWLID = seriesWLID
self.seriesFSID = seriesFSID
self.skin = None
self['actions'] = HelpableActionMap(self, 'SerienRecorderActions', {'ok': (self.keyOK, 'Umschalten ausgewahlter Sendetermin aktiviert/deaktiviert'), 'cancel': (self.keyCancel, 'Zuruck zur Serien-Marker-Ansicht'), 'left': (self.keyLeft, 'Zur vorherigen Seite blattern'), 'right': (self.keyRight, 'Zur nachsten Seite blattern'), 'up': (self.keyUp, 'Eine Zeile nach oben'), 'down': (self.keyDown, 'Eine Zeile nach unten'), 'red': (self.keyRed, 'Zuruck zur Serien-Marker-Ansicht'), 'green': (self.keyGreen, 'Timer fur aktivierte Sendetermine erstellen'), 'yellow': (self.keyYellow, 'Filter umschalten (aktive Sender / Marker-Sender / alle)'), 'blue': (self.keyBlue, 'Ansicht Timer-Liste offnen'), 'menu': (self.recSetup, 'Menu fur globale Einstellungen offnen'), 'startTeletext': (self.wunschliste, 'Informationen zur ausgewahlten Serie auf Wunschliste anzeigen'), '0': (self.readLogFile, 'Log-File des letzten Suchlaufs anzeigen'), '3': (self.showProposalDB, 'Liste der Serien/Staffel-Starts anzeigen'), '4': (self.serieInfo, 'Informationen zur ausgewahlten Serie anzeigen'), '6': (self.showConflicts, 'Liste der Timer-Konflikte anzeigen'), '7': (self.showWishlist, 'Merkzettel (vorgemerkte Folgen) anzeigen')}, (- 1))
self.helpList[0][2].sort()
self['helpActions'] = ActionMap(['SerienRecorderActions'], {'displayHelp': self.showHelp, 'displayHelp_long': self.showManual}, 0)
self.filterMode = 1
self.title_txt = 'aktive Sender'
self.changesMade = False
self.setupSkin()
self.sendetermine_list = []
self.loading = True
self.onLayoutFinish.append(self.searchEvents)
self.onClose.append(self.__onClose)
self.onLayoutFinish.append(self.setSkinProperties)
def callHelpAction(self, *args):
HelpableScreen.callHelpAction(self, *args)
def setSkinProperties(self):
super(self.__class__, self).setSkinProperties()
self['text_red'].setText('Abbrechen')
self['text_ok'].setText('Auswahl')
if (self.filterMode == 1):
self['text_yellow'].setText('Filter umschalten')
self.title_txt = 'aktive Sender'
elif (self.filterMode == 2):
self['text_yellow'].setText('Filter ausschalten')
self.title_txt = 'Marker Sender'
else:
self['text_yellow'].setText('Filter einschalten')
self.title_txt = 'alle'
self['text_blue'].setText('Timer-Liste')
super(self.__class__, self).startDisplayTimer()
def setupSkin(self):
self.skin = None
InitSkin(self)
self.chooseMenuList = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self.chooseMenuList.l.setFont(0, gFont('Regular', (20 + int(config.plugins.serienRec.listFontsize.value))))
self.chooseMenuList.l.setItemHeight(int((50 * skinFactor)))
self['menu_list'] = self.chooseMenuList
self['menu_list'].show()
self['title'].setText('Lade Web-Sender / Box-Sender...')
if config.plugins.serienRec.showCover.value:
self['cover'].show()
if (not config.plugins.serienRec.showAllButtons.value):
self['bt_red'].show()
self['bt_green'].show()
self['bt_ok'].show()
self['bt_yellow'].show()
self['bt_blue'].show()
self['bt_exit'].show()
self['bt_text'].show()
self['bt_info'].show()
self['bt_menu'].show()
self['text_red'].show()
self['text_green'].show()
self['text_ok'].show()
self['text_yellow'].show()
self['text_blue'].show()
self['text_0'].show()
self['text_1'].show()
self['text_2'].show()
self['text_3'].show()
self['text_4'].show()
def serieInfo(self):
if self.loading:
return
if self.seriesWLID:
from .SerienRecorderSeriesInfoScreen import serienRecShowInfo
self.session.open(serienRecShowInfo, self.seriesName, self.seriesWLID, self.seriesFSID)
def wunschliste(self):
super(self.__class__, self).wunschliste(self.seriesFSID)
def setupClose(self, result):
if (not result[2]):
self.close()
else:
if result[0]:
if config.plugins.serienRec.timeUpdate.value:
from .SerienRecorderCheckForRecording import checkForRecordingInstance
checkForRecordingInstance.initialize(self.session, False, False)
if result[1]:
self.searchEvents()
def searchEvents(self, result=None):
self['title'].setText(("Suche ' %s '" % self.seriesName))
print(("[SerienRecorder] suche ' %s '" % self.seriesName))
transmissions = None
if self.seriesWLID:
if (self.seriesWLID != 0):
from .SerienRecorder import getCover
getCover(self, self.seriesName, self.seriesFSID)
if (self.filterMode == 0):
webChannels = []
elif (self.filterMode == 1):
webChannels = self.database.getActiveChannels()
else:
webChannels = self.database.getMarkerChannels(self.seriesFSID)
try:
transmissions = SeriesServer().doGetTransmissions(self.seriesWLID, 0, webChannels)
except:
transmissions = None
else:
transmissions = None
self.resultsEvents(transmissions)
def getFilteredTransmissions(transmissions, addedEpisodes, database, seriesFSID):
filteredTransmissions = []
dirList = {}
marginList = {}
seriesSeason = None
fromEpisode = None
try:
(serienTitle, seriesURL, seriesSeason, seriesChannel, fromEpisode, numberOfRecordings, seriesEnabled, excludedWeekdays, skipSeriesServer, markerType, fsID, forceRecording) = database.getMarkers(config.plugins.serienRec.BoxID.value, config.plugins.serienRec.NoOfRecords.value, [seriesFSID])[0]
except:
SRLogger.writeLog('Fehler beim Filtern nach Staffel', True)
for (seriesName, channel, startTime, endTime, season, episode, title, status) in transmissions:
seasonAllowed = serienRecSendeTermine.isSeasonAllowed(database, seriesFSID, season, episode, seriesSeason, fromEpisode)
if ((config.plugins.serienRec.seasonFilter.value == '1') and (not seasonAllowed)):
continue
seasonEpisodeString = ('S%sE%s' % (str(season).zfill(2), str(episode).zfill(2)))
bereits_vorhanden = False
if config.plugins.serienRec.sucheAufnahme.value:
if (not (season in dirList)):
dirList[season] = getDirname(database, seriesName, seriesFSID, season)
(dirname, dirname_serie) = dirList[season]
if str(episode).isdigit():
if (int(episode) == 0):
bereits_vorhanden = ((STBHelpers.countEpisodeOnHDD(dirname, seasonEpisodeString, seriesName, True, title) and True) or False)
else:
bereits_vorhanden = ((STBHelpers.countEpisodeOnHDD(dirname, seasonEpisodeString, seriesName, True) and True) or False)
else:
bereits_vorhanden = ((STBHelpers.countEpisodeOnHDD(dirname, seasonEpisodeString, seriesName, True) and True) or False)
if bereits_vorhanden:
addedType = 1
else:
if (not (channel in marginList)):
marginList[channel] = database.getMargins(seriesFSID, channel, config.plugins.serienRec.margin_before.value, config.plugins.serienRec.margin_after.value)
(margin_before, margin_after) = marginList[channel]
start_unixtime = (startTime - (int(margin_before) * 60))
if serienRecSendeTermine.isTimerAdded(addedEpisodes, channel, season, episode, int(start_unixtime), title):
addedType = 2
elif serienRecSendeTermine.isAlreadyAdded(addedEpisodes, season, episode, title):
addedType = 3
else:
addedType = 0
if ((not config.plugins.serienRec.timerFilter.value) or (config.plugins.serienRec.timerFilter.value and (addedType == 0))):
filteredTransmissions.append([seriesName, channel, startTime, endTime, season, episode, title, status, addedType, seasonAllowed])
return filteredTransmissions
def resultsEvents(self, transmissions):
if (transmissions is None):
self['title'].setText(("Fehler beim Abrufen der Termine fur ' %s '" % self.seriesName))
return
self.sendetermine_list = []
if self.changesMade:
self.addedEpisodes = self.database.getTimerForSeries(self.seriesFSID, False)
self.sendetermine_list = serienRecSendeTermine.getFilteredTransmissions(transmissions, self.addedEpisodes, self.database, self.seriesFSID)
if len(self.sendetermine_list):
self['text_green'].setText('Timer erstellen')
self.chooseMenuList.setList(list(map(self.buildList_termine, self.sendetermine_list)))
self.loading = False
self['title'].setText(("%s Sendetermine fur ' %s ' gefunden. (%s)" % (str(len(self.sendetermine_list)), self.seriesName, self.title_txt)))
def buildList_termine(entry):
(serien_name, sender, start, end, staffel, episode, title, status, addedType, seasonAllowed) = entry
seasonEpisodeString = ('S%sE%s' % (str(staffel).zfill(2), str(episode).zfill(2)))
weekdays = ['Mo', 'Di', 'Mi', 'Do', 'Fr', 'Sa', 'So']
transmissionTime = time.localtime(start)
datum = time.strftime((weekdays[transmissionTime.tm_wday] + ', %d.%m.%Y'), transmissionTime)
startTime = time.strftime('%H:%M', transmissionTime)
serienRecMainPath = os.path.dirname(__file__)
imageMinus = ('%s/images/minus.png' % serienRecMainPath)
imagePlus = ('%s/images/plus.png' % serienRecMainPath)
imageNone = ('%s/images/black.png' % serienRecMainPath)
if (int(status) == 0):
leftImage = imageMinus
else:
leftImage = imagePlus
imageHDD = imageNone
imageTimer = imageNone
if ((not seasonAllowed) and (config.plugins.serienRec.seasonFilter.value == '2')):
titleColor = parseColor('grey').argb()
titleColorSelected = titleColor
elif (addedType == 1):
titleColor = None
titleColorSelected = None
imageHDD = ('%simages/hdd_icon.png' % serienRecMainPath)
elif (addedType == 2):
titleColor = parseColor('blue').argb()
titleColorSelected = 39367
imageTimer = ('%simages/timer.png' % serienRecMainPath)
elif (addedType == 3):
titleColor = parseColor('green').argb()
titleColorSelected = titleColor
else:
titleColor = parseColor('red').argb()
titleColorSelected = titleColor
foregroundColor = parseColor('foreground').argb()
return [entry, (eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, (5 * skinFactor), (15 * skinFactor), (16 * skinFactor), (16 * skinFactor), loadPNG(leftImage)), (eListboxPythonMultiContent.TYPE_TEXT, (40 * skinFactor), (3 * skinFactor), (240 * skinFactor), (26 * skinFactor), 0, (RT_HALIGN_LEFT | RT_VALIGN_CENTER), sender), (eListboxPythonMultiContent.TYPE_TEXT, (40 * skinFactor), (29 * skinFactor), (230 * skinFactor), (18 * skinFactor), 0, (RT_HALIGN_LEFT | RT_VALIGN_CENTER), ('%s - %s' % (datum, startTime)), foregroundColor, foregroundColor), (eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, (265 * skinFactor), (7 * skinFactor), (30 * skinFactor), (22 * skinFactor), loadPNG(imageTimer)), (eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, (265 * skinFactor), (30 * skinFactor), (30 * skinFactor), (22 * skinFactor), loadPNG(imageHDD)), (eListboxPythonMultiContent.TYPE_TEXT, (300 * skinFactor), (3 * skinFactor), (565 * skinFactor), (44 * skinFactor), 0, (RT_HALIGN_LEFT | RT_VALIGN_CENTER), ('%s - %s' % (seasonEpisodeString, title)), titleColor, titleColorSelected)]
def isAlreadyAdded(addedEpisodes, season, episode, title=None):
result = False
seasonEpisodeString = ('S%sE%s' % (str(season).zfill(2), str(episode).zfill(2)))
if (seasonEpisodeString != 'S00E00'):
title = None
if (not title):
for addedEpisode in addedEpisodes:
if ((addedEpisode[0] == season) and (addedEpisode[1] == episode)):
result = True
break
else:
for addedEpisode in addedEpisodes:
if ((addedEpisode[0] == season) and (addedEpisode[1] == episode) and (addedEpisode[2] == title)):
result = True
break
return result
def isTimerAdded(addedEpisodes, sender, season, episode, start_unixtime, title=None):
result = False
if (not title):
for addedEpisode in addedEpisodes:
if ((addedEpisode[0] == season) and (addedEpisode[1] == episode) and (addedEpisode[3] == sender.lower()) and ((int(start_unixtime) - (int(STBHelpers.getEPGTimeSpan()) * 60)) <= addedEpisode[4] <= (int(start_unixtime) + (int(STBHelpers.getEPGTimeSpan()) * 60)))):
result = True
break
else:
for addedEpisode in addedEpisodes:
if ((((addedEpisode[0] == season) and (addedEpisode[1] == episode)) or (addedEpisode[2] == title)) and (addedEpisode[3] == sender.lower()) and ((int(start_unixtime) - (int(STBHelpers.getEPGTimeSpan()) * 60)) <= addedEpisode[4] <= (int(start_unixtime) + (int(STBHelpers.getEPGTimeSpan()) * 60)))):
result = True
break
return result
def countSelectedTransmissionForTimerCreation(self):
result = 0
for (serien_name, sender, start_unixtime, end_unixtime, staffel, episode, title, status, addedType, seasonAllowed) in self.sendetermine_list:
if (int(status) == 1):
result += 1
return result
def getTimes(self):
changesMade = False
if ((len(self.sendetermine_list) != 0) and (self.countSelectedTransmissionForTimerCreation() != 0)):
(activatedTimer, deactivatedTimer) = serienRecSendeTermine.prepareTimer(self.database, self.filterMode, self.seriesWLID, self.seriesFSID, self.sendetermine_list)
from .SerienRecorderLogScreen import serienRecReadLog
self.session.open(serienRecReadLog)
if ((activatedTimer > 0) or (deactivatedTimer > 0)):
changesMade = True
else:
self['title'].setText('Keine Sendetermine ausgewahlt.')
print('[SerienRecorder] keine Sendetermine ausgewahlt.')
return changesMade
def createTimer(database, filterMode, wlid, fsid, params, force=True):
activatedTimer = 0
deactivatedTimer = 0
if (not force):
return (False, activatedTimer, deactivatedTimer)
else:
(serien_name, sender, start_unixtime, margin_before, margin_after, end_unixtime, label_serie, staffel, episode, title, dirname, preferredChannel, useAlternativeChannel, vpsSettings, tags, addToDatabase, autoAdjust, epgSeriesName, timerSeriesName, kindOfTimer) = params
(webChannel, stbChannel, stbRef, altstbChannel, altstbRef, status) = database.getChannelInfo(sender, fsid, filterMode)
TimerOK = False
if (stbChannel == ''):
SRLogger.writeLog(("' %s ' - Kein Box-Sender gefunden ' %s '" % (serien_name, webChannel)))
elif (int(status) == 0):
SRLogger.writeLog(("' %s ' - Box-Sender deaktiviert ' %s '" % (serien_name, webChannel)))
else:
from .SerienRecorderTimer import serienRecTimer, serienRecBoxTimer
timer = serienRecTimer()
timer_name = serienRecTimer.getTimerName(serien_name, staffel, episode, title, timerSeriesName, 0)
timer_description = serienRecTimer.getTimerDescription(serien_name, staffel, episode, title)
if (preferredChannel == 1):
timer_stbRef = stbRef
timer_altstbRef = altstbRef
else:
timer_stbRef = altstbRef
timer_altstbRef = stbRef
if ((len(epgSeriesName) == 0) or (epgSeriesName == serien_name)):
epgSeriesName = ''
if database.getUpdateFromEPG(fsid, config.plugins.serienRec.eventid.value):
(eit, start_unixtime_eit, end_unixtime_eit) = STBHelpers.getStartEndTimeFromEPG(start_unixtime, end_unixtime, margin_before, serien_name, title, epgSeriesName, timer_stbRef)
if (eit > 0):
start_unixtime_eit = (int(start_unixtime_eit) - (int(margin_before) * 60))
end_unixtime_eit = (int(end_unixtime_eit) + (int(margin_after) * 60))
else:
eit = 0
start_unixtime_eit = start_unixtime
end_unixtime_eit = end_unixtime
konflikt = ''
result = serienRecBoxTimer.addTimer(timer_stbRef, str(start_unixtime_eit), str(end_unixtime_eit), timer_name, timer_description, eit, False, dirname, vpsSettings, tags, autoAdjust, kindOfTimer, False, None)
if result['result']:
timer.addTimerToDB(serien_name, wlid, fsid, staffel, episode, title, str(start_unixtime_eit), timer_stbRef, webChannel, eit, addToDatabase)
activatedTimer += 1
TimerOK = True
else:
konflikt = result['message']
if ((not TimerOK) and useAlternativeChannel):
if ((len(epgSeriesName) == 0) or (epgSeriesName == serien_name)):
epgSeriesName = ''
if database.getUpdateFromEPG(fsid, config.plugins.serienRec.eventid.value):
(alt_eit, alt_start_unixtime_eit, alt_end_unixtime_eit) = STBHelpers.getStartEndTimeFromEPG(start_unixtime, end_unixtime, margin_before, serien_name, title, epgSeriesName, timer_altstbRef)
else:
alt_eit = 0
alt_start_unixtime_eit = start_unixtime
alt_end_unixtime_eit = end_unixtime
alt_start_unixtime_eit = (int(alt_start_unixtime_eit) - (int(margin_before) * 60))
alt_end_unixtime_eit = (int(alt_end_unixtime_eit) + (int(margin_after) * 60))
result = serienRecBoxTimer.addTimer(timer_altstbRef, str(alt_start_unixtime_eit), str(alt_end_unixtime_eit), timer_name, timer_description, alt_eit, False, dirname, vpsSettings, tags, autoAdjust, kindOfTimer, False, None)
if result['result']:
konflikt = None
timer.addTimerToDB(serien_name, wlid, fsid, staffel, episode, title, str(alt_start_unixtime_eit), timer_altstbRef, webChannel, alt_eit, addToDatabase)
activatedTimer += 1
TimerOK = True
else:
konflikt = result['message']
if ((not TimerOK) and konflikt):
SRLogger.writeLog(("' %s ' - ACHTUNG! %s" % (label_serie, konflikt)), True)
dbMessage = result['message'].replace('In Konflikt stehende Timer vorhanden!', '').strip()
result = serienRecBoxTimer.addTimer(timer_stbRef, str(start_unixtime_eit), str(end_unixtime_eit), timer_name, timer_description, eit, True, dirname, vpsSettings, tags, autoAdjust, kindOfTimer, False, None)
if result['result']:
timer.addTimerToDB(serien_name, wlid, fsid, staffel, episode, title, str(start_unixtime_eit), timer_stbRef, webChannel, eit, addToDatabase, False)
deactivatedTimer += 1
TimerOK = True
database.addTimerConflict(dbMessage, start_unixtime_eit, webChannel)
return (TimerOK, activatedTimer, deactivatedTimer)
def prepareTimer(database, filterMode, wlid, fsid, sendetermine):
activatedTimer = 0
deactivatedTimer = 0
lt = time.localtime()
uhrzeit = time.strftime('%d.%m.%Y - %H:%M:%S', lt)
print(("' Manuelle Timererstellung aus Sendeterminen um %s '" % uhrzeit))
SRLogger.writeLog(("\n' Manuelle Timererstellung aus Sendeterminen um %s '" % uhrzeit), True)
for (serien_name, sender, start_unixtime, end_unixtime, staffel, episode, title, status, addedType, seasonAllowed) in sendetermine:
if (int(status) == 1):
seasonEpisodeString = ('S%sE%s' % (str(staffel).zfill(2), str(episode).zfill(2)))
label_serie = ('%s - %s - %s' % (serien_name, seasonEpisodeString, title))
(margin_before, margin_after) = database.getMargins(fsid, sender, config.plugins.serienRec.margin_before.value, config.plugins.serienRec.margin_after.value)
start_unixtime = (int(start_unixtime) - (int(margin_before) * 60))
end_unixtime = (int(end_unixtime) + (int(margin_after) * 60))
vpsSettings = database.getVPS(fsid, sender)
tags = database.getTags(fsid)
addToDatabase = database.getAddToDatabase(fsid)
autoAdjust = database.getAutoAdjust(fsid, sender)
kindOfTimer = database.getKindOfTimer(fsid, config.plugins.serienRec.kindOfTimer.value)
epgSeriesName = database.getMarkerEPGName(fsid)
timerSeriesName = database.getMarkerTimerName(fsid)
(dirname, dirname_serie) = getDirname(database, serien_name, fsid, staffel)
print('[SerienRecorder] Check configured recording directories')
try:
SRLogger.writeLog('\nPrufe konfigurierte Aufnahmeverzeichnisse:', True)
recordDirectories = database.getRecordDirectories(config.plugins.serienRec.savetopath.value)
for directory in recordDirectories:
SRLogger.writeLog((" ' %s '" % directory), True)
os.path.exists(directory)
SRLogger.writeLog('\n', True)
except:
SRLogger.writeLog('Es konnten nicht alle Aufnahmeverzeichnisse gefunden werden', True)
(NoOfRecords, preferredChannel, useAlternativeChannel) = database.getPreferredMarkerChannels(fsid, config.plugins.serienRec.useAlternativeChannel.value, config.plugins.serienRec.NoOfRecords.value)
params = (serien_name, sender, start_unixtime, margin_before, margin_after, end_unixtime, label_serie, staffel, episode, title, dirname, preferredChannel, bool(useAlternativeChannel), vpsSettings, tags, addToDatabase, autoAdjust, epgSeriesName, timerSeriesName, kindOfTimer)
timerExists = False
if (config.plugins.serienRec.selectBouquets.value and config.plugins.serienRec.preferMainBouquet.value):
(primary_bouquet_active, secondary_bouquet_active) = database.isBouquetActive(sender)
if str(episode).isdigit():
if (int(episode) == 0):
(count_manually, count_primary_bouquet, count_secondary_bouquet) = database.getNumberOfTimersByBouquet(fsid, str(staffel), str(episode), title)
else:
(count_manually, count_primary_bouquet, count_secondary_bouquet) = database.getNumberOfTimersByBouquet(fsid, str(staffel), str(episode))
else:
(count_manually, count_primary_bouquet, count_secondary_bouquet) = database.getNumberOfTimersByBouquet(fsid, str(staffel), str(episode))
if ((count_manually >= NoOfRecords) or ((count_primary_bouquet >= NoOfRecords) or (secondary_bouquet_active and (count_secondary_bouquet >= NoOfRecords)) or (primary_bouquet_active and (count_primary_bouquet >= NoOfRecords)))):
timerExists = True
else:
if str(episode).isdigit():
if (int(episode) == 0):
bereits_vorhanden = database.getNumberOfTimers(fsid, str(staffel), str(episode), title, searchOnlyActiveTimers=True)
bereits_vorhanden_HDD = STBHelpers.countEpisodeOnHDD(dirname, seasonEpisodeString, serien_name, False, title)
else:
bereits_vorhanden = database.getNumberOfTimers(fsid, str(staffel), str(episode), searchOnlyActiveTimers=True)
bereits_vorhanden_HDD = STBHelpers.countEpisodeOnHDD(dirname, seasonEpisodeString, serien_name, False)
else:
bereits_vorhanden = database.getNumberOfTimers(fsid, str(staffel), str(episode), searchOnlyActiveTimers=True)
bereits_vorhanden_HDD = STBHelpers.countEpisodeOnHDD(dirname, seasonEpisodeString, serien_name, False)
if ((bereits_vorhanden >= NoOfRecords) or (bereits_vorhanden_HDD >= NoOfRecords)):
timerExists = True
if (not timerExists):
(TimerDone, onTimer, offTimer) = serienRecSendeTermine.createTimer(database, filterMode, wlid, fsid, params)
else:
SRLogger.writeLog(("' %s ' Staffel/Episode bereits vorhanden ' %s '" % (serien_name, seasonEpisodeString)))
(TimerDone, onTimer, offTimer) = serienRecSendeTermine.createTimer(database, filterMode, wlid, fsid, params, config.plugins.serienRec.forceManualRecording.value)
activatedTimer += onTimer
deactivatedTimer += offTimer
if TimerDone:
STBHelpers.createDirectory(fsid, 0, dirname, dirname_serie)
SRLogger.writeLog(('Es wurde(n) %s Timer erstellt.' % str(activatedTimer)), True)
print(('[SerienRecorder] Es wurde(n) %s Timer erstellt.' % str(activatedTimer)))
if (deactivatedTimer > 0):
SRLogger.writeLog(('%s Timer wurde(n) wegen Konflikten deaktiviert erstellt!' % str(deactivatedTimer)), True)
print(('[SerienRecorder] %s Timer wurde(n) wegen Konflikten deaktiviert erstellt!' % str(deactivatedTimer)))
SRLogger.writeLog("\n' Manuelle Timererstellung aus Sendeterminen beendet '", True)
print("' Manuelle Timererstellung aus Sendeterminen beendet '")
return (activatedTimer, deactivatedTimer)
def isSeasonAllowed(database, seriesFSID, season, episode, markerSeasons, fromEpisode):
if ((not markerSeasons) and (not fromEpisode)):
return True
allowed = False
if ((- 2) in markerSeasons):
allowed = False
elif (((- 1) in markerSeasons) and (0 in markerSeasons)):
allowed = True
elif str(season).isdigit():
if (int(season) == 0):
if str(episode).isdigit():
if (int(episode) < int(fromEpisode)):
allowed = False
else:
allowed = True
elif (int(season) in markerSeasons):
allowed = True
elif ((- 1) in markerSeasons):
if (int(season) >= max(markerSeasons)):
allowed = True
elif database.getSpecialsAllowed(seriesFSID):
allowed = True
return allowed
def keyOK(self):
if (self.loading or (self['menu_list'].getCurrent() is None)):
return
sindex = self['menu_list'].getSelectedIndex()
if (len(self.sendetermine_list) != 0):
if (int(self.sendetermine_list[sindex][7]) == 0):
self.sendetermine_list[sindex][7] = '1'
else:
self.sendetermine_list[sindex][7] = '0'
self.chooseMenuList.setList(list(map(self.buildList_termine, self.sendetermine_list)))
def keyLeft(self):
self['menu_list'].pageUp()
def keyRight(self):
self['menu_list'].pageDown()
def keyDown(self):
self['menu_list'].down()
def keyUp(self):
self['menu_list'].up()
def keyRed(self):
self.close(self.changesMade)
def keyGreen(self):
self.changesMade = self.getTimes()
if self.changesMade:
self.searchEvents()
def keyYellow(self):
self.sendetermine_list = []
self.loading = True
self.chooseMenuList.setList(list(map(self.buildList_termine, self.sendetermine_list)))
if (self.filterMode == 0):
self.filterMode = 1
self['text_yellow'].setText('Filter umschalten')
self.title_txt = 'aktive Sender'
elif (self.filterMode == 1):
self.filterMode = 2
self['text_yellow'].setText('Filter ausschalten')
self.title_txt = 'Marker Sender'
else:
self.filterMode = 0
self['text_yellow'].setText('Filter einschalten')
self.title_txt = 'alle'
print(("[SerienRecorder] suche ' %s '" % self.seriesName))
self['title'].setText(("Suche ' %s '" % self.seriesName))
print(self.seriesWLID)
if (self.filterMode == 0):
webChannels = []
elif (self.filterMode == 1):
webChannels = self.database.getActiveChannels()
else:
webChannels = self.database.getMarkerChannels(self.seriesFSID)
try:
transmissions = SeriesServer().doGetTransmissions(self.seriesWLID, 0, webChannels)
except:
transmissions = None
self.resultsEvents(transmissions)
def keyBlue(self):
from .SerienRecorderTimerListScreen import serienRecTimerListScreen
self.session.openWithCallback(self.searchEvents, serienRecTimerListScreen)
def __onClose(self):
if self.displayTimer:
self.displayTimer.stop()
self.displayTimer = None
def keyCancel(self):
self.close(self.changesMade) |
class NamedProperty(object):
def __init__(self, disp, get, put, putref):
self.get = get
self.put = put
self.putref = putref
self.disp = disp
def __getitem__(self, arg):
if (self.get is None):
raise TypeError('unsubscriptable object')
if isinstance(arg, tuple):
return self.disp._comobj._invoke(self.get.memid, self.get.invkind, 0, *arg)
elif (arg == _all_slice):
return self.disp._comobj._invoke(self.get.memid, self.get.invkind, 0)
return self.disp._comobj._invoke(self.get.memid, self.get.invkind, 0, *[arg])
def __call__(self, *args):
if (self.get is None):
raise TypeError('object is not callable')
return self.disp._comobj._invoke(self.get.memid, self.get.invkind, 0, *args)
def __setitem__(self, name, value):
if ((self.put is None) and (self.putref is None)):
raise TypeError('object does not support item assignment')
if comtypes._is_object(value):
descr = (self.putref or self.put)
else:
descr = (self.put or self.putref)
if isinstance(name, tuple):
self.disp._comobj._invoke(descr.memid, descr.invkind, 0, *(name + (value,)))
elif (name == _all_slice):
self.disp._comobj._invoke(descr.memid, descr.invkind, 0, value)
else:
self.disp._comobj._invoke(descr.memid, descr.invkind, 0, name, value)
def __iter__(self):
msg = ('%r is not iterable' % self.disp)
raise TypeError(msg) |
def test_module_loading3():
clear_test_classes()
store = AssetStore()
s = SessionTester('', store)
m1 = FakeModule(store, 'foo.m1')
m2 = FakeModule(store, 'foo.m2')
m3 = FakeModule(store, 'foo.m3')
Ma = m2.make_component_class('Ma')
m2.deps = add_prefix(['foo.m3'])
m3.deps = add_prefix(['foo.m1'])
s._register_component(Ma(flx_session=s))
assert (s.assets_js == add_prefix(['foo.m1.js', 'foo.m3.js', 'foo.m2.js'])) |
class TestDynamoGateway(unittest.TestCase):
TEST_ACCESS_KEY_ID = 'test-access-key-id'
TEST_ACCESS_KEY_DATA = 'test-access-key-data'
TEST_REGION = 'us-west-2'
TEST_TABLE_NAME = 'test_table'
('boto3.client')
def setUp(self, BotoClient) -> None:
self.gw = DynamoDBGateway(self.TEST_REGION, self.TEST_ACCESS_KEY_ID, self.TEST_ACCESS_KEY_DATA)
self.gw.client = BotoClient()
self.serializer = TypeSerializer()
self.deserializer = TypeDeserializer()
def test_put_item(self) -> None:
test_key_name = 'instance_id'
test_key_value = '111'
test_attribute_name = 'instance_name'
test_attribute_value = 'test'
test_item = {test_key_name: test_key_value, test_attribute_name: test_attribute_value}
expected_item = {test_key_name: {'S': test_key_value}, test_attribute_name: {'S': test_attribute_value}}
self.gw.put_item(self.TEST_TABLE_NAME, test_item)
self.gw.client.put_item.assert_called_with(TableName=self.TEST_TABLE_NAME, Item=expected_item, ConditionExpression='attribute_not_exists(instance_id)')
def test_get_item(self) -> None:
test_key_name = 'instance_id'
test_key_value = '111'
test_attribute_name = 'instance_name'
test_attribute_value = 'test'
client_return = {'Item': {test_key_name: {'S': test_key_value}, test_attribute_name: {'S': test_attribute_value}}, 'ResponseMetadata': {'RequestId': 'JSSDCD5NEGJ5VBSI7QO36GENVRVV4KQNSO5AEMVJF66Q9ASUAAJG', 'HTTPStatusCode': 200, 'HTTPHeaders': {'server': 'Server', 'date': 'Mon, 22 Aug 2022 17:15:26 GMT', 'content-type': 'application/x-amz-json-1.0', 'x-amzn-requestid': 'JSSDCD5NEGJ5VBSI7QO36GENVRVV4KQNSO5AEMVJF66Q9ASUAAJG', 'x-amz-crc32': '', 'via': 'HTTP/1.1 52.94.28.254:443 (fwdproxy2/234ced5316cce89d49ae 66.220.149.19)', 'x-connected-to': '52.94.28.254', 'x-fb-ip-type': 'allow_default', 'connection': 'keep-alive', 'content-length': '85'}, 'RetryAttempts': 0}}
expected_return = {test_key_name: test_key_value, test_attribute_name: test_attribute_value}
self.gw.client.get_item = MagicMock(return_value=client_return)
res = self.gw.get_item(table_name=self.TEST_TABLE_NAME, key_name=test_key_name, key_value=test_key_value)
self.assertEqual(res, expected_return)
def test_delete_item(self) -> None:
test_key_name = 'instance_id'
test_key_value = '111'
expected_key = {test_key_name: self.serializer.serialize(test_key_value)}
self.gw.delete_item(table_name=self.TEST_TABLE_NAME, key_name=test_key_name, key_value=test_key_value)
self.gw.client.delete_item.assert_called_with(TableName=self.TEST_TABLE_NAME, Key=expected_key)
def test_update(self):
test_key_name = 'instance_id'
test_key_value = '111'
test_attribute_name = 'instance_name'
test_new_attribute_value = 'test_update'
test_boto3_key = {test_key_name: self.serializer.serialize(test_key_value)}
test_update_expression = f'SET {test_attribute_name} = :new_data'
test_boto3_attributes = {':new_data': self.serializer.serialize(test_new_attribute_value)}
self.gw.update_item(table_name=self.TEST_TABLE_NAME, key_name=test_key_name, key_value=test_key_value, attribute_name=test_attribute_name, new_value=test_new_attribute_value)
self.gw.client.update_item.assert_called_with(TableName=self.TEST_TABLE_NAME, Key=test_boto3_key, UpdateExpression=test_update_expression, ExpressionAttributeValues=test_boto3_attributes, ConditionExpression='attribute_exists(instance_id)') |
def version_dialog(logging_level=logging.WARNING, mode=2):
from anima.utils import do_db_setup
do_db_setup()
set_qt_lib()
from anima.ui.dialogs import version_dialog as vd
from anima.dcc import mayaEnv
m = mayaEnv.Maya()
import pymel
m.name = ('Maya%s' % str(pymel.versions.current())[0:4])
logger.setLevel(logging_level)
vd.UI(environment=m, parent=mayaEnv.get_maya_main_window(), mode=mode) |
def reg(func_name: str, func: Callable=None) -> Callable:
if (func_name in BACKEND_FUNCTIONS):
raise RuntimeError('{name} funcion has already been registered.'.format(name=func_name))
def _do_reg(func):
BACKEND_FUNCTIONS[func_name] = func
return func
if (func is None):
return _do_reg
return func |
class TabsExtraViewWrapperCommand(sublime_plugin.WindowCommand):
def run(self, command, group=(- 1), index=(- 1), args=None):
if (args is None):
args = {}
if ((group >= 0) and (index >= 0)):
view = get_group_view(self.window, group, index)
if (view is not None):
self.window.focus_view(view)
self.window.run_command(command, args) |
class OptionPlotoptionsHeatmapSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class CustomDateFormatsTest(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
setUpMyTime()
def tearDown(self):
unittest.TestCase.tearDown(self)
tearDownMyTime()
def testIso8601(self):
date = datetime.datetime.utcfromtimestamp(iso8601.getDate('2007-01-25T12:00:00Z')[0])
self.assertEqual(date, datetime.datetime(2007, 1, 25, 12, 0))
self.assertRaises(TypeError, iso8601.getDate, None)
self.assertRaises(TypeError, iso8601.getDate, date)
self.assertEqual(iso8601.getDate(''), None)
self.assertEqual(iso8601.getDate('Z'), None)
self.assertEqual(iso8601.getDate('2007-01-01T120:00:00Z'), None)
self.assertEqual(iso8601.getDate('2007-13-01T12:00:00Z'), None)
date = datetime.datetime.utcfromtimestamp(iso8601.getDate('2007-01-25T12:00:00+0400')[0])
self.assertEqual(date, datetime.datetime(2007, 1, 25, 8, 0))
date = datetime.datetime.utcfromtimestamp(iso8601.getDate('2007-01-25T12:00:00+04:00')[0])
self.assertEqual(date, datetime.datetime(2007, 1, 25, 8, 0))
date = datetime.datetime.utcfromtimestamp(iso8601.getDate('2007-01-25T12:00:00-0400')[0])
self.assertEqual(date, datetime.datetime(2007, 1, 25, 16, 0))
date = datetime.datetime.utcfromtimestamp(iso8601.getDate('2007-01-25T12:00:00-04')[0])
self.assertEqual(date, datetime.datetime(2007, 1, 25, 16, 0))
def testAmbiguousDatePattern(self):
defDD = DateDetector()
defDD.addDefaultTemplate()
for (matched, dp, line) in (('Jan 23 21:59:59', None, 'Test failure Jan 23 21:59:59 for 192.0.2.1'), (False, None, 'Test failure TestJan 23 21:59:59.011 2015 for 192.0.2.1'), (False, None, 'Test failure Jan 23 21:59: for 192.0.2.1'), ('Aug 8 11:25:50', None, 'Aug 8 11:25:50 20030f2329b8 Authentication failed from 192.0.2.1'), ('Aug 8 11:25:50', None, '[Aug 8 11:25:50] 20030f2329b8 Authentication failed from 192.0.2.1'), ('Aug 8 11:25:50 2014', None, 'Aug 8 11:25:50 2014 20030f2329b8 Authentication failed from 192.0.2.1'), ('20:00:00 01.02.2003', '%H:%M:%S %d.%m.%Y$', '192.0.2.1 at 20:00:00 01.02.2003'), ('[20:00:00 01.02.2003]', '\\[%H:%M:%S %d.%m.%Y\\]', '192.0.2.1[20:00:00 01.02.2003]'), ('[20:00:00 01.02.2003]', '\\[%H:%M:%S %d.%m.%Y\\]', '[20:00:00 01.02.2003]192.0.2.1'), ('[20:00:00 01.02.2003]', '\\[%H:%M:%S %d.%m.%Y\\]$', '192.0.2.1[20:00:00 01.02.2003]'), ('[20:00:00 01.02.2003]', '^\\[%H:%M:%S %d.%m.%Y\\]', '[20:00:00 01.02.2003]192.0.2.1'), ('[17/Jun/2011 17:00:45]', '^\\[%d/%b/%Y %H:%M:%S\\]', '[17/Jun/2011 17:00:45] Attempt, IP address 192.0.2.1'), ('[17/Jun/2011 17:00:45]', '\\[%d/%b/%Y %H:%M:%S\\]', 'Attempt [17/Jun/2011 17:00:45] IP address 192.0.2.1'), ('[17/Jun/2011 17:00:45]', '\\[%d/%b/%Y %H:%M:%S\\]', 'Attempt IP address 192.0.2.1, date: [17/Jun/2011 17:00:45]'), (False, '%H:%M:%S %d.%m.%Y', '192.0.2.1x20:00:00 01.02.2003'), (False, '%H:%M:%S %d.%m.%Y', '20:00:00 01.02.2003x192.0.2.1'), ('20:00:00 01.02.2003', '**%H:%M:%S %d.%m.%Y**', '192.0.2.1x20:00:00 01.02.2003'), ('20:00:00 01.02.2003', '**%H:%M:%S %d.%m.%Y**', '20:00:00 01.02.2003x192.0.2.1'), ('*20:00:00 01.02.2003*', '\\**%H:%M:%S %d.%m.%Y\\**', 'test*20:00:00 01.02.2003*test'), ('20:00:00 01.02.2003', '%H:%M:%S %d.%m.%Y', '192.0.2.1 20:00:00 01.02.2003'), ('20:00:00 01.02.2003', '%H:%M:%S %d.%m.%Y', '20:00:00 01.02.2003 192.0.2.1'), (None, '%Y-%Exm-%Exd %ExH:%ExM:%ExS', '0000-12-30 00:00:00 - 2003-12-30 00:00:00'), ('2003-12-30 00:00:00', '%ExY-%Exm-%Exd %ExH:%ExM:%ExS', '0000-12-30 00:00:00 - 2003-12-30 00:00:00'), ('2003-12-30 00:00:00', None, '0000-12-30 00:00:00 - 2003-12-30 00:00:00'), ('200333 010203', '%Y%m%d %H%M%S', 'text:200333 010203 | date: 010203'), (' 010203', '%ExY%Exm%Exd %ExH%ExM%ExS', 'text:200333 010203 | date: 010203'), (' 010203', None, 'text:200333 010203 | date: 010203'), (' 000000', '%ExY%Exm%Exd %ExH%ExM%ExS', ' 010203 - 000000'), (None, '{^LN-BEG}%ExY%Exm%Exd %ExH%ExM%ExS', ' 010203 - 000000'), (' 010203', '{^LN-BEG}%ExY%Exm%Exd %ExH%ExM%ExS', ' 010203 - 000000'), ('', '{^LN-BEG}%ExY%Exm%Exd%ExH%ExM%ExS**', ''), ('', '{^LN-BEG}%ExY%Exm%Exd%ExH%ExM%ExS**', '#'), ('', '{^LN-BEG}%ExY%Exm%Exd%ExH%ExM%ExS**', '##'), ('', '{^LN-BEG}%ExY%Exm%Exd%ExH%ExM%ExS', '[]'), ((.0 - 3600), '{^LN-BEG}%ExY-%Exm-%Exd %ExH:%ExM:%ExS(?: %z)?', '[2003-12-30 01:02:03] server ...'), ((.0 - 3600), '{^LN-BEG}%ExY-%Exm-%Exd %ExH:%ExM:%ExS(?: %Z)?', '[2003-12-30 01:02:03] server ...'), (.0, '{^LN-BEG}%ExY-%Exm-%Exd %ExH:%ExM:%ExS(?: %z)?', '[2003-12-30 01:02:03 UTC] server ...'), (.0, '{^LN-BEG}%ExY-%Exm-%Exd %ExH:%ExM:%ExS(?: %Z)?', '[2003-12-30 01:02:03 UTC] server ...'), (.0, '{^LN-BEG}%ExY-%Exm-%Exd %ExH:%ExM:%ExS(?: %z)?', '[2003-12-30 01:02:03 Z] server ...'), (.0, '{^LN-BEG}%ExY-%Exm-%Exd %ExH:%ExM:%ExS(?: %z)?', '[2003-12-30 01:02:03 +0000] server ...'), (.0, '{^LN-BEG}%ExY-%Exm-%Exd %ExH:%ExM:%ExS(?: %Z)?', '[2003-12-30 01:02:03 Z] server ...')):
logSys.debug('== test: %r', (matched, dp, line))
if (dp is None):
dd = defDD
else:
dd = DateDetector()
dd.appendTemplate(dp)
date = dd.getTime(line)
if matched:
self.assertTrue(date)
if isinstance(matched, str):
self.assertEqual(matched, date[1].group(1))
else:
self.assertEqual(matched, date[0])
else:
self.assertEqual(date, None)
def testVariousFormatSpecs(self):
for (matched, dp, line) in ((.0, '^%B %Exd %I:%ExM:%ExS**', 'January 23 12:59:59'), (.0, '^%y %U %A %ExH:%ExM:%ExS**', '01 11 Wednesday 21:59:59'), (.0, '^%y %W %A %ExH:%ExM:%ExS**', '01 11 Wednesday 21:59:59'), (.0, '^%y %W %w %ExH:%ExM:%ExS**', '01 11 0 21:59:59'), (.0, '^%y %W %w %ExH:%ExM:%ExS**', '01 11 6 21:59:59'), (.0, '^%ExH:%ExM:%ExS**', '21:59:59'), (.0, '^%ExH:%ExM:%ExS**', '00:00:01'), (.0, '^%m/%d %ExH:%ExM:%ExS**', '09/01 21:59:59'), (.0, '^%Y-%m-%d**', '2004-09-01'), (.0, '^%Y-%m-%d%z**', '2004-09-01Z')):
logSys.debug('== test: %r', (matched, dp, line))
dd = DateDetector()
dd.appendTemplate(dp)
date = dd.getTime(line)
if matched:
self.assertTrue(date)
if isinstance(matched, str):
self.assertEqual(matched, date[1].group(1))
else:
self.assertEqual(matched, date[0])
else:
self.assertEqual(date, None) |
def test_serialize_attrs():
fwd = (lambda model, X, is_train: (X, (lambda dY: dY)))
attrs = {'test': 'foo'}
model1 = Model('test', fwd, attrs=attrs).initialize()
bytes_attr = serialize_attr(model1.attrs['test'], attrs['test'], 'test', model1)
assert (bytes_attr == srsly.msgpack_dumps('foo'))
model2 = Model('test', fwd, attrs={'test': ''})
result = deserialize_attr(model2.attrs['test'], bytes_attr, 'test', model2)
assert (result == 'foo')
_attr.register(SerializableAttr)
def serialize_attr_custom(_, value, name, model):
return value.to_bytes()
_attr.register(SerializableAttr)
def deserialize_attr_custom(_, value, name, model):
return SerializableAttr().from_bytes(value)
attrs = {'test': SerializableAttr()}
model3 = Model('test', fwd, attrs=attrs)
bytes_attr = serialize_attr(model3.attrs['test'], attrs['test'], 'test', model3)
assert (bytes_attr == b'foo')
model4 = Model('test', fwd, attrs=attrs)
assert (model4.attrs['test'].value == 'foo')
result = deserialize_attr(model4.attrs['test'], bytes_attr, 'test', model4)
assert (result.value == 'foo from bytes') |
class OptionSeriesLollipopSonificationTracksMapping(Options):
def frequency(self) -> 'OptionSeriesLollipopSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesLollipopSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesLollipopSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesLollipopSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesLollipopSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesLollipopSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionSeriesLollipopSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesLollipopSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionSeriesLollipopSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesLollipopSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionSeriesLollipopSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionSeriesLollipopSonificationTracksMappingPan)
def pitch(self) -> 'OptionSeriesLollipopSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionSeriesLollipopSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionSeriesLollipopSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesLollipopSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionSeriesLollipopSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionSeriesLollipopSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesLollipopSonificationTracksMappingTime':
return self._config_sub_data('time', OptionSeriesLollipopSonificationTracksMappingTime)
def tremolo(self) -> 'OptionSeriesLollipopSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesLollipopSonificationTracksMappingTremolo)
def volume(self) -> 'OptionSeriesLollipopSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionSeriesLollipopSonificationTracksMappingVolume) |
class CallStack():
_stack: List[FunctionCall]
def __init__(self) -> None:
self._stack = []
def push(self, FunctionCall: FunctionCall) -> None:
self._stack.append(FunctionCall)
def pop(self) -> FunctionCall:
return self._stack.pop()
def peek(self) -> Optional[FunctionCall]:
return (self._stack[(- 1)] if (len(self._stack) > 0) else None) |
(scope='function')
def aircall_connection_config(db: Session, aircall_config, aircall_secrets) -> Generator:
fides_key = aircall_config['fides_key']
connection_config = ConnectionConfig.create(db=db, data={'key': fides_key, 'name': fides_key, 'connection_type': ConnectionType.saas, 'access': AccessLevel.write, 'secrets': aircall_secrets, 'saas_config': aircall_config})
(yield connection_config)
connection_config.delete(db) |
class ViewChooser(HasTraits):
window = Instance('pyface.workbench.api.WorkbenchWindow')
selected = Any()
view = Instance(IView)
traits_ui_view = View(Item(name='window', editor=TreeEditor(nodes=[WorkbenchWindowTreeNode(auto_open=True, label='=Views', rename=False, copy=False, delete=False, insert=False, menu=None), TreeNode(node_for=[Category], auto_open=True, children='views', label='name', rename=False, copy=False, delete=False, insert=False, menu=None), IViewTreeNode(auto_open=False, label='name', rename=False, copy=False, delete=False, insert=False, menu=None)], editable=False, hide_root=True, selected='selected', show_icons=True), show_label=False), buttons=[Action(name='OK', enabled_when='view is not None'), 'Cancel'], resizable=True, style='custom', title='Show View', width=0.2, height=0.4)
def _selected_changed(self, old, new):
try:
self.view = new
except TraitError:
self.view = None
return |
class AdCreativeDocsTestCase(DocsTestCase):
def setUp(self):
creative = self.create_creative(1)
DocsDataStore.set('creative_id', creative.get_id())
def test_get_ad_preview(self):
creative = AdCreative(DocsDataStore.get('creative_id'))
preview = creative.get_ad_preview(params={'ad_format': 'RIGHT_COLUMN_STANDARD'})
self.store_response(preview) |
def parse_sdescs_and_recogs(sender, candidates, string, search_mode=False, case_sensitive=True, fallback=None):
candidate_map = []
for obj in candidates:
if hasattr(sender, 'recog'):
if (recog := sender.recog.get(obj)):
candidate_map.append((obj, recog))
if hasattr(obj, 'sdesc'):
candidate_map.append((obj, obj.sdesc.get()))
else:
candidate_map.append((obj, obj.key))
candidate_map.extend([(obj, alias) for alias in obj.aliases.all()])
string = _RE_REF.sub('\\1', string)
string = _RE_LEFT_BRACKETS.sub('{{', string)
string = _RE_RIGHT_BRACKETS.sub('}}', string)
mapping = {}
errors = []
obj = None
nmatches = 0
for self_match in list(_RE_SELF_REF.finditer(string)):
matched = self_match.group()
case = (_get_case_ref(matched.lstrip(_PREFIX)) if case_sensitive else '')
key = f'#{sender.id}{case}'
string = _RE_SELF_REF.sub(f'{{{key}}}', string, count=1)
mapping[key] = sender
for marker_match in reversed(list(_RE_OBJ_REF_START.finditer(string))):
(num_identifier, _) = marker_match.groups('')
match_index = marker_match.start()
head = string[:match_index]
tail = string[(match_index + 1):]
if search_mode:
rquery = ''.join([(('\\b(' + re.escape(word.strip(punctuation))) + ').*') for word in iter(tail.split())])
matches = ((re.search(rquery, text, _RE_FLAGS), obj, text) for (obj, text) in candidate_map)
bestmatches = [(obj, match.group()) for (match, obj, text) in matches if match]
else:
word_list = []
bestmatches = []
tail = re.split('(\\W)', tail)
iend = 0
for (i, item) in enumerate(tail):
if (not item.isalpha()):
continue
word_list.append(item)
rquery = ''.join([(('\\b(' + re.escape(word)) + ').*') for word in word_list])
matches = ((re.search(rquery, text, _RE_FLAGS), obj, text) for (obj, text) in candidate_map)
matches = [(obj, match.group()) for (match, obj, text) in matches if match]
if (len(matches) == 0):
break
bestmatches = matches
iend = i
matched_text = ''.join(tail[1:iend])
tail = ''.join(tail[(iend + 1):])
nmatches = len(bestmatches)
if (not nmatches):
obj = None
nmatches = 0
elif (nmatches == 1):
(obj, match_str) = bestmatches[0]
elif all(((bestmatches[0][0].id == obj.id) for (obj, text) in bestmatches)):
(obj, match_str) = bestmatches[0]
nmatches = 1
else:
inum = (min(max(0, (int(num_identifier) - 1)), (nmatches - 1)) if num_identifier else None)
if (inum is not None):
(obj, match_str) = bestmatches[inum]
nmatches = 1
else:
obj = bestmatches
if search_mode:
break
elif (nmatches == 0):
if fallback:
string = f'{head}{fallback}{tail}'
else:
errors.append(_EMOTE_NOMATCH_ERROR.format(ref=marker_match.group()))
elif (nmatches == 1):
case = (_get_case_ref(marker_match.group()) if case_sensitive else '')
key = f'#{obj.id}{case}'
string = f'{head}{{{key}}}{tail}'
mapping[key] = obj
else:
refname = marker_match.group()
reflist = ['{num}{sep}{name} ({text}{key})'.format(num=(inum + 1), sep=_NUM_SEP, name=_RE_PREFIX.sub('', refname), text=text, key=(f' ({sender.key})' if (sender == ob) else '')) for (inum, (ob, text)) in enumerate(obj)]
errors.append(_EMOTE_MULTIMATCH_ERROR.format(ref=marker_match.group(), reflist='\n '.join(reflist)))
if search_mode:
if (nmatches == 0):
return []
elif (nmatches == 1):
return [obj]
else:
return [tup[0] for tup in obj]
if errors:
raise EmoteError('\n'.join(errors))
return (string, mapping) |
class OptionSeriesFunnelSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class Follows(models.Model):
follow = models.ForeignKey(User, on_delete=models.CASCADE, related_name='follow', verbose_name=',')
fan = models.ForeignKey(User, on_delete=models.CASCADE, related_name='fan', verbose_name='')
add_time = models.DateTimeField(auto_now_add=True, verbose_name='')
def __str__(self):
return str(self.follow.id)
class Meta():
ordering = ('-follow',) |
(scope='function')
def adobe_campaign_secrets(saas_config):
return {'domain': (pydash.get(saas_config, 'adobe_campaign.domain') or secrets['domain']), 'organization': (pydash.get(saas_config, 'adobe_campaign.organization') or secrets['organization']), 'organization_id': (pydash.get(saas_config, 'adobe_campaign.organization_id') or secrets['organization_id']), 'client_id': (pydash.get(saas_config, 'adobe_campaign.client_id') or secrets['client_id']), 'client_secret': (pydash.get(saas_config, 'adobe_campaign.client_secret') or secrets['client_secret']), 'technical_account_id': (pydash.get(saas_config, 'adobe_campaign.technical_account_id') or secrets['technical_account_id']), 'private_key': (pydash.get(saas_config, 'adobe_campaign.private_key') or secrets['private_key']), 'namespace': (pydash.get(saas_config, 'adobe_campaign.namespace') or secrets['namespace']), 'regulation': (pydash.get(saas_config, 'adobe_campaign.regulation') or secrets['regulation'])} |
def make_schema(model, class_name=None, fields=None, options=None, BaseSchema=BaseAutoSchema):
class_name = (class_name or '{0}Schema'.format(model.__name__))
Meta = type('Meta', (object,), utils.extend({'model': model, 'sqla_session': models.db.session, 'load_instance': True, 'include_relationships': True, 'include_fk': True}, (options or {})))
mapped_schema = (BaseSchema if (not schema_map.get(model.__name__)) else schema_map.get(model.__name__))
return type(class_name, (mapped_schema,), utils.extend({'Meta': Meta}, (fields or {}))) |
.parametrize('config_str, expect_forward_init, expect_num_loaded, error', [('SURFACE MY_PARAM OUTPUT_FILE:surf.irap INIT_FILES:surf%d.irap BASE_SURFACE:surf0.irap', False, 1, ''), ('SURFACE MY_PARAM OUTPUT_FILE:surf.irap INIT_FILES:../../../surf%d.irap BASE_SURFACE:surf0.irap FORWARD_INIT:True', True, 1, ''), ('SURFACE MY_PARAM OUTPUT_FILE:surf.irap INIT_FILES:../../../surf.irap BASE_SURFACE:surf0.irap FORWARD_INIT:True', True, 1, ''), ('SURFACE MY_PARAM OUTPUT_FILE:surf.irap INIT_FILES:surf%d.irap BASE_SURFACE:surf0.irap FORWARD_INIT:True', True, 0, "Failed to initialize parameter 'MY_PARAM' in file surf0.irap"), ('SURFACE MY_PARAM OUTPUT_FILE:surf.irap INIT_FILES:surf.irap BASE_SURFACE:surf0.irap FORWARD_INIT:True', True, 0, "Failed to initialize parameter 'MY_PARAM' in file surf.irap")])
def test_surface_param(storage, tmpdir, config_str, expect_forward_init, expect_num_loaded, error, caplog):
with tmpdir.as_cwd():
config = dedent('\n JOBNAME my_name%d\n NUM_REALIZATIONS 1\n ')
config += config_str
expect_surface = Surface(nx=2, ny=2, xinc=1, yinc=1, xstart=1, ystart=1, angle=0)
for i in range(4):
expect_surface[i] = float(i)
expect_surface.write('surf.irap')
expect_surface.write('surf0.irap')
with open('config.ert', mode='w', encoding='utf-8') as fh:
fh.writelines(config)
(ensemble_config, fs) = create_runpath(storage, 'config.ert')
assert (ensemble_config['MY_PARAM'].forward_init is expect_forward_init)
assert (load_from_forward_model('config.ert', fs) == expect_num_loaded)
assert (error in ''.join(caplog.messages))
if (expect_num_loaded > 0):
if expect_forward_init:
assert (not Path('simulations/realization-0/iter-0/surf.irap').exists())
create_runpath(storage, 'config.ert', ensemble=fs, iteration=1)
expected_iter = (1 if expect_forward_init else 0)
actual_surface = Surface(f'simulations/realization-0/iter-{expected_iter}/surf.irap')
assert (actual_surface == expect_surface)
if (expect_num_loaded > 0):
arr = fs.load_parameters('MY_PARAM', 0)['values'].values.T
assert (arr.flatten().tolist() == [0.0, 1.0, 2.0, 3.0])
else:
with pytest.raises(KeyError, match="No dataset 'MY_PARAM' in storage for realization 0"):
fs.load_parameters('MY_PARAM', 0)['values'] |
def upgrade():
op.create_table('custom_form_translates', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('custom_form_id', sa.Integer(), nullable=True), sa.Column('language_code', sa.String(), nullable=False), sa.Column('form_id', sa.String(), nullable=False), sa.ForeignKeyConstraint(['custom_form_id'], ['custom_forms.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'))
op.add_column('custom_forms', sa.Column('main_language', sa.String(), nullable=True)) |
def extractAoitenshilnHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Warm Place', 'Warm Place', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def download_save(country_code: str, transfer_code: str, confirmation_code: str, game_version: str) -> requests.Response:
country_code = country_code.replace('jp', 'ja')
url = (((get_nyanko_save_url() + '/v2/transfers/') + transfer_code) + '/reception')
data = get_client_info(country_code, game_version)
data['pin'] = confirmation_code
data_s = json.dumps(data)
data_s = data_s.replace(' ', '')
headers = {'content-type': 'application/json', 'accept-encoding': 'gzip', 'connection': 'keep-alive', 'user-agent': 'Dalvik/2.1.0 (Linux; U; Android 9; SM-G955F Build/N2G48B)'}
try:
response = requests.post(url, data=data_s, headers=headers)
except requests.exceptions.RequestException as err:
raise Exception(('Error getting save: ' + str(err))) from err
return response |
(sma1=50, sma2=1000, series=None)
def diff_strategy(ohlcv):
series = diff_strategy.series
sma1 = series.rolling(diff_strategy.sma1).mean()
sma2 = series.rolling(diff_strategy.sma2).mean()
entries = ((sma1 < sma2) & (sma1.shift() > sma2.shift()))
exits = ((sma1 > sma2) & (sma1.shift() < sma2.shift()))
figures = {'figures': {'indicator': {'value': series, 'sma1': sma1, 'sma2': sma2}}}
return (entries, exits, figures) |
def extractArtegTheBear(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def get_formated_speaking_rate(speaking_rate: int):
if (speaking_rate > 100):
speaking_rate = 100
if (speaking_rate < (- 100)):
speaking_rate = (- 100)
if (speaking_rate >= 0):
diff = (speaking_rate / 100)
return (1 + diff)
diff = ((((- 1) / 2) * speaking_rate) / 100)
return (1 - diff) |
class OptionSeriesGaugeSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesColumnrangeSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class ExaHttpThread(threading.Thread):
def __init__(self, ipaddr, port, compression, encryption):
self.server = ExaTCPServer((ipaddr, port), ExaHttpRequestHandler, compression=compression, encryption=encryption)
self.read_pipe = self.server.read_pipe
self.write_pipe = self.server.write_pipe
self.exc = None
super().__init__()
def exa_address(self):
return f'{self.server.exa_address_ipaddr}:{self.server.exa_address_port}'
def run(self):
try:
while ((self.server.total_clients == 0) and (not self.server.is_terminated)):
self.server.handle_request()
except BaseException as e:
self.exc = e
finally:
self.server.server_close()
def join(self, timeout=None):
self.server.can_finish_get.set()
super().join(timeout)
def join_with_exc(self):
self.join()
if self.exc:
raise self.exc
def terminate(self):
self.server.is_terminated = True
self.server.can_finish_get.set()
self.write_pipe.close()
self.read_pipe.close() |
class OptionSeriesArcdiagramTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionSeriesArcdiagramTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionSeriesArcdiagramTooltipDatetimelabelformats)
def distance(self):
return self._config_get(6)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(True)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get('<span style="font-size: 0.8em">{series.name}</span><br/>')
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nodeFormat(self):
return self._config_get('{point.name}: <b>{point.sum}</b><br/>')
def nodeFormat(self, text: str):
self._config(text, js_type=False)
def nodeFormatter(self):
return self._config_get(None)
def nodeFormatter(self, value: Any):
self._config(value, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get('{point.fromNode.name} {point.toNode.name}: <b>{point.weight}</b><br/>')
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
def _run_main(argv, *, main):
del argv
tf.config.experimental.set_visible_devices([], 'GPU')
rng = jax.random.PRNGKey(FLAGS.config.seed)
logging.info('RNG: %s', rng)
os.makedirs(FLAGS.workdir, exist_ok=True)
config = FLAGS.config
if config.log_to_wandb:
wandb.init(config=config, project=config.wandb_project, entity=config.wandb_entity, dir=FLAGS.workdir)
main(rng=rng, config=config, workdir=FLAGS.workdir)
if FLAGS.config.log_to_wandb:
wandb.finish() |
class MarkerHandler():
decision_context: DecisionContext
marker_name: str
mutable_marker_results: Dict[(str, MarkerResult)] = field(default_factory=dict)
def record_mutable_marker(self, id: str, event_id: int, data: bytes, access_count: int):
marker = MarkerData.create(id=id, event_id=event_id, data=data, access_count=access_count)
if (id in self.mutable_marker_results):
self.mutable_marker_results[id].replayed = True
else:
self.mutable_marker_results[id] = MarkerResult(data=data)
self.decision_context.record_marker(self.marker_name, marker.get_header(), data)
def set_data(self, id, data: bytes):
self.mutable_marker_results[id] = MarkerResult(data=data)
def mark_replayed(self, id):
self.mutable_marker_results[id].replayed = True
def handle(self, id: str, func) -> Optional[bytes]:
event_id = self.decision_context.decider.next_decision_event_id
result: MarkerResult = self.mutable_marker_results.get(id)
if (result or self.decision_context.is_replaying()):
if result:
if (self.decision_context.is_replaying() and (not result.replayed)):
self.record_mutable_marker(id, event_id, result.data, 0)
return result.data
else:
return None
else:
to_store = func()
if to_store:
data = to_store
self.record_mutable_marker(id, event_id, data, 0)
return to_store
else:
pass
def get_marker_data_from_history(self, event_id: int, marker_id: str, expected_access_count: int) -> Optional[bytes]:
event: HistoryEvent = self.decision_context.decider.get_optional_decision_event(event_id)
if ((not event) or (event.event_type != EventType.MarkerRecorded)):
return None
attributes: MarkerRecordedEventAttributes = event.marker_recorded_event_attributes
name = attributes.marker_name
if (self.marker_name != name):
return None
marker_data = MarkerInterface.from_event_attributes(attributes)
if ((marker_id != marker_data.get_id()) or (marker_data.get_access_count() > expected_access_count)):
return None
return marker_data.get_data() |
def test_create_client_and_secret_no_roles(db, config):
(new_client, secret) = ClientDetail.create_client_and_secret(db, config.security.oauth_client_id_length_bytes, config.security.oauth_client_secret_length_bytes, scopes=['user:create', 'user:read'])
assert (new_client.hashed_secret is not None)
assert (hash_with_salt(secret.encode(config.security.encoding), new_client.salt.encode(config.security.encoding)) == new_client.hashed_secret)
assert (new_client.scopes == ['user:create', 'user:read'])
assert (new_client.roles == [])
assert (new_client.systems == []) |
class SampleGUIClientWindow(QMainWindow):
def __init__(self, parent=None):
super(SampleGUIClientWindow, self).__init__(parent)
self.create_main_frame()
self.create_client()
self.create_timers()
def create_main_frame(self):
self.circle_widget = CircleWidget()
self.doit_button = QPushButton('Do it!')
self.doit_button.clicked.connect(self.on_doit)
self.log_widget = LogWidget()
hbox = QHBoxLayout()
hbox.addWidget(self.circle_widget)
hbox.addWidget(self.doit_button)
hbox.addWidget(self.log_widget)
main_frame = QWidget()
main_frame.setLayout(hbox)
self.setCentralWidget(main_frame)
def create_client(self):
self.client = SocketClientThread()
self.client.start()
def create_timers(self):
self.circle_timer = QTimer(self)
self.circle_timer.timeout.connect(self.circle_widget.next)
self.circle_timer.start(25)
self.client_reply_timer = QTimer(self)
self.client_reply_timer.timeout.connect(self.on_client_reply_timer)
self.client_reply_timer.start(100)
def on_doit(self):
self.client.cmd_q.put(ClientCommand(ClientCommand.CONNECT, SERVER_ADDR))
self.client.cmd_q.put(ClientCommand(ClientCommand.SEND, 'hello'))
self.client.cmd_q.put(ClientCommand(ClientCommand.RECEIVE))
self.client.cmd_q.put(ClientCommand(ClientCommand.CLOSE))
def on_client_reply_timer(self):
try:
reply = self.client.reply_q.get(block=False)
status = ('SUCCESS' if (reply.type == ClientReply.SUCCESS) else 'ERROR')
self.log(('Client reply %s: %s' % (status, reply.data)))
except Queue.Empty:
pass
def log(self, msg):
timestamp = ('[%010.3f]' % time.clock())
self.log_widget.append(((timestamp + ' ') + str(msg))) |
def parse_csv(data: str, delimeter: str=',') -> list[list[str]]:
data = remove_comments(data)
data_ls = data.split('\n')
data_ls_ls = [line.split(delimeter) for line in data_ls]
data_ls_ls = remove_empty_items(data_ls_ls)
data_ls_ls = [line for line in data_ls_ls if (line != [])]
return data_ls_ls |
def input_from_editor(init_content, editor):
import subprocess
with CustomTemporaryFile() as filename:
with open(filename, 'w') as edit_file:
edit_file.write(init_content)
try:
subprocess.call([editor, filename])
except FileNotFoundError:
raise CannotOpenEditorError('Cannot open editor', editor=editor)
with open(filename) as edit_file:
new_content = edit_file.read()
return new_content |
def create_or_update_measure(measure_def, end_date):
measure_id = measure_def['id']
v = arrays_to_strings(measure_def)
for (k, val) in v.items():
if isinstance(val, str):
v[k] = val.strip()
try:
m = Measure.objects.get(id=measure_id)
except Measure.DoesNotExist:
m = Measure(id=measure_id)
m.title = v['title']
m.description = v['description']
m.why_it_matters = v['why_it_matters']
m.name = v['name']
m.tags = v['tags']
m.tags_focus = v.get('tags_focus', [])
m.title = v['title']
m.description = v['description']
m.numerator_short = v['numerator_short']
m.denominator_short = v['denominator_short']
m.url = v['url']
m.is_cost_based = v['is_cost_based']
m.is_percentage = v['is_percentage']
m.low_is_good = v['low_is_good']
m.include_in_alerts = v.get('include_in_alerts', True)
m.numerator_type = v['numerator_type']
if (m.numerator_type == 'custom'):
m.numerator_columns = v['numerator_columns']
m.numerator_from = v['numerator_from']
m.numerator_where = v['numerator_where']
m.numerator_bnf_codes_query = v.get('numerator_bnf_codes_query')
m.numerator_is_list_of_bnf_codes = v.get('numerator_is_list_of_bnf_codes', True)
if m.numerator_is_list_of_bnf_codes:
m.numerator_bnf_codes = get_num_or_denom_bnf_codes(m, 'numerator', end_date)
else:
if (m.numerator_type == 'bnf_items'):
m.numerator_columns = 'SUM(items) AS numerator'
elif (m.numerator_type == 'bnf_quantity'):
m.numerator_columns = 'SUM(quantity) AS numerator'
elif (m.numerator_type == 'bnf_cost'):
m.numerator_columns = 'SUM(actual_cost) AS numerator'
else:
assert False, measure_id
m.numerator_from = '{hscic}.normalised_prescribing'
m.numerator_bnf_codes_filter = v.get('numerator_bnf_codes_filter')
m.numerator_bnf_codes_query = v.get('numerator_bnf_codes_query')
m.numerator_bnf_codes = get_bnf_codes(m.numerator_bnf_codes_query, m.numerator_bnf_codes_filter)
m.numerator_where = build_where(m.numerator_bnf_codes)
m.numerator_is_list_of_bnf_codes = True
m.denominator_type = v['denominator_type']
if (m.denominator_type == 'custom'):
m.denominator_columns = v['denominator_columns']
m.denominator_from = v['denominator_from']
m.denominator_where = v['denominator_where']
m.denominator_bnf_codes_query = v.get('denominator_bnf_codes_query')
if (m.denominator_from and ('normalised_prescribing' in m.denominator_from)):
m.denominator_is_list_of_bnf_codes = v.get('denominator_is_list_of_bnf_codes', True)
else:
m.denominator_is_list_of_bnf_codes = False
m.denominator_bnf_codes = get_num_or_denom_bnf_codes(m, 'denominator', end_date)
elif (m.denominator_type == 'list_size'):
m.denominator_columns = 'SUM(total_list_size / 1000.0) AS denominator'
m.denominator_from = '{hscic}.practice_statistics'
m.denominator_where = '1 = 1'
m.denominator_bnf_codes_query = None
m.denominator_is_list_of_bnf_codes = False
elif (m.denominator_type == 'star_pu_antibiotics'):
m.denominator_columns = "CAST(JSON_EXTRACT(MAX(star_pu), '$.oral_antibacterials_item') AS FLOAT64) AS denominator"
m.denominator_from = '{hscic}.practice_statistics'
m.denominator_where = '1 = 1'
m.denominator_bnf_codes_query = None
m.denominator_is_list_of_bnf_codes = False
else:
if (m.denominator_type == 'bnf_items'):
m.denominator_columns = 'SUM(items) AS denominator'
elif (m.denominator_type == 'bnf_quantity'):
m.denominator_columns = 'SUM(quantity) AS denominator'
elif (m.denominator_type == 'bnf_cost'):
m.denominator_columns = 'SUM(actual_cost) AS denominator'
else:
assert False, measure_id
m.denominator_from = '{hscic}.normalised_prescribing'
m.denominator_bnf_codes_filter = v.get('denominator_bnf_codes_filter')
m.denominator_bnf_codes_query = v.get('denominator_bnf_codes_query')
m.denominator_bnf_codes = get_bnf_codes(m.denominator_bnf_codes_query, m.denominator_bnf_codes_filter)
m.denominator_where = build_where(m.denominator_bnf_codes)
m.denominator_is_list_of_bnf_codes = True
if (not v.get('no_analyse_url')):
m.analyse_url = build_analyse_url(m)
m.save()
return m |
class PidFile():
def __init__(self, process_name: str, path: Path) -> None:
self.filepath = (path / (process_name + '.pid'))
self._running = False
def __enter__(self) -> None:
if self.filepath.exists():
raise FileExistsError(f'File {self.filepath} already exists -- cannot run the same process twice')
elif self._running:
raise RuntimeError('Cannot enter the same PidFile context twice')
if (not self.filepath.parent.is_dir()):
raise IOError(f"Cannot create pidfile in base directory that doesn't exist: {self.filepath}")
else:
self._running = True
with self.filepath.open('x') as pidfile:
pidfile.write((str(os.getpid()) + '\n'))
def __exit__(self, exc_type=None, exc_value=None, exc_tb=None):
self.filepath.unlink()
self._running = False |
class RetentionFilter(ReprMixIn):
_valid_categories = ('latest', 'hours', 'days', 'weeks', 'months', 'years')
def _parse_rules(cls, rules_spec: str) -> OrderedDict:
tokens = rules_spec.split(',')
rules_dict: Dict[(str, int)] = {}
for token in tokens:
if (not token):
raise ValueError('Empty retention policy element.')
match = re.search('^([a-z]+)([0-9]+)$', token)
if match:
category = match.group(1)
timecount = int(match.group(2))
if (category not in cls._valid_categories):
raise ValueError('Time category {} in retention policy is invalid.'.format(category))
if (category in rules_dict):
raise ValueError('Time category {} listed more than once in retention policy.'.format(category))
if (timecount <= 0):
raise UsageError('Count of time category {} must be a positive integer.'.format(category))
rules_dict[category] = timecount
continue
raise ValueError('Invalid retention policy element {}.'.format(token))
rules: OrderedDict[(str, int)] = OrderedDict()
for category in cls._valid_categories:
if (category in rules_dict):
rules[category] = rules_dict[category]
return rules
def __init__(self, rules_spec: str, reference_time: datetime.datetime=None, tz: datetime.tzinfo=None) -> None:
self.tz = (tz if (tz is not None) else dateutil.tz.tzlocal())
if (reference_time is None):
self.reference_time = datetime.datetime.now(tz=self.tz)
elif (reference_time.tzinfo is None):
self.reference_time = reference_time.replace(tzinfo=datetime.timezone.utc)
else:
self.reference_time = reference_time
self.rules = self._parse_rules(rules_spec)
logger.debug('Retention filter set up with reference time {} and rules {}.'.format(self.reference_time.isoformat(timespec='seconds'), self.rules))
def filter(self, versions: Union[(Sequence[Version], Set[Version])]) -> List[Version]:
return self._filter(versions)[0]
def _filter(self, versions: Union[(Sequence[Version], Set[Version])]) -> Tuple[(List[Version], Dict[(str, Dict[(int, List[Version])])])]:
categories = [category for category in self.rules.keys() if (category != 'latest')]
versions_by_category: Dict[(str, Dict[(int, List[Version])])] = {}
versions_by_category_remaining: Dict[(str, Dict[(int, List[Version])])] = {}
for category in categories:
versions_by_category[category] = defaultdict(list)
versions_by_category_remaining[category] = {}
versions = list(versions)
versions.sort(key=(lambda version: version.date), reverse=True)
if ('latest' in self.rules):
logger.debug('Keeping {} latest versions.'.format(self.rules['latest']))
versions_by_category_remaining['latest'] = {0: versions[:self.rules['latest']]}
del versions[:self.rules['latest']]
dismissed_versions = []
for version in versions:
try:
td = _Timedelta(version.date.replace(tzinfo=datetime.timezone.utc), self.reference_time, tz=self.tz)
except ValueError as exception:
logger.warning('Version {}: {}.'.format(version.uid, exception))
continue
logger.debug('Time and time delta for version {} are {} and {}.'.format(version.uid, version.date.isoformat(timespec='seconds'), td))
for category in categories:
timecount = getattr(td, category)
if (timecount <= self.rules[category]):
logger.debug('Found matching category {}, timecount {}.'.format(category, timecount))
versions_by_category[category][timecount].append(version)
break
else:
dismissed_versions.append(version)
logger.debug("Dismissing version, it doesn't fit into any category.")
for category in categories:
for timecount in versions_by_category[category]:
dismissed_versions.extend(versions_by_category[category][timecount][:(- 1)])
versions_by_category_remaining[category][timecount] = versions_by_category[category][timecount][(- 1):]
return (dismissed_versions, versions_by_category_remaining) |
class filter_test_case(unittest.TestCase):
def test_filter(self):
i = {'a': 1, 'b': 2, 'c': '4', 'e': '5', 'f': 6, 'g': 7}
with self.assertRaises(ValueError):
_filter(i, True)
o = _filter(i, (lambda key, val: isinstance(val, int)))
r = {'a': 1, 'b': 2, 'f': 6, 'g': 7}
self.assertFalse((i is o))
self.assertEqual(o, r) |
def test_node_data_request_with_extra_unrequested_nodes():
(node_keys, nodes) = mk_node_data(10)
validator = GetNodeDataValidator(node_keys)
node_data = tuple(zip(node_keys, nodes))
other_nodes = tuple(set((mk_node() for _ in range(10))).difference(nodes))
other_node_data = tuple(((keccak(node), node) for node in other_nodes))
with pytest.raises(ValidationError):
validator.validate_result((node_data + other_node_data)) |
def encode_field(types, name, field_type, value):
if (value is None):
raise ValueError(f'Missing value for field {name} of type {field_type}')
if (field_type in types):
return ('bytes32', keccak(encode_data(field_type, types, value)))
if (field_type == 'bytes'):
if (not isinstance(value, bytes)):
raise TypeError(f'Value of field `{name}` ({value}) is of the type `{type(value)}`, but expected bytes value')
return ('bytes32', keccak(value))
if (field_type == 'string'):
if (not isinstance(value, str)):
raise TypeError(f'Value of field `{name}` ({value}) is of the type `{type(value)}`, but expected string value')
return ('bytes32', keccak(text=value))
if is_array_type(field_type):
array_dimensions = get_array_dimensions(value)
parsed_field_type = parse(field_type)
for i in range(len(array_dimensions)):
if (len(parsed_field_type.arrlist[i]) == 0):
continue
if (array_dimensions[i] != parsed_field_type.arrlist[i][0]):
raise TypeError(f"Array data `{value}` has dimensions `{array_dimensions}` whereas the schema has dimensions `{tuple(map((lambda x: (x[0] if x else 'dynamic')), parsed_field_type.arrlist))}`")
field_type_of_inside_array = field_type[:field_type.rindex('[')]
field_type_value_pairs = [encode_field(types, name, field_type_of_inside_array, item) for item in value]
if value:
(data_types, data_hashes) = zip(*field_type_value_pairs)
else:
(data_types, data_hashes) = ([], [])
return ('bytes32', keccak(encode(data_types, data_hashes)))
if (not is_encodable_type(field_type)):
raise TypeError(f'Received Invalid type `{field_type}` in field `{name}`')
if is_encodable(field_type, value):
return (field_type, value)
else:
raise TypeError(f'Value of `{name}` ({value}) is not encodable as type `{field_type}`. If the base type is correct, verify that the value does not exceed the specified size for the type.') |
.django_db
def test_is_registered_any_way_with_is_attendee_false_and_is_registered_false_should_return_false(mocker, user1, event1):
mock_is_attendee = mocker.patch('manager.templatetags.filters.is_attendee')
mock_is_registered = mocker.patch('manager.templatetags.filters.is_registered')
mock_is_attendee.return_value = False
mock_is_registered.return_value = False
assert (not filters.is_registered_any_way(user1, event1.event_slug))
assert mock_is_attendee.called
assert mock_is_registered.called
mock_is_attendee.assert_called_once_with(user1, event1.event_slug)
mock_is_registered.assert_called_once_with(user1, event1.event_slug) |
class Solution(object):
def hasGroupsSizeX(self, deck):
from fractions import gcd
tr = {}
for n in deck:
if (n not in tr):
tr[n] = 0
tr[n] += 1
m = None
for (n, c) in tr.iteritems():
if (c < 2):
return False
if (m is None):
m = c
m = gcd(m, c)
if (m < 2):
return False
return True |
.integration
.ledger
def test_multiple_signatures_transaction_missing_pubkeys():
fetchai_api = FetchAIApi(**FETCHAI_TESTNET_CONFIG)
coins = [Coin(denom='DENOM', amount='1234')]
msg_send = MsgSend(from_address=str('from'), to_address=str('to'), amount=coins)
send_msg_packed = ProtoAny()
send_msg_packed.Pack(msg_send, type_url_prefix='/')
with pytest.raises(RuntimeError, match='Only transaction with one signer can be generated without pubkeys'):
fetchai_api._get_transaction(account_numbers=[1, 2], from_addresses=['adr1', 'adr2'], chain_id='chain_id', tx_fee=coins, gas=1234, memo='MEMO', sequences=[1, 2], msgs=[send_msg_packed, send_msg_packed]) |
def test_job(mock_webapi, monkeypatch, tmp_path):
monkeypatch.setattr('tidy3d.web.api.container.Job.load', (lambda *args, **kwargs: True))
sim = make_sim()
j = Job(simulation=sim, task_name=TASK_NAME, folder_name=PROJECT_NAME)
_ = j.run(path=str((tmp_path / 'web_test_tmp.json')))
_ = j.status
j.estimate_cost()
_ = j.delete
assert (j.real_cost() == FLEX_UNIT) |
def extractSandysreadreceiptsWeeblyCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class NuReleaseItem(Base):
__tablename__ = 'nu_release_item'
id = Column(BigInteger().with_variant(Integer, 'sqlite'), primary_key=True)
actual_target = Column(Text)
page_title = Column(Text)
fetch_tries = Column(Integer, default='0')
seriesname = Column(Text, nullable=False, index=True)
releaseinfo = Column(Text)
groupinfo = Column(Text, nullable=False, index=True)
referrer = Column(Text, nullable=False)
outbound_wrapper = Column(Text, nullable=False, unique=True)
release_date = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
first_seen = Column(DateTime, nullable=False, default=datetime.datetime.min)
__table_args__ = (UniqueConstraint('seriesname', 'releaseinfo', 'groupinfo', 'outbound_wrapper'),) |
class Solution():
def increasingTriplet(self, nums: List[int]) -> bool:
(small, big) = (None, None)
for n in nums:
if ((small is None) or (n <= small)):
small = n
elif ((big is None) or (n <= big)):
big = n
else:
return True
return False |
def test_upload01_upload_a_file(dash_duo, testfile10Mb_csv):
app = import_app('usage')
dash_duo.start_server(app)
upload = dash_duo.find_element('#dash-uploader')
upload_input = upload.find_element(By.XPATH, "//input[='dash-uploader-upload']")
upload_input.send_keys(str(testfile10Mb_csv))
upload_label = upload.find_element(By.XPATH, '//label')
wait = WebDriverWait(dash_duo._driver, 10)
wait.until(EC.text_to_be_present_in_element((By.XPATH, "//div[='dash-uploader']/*/label"), 'Completed'))
callback_output = dash_duo.find_element('#callback-output')
uploaded_file = callback_output.find_element(By.XPATH, '//ul').text
uploaded_file = Path(uploaded_file)
assert (uploaded_file.name == testfile10Mb_csv.name)
assert uploaded_file.exists()
assert (uploaded_file.stat().st_size == testfile10Mb_csv.stat().st_size)
shutil.rmtree(uploaded_file.parent) |
class GetReceiptsTracker(BasePerformanceTracker[(GetReceiptsV65, ReceiptsBundles)]):
def _get_request_size(self, request: GetReceiptsV65) -> Optional[int]:
return len(request.payload)
def _get_result_size(self, result: ReceiptsBundles) -> int:
return len(result)
def _get_result_item_count(self, result: ReceiptsBundles) -> int:
return sum((len(receipts) for (receipts, trie_data) in result)) |
def test_extract_datetime_features_from_specified_variables(df_datetime, df_datetime_transformed):
X = DatetimeFeatures(variables='date_obj1').fit_transform(df_datetime)
pd.testing.assert_frame_equal(X, df_datetime_transformed[((vars_non_dt + ['datetime_range', 'date_obj2', 'time_obj']) + [('date_obj1' + feat) for feat in feat_names_default])], check_dtype=False)
X = DatetimeFeatures(variables=['datetime_range', 'date_obj2']).fit_transform(df_datetime)
pd.testing.assert_frame_equal(X, df_datetime_transformed[((vars_non_dt + ['date_obj1', 'time_obj']) + [(var + feat) for var in ['datetime_range', 'date_obj2'] for feat in feat_names_default])], check_dtype=False)
X = DatetimeFeatures(variables=['date_obj2', 'date_obj1']).fit_transform(df_datetime)
pd.testing.assert_frame_equal(X, df_datetime_transformed[((vars_non_dt + ['datetime_range', 'time_obj']) + [(var + feat) for var in ['date_obj2', 'date_obj1'] for feat in feat_names_default])], check_dtype=False)
X = DatetimeFeatures(variables='index', features_to_extract=['month', 'day_of_month']).fit_transform(dates_idx_dt)
pd.testing.assert_frame_equal(X, pd.concat([dates_idx_dt, pd.DataFrame([[2, 27], [2, 28], [3, 1], [3, 2]], index=dates_idx_dt.index, columns=['month', 'day_of_month'])], axis=1), check_dtype=False) |
def check_same_event(room_ids):
rooms = Microlocation.query.filter(Microlocation.id.in_(room_ids)).all()
event_ids = set()
for room in rooms:
event_ids.add(room.event_id)
if (len(event_ids) > 1):
raise ForbiddenError({'pointer': '/data/relationships/rooms'}, 'Video Stream can only be created/edited with rooms of a single event')
check_event_access(event_ids.pop()) |
.parametrize(['operation', 'possible_results'], [(_plus(_plus(_c_i32(7), _c_i32(11)), _c_i32(42)), {_plus(_plus(_c_i32(0), _c_i32(0)), _c_i32(60)), _plus(_plus(_c_i32(0), _c_i32(60)), _c_i32(0)), _plus(_plus(_c_i32(60), _c_i32(0)), _c_i32(0))}), (_plus(_plus(_var_i32('a'), _c_i32(2)), _plus(_var_i32('b'), _c_i32(3))), {_plus(_plus(_var_i32('a'), _c_i32(5)), _plus(_var_i32('b'), _c_i32(0))), _plus(_plus(_var_i32('a'), _c_i32(0)), _plus(_var_i32('b'), _c_i32(5)))}), (_mul(_mul(_c_i32(7), _c_i32(11)), _c_i32(2)), {_mul(_mul(_c_i32(1), _c_i32(1)), _c_i32(154)), _mul(_mul(_c_i32(1), _c_i32(154)), _c_i32(1)), _mul(_mul(_c_i32(154), _c_i32(1)), _c_i32(1))}), (_mul(_mul(_var_i32('a'), _c_i32(2)), _mul(_var_i32('b'), _c_i32(3))), {_mul(_mul(_var_i32('a'), _c_i32(6)), _mul(_var_i32('b'), _c_i32(1))), _mul(_mul(_var_i32('a'), _c_i32(1)), _mul(_var_i32('b'), _c_i32(6)))}), (_mul_us(_mul_us(_c_i32(7), _c_i32(11)), _c_i32(2)), {_mul_us(_mul_us(_c_i32(1), _c_i32(1)), _c_i32(154)), _mul_us(_mul_us(_c_i32(1), _c_i32(154)), _c_i32(1)), _mul_us(_mul_us(_c_i32(154), _c_i32(1)), _c_i32(1))}), (_mul_us(_mul_us(_var_i32('a'), _c_i32(2)), _mul_us(_var_i32('b'), _c_i32(3))), {_mul_us(_mul_us(_var_i32('a'), _c_i32(6)), _mul_us(_var_i32('b'), _c_i32(1))), _mul_us(_mul_us(_var_i32('a'), _c_i32(1)), _mul_us(_var_i32('b'), _c_i32(6)))}), (_bit_and(_bit_and(_c_i32(7), _c_i32(11)), _c_i32(2)), {_bit_and(_bit_and(_c_i32((- 1)), _c_i32((- 1))), _c_i32(2)), _bit_and(_bit_and(_c_i32((- 1)), _c_i32(2)), _c_i32((- 1))), _bit_and(_bit_and(_c_i32(2), _c_i32((- 1))), _c_i32((- 1)))}), (_bit_and(_bit_and(_var_i32('a'), _c_i32(2)), _bit_and(_var_i32('b'), _c_i32(3))), {_bit_and(_bit_and(_var_i32('a'), _c_i32(2)), _bit_and(_var_i32('b'), _c_i32((- 1)))), _bit_and(_bit_and(_var_i32('a'), _c_i32((- 1))), _bit_and(_var_i32('b'), _c_i32(2)))}), (_bit_xor(_bit_xor(_c_i32(7), _c_i32(11)), _c_i32(2)), {_bit_xor(_bit_xor(_c_i32(0), _c_i32(0)), _c_i32(14)), _bit_xor(_bit_xor(_c_i32(0), _c_i32(14)), _c_i32(0)), _bit_xor(_bit_xor(_c_i32(14), _c_i32(0)), _c_i32(0))}), (_bit_xor(_bit_xor(_var_i32('a'), _c_i32(2)), _bit_xor(_var_i32('b'), _c_i32(3))), {_bit_xor(_bit_xor(_var_i32('a'), _c_i32(1)), _bit_xor(_var_i32('b'), _c_i32(0))), _bit_xor(_bit_xor(_var_i32('a'), _c_i32(0)), _bit_xor(_var_i32('b'), _c_i32(1)))}), (_bit_or(_bit_or(_c_i32(7), _c_i32(11)), _c_i32(2)), {_bit_or(_bit_or(_c_i32(0), _c_i32(0)), _c_i32(15)), _bit_or(_bit_or(_c_i32(0), _c_i32(15)), _c_i32(0)), _bit_or(_bit_or(_c_i32(15), _c_i32(0)), _c_i32(0))}), (_bit_or(_bit_or(_var_i32('a'), _c_i32(2)), _bit_or(_var_i32('b'), _c_i32(3))), {_bit_or(_bit_or(_var_i32('a'), _c_i32(3)), _bit_or(_var_i32('b'), _c_i32(0))), _bit_or(_bit_or(_var_i32('a'), _c_i32(0)), _bit_or(_var_i32('b'), _c_i32(3)))})])
def test_collect_terms(operation: Operation, possible_results: set[Expression]):
collect_terms = CollapseNestedConstants()
for i in range(100):
substitutions = collect_terms.apply(operation)
if (not substitutions):
break
for (replacee, replacement) in substitutions:
new_operation = operation.accept(SubstituteVisitor.identity(replacee, replacement))
if (new_operation is not None):
operation = new_operation
else:
raise RuntimeError('Max iterations exceeded')
assert (operation in possible_results) |
class ScarletMist(Skill):
associated_action = ScarletMistAction
skill_category = ['character', 'active', 'once', 'boss']
game: THBattleRole
def check(self) -> bool:
return (not len(self.associated_cards))
def target(self, src: Character, tl: Sequence[Character]):
g = self.game
from thb.thbrole import THBRoleRole
n = sum(((i == THBRoleRole.ACCOMPLICE) for i in g.roles.values()))
n -= sum(((ch.dead and (g.roles[ch.player] == THBRoleRole.ACCOMPLICE)) for ch in g.players))
tl = [t for t in tl if (not t.dead)]
try:
tl.remove(src)
except ValueError:
pass
tl.insert(0, src)
return (tl[:(n + 1)], bool(len(tl))) |
def ensure_base_types(f):
def to_base_types(item):
if isinstance(item, engine.SnmpEngine):
item = item.snmpEngineID
if isinstance(item, (univ.Integer, univ.OctetString, univ.ObjectIdentifier)):
item = item.prettyPrint()
if item.startswith('0x'):
item = item[2:]
return item
if isinstance(item, (udp.UdpTransportAddress, udp6.Udp6TransportAddress)):
return str(item[0])
return item
def to_dct(dct):
items = {}
for (k, v) in dct.items():
k = to_base_types(k)
k = camel2snake(k)
if isinstance(v, dict):
v = to_dct(v)
else:
v = to_base_types(v)
items[k] = v
return items
(f)
def decorated_function(*args, **kwargs):
return f(*args, **to_dct(kwargs))
return decorated_function |
def get_headers(inquiry_code: str, data: str) -> dict[(str, Any)]:
return {'accept-encoding': 'gzip', 'connection': 'keep-alive', 'content-type': 'application/json', 'nyanko-signature': generate_nyanko_signature(inquiry_code, data), 'nyanko-timestamp': str(get_current_time()), 'nyanko-signature-version': '1', 'nyanko-signature-algorithm': 'HMACSHA256', 'user-agent': 'Dalvik/2.1.0 (Linux; U; Android 9; SM-G955F Build/N2G48B)'} |
def duplicate_path_fragments(url, dup_max=3):
path = urllib.parse.urlparse(url).path
parts = pathlib.Path(path).parts
segments = {}
for chunk in parts:
if (not (chunk in segments)):
segments[chunk] = 0
segments[chunk] += 1
return any([(tmp >= dup_max) for tmp in segments.values()]) |
_renderer(wrap_type=TestValueMeanError)
class TestValueMeanErrorRenderer(TestRenderer):
def render_html(self, obj: TestValueMeanError) -> TestHtmlInfo:
info = super().render_html(obj)
metric_result = obj.metric.get_result()
me_hist_for_plot = metric_result.me_hist_for_plot
hist_curr = me_hist_for_plot.current
hist_ref = me_hist_for_plot.reference
fig = plot_distr_with_cond_perc_button(hist_curr=hist_curr, hist_ref=hist_ref, xaxis_name='', yaxis_name='count', yaxis_name_perc='percent', color_options=self.color_options, to_json=False, condition=obj.get_condition(), value=metric_result.current.mean_error, value_name='current mean error')
info.with_details('', plotly_figure(title='', figure=fig))
return info |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.