content stringlengths 0 1.55M |
|---|
<import_from_stmt>functools partial<import_stmt>torch<import_stmt>torch.nn<as>nn<import_stmt>torch.nn.functional<as>F<import_from_stmt>mmcv.cnn build_conv_layer<import_from_stmt>mmcv.runner load_checkpoint<import_from_stmt>mmedit.models.common PixelShufflePack ResidualBlockNoBN make_layer <import_from_stmt>mmedit.models.registry BACKBONES<import_from_stmt>mmedit.utils get_root_logger<line_sep># Use partial to specify some default arguments
_conv3x3_layer=partial(build_conv_layer dict(type='Conv2d') kernel_size=3 padding=1)<line_sep>_conv1x1_layer=partial(build_conv_layer dict(type='Conv2d') kernel_size=1 padding=0)<class_stmt>SFE(nn.Module)<block_start>"""Structural Feature Encoder
Backbone of Texture Transformer Network for Image Super-Resolution.
Args:
in_channels (int): Number of channels in the input image
mid_channels (int): Channel number of intermediate features
num_blocks (int): Block number in the trunk network
res_scale (float): Used to scale the residual in residual block.
Default: 1.
"""<def_stmt>__init__ self in_channels mid_channels num_blocks res_scale<block_start>super().__init__()<line_sep>self.num_blocks=num_blocks<line_sep>self.conv_first=_conv3x3_layer(in_channels mid_channels)<line_sep>self.body=make_layer(ResidualBlockNoBN num_blocks mid_channels=mid_channels res_scale=res_scale)<line_sep>self.conv_last=_conv3x3_layer(mid_channels mid_channels)<block_end><def_stmt>forward self x<block_start>"""Forward function.
Args:
x (Tensor): Input tensor with shape (n, c, h, w).
Returns:
Tensor: Forward results.
"""<line_sep>x1=x=F.relu(self.conv_first(x))<line_sep>x=self.body(x)<line_sep>x=self.conv_last(x)<line_sep>x=x+x1<line_sep><return>x<block_end><block_end><class_stmt>CSFI2(nn.Module)<block_start>"""Cross-Scale Feature Integration between 1x and 2x features.
Cross-Scale Feature Integration in Texture Transformer Network for
Image Super-Resolution.
It is cross-scale feature integration between 1x and 2x features.
For example, `conv2to1` means conv layer from 2x feature to 1x
feature. Down-sampling is achieved by conv layer with stride=2,
and up-sampling is achieved by bicubic interpolate and conv layer.
Args:
mid_channels (int): Channel number of intermediate features
"""<def_stmt>__init__ self mid_channels<block_start>super().__init__()<line_sep>self.conv1to2=_conv1x1_layer(mid_channels mid_channels)<line_sep>self.conv2to1=_conv3x3_layer(mid_channels mid_channels stride=2)<line_sep>self.conv_merge1=_conv3x3_layer(mid_channels<times>2 mid_channels)<line_sep>self.conv_merge2=_conv3x3_layer(mid_channels<times>2 mid_channels)<block_end><def_stmt>forward self x1 x2<block_start>"""Forward function.
Args:
x1 (Tensor): Input tensor with shape (n, c, h, w).
x2 (Tensor): Input tensor with shape (n, c, 2h, 2w).
Returns:
x1 (Tensor): Output tensor with shape (n, c, h, w).
x2 (Tensor): Output tensor with shape (n, c, 2h, 2w).
"""<line_sep>x12=F.interpolate(x1 scale_factor=2 mode='bicubic' align_corners=<false>)<line_sep>x12=F.relu(self.conv1to2(x12))<line_sep>x21=F.relu(self.conv2to1(x2))<line_sep>x1=F.relu(self.conv_merge1(torch.cat((x1 x21) dim=1)))<line_sep>x2=F.relu(self.conv_merge2(torch.cat((x2 x12) dim=1)))<line_sep><return>x1 x2<block_end><block_end><class_stmt>CSFI3(nn.Module)<block_start>"""Cross-Scale Feature Integration between 1x, 2x, and 4x features.
Cross-Scale Feature Integration in Texture Transformer Network for
Image Super-Resolution.
It is cross-scale feature integration between 1x and 2x features.
For example, `conv2to1` means conv layer from 2x feature to 1x
feature. Down-sampling is achieved by conv layer with stride=2,
and up-sampling is achieved by bicubic interpolate and conv layer.
Args:
mid_channels (int): Channel number of intermediate features
"""<def_stmt>__init__ self mid_channels<block_start>super().__init__()<line_sep>self.conv1to2=_conv1x1_layer(mid_channels mid_channels)<line_sep>self.conv1to4=_conv1x1_layer(mid_channels mid_channels)<line_sep>self.conv2to1=_conv3x3_layer(mid_channels mid_channels stride=2)<line_sep>self.conv2to4=_conv1x1_layer(mid_channels mid_channels)<line_sep>self.conv4to1_1=_conv3x3_layer(mid_channels mid_channels stride=2)<line_sep>self.conv4to1_2=_conv3x3_layer(mid_channels mid_channels stride=2)<line_sep>self.conv4to2=_conv3x3_layer(mid_channels mid_channels stride=2)<line_sep>self.conv_merge1=_conv3x3_layer(mid_channels<times>3 mid_channels)<line_sep>self.conv_merge2=_conv3x3_layer(mid_channels<times>3 mid_channels)<line_sep>self.conv_merge4=_conv3x3_layer(mid_channels<times>3 mid_channels)<block_end><def_stmt>forward self x1 x2 x4<block_start>"""Forward function.
Args:
x1 (Tensor): Input tensor with shape (n, c, h, w).
x2 (Tensor): Input tensor with shape (n, c, 2h, 2w).
x4 (Tensor): Input tensor with shape (n, c, 4h, 4w).
Returns:
x1 (Tensor): Output tensor with shape (n, c, h, w).
x2 (Tensor): Output tensor with shape (n, c, 2h, 2w).
x4 (Tensor): Output tensor with shape (n, c, 4h, 4w).
"""<line_sep>x12=F.interpolate(x1 scale_factor=2 mode='bicubic' align_corners=<false>)<line_sep>x12=F.relu(self.conv1to2(x12))<line_sep>x14=F.interpolate(x1 scale_factor=4 mode='bicubic' align_corners=<false>)<line_sep>x14=F.relu(self.conv1to4(x14))<line_sep>x21=F.relu(self.conv2to1(x2))<line_sep>x24=F.interpolate(x2 scale_factor=2 mode='bicubic' align_corners=<false>)<line_sep>x24=F.relu(self.conv2to4(x24))<line_sep>x41=F.relu(self.conv4to1_1(x4))<line_sep>x41=F.relu(self.conv4to1_2(x41))<line_sep>x42=F.relu(self.conv4to2(x4))<line_sep>x1=F.relu(self.conv_merge1(torch.cat((x1 x21 x41) dim=1)))<line_sep>x2=F.relu(self.conv_merge2(torch.cat((x2 x12 x42) dim=1)))<line_sep>x4=F.relu(self.conv_merge4(torch.cat((x4 x14 x24) dim=1)))<line_sep><return>x1 x2 x4<block_end><block_end><class_stmt>MergeFeatures(nn.Module)<block_start>"""Merge Features. Merge 1x, 2x, and 4x features.
Final module of Texture Transformer Network for Image Super-Resolution.
Args:
mid_channels (int): Channel number of intermediate features
out_channels (int): Number of channels in the output image
"""<def_stmt>__init__ self mid_channels out_channels<block_start>super().__init__()<line_sep>self.conv1to4=_conv1x1_layer(mid_channels mid_channels)<line_sep>self.conv2to4=_conv1x1_layer(mid_channels mid_channels)<line_sep>self.conv_merge=_conv3x3_layer(mid_channels<times>3 mid_channels)<line_sep>self.conv_last1=_conv3x3_layer(mid_channels mid_channels<floordiv>2)<line_sep>self.conv_last2=_conv1x1_layer(mid_channels<floordiv>2 out_channels)<block_end><def_stmt>forward self x1 x2 x4<block_start>"""Forward function.
Args:
x1 (Tensor): Input tensor with shape (n, c, h, w).
x2 (Tensor): Input tensor with shape (n, c, 2h, 2w).
x4 (Tensor): Input tensor with shape (n, c, 4h, 4w).
Returns:
x (Tensor): Output tensor with shape (n, c_out, 4h, 4w).
"""<line_sep>x14=F.interpolate(x1 scale_factor=4 mode='bicubic' align_corners=<false>)<line_sep>x14=F.relu(self.conv1to4(x14))<line_sep>x24=F.interpolate(x2 scale_factor=2 mode='bicubic' align_corners=<false>)<line_sep>x24=F.relu(self.conv2to4(x24))<line_sep>x=F.relu(self.conv_merge(torch.cat((x4 x14 x24) dim=1)))<line_sep>x=self.conv_last1(x)<line_sep>x=self.conv_last2(x)<line_sep>x=torch.clamp(x -1 1)<line_sep><return>x<block_end><block_end>@BACKBONES.register_module()<class_stmt>TTSRNet(nn.Module)<block_start>"""TTSR network structure (main-net) for reference-based super-resolution.
Paper: Learning Texture Transformer Network for Image Super-Resolution
Adapted from 'https://github.com/researchmm/TTSR.git'
'https://github.com/researchmm/TTSR'
Copyright permission at 'https://github.com/researchmm/TTSR/issues/38'.
Args:
in_channels (int): Number of channels in the input image
out_channels (int): Number of channels in the output image
mid_channels (int): Channel number of intermediate features.
Default: 64
num_blocks (tuple[int]): Block numbers in the trunk network.
Default: (16, 16, 8, 4)
res_scale (float): Used to scale the residual in residual block.
Default: 1.
"""<def_stmt>__init__ self in_channels out_channels mid_channels=64 texture_channels=64 num_blocks=(16 16 8 4) res_scale=1.0<block_start>super().__init__()<line_sep>self.texture_channels=texture_channels<line_sep>self.sfe=SFE(in_channels mid_channels num_blocks[0] res_scale)<line_sep># stage 1
self.conv_first1=_conv3x3_layer(4<times>texture_channels+mid_channels mid_channels)<line_sep>self.res_block1=make_layer(ResidualBlockNoBN num_blocks[1] mid_channels=mid_channels res_scale=res_scale)<line_sep>self.conv_last1=_conv3x3_layer(mid_channels mid_channels)<line_sep># up-sampling 1 -> 2
self.up1=PixelShufflePack(in_channels=mid_channels out_channels=mid_channels scale_factor=2 upsample_kernel=3)<line_sep># stage 2
self.conv_first2=_conv3x3_layer(2<times>texture_channels+mid_channels mid_channels)<line_sep>self.csfi2=CSFI2(mid_channels)<line_sep>self.res_block2_1=make_layer(ResidualBlockNoBN num_blocks[2] mid_channels=mid_channels res_scale=res_scale)<line_sep>self.res_block2_2=make_layer(ResidualBlockNoBN num_blocks[2] mid_channels=mid_channels res_scale=res_scale)<line_sep>self.conv_last2_1=_conv3x3_layer(mid_channels mid_channels)<line_sep>self.conv_last2_2=_conv3x3_layer(mid_channels mid_channels)<line_sep># up-sampling 2 -> 3
self.up2=PixelShufflePack(in_channels=mid_channels out_channels=mid_channels scale_factor=2 upsample_kernel=3)<line_sep># stage 3
self.conv_first3=_conv3x3_layer(texture_channels+mid_channels mid_channels)<line_sep>self.csfi3=CSFI3(mid_channels)<line_sep>self.res_block3_1=make_layer(ResidualBlockNoBN num_blocks[3] mid_channels=mid_channels res_scale=res_scale)<line_sep>self.res_block3_2=make_layer(ResidualBlockNoBN num_blocks[3] mid_channels=mid_channels res_scale=res_scale)<line_sep>self.res_block3_3=make_layer(ResidualBlockNoBN num_blocks[3] mid_channels=mid_channels res_scale=res_scale)<line_sep>self.conv_last3_1=_conv3x3_layer(mid_channels mid_channels)<line_sep>self.conv_last3_2=_conv3x3_layer(mid_channels mid_channels)<line_sep>self.conv_last3_3=_conv3x3_layer(mid_channels mid_channels)<line_sep># end, merge features
self.merge_features=MergeFeatures(mid_channels out_channels)<block_end><def_stmt>forward self x soft_attention textures<block_start>"""Forward function.
Args:
x (Tensor): Input tensor with shape (n, c, h, w).
soft_attention (Tensor): Soft-Attention tensor with shape
(n, 1, h, w).
textures (Tuple[Tensor]): Transferred HR texture tensors.
[(N, C, H, W), (N, C/2, 2H, 2W), ...]
Returns:
Tensor: Forward results.
"""<assert_stmt>textures[-1].shape[1]<eq>self.texture_channels<line_sep>x1=self.sfe(x)<line_sep># stage 1
x1_res=torch.cat((x1 textures[0]) dim=1)<line_sep>x1_res=self.conv_first1(x1_res)<line_sep># soft-attention
x1=x1+x1_res<times>soft_attention<line_sep>x1_res=self.res_block1(x1)<line_sep>x1_res=self.conv_last1(x1_res)<line_sep>x1=x1+x1_res<line_sep># stage 2
x21=x1<line_sep>x22=self.up1(x1)<line_sep>x22=F.relu(x22)<line_sep>x22_res=torch.cat((x22 textures[1]) dim=1)<line_sep>x22_res=self.conv_first2(x22_res)<line_sep># soft-attention
x22_res=x22_res<times>F.interpolate(soft_attention scale_factor=2 mode='bicubic' align_corners=<false>)<line_sep>x22=x22+x22_res<line_sep>x21_res,x22_res=self.csfi2(x21 x22)<line_sep>x21_res=self.res_block2_1(x21_res)<line_sep>x22_res=self.res_block2_2(x22_res)<line_sep>x21_res=self.conv_last2_1(x21_res)<line_sep>x22_res=self.conv_last2_2(x22_res)<line_sep>x21=x21+x21_res<line_sep>x22=x22+x22_res<line_sep># stage 3
x31=x21<line_sep>x32=x22<line_sep>x33=self.up2(x22)<line_sep>x33=F.relu(x33)<line_sep>x33_res=torch.cat((x33 textures[2]) dim=1)<line_sep>x33_res=self.conv_first3(x33_res)<line_sep># soft-attention
x33_res=x33_res<times>F.interpolate(soft_attention scale_factor=4 mode='bicubic' align_corners=<false>)<line_sep>x33=x33+x33_res<line_sep>x31_res,x32_res,x33_res=self.csfi3(x31 x32 x33)<line_sep>x31_res=self.res_block3_1(x31_res)<line_sep>x32_res=self.res_block3_2(x32_res)<line_sep>x33_res=self.res_block3_3(x33_res)<line_sep>x31_res=self.conv_last3_1(x31_res)<line_sep>x32_res=self.conv_last3_2(x32_res)<line_sep>x33_res=self.conv_last3_3(x33_res)<line_sep>x31=x31+x31_res<line_sep>x32=x32+x32_res<line_sep>x33=x33+x33_res<line_sep>x=self.merge_features(x31 x32 x33)<line_sep><return>x<block_end><def_stmt>init_weights self pretrained=<none> strict=<true><block_start>"""Init weights for models.
Args:
pretrained (str, optional): Path for pretrained weights. If given
None, pretrained weights will not be loaded. Defaults to None.
strict (boo, optional): Whether strictly load the pretrained model.
Defaults to True.
"""<if_stmt>isinstance(pretrained str)<block_start>logger=get_root_logger()<line_sep>load_checkpoint(self pretrained strict=strict logger=logger)<block_end><elif_stmt>pretrained<is><none><block_start><pass># use default initialization
<block_end><else_stmt><block_start><raise>TypeError('"pretrained" must be a str or None. '<concat>f'But received {type(pretrained)}.')<block_end><block_end><block_end> |
<import_from_stmt>django.conf settings<def_stmt>_ellipse_bbox x y height<block_start>x<augmul>settings.RENDER_SCALE<line_sep>y<augmul>settings.RENDER_SCALE<line_sep>y=height-y<line_sep><return>((x-2 y-2) (x+2 y+2))<block_end><def_stmt>_line_coords from_point to_point height<block_start><return>(from_point.x<times>settings.RENDER_SCALE height-(from_point.y<times>settings.RENDER_SCALE) to_point.x<times>settings.RENDER_SCALE height-(to_point.y<times>settings.RENDER_SCALE))<block_end> |
# pylint: disable=unused-argument, no-self-use
<import_from_stmt>marshmallow Schema fields EXCLUDE post_load<import_from_stmt>draft_kings.response.objects.draft_group ContestType League Game DraftGroup DraftGroupResponse<class_stmt>ContestTypeSchema(Schema)<block_start><class_stmt>Meta<block_start>unknown=EXCLUDE<block_end>contestTypeId=fields.Int(attribute="contest_type_id" missing=<none>)<line_sep>gameType=fields.Str(attribute="game_type" missing=<none>)<line_sep>@post_load<def_stmt>make_contest_type self data **kwargs<block_start><return>ContestType(**data)<block_end><block_end><class_stmt>LeagueSchema(Schema)<block_start><class_stmt>Meta<block_start>unknown=EXCLUDE<block_end>leagueAbbreviation=fields.Str(attribute="league_abbreviation" missing=<none>)<line_sep>leagueId=fields.Int(attribute="league_id" missing=<none>)<line_sep>leagueName=fields.Str(attribute="league_name" missing=<none>)<line_sep>@post_load<def_stmt>make_league self data **kwargs<block_start><return>League(**data)<block_end><block_end><class_stmt>GameSchema(Schema)<block_start><class_stmt>Meta<block_start>unknown=EXCLUDE<block_end>awayTeamId=fields.Int(attribute="away_team_id" missing=<none>)<line_sep>description=fields.Str(attribute="description" missing=<none>)<line_sep>gameId=fields.Int(attribute="game_id" missing=<none>)<line_sep>homeTeamId=fields.Int(attribute="home_team_id" missing=<none>)<line_sep>location=fields.Str(attribute="location" missing=<none>)<line_sep>name=fields.Str(attribute="name" missing=<none>)<line_sep>startDate=fields.AwareDateTime(attribute="start_date" missing=<none>)<line_sep>status=fields.Str(attribute="status" missing=<none>)<line_sep>@post_load<def_stmt>make_game self data **kwargs<block_start><return>Game(**data)<block_end><block_end><class_stmt>DraftGroupSchema(Schema)<block_start><class_stmt>Meta<block_start>unknown=EXCLUDE<block_end>contestType=fields.Nested(ContestTypeSchema attribute="contest_type" required=<true>)<line_sep>draftGroupId=fields.Int(attribute="draft_group_id" missing=<none>)<line_sep>draftGroupState=fields.Str(attribute="draft_group_state" missing=<none>)<line_sep>games=fields.List(fields.Nested(GameSchema required=<true>) attribute="games" missing=[])<line_sep>leagues=fields.List(fields.Nested(LeagueSchema required=<true>) attribute="leagues" missing=[])<line_sep>maxStartTime=fields.AwareDateTime(attribute="max_start_time" missing=<none>)<line_sep>minStartTime=fields.AwareDateTime(attribute="min_start_time" missing=<none>)<line_sep>sportId=fields.Int(attribute="sport_id" missing=<none>)<line_sep>startTimeType=fields.Str(attribute="start_time_type" missing=<none>)<line_sep>@post_load<def_stmt>make_draft_group self data **kwargs<block_start><return>DraftGroup(**data)<block_end><block_end><class_stmt>DraftGroupResponseSchema(Schema)<block_start><class_stmt>Meta<block_start>unknown=EXCLUDE<block_end>draftGroup=fields.Nested(DraftGroupSchema attribute="draft_group" required=<true>)<line_sep>@post_load<def_stmt>make_draft_group_response self data **kwargs<block_start><return>DraftGroupResponse(**data)<block_end><block_end># pylint: enable=unused-argument, no-self-use
|
# -*- coding: utf-8 -*-
"""
Microsoft-Windows-WMPNSS-PublicAPI
GUID : 614696c9-85af-4e64-b389-d2c0db4ff87b
"""<import_from_stmt>construct Int8sl Int8ul Int16ul Int16sl Int32sl Int32ul Int64sl Int64ul Bytes Double Float32l Struct<import_from_stmt>etl.utils WString CString SystemTime Guid<import_from_stmt>etl.dtyp Sid<import_from_stmt>etl.parsers.etw.core Etw declare guid<line_sep>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=100 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_100_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=101 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_101_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=102 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_102_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=103 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_103_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=104 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_104_0(Etw)<block_start>pattern=Struct("LibraryName"/WString "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=105 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_105_0(Etw)<block_start>pattern=Struct("LibraryName"/WString "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=106 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_106_0(Etw)<block_start>pattern=Struct("LibraryName"/WString "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=107 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_107_0(Etw)<block_start>pattern=Struct("LibraryName"/WString "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=108 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_108_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=109 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_109_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=110 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_110_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=111 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_111_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=112 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_112_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=113 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_113_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=114 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_114_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=115 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_115_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=116 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_116_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=117 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_117_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=118 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_118_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=119 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_119_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=120 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_120_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=121 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_121_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=122 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_122_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=123 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_123_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=124 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_124_0(Etw)<block_start>pattern=Struct("MACAddress"/WString "FriendlyName"/WString "Authorize"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=125 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_125_0(Etw)<block_start>pattern=Struct("MACAddress"/WString "FriendlyName"/WString "Authorize"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=126 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_126_0(Etw)<block_start>pattern=Struct("MACAddress"/WString "Authorize"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=127 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_127_0(Etw)<block_start>pattern=Struct("MACAddress"/WString "Authorize"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=128 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_128_0(Etw)<block_start>pattern=Struct("Devices"/Int64ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=129 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_129_0(Etw)<block_start>pattern=Struct("Devices"/Int64ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=130 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_130_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=131 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_131_0(Etw)<block_start>pattern=Struct("Enable"/Int8ul "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=132 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_132_0(Etw)<block_start>pattern=Struct("DeviceID"/WString "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=133 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_133_0(Etw)<block_start>pattern=Struct("DeviceID"/WString "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=134 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_134_0(Etw)<block_start>pattern=Struct("SecurityGroup"/WString "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=135 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_135_0(Etw)<block_start>pattern=Struct("SecurityGroup"/WString "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=136 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_136_0(Etw)<block_start>pattern=Struct("SecurityGroup"/WString "HResult"/Int32ul)<block_end>@declare(guid=guid("614696c9-85af-4e64-b389-d2c0db4ff87b") event_id=137 version=0)<class_stmt>Microsoft_Windows_WMPNSS_PublicAPI_137_0(Etw)<block_start>pattern=Struct("SecurityGroup"/WString "HResult"/Int32ul)<block_end> |
<import_from_stmt>types SimpleNamespace<import_stmt>pytest<import_from_stmt>mock patch MagicMock<import_from_stmt>backend.lambdas.tasks.check_queue_size handler<line_sep>pytestmark=[pytest.mark.unit pytest.mark.task]<line_sep>@patch("backend.lambdas.tasks.check_queue_size.sqs")<def_stmt>test_it_returns_correct_queue_size mock_resource<block_start>mock_queue=MagicMock()<line_sep>mock_resource.Queue.return_value=mock_queue<line_sep>mock_queue.attributes={"ApproximateNumberOfMessages":"4" "ApproximateNumberOfMessagesNotVisible":"2" }<line_sep>event={"QueueUrl":"queue_url"}<line_sep>resp=handler(event SimpleNamespace())<assert_stmt>{"Visible":4 "NotVisible":2 "Total":6}<eq>resp<block_end> |
<import_stmt>pandas<as>pd<import_from_stmt>tensortrade.feed Stream<import_from_stmt>tests.utils.ops assert_op<def_stmt>test_add # (left, right) : (Stream, Stream)
<block_start>s1=Stream.source([3 -4 6 -7 2 -6] dtype="float")<line_sep>s2=Stream.source([-3 4 -6 7 -2 6] dtype="float")<line_sep>w1=s1.add(s2).rename("w1")<line_sep>w2=(s1+s2).rename("w2")<line_sep>assert_op([w1 w2] 6<times>[0])<line_sep># (left, right) : (Stream, float)
s1=Stream.source([1 2 3 4 5 6] dtype="float")<line_sep>s2=1<line_sep>w1=s1.add(s2).rename("w1")<line_sep>w2=(s1+s2).rename("w2")<line_sep>assert_op([w1 w2] [2 3 4 5 6 7])<block_end><def_stmt>test_radd # (left, right) : (float, Stream)
<block_start>s1=1<line_sep>s2=Stream.source([1 2 3 4 5 6] dtype="float")<line_sep>w=(s1+s2).rename("w")<line_sep>assert_op([w] [2 3 4 5 6 7])<block_end><def_stmt>test_sub <block_start>expected=[0 1 2 3 4 5]<line_sep># (left, right) : (Stream, Stream)
s1=Stream.source([1 2 3 4 5 6] dtype="float")<line_sep>s2=Stream.source([1 1 1 1 1 1] dtype="float")<line_sep>w1=s1.sub(s2).rename("w1")<line_sep>w2=(s1-s2).rename("w2")<line_sep>assert_op([w1 w2] expected)<line_sep># (left, right) : (Stream, float)
w1=s1.sub(1).rename("w1")<line_sep>w2=(s1-1).rename("w2")<line_sep>assert_op([w1 w2] expected)<block_end><def_stmt>test_rsub # (left, right) : (float, Stream)
<block_start>s1=6<line_sep>s2=Stream.source([1 2 3 4 5 6] dtype="float")<line_sep>w=(s1-s2).rename("w")<line_sep>assert_op([w] [5 4 3 2 1 0])<block_end><def_stmt>test_mul <block_start>expected=[2 4 6 8 10 12]<line_sep># (left, right) : (Stream, Stream)
s1=Stream.source([1 2 3 4 5 6] dtype="float")<line_sep>s2=Stream.source([2 2 2 2 2 2] dtype="float")<line_sep>w1=s1.mul(s2).rename("w1")<line_sep>w2=(s1<times>s2).rename("w2")<line_sep>assert_op([w1 w2] expected)<line_sep># (left, right) : (Stream, float)
w1=s1.mul(2).rename("w1")<line_sep>w2=(s1<times>2).rename("w2")<line_sep>assert_op([w1 w2] expected)<block_end><def_stmt>test_rmul <block_start>expected=[2 4 6 8 10 12]<line_sep># (left, right) : (Stream, Stream)
s=Stream.source([1 2 3 4 5 6] dtype="float")<line_sep># (left, right) : (Stream, float)
w=(2<times>s).rename("w")<line_sep>assert_op([w] expected)<block_end><def_stmt>test_div <block_start>expected=[1 2 3 4 5 6]<line_sep># (left, right) : (Stream, Stream)
s1=Stream.source([2 4 6 8 10 12] dtype="float")<line_sep>s2=Stream.source([2 2 2 2 2 2] dtype="float")<line_sep>w1=s1.div(s2).rename("w1")<line_sep>w2=(s1/s2).rename("w2")<line_sep>assert_op([w1 w2] expected)<line_sep># (left, right) : (Stream, float)
w1=s1.div(2).rename("w1")<line_sep>w2=(s1/2).rename("w2")<line_sep>assert_op([w1 w2] expected)<block_end><def_stmt>test_rdiv <block_start>expected=[6 3 2 3/2 6/5 1]<line_sep># (left, right) : (Stream, Stream)
s=Stream.source([2 4 6 8 10 12] dtype="float")<line_sep># (left, right) : (Stream, float)
w=(12/s).rename("w")<line_sep>assert_op([w] expected)<block_end><def_stmt>test_abs <block_start>s=Stream.source([3 -4 6 -7 2 -6] dtype="float")<line_sep>s1=s.abs().rename("s1")<line_sep>s2=abs(s).rename("s2")<line_sep>assert_op([s1 s2] [3 4 6 7 2 6])<block_end><def_stmt>test_neg <block_start>s=Stream.source([3 -4 6 -7 2 -6] dtype="float")<line_sep>s1=s.neg().rename("s1")<line_sep>s2=(-s).rename("s2")<line_sep>assert_op([s1 s2] [-3 4 -6 7 -2 6])<block_end><def_stmt>test_pow <block_start>array=[1 -2 3 -4 5 -6]<line_sep>s=Stream.source(array dtype="float")<line_sep>s1=s.pow(3).rename("s1")<line_sep>s2=(s<power>3).rename("s2")<line_sep>expected=list(pd.Series(array)<power>3)<line_sep>assert_op([s1 s2] expected)<block_end> |
<import_from_stmt>django.core.paginator Paginator EmptyPage PageNotAnInteger<import_from_stmt>django forms<import_from_stmt>django.utils.safestring mark_safe<import_from_stmt>django.forms widgets<class_stmt>HorizontalRadioSelect(widgets.RadioSelect)<block_start>input_type='radio'<line_sep>template_name='radio-horizontal.html'<line_sep>option_template_name='django/forms/widgets/radio_option.html'<block_end><class_stmt>NoBulletsRadioSelect(widgets.RadioSelect)<block_start>input_type='radio'<line_sep>template_name='radio-nobullets.html'<line_sep>option_template_name='django/forms/widgets/radio_option.html'<block_end># class HorizontalRenderer(forms.RadioSelect.renderer):
# def render(self):
# return mark_safe(u'\n'.join([u'%s' % w for w in self]))
#
# class NoBulletsRenderer(forms.RadioSelect.renderer):
# def render(self):
# return mark_safe(u'<br />\n'.join([u'%s' % w for w in self]))
<def_stmt>store_session_form session form_class data<block_start>session[form_class.__name__]=data<block_end><def_stmt>get_session_form session form_class **kwargs<block_start><if_stmt>session.get(form_class.__name__)<block_start>form=form_class(session[form_class.__name__] **kwargs)<line_sep>form.is_valid()<del_stmt>session[form_class.__name__]<block_end><else_stmt><block_start>form=form_class(**kwargs)<block_end><return>form<block_end><def_stmt>dict_pack struct_tuple data_tuple<block_start>pack=[]<for_stmt>data data_tuple<block_start>index=0<line_sep>packed_data={}<for_stmt>key struct_tuple<block_start>packed_data.update({key:data[index]})<line_sep>index<augadd>1<block_end>pack<augadd>[packed_data]<block_end><return>tuple(pack)<block_end><def_stmt>choicify choice_dict_list database_value human_readable_value<block_start>choice_list=[]<for_stmt>d choice_dict_list<block_start>choice_list.append((d[database_value] d[human_readable_value]))<block_end><return>tuple(choice_list)<block_end><def_stmt>get_page object_list page_size page_number<block_start>paginator=Paginator(object_list page_size)<try_stmt><block_start>page=paginator.page(page_number)<block_end><except_stmt>PageNotAnInteger<block_start>page=paginator.page(1)<block_end><except_stmt>EmptyPage<block_start>page=paginator.page(paginator.num_pages)<block_end><return>page<block_end> |
<import_stmt>threading<import_stmt>logging<import_stmt>time<class_stmt>ThreadHelperException(Exception)<block_start><pass><block_end><class_stmt>ThreadHelper(threading.Thread)<block_start>"""
The class provides a frame for the threads used in the framework.
"""<line_sep># ---------------------------------------------------------------------------------------------------------------- #
# function: initialization #
# ---------------------------------------------------------------------------------------------------------------- #
<def_stmt>__init__ self<block_start>"""
The function initializes all necessary variables and instances to deal with threads.
"""<line_sep># init parent class
threading.Thread.__init__(self)<line_sep># init additional instances and variables
self.mutex=threading.Lock()<line_sep>self.eventWakeup=threading.Event()<line_sep>self.running=<true><line_sep>self.shutDownBool=<false><line_sep># bind thread to main process
self.setDaemon(<true>)<block_end># ---------------------------------------------------------------------------------------------------------------- #
# function: suspend #
# ---------------------------------------------------------------------------------------------------------------- #
<def_stmt>suspend self<block_start>"""
Suspends the thread.
"""<with_stmt>self.mutex<block_start>self.running=<false><block_end><block_end># ---------------------------------------------------------------------------------------------------------------- #
# function: resume #
# ---------------------------------------------------------------------------------------------------------------- #
<def_stmt>resume self<block_start>"""
Resumes the thread.
"""<with_stmt>self.mutex<block_start><if_stmt>self.running<is><not><true><block_start>self.running=<true><line_sep>self.eventWakeup.set()<block_end><block_end><block_end># ---------------------------------------------------------------------------------------------------------------- #
# function: shutDown #
# ---------------------------------------------------------------------------------------------------------------- #
<def_stmt>shutDown self<block_start>"""
Shut down the thread.
"""<with_stmt>self.mutex<block_start>self.shutDownBool=<true><block_end><if_stmt>self.running<is><not><true><block_start>self.resume()<block_end><block_end># ---------------------------------------------------------------------------------------------------------------- #
# function: run #
# ---------------------------------------------------------------------------------------------------------------- #
<def_stmt>run self<block_start>"""
The default run function.
"""<line_sep>logging.debug("Start the default thread")<while_stmt>self.shutDownBool<is><not><true><block_start><if_stmt>self.running# raise ThreadHelperException("The thread use the default run function. Implement a run function in the"
# " derived class")
<block_start>logging.debug("Default thread executed")<line_sep>time.sleep(0.05)<block_end><else_stmt><block_start>logging.debug('Default thread wait')<line_sep>self.eventWakeup.wait()<line_sep>logging.debug('Default thread resumed')<block_end><block_end>logging.debug("Shut down the default thread")<block_end><block_end> |
<import_from_future_stmt> division<import_from_stmt>past.utils old_div<line_sep>#===============================================================================
# SCG Scaled conjugate gradient optimization.
#
# Copyright (c) <NAME> (1996-2001)
# updates by <NAME> 2013
#
# Permission is granted for anyone to copy, use, or modify these
# programs and accompanying documents for purposes of research or
# education, provided this copyright notice is retained, and note is
# made of any changes that have been made.
#
# These programs and documents are distributed without any warranty,
# express or implied. As the programs were written for research
# purposes only, they have not been tested to the degree that would be
# advisable in any important application. All use of these programs is
# entirely at the user's own risk."
#===============================================================================
<import_from_stmt>math sqrt<import_stmt>numpy<as>np<import_stmt>logging<def_stmt>run f x args=() niters=100 gradcheck=<false> display=0 flog=<false> pointlog=<false> scalelog=<false> tolX=1.0e-8 tolO=1.0e-8 eval=<none><block_start>'''Scaled conjugate gradient optimization. '''<if_stmt>display<block_start>logging.getLogger(__name__).info('***** starting optimization (SCG) *****')<block_end>nparams=len(x)<line_sep># Check gradients
<if_stmt>gradcheck<block_start><pass><block_end>eps=1.0e-4<line_sep>sigma0=1.0e-4<line_sep>result=f(x *args)<line_sep>fold=result[0]# Initial function value.
fnow=fold<line_sep>funcCount=1# Increment function evaluation counter.
gradnew=result[1]# Initial gradient.
gradold=gradnew<line_sep>gradCount=1# Increment gradient evaluation counter.
d=-gradnew# Initial search direction.
success=1# Force calculation of directional derivs.
nsuccess=0# nsuccess counts number of successes.
beta=1.0# Initial scale parameter.
betamin=1.0e-15# Lower bound on scale.
betamax=1.0e50# Upper bound on scale.
j=1# j counts number of iterations.
<if_stmt>flog<block_start><pass><line_sep>#flog(j, :) = fold;
<block_end><if_stmt>pointlog<block_start><pass><line_sep>#pointlog(j, :) = x;
<block_end># Main optimization loop.
listF=[fold]<if_stmt>eval<is><not><none><block_start>evalue,timevalue=eval(x *args)<line_sep>evalList=[evalue]<line_sep>time=[timevalue]<block_end><while_stmt>(j<le>niters)# Calculate first and second directional derivatives.
<block_start><if_stmt>(success<eq>1)<block_start>mu=np.dot(d gradnew)<if_stmt>(mu<ge>0)<block_start>d=-gradnew<line_sep>mu=np.dot(d gradnew)<block_end>kappa=np.dot(d d)<if_stmt>(kappa<l>eps)<block_start>logging.getLogger(__name__).info("FNEW: "+str(fnow))<line_sep>#options(8) = fnow
<if_stmt>eval<is><not><none><block_start><return>x listF evalList time<block_end><else_stmt><block_start><return>x listF<block_end><block_end>sigma=old_div(sigma0 sqrt(kappa))<line_sep>xplus=x+sigma<times>d<line_sep>gplus=f(xplus *args)[1]<line_sep>gradCount<augadd>1<line_sep>theta=old_div((np.dot(d (gplus-gradnew))) sigma)<line_sep><block_end># Increase effective curvature and evaluate step size alpha.
delta=theta+beta<times>kappa<if_stmt>(delta<le>0)<block_start>delta=beta<times>kappa<line_sep>beta=beta-old_div(theta kappa)<block_end>alpha=old_div(-mu delta)<line_sep># Calculate the comparison ratio.
xnew=x+alpha<times>d<line_sep>fnew=f(xnew *args)[0]<line_sep>funcCount<augadd>1<line_sep>Delta=2<times>(fnew-fold)/(alpha<times>mu)<if_stmt>(Delta<ge>0)<block_start>success=1<line_sep>nsuccess<augadd>1<line_sep>x=xnew<line_sep>fnow=fnew<line_sep>listF.append(fnow)<if_stmt>eval<is><not><none><block_start>evalue,timevalue=eval(x *args)<line_sep>evalList.append(evalue)<line_sep>time.append(timevalue)<block_end><block_end><else_stmt><block_start>success=0<line_sep>fnow=fold<line_sep><block_end><if_stmt>flog# Store relevant variables
#flog(j) = fnow; # Current function value
<block_start><pass><block_end><if_stmt>pointlog#pointlog(j,:) = x; # Current position
<block_start><pass><block_end><if_stmt>scalelog#scalelog(j) = beta; # Current scale parameter
<block_start><pass><block_end><if_stmt>display<g>0<block_start>logging.getLogger(__name__).info('***** Cycle %4d Error %11.6f Scale %e' j fnow beta)<block_end><if_stmt>(success<eq>1)# Test for termination
# print type (alpha), type(d), type(tolX), type(fnew), type(fold)
<block_start><if_stmt>((max(abs(alpha<times>d))<l>tolX)&(abs(fnew-fold)<l>tolO))# options(8) = fnew;
# print "FNEW: " , fnew
<block_start><if_stmt>eval<is><not><none><block_start><return>x listF evalList time<block_end><else_stmt><block_start><return>x listF<block_end><block_end><else_stmt># Update variables for new position
<block_start>fold=fnew<line_sep>gradold=gradnew<line_sep>gradnew=f(x *args)[1]<line_sep>gradCount<augadd>1<line_sep># If the gradient is zero then we are done.
<if_stmt>(np.dot(gradnew gradnew)<eq>0)# print "FNEW: " , fnew
# options(8) = fnew;
<block_start><if_stmt>eval<is><not><none><block_start><return>x listF evalList time<block_end><else_stmt><block_start><return>x listF<block_end><block_end><block_end><block_end># Adjust beta according to comparison ratio.
<if_stmt>(Delta<l>0.25)<block_start>beta=min(4.0<times>beta betamax)<line_sep><block_end><if_stmt>(Delta<g>0.75)<block_start>beta=max(0.5<times>beta betamin)<line_sep><block_end># Update search direction using Polak-Ribiere formula, or re-start
# in direction of negative gradient after nparams steps.
<if_stmt>(nsuccess<eq>nparams)<block_start>d=-gradnew<line_sep>nsuccess=0<block_end><else_stmt><block_start><if_stmt>(success<eq>1)<block_start>gamma=old_div(np.dot((gradold-gradnew) gradnew) (mu))<line_sep>d=gamma<times>d-gradnew<line_sep><block_end><block_end>j<augadd>1<block_end># If we get here, then we haven't terminated in the given number of
# iterations.
# options(8) = fold;
<if_stmt>(display)<block_start>logging.getLogger(__name__).info("maximum number of iterations reached")<block_end><if_stmt>eval<is><not><none><block_start><return>x listF evalList time<block_end><else_stmt><block_start><return>x listF<block_end><block_end> |
<import_stmt>logging<import_stmt>os<import_stmt>time<import_stmt>pytest<import_from_stmt>helpers.cluster ClickHouseCluster<import_from_stmt>helpers.test_tools TSV<line_sep>logging.getLogger().setLevel(logging.INFO)<line_sep>logging.getLogger().addHandler(logging.StreamHandler())<line_sep>SCRIPT_DIR=os.path.dirname(os.path.realpath(__file__))<line_sep>@pytest.fixture(scope="module")<def_stmt>started_cluster <block_start><try_stmt><block_start>cluster=ClickHouseCluster(__file__)<line_sep>cluster.add_instance("h0_0_0" main_configs=["configs/config.xml"] extra_configs=["configs/hdfs-site.xml" "data/prepare_hive_data.sh"] with_hive=<true> )<line_sep>logging.info("Starting cluster ...")<line_sep>cluster.start()<line_sep>cluster.copy_file_to_container("roottesthivequery_hdfs1_1" "/ClickHouse/tests/integration/test_hive_query/data/prepare_hive_data.sh" "/prepare_hive_data.sh" )<line_sep>cluster.exec_in_container("roottesthivequery_hdfs1_1" ["bash" "-c" "bash /prepare_hive_data.sh"])<line_sep><yield>cluster<block_end><finally_stmt><block_start>cluster.shutdown()<block_end><block_end><def_stmt>test_create_parquet_table started_cluster<block_start>logging.info("Start testing creating hive table ...")<line_sep>node=started_cluster.instances["h0_0_0"]<line_sep>test_passed=<false><for_stmt>i range(10)<block_start>node.query("set input_format_parquet_allow_missing_columns = true")<line_sep>result=node.query("""
DROP TABLE IF EXISTS default.demo_parquet;
CREATE TABLE default.demo_parquet (`id` Nullable(String), `score` Nullable(Int32), `day` Nullable(String)) ENGINE = Hive('thrift://hivetest:9083', 'test', 'demo') PARTITION BY(day)
""")<line_sep>logging.info("create result {}".format(result))<if_stmt>result.strip()<eq>""<block_start>test_passed=<true><line_sep><break><block_end>time.sleep(60)<block_end><assert_stmt>test_passed<block_end><def_stmt>test_create_parquet_table_1 started_cluster<block_start>logging.info("Start testing creating hive table ...")<line_sep>node=started_cluster.instances["h0_0_0"]<for_stmt>i range(10)<block_start>node.query("set input_format_parquet_allow_missing_columns = true")<line_sep>result=node.query("""
DROP TABLE IF EXISTS default.demo_parquet_parts;
CREATE TABLE default.demo_parquet_parts (`id` Nullable(String), `score` Nullable(Int32), `day` Nullable(String), `hour` String) ENGINE = Hive('thrift://hivetest:9083', 'test', 'parquet_demo') PARTITION BY(day, hour);
""")<line_sep>logging.info("create result {}".format(result))<if_stmt>result.strip()<eq>""<block_start>test_passed=<true><line_sep><break><block_end>time.sleep(60)<block_end><assert_stmt>test_passed<block_end><def_stmt>test_create_orc_table started_cluster<block_start>logging.info("Start testing creating hive table ...")<line_sep>node=started_cluster.instances["h0_0_0"]<line_sep>test_passed=<false><for_stmt>i range(10)<block_start>result=node.query("""
DROP TABLE IF EXISTS default.demo_orc;
CREATE TABLE default.demo_orc (`id` Nullable(String), `score` Nullable(Int32), `day` Nullable(String)) ENGINE = Hive('thrift://hivetest:9083', 'test', 'demo_orc') PARTITION BY(day)
""")<line_sep>logging.info("create result {}".format(result))<if_stmt>result.strip()<eq>""<block_start>test_passed=<true><line_sep><break><block_end>time.sleep(60)<block_end><assert_stmt>test_passed<block_end><def_stmt>test_create_text_table started_cluster<block_start>logging.info("Start testing creating hive table ...")<line_sep>node=started_cluster.instances["h0_0_0"]<line_sep>result=node.query("""
DROP TABLE IF EXISTS default.demo_text;
CREATE TABLE default.demo_text (`id` Nullable(String), `score` Nullable(Int32), `day` Nullable(String)) ENGINE = Hive('thrift://hivetest:9083', 'test', 'demo_text') PARTITION BY (tuple())
""")<line_sep>logging.info("create result {}".format(result))<assert_stmt>result.strip()<eq>""<block_end><def_stmt>test_parquet_groupby started_cluster<block_start>logging.info("Start testing groupby ...")<line_sep>node=started_cluster.instances["h0_0_0"]<line_sep>result=node.query("""
SELECT day, count(*) FROM default.demo_parquet group by day order by day
""")<line_sep>expected_result="""2021-11-01 1
2021-11-05 2
2021-11-11 1
2021-11-16 2
"""<assert_stmt>result<eq>expected_result<block_end><def_stmt>test_parquet_in_filter started_cluster<block_start>logging.info("Start testing groupby ...")<line_sep>node=started_cluster.instances["h0_0_0"]<line_sep>result=node.query("""
SELECT count(*) FROM default.demo_parquet_parts where day = '2021-11-05' and hour in ('00')
""")<line_sep>expected_result="""2
"""<line_sep>logging.info("query result:{}".format(result))<assert_stmt>result<eq>expected_result<block_end><def_stmt>test_orc_groupby started_cluster<block_start>logging.info("Start testing groupby ...")<line_sep>node=started_cluster.instances["h0_0_0"]<line_sep>result=node.query("""
SELECT day, count(*) FROM default.demo_orc group by day order by day
""")<line_sep>expected_result="""2021-11-01 1
2021-11-05 2
2021-11-11 1
2021-11-16 2
"""<assert_stmt>result<eq>expected_result<block_end>@pytest.mark.parametrize("table,use_local_cache_for_remote_storage,enable_orc_file_minmax_index,enable_orc_stripe_minmax_index" [pytest.param("demo_orc_no_cache_no_index" "false" "false" "false" id="demo_orc_no_cache_no_index" ) pytest.param("demo_orc_with_cache_no_index" "true" "false" "false" id="demo_orc_with_cache_no_index" ) pytest.param("demo_orc_no_cache_file_index" "false" "true" "false" id="demo_orc_no_cache_file_index" ) pytest.param("demo_orc_with_cache_file_index" "true" "true" "false" id="demo_orc_with_cache_file_index" ) pytest.param("demo_orc_no_cache_stripe_index" "false" "true" "true" id="demo_orc_no_cache_stripe_index" ) pytest.param("demo_orc_with_cache_stripe_index" "true" "true" "true" id="demo_orc_with_cache_stripe_index" ) ] )<def_stmt>test_orc_minmax_index started_cluster table use_local_cache_for_remote_storage enable_orc_file_minmax_index enable_orc_stripe_minmax_index <block_start>node=started_cluster.instances["h0_0_0"]<line_sep>result=node.query("""
DROP TABLE IF EXISTS default.{table};
CREATE TABLE default.{table} (`id` Nullable(String), `score` Nullable(Int32), `day` Nullable(String)) ENGINE = Hive('thrift://hivetest:9083', 'test', 'demo_orc') PARTITION BY(day)
SETTINGS enable_orc_file_minmax_index = {enable_orc_file_minmax_index}, enable_orc_stripe_minmax_index = {enable_orc_stripe_minmax_index};
""".format(table=table enable_orc_file_minmax_index=enable_orc_file_minmax_index enable_orc_stripe_minmax_index=enable_orc_stripe_minmax_index ))<assert_stmt>result.strip()<eq>""<for_stmt>i range(2)<block_start>result=node.query("""
SELECT day, id, score FROM default.{table} where day >= '2021-11-05' and day <= '2021-11-16' and score >= 15 and score <= 30 order by day, id
SETTINGS use_local_cache_for_remote_storage = {use_local_cache_for_remote_storage}
""".format(table=table use_local_cache_for_remote_storage=use_local_cache_for_remote_storage ))<assert_stmt>(result<eq>"""2021-11-05 abd 15
2021-11-16 aaa 22
""")<block_end><block_end>@pytest.mark.parametrize("table,use_local_cache_for_remote_storage,enable_parquet_rowgroup_minmax_index" [pytest.param("demo_parquet_no_cache_no_index" "false" "false" id="demo_parquet_no_cache_no_index" ) pytest.param("demo_parquet_with_cache_no_index" "true" "false" id="demo_parquet_with_cache_no_index" ) pytest.param("demo_parquet_no_cache_rowgroup_index" "false" "true" id="demo_parquet_no_cache_rowgroup_index" ) pytest.param("demo_parquet_with_cache_rowgroup_index" "true" "true" id="demo_parquet_with_cache_rowgroup_index" ) ] )<def_stmt>test_parquet_minmax_index started_cluster table use_local_cache_for_remote_storage enable_parquet_rowgroup_minmax_index <block_start>node=started_cluster.instances["h0_0_0"]<line_sep>result=node.query("""
DROP TABLE IF EXISTS default.{table};
CREATE TABLE default.{table} (`id` Nullable(String), `score` Nullable(Int32), `day` Nullable(String)) ENGINE = Hive('thrift://hivetest:9083', 'test', 'demo') PARTITION BY(day)
SETTINGS enable_parquet_rowgroup_minmax_index = {enable_parquet_rowgroup_minmax_index}
""".format(table=table enable_parquet_rowgroup_minmax_index=enable_parquet_rowgroup_minmax_index ))<assert_stmt>result.strip()<eq>""<for_stmt>i range(2)<block_start>result=node.query("""
SELECT day, id, score FROM default.{table} where day >= '2021-11-05' and day <= '2021-11-16' and score >= 15 and score <= 30 order by day, id
SETTINGS use_local_cache_for_remote_storage = {use_local_cache_for_remote_storage}
""".format(table=table use_local_cache_for_remote_storage=use_local_cache_for_remote_storage ))<assert_stmt>(result<eq>"""2021-11-05 abd 15
2021-11-16 aaa 22
""")<block_end><block_end><def_stmt>test_hive_columns_prunning started_cluster<block_start>logging.info("Start testing groupby ...")<line_sep>node=started_cluster.instances["h0_0_0"]<line_sep>result=node.query("""
SELECT count(*) FROM default.demo_parquet_parts where day = '2021-11-05'
""")<line_sep>expected_result="""4
"""<line_sep>logging.info("query result:{}".format(result))<assert_stmt>result<eq>expected_result<block_end><def_stmt>test_text_count started_cluster<block_start>node=started_cluster.instances["h0_0_0"]<line_sep>result=node.query("""
SELECT day, count(*) FROM default.demo_orc group by day order by day SETTINGS format_csv_delimiter = '\x01'
""")<line_sep>expected_result="""2021-11-01 1
2021-11-05 2
2021-11-11 1
2021-11-16 2
"""<assert_stmt>result<eq>expected_result<block_end><def_stmt>test_parquet_groupby_with_cache started_cluster<block_start>logging.info("Start testing groupby ...")<line_sep>node=started_cluster.instances["h0_0_0"]<line_sep>result=node.query("""
SELECT day, count(*) FROM default.demo_parquet group by day order by day
""")<line_sep>expected_result="""2021-11-01 1
2021-11-05 2
2021-11-11 1
2021-11-16 2
"""<assert_stmt>result<eq>expected_result<block_end><def_stmt>test_parquet_groupby_by_hive_function started_cluster<block_start>logging.info("Start testing groupby ...")<line_sep>node=started_cluster.instances["h0_0_0"]<line_sep>result=node.query("""
SELECT day, count(*) FROM hive('thrift://hivetest:9083', 'test', 'demo', '`id` Nullable(String), `score` Nullable(Int32), `day` Nullable(String)', 'day') group by day order by day
""")<line_sep>expected_result="""2021-11-01 1
2021-11-05 2
2021-11-11 1
2021-11-16 2
"""<assert_stmt>result<eq>expected_result<block_end><def_stmt>test_cache_read_bytes started_cluster<block_start>node=started_cluster.instances["h0_0_0"]<line_sep>result=node.query("""
CREATE TABLE IF NOT EXISTS default.demo_parquet_1 (`id` Nullable(String), `score` Nullable(Int32), `day` Nullable(String)) ENGINE = Hive('thrift://hivetest:9083', 'test', 'demo') PARTITION BY(day)
""")<line_sep>test_passed=<false><for_stmt>i range(10)<block_start>result=node.query("""
SELECT * FROM default.demo_parquet_1 settings input_format_parquet_allow_missing_columns = true
""")<line_sep>node.query("system flush logs")<line_sep>result=node.query("select sum(ProfileEvent_ExternalDataSourceLocalCacheReadBytes) from system.metric_log where ProfileEvent_ExternalDataSourceLocalCacheReadBytes > 0")<if_stmt>result.strip()<eq>"0"<block_start>logging.info("ProfileEvent_ExternalDataSourceLocalCacheReadBytes == 0")<line_sep>time.sleep(10)<line_sep><continue><block_end>test_passed=<true><line_sep><break><block_end><assert_stmt>test_passed<block_end><def_stmt>test_cache_dir_use started_cluster<block_start>node=started_cluster.instances["h0_0_0"]<line_sep>result0=node.exec_in_container(["bash" "-c" "ls /tmp/clickhouse_local_cache | wc -l"])<line_sep>result1=node.exec_in_container(["bash" "-c" "ls /tmp/clickhouse_local_cache1 | wc -l"])<assert_stmt>result0<ne>"0"<and>result1<ne>"0"<block_end> |
<import_stmt>FWCore.ParameterSet.Config<as>cms<line_sep># Digitiser ####
# SiStrip
<import_from_stmt>EventFilter.SiStripRawToDigi.SiStripDigis_cfi *<line_sep>siStripDigis.ProductLabel='source'<line_sep># SiPixel
<import_from_stmt>EventFilter.SiPixelRawToDigi.SiPixelRawToDigi_cfi *<line_sep>siPixelDigis.InputLabel='source'<line_sep># Local Reco Cosmic ####
<import_from_stmt>RecoLocalTracker.Configuration.RecoLocalTracker_Cosmics_cff *<line_sep>#DefaultClusterizer.ConditionsLabel = '' #not needed to specify it is used as default
# Track Reconstruction Cosmic ########
<import_from_stmt>RecoTracker.Configuration.RecoTrackerP5_cff *<line_sep># Beam Spot ########
<import_from_stmt>RecoVertex.BeamSpotProducer.BeamSpot_cff *<line_sep># Reconstruction Sequence
RecoForDQMCosmic=cms.Sequence(siPixelDigis<times>siStripDigis<times>offlineBeamSpot<times>trackerlocalreco<times>ctftracksP5)<line_sep> |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""<import_from_stmt>twilio.base values<import_from_stmt>twilio.base.instance_resource InstanceResource<import_from_stmt>twilio.base.list_resource ListResource<import_from_stmt>twilio.base.page Page<class_stmt>EventList(ListResource)<block_start><def_stmt>__init__ self version account_sid call_sid<block_start>"""
Initialize the EventList
:param Version version: Version that contains the resource
:param account_sid: The SID of the Account that created this resource
:param call_sid: The unique string that identifies this resource
:returns: twilio.rest.api.v2010.account.call.event.EventList
:rtype: twilio.rest.api.v2010.account.call.event.EventList
"""<line_sep>super(EventList self).__init__(version)<line_sep># Path Solution
self._solution={'account_sid':account_sid 'call_sid':call_sid }<line_sep>self._uri='/Accounts/{account_sid}/Calls/{call_sid}/Events.json'.format(**self._solution)<block_end><def_stmt>stream self limit=<none> page_size=<none><block_start>"""
Streams EventInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.call.event.EventInstance]
"""<line_sep>limits=self._version.read_limits(limit page_size)<line_sep>page=self.page(page_size=limits['page_size'] )<line_sep><return>self._version.stream(page limits['limit'])<block_end><def_stmt>list self limit=<none> page_size=<none><block_start>"""
Lists EventInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.call.event.EventInstance]
"""<line_sep><return>list(self.stream(limit=limit page_size=page_size ))<block_end><def_stmt>page self page_token=values.unset page_number=values.unset page_size=values.unset<block_start>"""
Retrieve a single page of EventInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of EventInstance
:rtype: twilio.rest.api.v2010.account.call.event.EventPage
"""<line_sep>data=values.of({'PageToken':page_token 'Page':page_number 'PageSize':page_size })<line_sep>response=self._version.page(method='GET' uri=self._uri params=data )<line_sep><return>EventPage(self._version response self._solution)<block_end><def_stmt>get_page self target_url<block_start>"""
Retrieve a specific page of EventInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of EventInstance
:rtype: twilio.rest.api.v2010.account.call.event.EventPage
"""<line_sep>response=self._version.domain.twilio.request('GET' target_url )<line_sep><return>EventPage(self._version response self._solution)<block_end><def_stmt>__repr__ self<block_start>"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""<line_sep><return>'<Twilio.Api.V2010.EventList>'<block_end><block_end><class_stmt>EventPage(Page)<block_start><def_stmt>__init__ self version response solution<block_start>"""
Initialize the EventPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param account_sid: The SID of the Account that created this resource
:param call_sid: The unique string that identifies this resource
:returns: twilio.rest.api.v2010.account.call.event.EventPage
:rtype: twilio.rest.api.v2010.account.call.event.EventPage
"""<line_sep>super(EventPage self).__init__(version response)<line_sep># Path Solution
self._solution=solution<block_end><def_stmt>get_instance self payload<block_start>"""
Build an instance of EventInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.call.event.EventInstance
:rtype: twilio.rest.api.v2010.account.call.event.EventInstance
"""<line_sep><return>EventInstance(self._version payload account_sid=self._solution['account_sid'] call_sid=self._solution['call_sid'] )<block_end><def_stmt>__repr__ self<block_start>"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""<line_sep><return>'<Twilio.Api.V2010.EventPage>'<block_end><block_end><class_stmt>EventInstance(InstanceResource)<block_start><def_stmt>__init__ self version payload account_sid call_sid<block_start>"""
Initialize the EventInstance
:returns: twilio.rest.api.v2010.account.call.event.EventInstance
:rtype: twilio.rest.api.v2010.account.call.event.EventInstance
"""<line_sep>super(EventInstance self).__init__(version)<line_sep># Marshaled Properties
self._properties={'request':payload.get('request') 'response':payload.get('response') }<line_sep># Context
self._context=<none><line_sep>self._solution={'account_sid':account_sid 'call_sid':call_sid }<block_end>@property<def_stmt>request self<block_start>"""
:returns: Call Request.
:rtype: dict
"""<line_sep><return>self._properties['request']<block_end>@property<def_stmt>response self<block_start>"""
:returns: Call Response with Events.
:rtype: dict
"""<line_sep><return>self._properties['response']<block_end><def_stmt>__repr__ self<block_start>"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""<line_sep><return>'<Twilio.Api.V2010.EventInstance>'<block_end><block_end> |
<import_stmt>os<import_stmt>sys<import_stmt>time<import_from_stmt>monk.pip_unit_tests.pytorch.test_optimizer_sgd test_optimizer_sgd<import_from_stmt>monk.pip_unit_tests.pytorch.test_optimizer_nesterov_sgd test_optimizer_nesterov_sgd<import_from_stmt>monk.pip_unit_tests.pytorch.test_optimizer_rmsprop test_optimizer_rmsprop<import_from_stmt>monk.pip_unit_tests.pytorch.test_optimizer_momentum_rmsprop test_optimizer_momentum_rmsprop<import_from_stmt>monk.pip_unit_tests.pytorch.test_optimizer_adam test_optimizer_adam<import_from_stmt>monk.pip_unit_tests.pytorch.test_optimizer_adamax test_optimizer_adamax<import_from_stmt>monk.pip_unit_tests.pytorch.test_optimizer_adamw test_optimizer_adamw<import_from_stmt>monk.pip_unit_tests.pytorch.test_optimizer_adadelta test_optimizer_adadelta<import_from_stmt>monk.pip_unit_tests.pytorch.test_optimizer_adagrad test_optimizer_adagrad<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_l1 test_loss_l1<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_l2 test_loss_l2<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_l2 test_loss_l2<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_softmax_crossentropy test_loss_softmax_crossentropy<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_crossentropy test_loss_crossentropy<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_sigmoid_binary_crossentropy test_loss_sigmoid_binary_crossentropy<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_binary_crossentropy test_loss_binary_crossentropy<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_kldiv test_loss_kldiv<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_poisson_nll test_loss_poisson_nll<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_huber test_loss_huber<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_hinge test_loss_hinge<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_squared_hinge test_loss_squared_hinge<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_multimargin test_loss_multimargin<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_squared_multimargin test_loss_squared_multimargin<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_multilabelmargin test_loss_multilabelmargin<import_from_stmt>monk.pip_unit_tests.pytorch.test_loss_multilabelsoftmargin test_loss_multilabelsoftmargin<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_convolution1d test_layer_convolution1d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_convolution2d test_layer_convolution2d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_convolution3d test_layer_convolution3d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_transposed_convolution1d test_layer_transposed_convolution1d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_transposed_convolution2d test_layer_transposed_convolution2d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_transposed_convolution3d test_layer_transposed_convolution3d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_max_pooling1d test_layer_max_pooling1d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_max_pooling2d test_layer_max_pooling2d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_max_pooling3d test_layer_max_pooling3d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_average_pooling1d test_layer_average_pooling1d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_average_pooling2d test_layer_average_pooling2d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_average_pooling3d test_layer_average_pooling3d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_global_max_pooling1d test_layer_global_max_pooling1d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_global_max_pooling2d test_layer_global_max_pooling2d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_global_max_pooling3d test_layer_global_max_pooling3d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_global_average_pooling1d test_layer_global_average_pooling1d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_global_average_pooling2d test_layer_global_average_pooling2d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_global_average_pooling3d test_layer_global_average_pooling3d<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_batch_normalization test_layer_batch_normalization<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_instance_normalization test_layer_instance_normalization<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_layer_normalization test_layer_layer_normalization<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_identity test_layer_identity<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_fully_connected test_layer_fully_connected<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_dropout test_layer_dropout<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_flatten test_layer_flatten<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_relu test_activation_relu<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_sigmoid test_activation_sigmoid<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_tanh test_activation_tanh<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_softplus test_activation_softplus<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_softsign test_activation_softsign<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_elu test_activation_elu<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_leaky_relu test_activation_leaky_relu<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_prelu test_activation_prelu<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_selu test_activation_selu<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_hardshrink test_activation_hardshrink<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_hardtanh test_activation_hardtanh<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_logsigmoid test_activation_logsigmoid<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_relu6 test_activation_relu6<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_rrelu test_activation_rrelu<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_celu test_activation_celu<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_softshrink test_activation_softshrink<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_tanhshrink test_activation_tanhshrink<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_threshold test_activation_threshold<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_softmin test_activation_softmin<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_softmax test_activation_softmax<import_from_stmt>monk.pip_unit_tests.pytorch.test_activation_logsoftmax test_activation_logsoftmax<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_concatenate test_layer_concatenate<import_from_stmt>monk.pip_unit_tests.pytorch.test_layer_add test_layer_add<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_resnet_v1 test_block_resnet_v1<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_resnet_v2 test_block_resnet_v2<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_resnet_v1_bottleneck test_block_resnet_v1_bottleneck<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_resnet_v2_bottleneck test_block_resnet_v2_bottleneck<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_resnext test_block_resnext<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_mobilenet_v2_linear_bottleneck test_block_mobilenet_v2_linear_bottleneck<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_mobilenet_v2_inverted_linear_bottleneck test_block_mobilenet_v2_inverted_linear_bottleneck<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_squeezenet_fire test_block_squeezenet_fire<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_densenet test_block_densenet<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_conv_bn_relu test_block_conv_bn_relu<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_inception_a test_block_inception_a<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_inception_b test_block_inception_b<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_inception_c test_block_inception_c<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_inception_d test_block_inception_d<import_from_stmt>monk.pip_unit_tests.pytorch.test_block_inception_e test_block_inception_e<import_from_stmt>monk.pip_functionality_tests.pytorch.test_default_train test_default_train<import_from_stmt>monk.pip_functionality_tests.pytorch.test_default_eval_infer test_default_eval_infer<import_from_stmt>monk.pip_functionality_tests.pytorch.test_update_copy_from test_update_copy_from<import_from_stmt>monk.pip_functionality_tests.pytorch.test_update_normal test_update_normal<import_from_stmt>monk.pip_functionality_tests.pytorch.test_update_eval_infer test_update_eval_infer<import_from_stmt>monk.pip_functionality_tests.pytorch.test_expert_train test_expert_train<import_from_stmt>monk.pip_functionality_tests.pytorch.test_expert_eval_infer test_expert_eval_infer<import_from_stmt>monk.pip_functionality_tests.pytorch.test_switch_default test_switch_default<import_from_stmt>monk.pip_functionality_tests.pytorch.test_switch_expert test_switch_expert<import_from_stmt>monk.pip_functionality_tests.pytorch.test_compare test_compare<import_from_stmt>monk.pip_functionality_tests.pytorch.test_analyse test_analyse<def_stmt>run_functionality_tests <block_start>origstdout=sys.stdout<line_sep>print("Running Tests...")<line_sep>sys.stdout=open("test_logs.txt" 'w')<line_sep>system_dict={}<line_sep>system_dict["total_tests"]=0<line_sep>system_dict["successful_tests"]=0<line_sep>system_dict["failed_tests_lists"]=[]<line_sep>system_dict["failed_tests_exceptions"]=[]<line_sep>system_dict["skipped_tests_lists"]=[]<line_sep>start=time.time()<line_sep>print("Running 1/11")<line_sep>system_dict=test_default_train(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running 2/11")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>system_dict=test_default_eval_infer(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running 3/11")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>system_dict=test_update_copy_from(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running 4/11")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>system_dict=test_update_normal(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running 5/11")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>system_dict=test_update_eval_infer(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running 6/11")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>system_dict=test_expert_train(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("Running 7/11")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>system_dict=test_expert_eval_infer(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running 8/11")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>system_dict=test_switch_default(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running 9/11")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>system_dict=test_switch_expert(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running 10/11")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>system_dict=test_compare(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running 11/11")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>system_dict=test_analyse(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>end=time.time()<line_sep>print("Total Tests - {}".format(system_dict["total_tests"]))<line_sep>print("Time Taken - {} sec".format(end-start))<line_sep>print("Num Successful Tests - {}".format(system_dict["successful_tests"]))<line_sep>print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])))<line_sep>print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])))<line_sep>print("")<for_stmt>i range(len(system_dict["failed_tests_lists"]))<block_start>print("{}. Failed Test:".format(i+1))<line_sep>print("Name - {}".format(system_dict["failed_tests_lists"][i]))<line_sep>print("Error - {}".format(system_dict["failed_tests_exceptions"][i]))<line_sep>print("")<line_sep><block_end>print("Skipped Tests List - {}".format(system_dict["skipped_tests_lists"]))<line_sep>print("")<line_sep>sys.stdout=origstdout<line_sep>print("Total Tests - {}".format(system_dict["total_tests"]))<line_sep>print("Time Taken - {} sec".format(end-start))<line_sep>print("Num Successful Tests - {}".format(system_dict["successful_tests"]))<line_sep>print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])))<line_sep>print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])))<line_sep>print("See test_logs.txt for errors")<line_sep>print("")<line_sep>os.system("rm -r workspace")<line_sep><block_end><def_stmt>run_unit_tests <block_start>origstdout=sys.stdout<line_sep>print("Running Tests...")<line_sep>sys.stdout=open("test_logs.txt" 'w')<line_sep>system_dict={}<line_sep>system_dict["total_tests"]=0<line_sep>system_dict["successful_tests"]=0<line_sep>system_dict["failed_tests_lists"]=[]<line_sep>system_dict["failed_tests_exceptions"]=[]<line_sep>system_dict["skipped_tests_lists"]=[]<line_sep>start=time.time()<line_sep>exp_num=1<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_optimizer_sgd(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_optimizer_nesterov_sgd(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_optimizer_rmsprop(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_optimizer_adam(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_optimizer_adamax(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_optimizer_adamw(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_optimizer_adadelta(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_optimizer_adagrad(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_l1(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_l2(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_softmax_crossentropy(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_crossentropy(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_sigmoid_binary_crossentropy(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_binary_crossentropy(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_kldiv(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_poisson_nll(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_huber(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_hinge(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_squared_hinge(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_multimargin(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_squared_multimargin(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_multilabelmargin(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_loss_multilabelsoftmargin(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_convolution1d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_convolution2d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_convolution3d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_transposed_convolution1d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_transposed_convolution2d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_transposed_convolution3d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_max_pooling1d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_max_pooling2d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_max_pooling3d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_average_pooling1d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_average_pooling2d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_average_pooling3d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_global_max_pooling1d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_global_max_pooling2d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_global_max_pooling3d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_global_average_pooling1d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_global_average_pooling2d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_global_average_pooling3d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_batch_normalization(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_instance_normalization(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_layer_normalization(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_identity(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_fully_connected(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_dropout(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_flatten(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_relu(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_sigmoid(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_tanh(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_softplus(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_softsign(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_elu(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_leaky_relu(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_prelu(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_selu(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_hardshrink(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_hardtanh(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_logsigmoid(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_relu6(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_rrelu(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_celu(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_softshrink(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_tanhshrink(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_threshold(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_softmin(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_softmax(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_activation_logsoftmax(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_concatenate(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_layer_add(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_resnet_v1(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_resnet_v2(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_resnet_v1_bottleneck(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_resnet_v2_bottleneck(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_resnext(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_mobilenet_v2_linear_bottleneck(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_mobilenet_v2_inverted_linear_bottleneck(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_squeezenet_fire(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_densenet(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_conv_bn_relu(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_inception_a(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_inception_b(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_inception_c(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_inception_d(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>print("Running {}/<num>".format(exp_num))<line_sep>exp_num<augadd>1<line_sep>system_dict=test_block_inception_e(system_dict)<line_sep>sys.stdout=origstdout<line_sep>print("Tests Completed - {}".format(system_dict["total_tests"]))<line_sep>print("Tests Succesful - {}".format(system_dict["successful_tests"]))<line_sep>print("")<line_sep>sys.stdout=open("test_logs.txt" 'a')<line_sep>end=time.time()<line_sep>print("Total Tests - {}".format(system_dict["total_tests"]))<line_sep>print("Time Taken - {} sec".format(end-start))<line_sep>print("Num Successful Tests - {}".format(system_dict["successful_tests"]))<line_sep>print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])))<line_sep>print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])))<line_sep>print("")<for_stmt>i range(len(system_dict["failed_tests_lists"]))<block_start>print("{}. Failed Test:".format(i+1))<line_sep>print("Name - {}".format(system_dict["failed_tests_lists"][i]))<line_sep>print("Error - {}".format(system_dict["failed_tests_exceptions"][i]))<line_sep>print("")<line_sep><block_end>print("Skipped Tests List - {}".format(system_dict["skipped_tests_lists"]))<line_sep>print("")<line_sep>sys.stdout=origstdout<line_sep>print("Total Tests - {}".format(system_dict["total_tests"]))<line_sep>print("Time Taken - {} sec".format(end-start))<line_sep>print("Num Successful Tests - {}".format(system_dict["successful_tests"]))<line_sep>print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])))<line_sep>print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])))<line_sep>print("See test_logs.txt for errors")<line_sep>print("")<line_sep>os.system("rm -r workspace")<line_sep><block_end> |
##########################################################################
#
# Copyright (c) 2021, Cinesite VFX Ltd. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of <NAME> nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
<import_stmt>Gaffer<import_stmt>GafferScene<line_sep>Gaffer.Metadata.registerNode(GafferScene.BoundQuery "description" """
Queries a particular location in a scene and outputs the bound.
""" "layout:activator:spaceIsRelative" <lambda>node:node["space"].getValue()<eq>GafferScene.BoundQuery.Space.Relative plugs={"scene":["description" """
The scene to query the bounds for.
"""] "location":["description" """
The location within the scene to query the bound at.
> Note : If the location does not exist then the query will not be
> performed and all outputs will be set to their default values.
""" "plugValueWidget:type" "GafferSceneUI.ScenePathPlugValueWidget" "scenePathPlugValueWidget:scene" "scene" "nodule:type" ""] "space":["description" """
The space to query the bound in.
""" "preset:Local" GafferScene.BoundQuery.Space.Local "preset:World" GafferScene.BoundQuery.Space.World "preset:Relative" GafferScene.BoundQuery.Space.Relative "plugValueWidget:type" "GafferUI.PresetsPlugValueWidget" "nodule:type" ""] "relativeLocation":["description" """
The location within the scene to use for relative space mode.
> Note : If the location does not exist then the query will not be
> performed and all outputs will be set to their default values.
""" "plugValueWidget:type" "GafferSceneUI.ScenePathPlugValueWidget" "scenePathPlugValueWidget:scene" "scene" "layout:activator" "spaceIsRelative" "nodule:type" ""] "bound":["description" """
Bounding box at specified location in specified space.
""" "layout:section" "Settings.Outputs"] "center":["description" """
Center point vector of the requested bound.
""" "layout:section" "Settings.Outputs"] "size":["description" """
Size vector of the requested bound.
""" "layout:section" "Settings.Outputs"] })<line_sep> |
# -----------------------------------------------------------------------------
# calc.py
#
# A simple calculator with variables. This is from O'Reilly's
# "Lex and Yacc", p. 63.
# -----------------------------------------------------------------------------
<import_stmt>sys<line_sep>sys.path.insert(0 "../..")<if_stmt>sys.version_info[0]<ge>3<block_start>raw_input=input<block_end>tokens=('NAME' 'NUMBER' 'PLUS' 'MINUS' 'TIMES' 'DIVIDE' 'EQUALS' 'LPAREN' 'RPAREN' )<line_sep># Tokens
t_PLUS=r'\+'<line_sep>t_MINUS=r'-'<line_sep>t_TIMES=r'\*'<line_sep>t_DIVIDE=r'/'<line_sep>t_EQUALS=r'='<line_sep>t_LPAREN=r'\('<line_sep>t_RPAREN=r'\)'<line_sep>t_NAME=r'[a-zA-Z_][a-zA-Z0-9_]*'<def_stmt>t_NUMBER t<block_start>r'\d+'<try_stmt><block_start>t.value=int(t.value)<block_end><except_stmt>ValueError<block_start>print("Integer value too large %s"%t.value)<line_sep>t.value=0<block_end><return>t<block_end>t_ignore=" \t"<def_stmt>t_newline t<block_start>r'\n+'<line_sep>t.lexer.lineno<augadd>t.value.count("\n")<block_end><def_stmt>t_error t<block_start>print("Illegal character '%s'"%t.value[0])<line_sep>t.lexer.skip(1)<block_end># Build the lexer
<import_stmt>ply.lex<as>lex<line_sep>lex.lex(optimize=1)<line_sep># Parsing rules
precedence=(('left' 'PLUS' 'MINUS') ('left' 'TIMES' 'DIVIDE') ('right' 'UMINUS') )<line_sep># dictionary of names
names={}<def_stmt>p_statement_assign t<block_start>'statement : NAME EQUALS expression'<line_sep>names[t[1]]=t[3]<block_end><def_stmt>p_statement_expr t<block_start>'statement : expression'<line_sep>print(t[1])<block_end><def_stmt>p_expression_binop t<block_start>'''expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression'''<if_stmt>t[2]<eq>'+'<block_start>t[0]=t[1]+t[3]<block_end><elif_stmt>t[2]<eq>'-'<block_start>t[0]=t[1]-t[3]<block_end><elif_stmt>t[2]<eq>'*'<block_start>t[0]=t[1]<times>t[3]<block_end><elif_stmt>t[2]<eq>'/'<block_start>t[0]=t[1]/t[3]<block_end><elif_stmt>t[2]<eq>'<'<block_start>t[0]=t[1]<l>t[3]<block_end><block_end><def_stmt>p_expression_uminus t<block_start>'expression : MINUS expression %prec UMINUS'<line_sep>t[0]=-t[2]<block_end><def_stmt>p_expression_group t<block_start>'expression : LPAREN expression RPAREN'<line_sep>t[0]=t[2]<block_end><def_stmt>p_expression_number t<block_start>'expression : NUMBER'<line_sep>t[0]=t[1]<block_end><def_stmt>p_expression_name t<block_start>'expression : NAME'<try_stmt><block_start>t[0]=names[t[1]]<block_end><except_stmt>LookupError<block_start>print("Undefined name '%s'"%t[1])<line_sep>t[0]=0<block_end><block_end><def_stmt>p_error t<block_start><if_stmt>t<block_start>print("Syntax error at '%s'"%t.value)<block_end><else_stmt><block_start>print("Syntax error at EOF")<block_end><block_end><import_stmt>ply.yacc<as>yacc<line_sep>yacc.yacc(optimize=1)<while_stmt>1<block_start><try_stmt><block_start>s=raw_input('calc > ')<block_end><except_stmt>EOFError<block_start><break><block_end>yacc.parse(s)<block_end> |
<import_stmt>unittest<import_stmt>plotly.graph_objs<as>go<class_stmt>TestPlotly(unittest.TestCase)<block_start><def_stmt>test_figure self<block_start>trace={'x':[1 2] 'y':[1 3]}<line_sep>data=[trace]<line_sep>go.Figure(data=data)<block_end><block_end> |
# (C) Datadog, Inc. 2021-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
<import_stmt>click<import_from_stmt>...console CONTEXT_SETTINGS<import_from_stmt>.pdh pdh<line_sep>ALL_COMMANDS=[pdh]<line_sep>@click.group(context_settings=CONTEXT_SETTINGS short_help='Windows utilities')<def_stmt>windows <block_start><pass><block_end><for_stmt>command ALL_COMMANDS<block_start>windows.add_command(command)<block_end> |
<import_from_stmt>pkg_resources get_distribution<line_sep>__version__=get_distribution("django-robots").version<line_sep> |
<import_from_stmt>convlab2.policy.ppo.multiwoz.ppo_policy PPOPolicy<line_sep> |
<import_stmt>numpy<as>np<import_from_stmt>torch nn<import_from_stmt>torch.nn functional<as>F<import_stmt>torch<import_from_stmt>torchvision models<import_stmt>torchvision<line_sep>__all__=['ResNet_IR']<class_stmt>ResNet_IR(nn.Module)<block_start><def_stmt>__init__ self args<block_start>super().__init__()<if_stmt>args.backbone<eq>'resnet18'<block_start>self.backbone=models.resnet18(pretrained=<true>)<line_sep>last_channels=512<block_end><elif_stmt>args.backbone<eq>'resnet34'<block_start>self.backbone=models.resnet34(pretrained=<true>)<line_sep>last_channels=512<block_end><elif_stmt>args.backbone<eq>'resnet50'<block_start>self.backbone=models.resnet50(pretrained=<true>)<line_sep>last_channels=2048<block_end><elif_stmt>args.backbone<eq>'resnet101'<block_start>self.backbone=models.resnet101(pretrained=<true>)<line_sep>last_channels=2048<block_end><elif_stmt>args.backbone<eq>'resnet152'<block_start>self.backbone=models.resnet152(pretrained=<true>)<line_sep>last_channels=2048<block_end>self.features=nn.Sequential(self.backbone.conv1 self.backbone.bn1 self.backbone.relu self.backbone.layer1 self.backbone.layer2 self.backbone.layer3 self.backbone.layer4)<line_sep>self.bn1=nn.BatchNorm2d(last_channels)<line_sep>self.dropout=nn.Dropout2d(0.5)<line_sep>self.fc=nn.Linear(8<times>8<times>last_channels args.num_features)<line_sep>self.bn2=nn.BatchNorm1d(args.num_features)<block_end><def_stmt>freeze_bn self<block_start><for_stmt>m self.features.modules()<block_start><if_stmt>isinstance(m nn.BatchNorm2d)<block_start>m.weight.requires_grad=<false><line_sep>m.bias.requires_grad=<false><block_end><block_end><block_end><def_stmt>forward self x<block_start>x=self.features(x)<line_sep>x=self.bn1(x)<line_sep>x=self.dropout(x)<line_sep>x=x.view(x.shape[0] -1)<line_sep>x=self.fc(x)<line_sep>output=self.bn2(x)<line_sep><return>output<block_end><block_end> |
<import_stmt>json<import_stmt>os<import_from_stmt>geojson Polygon<import_from_stmt>kuwala.modules.common polyfill_polygon<line_sep># Get the aggregated number of a specific POI category per H3 index at a given resolution
<def_stmt>get_pois_by_category_in_h3 sp category resolution polygon_coords<block_start>polygon_cells=<none><if_stmt>polygon_coords<block_start>polygon_coords=json.loads(polygon_coords)<line_sep>polygon=Polygon(polygon_coords)<line_sep>polygon_cells=list(polyfill_polygon(polygon resolution=resolution))<block_end># noinspection SqlNoDataSourceInspection
query='''
CALL {
MATCH (pc:PoiCategory)<-[:BELONGS_TO]-(po:PoiOSM)-[:BELONGS_TO]->(p:Poi)-[:LOCATED_AT]->(h:H3Index)
'''+f'''
WITH p, pc, io.kuwala.h3.h3ToParent(h.h3Index, {resolution}) AS h3_index
WHERE {f'h3_index IN {polygon_cells} AND'<if>polygon_cells<else>''} pc.name = '{category}'
RETURN p
UNION
MATCH (pc:PoiCategory)<-[:BELONGS_TO]-(pg:PoiGoogle)-[b:BELONGS_TO]->(p:Poi)-[:LOCATED_AT]->(h:H3Index)
WITH p, pc, io.kuwala.h3.h3ToParent(h.h3Index, {resolution}) AS h3_index
WHERE
{f'h3_index IN {polygon_cells} AND'<if>polygon_cells<else>''}
b.confidence >= 0.8 AND
pc.name = '{category}'
RETURN p
'''+'''}
WITH p
MATCH (p)-[:LOCATED_AT]->(h:H3Index)
'''+f'''WITH p, io.kuwala.h3.h3ToParent(h.h3Index, {resolution}) AS h3_index
RETURN h3_index, COUNT(p) AS number_of_{category}
'''<line_sep>url=os.getenv('NEO4J_HOST')<or>'bolt://localhost:7687'<line_sep><return>sp.read.format("org.neo4j.spark.DataSource").option("url" url).option("authentication.type" "basic").option("authentication.basic.username" "neo4j").option("authentication.basic.password" "password").option("query" query).load()<block_end> |
# coding=utf-8
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Megatron Module"""<import_stmt>torch<class_stmt>MegatronModule(torch.nn.Module)<block_start>"""Megatron specific extentions of torch Module."""<def_stmt>__init__ self<block_start>super(MegatronModule self).__init__()<block_end><def_stmt>state_dict_for_save_checkpoint self destination=<none> prefix='' keep_vars=<false><block_start>"""Use this function to override the state dict for
saving checkpoints."""<line_sep><return>self.state_dict(destination prefix keep_vars)<block_end><block_end> |
# License: BSD 3 clause
# -*- coding: utf8 -*-
<import_stmt>unittest<import_from_stmt>tick.base.build.base standard_normal_cdf standard_normal_inv_cdf<import_from_stmt>scipy.stats norm<import_stmt>numpy<as>np<import_from_stmt>numpy.random normal uniform<class_stmt>Test(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>self.size=10<block_end><def_stmt>test_standard_normal_cdf self<block_start>"""...Test normal cumulative distribution function
"""<line_sep>tested_sample=normal(size=self.size)<line_sep>actual=np.array([standard_normal_cdf(s)<for>s tested_sample])<line_sep>expected=norm.cdf(tested_sample)<line_sep>np.testing.assert_almost_equal(actual expected decimal=7)<block_end><def_stmt>test_standard_normal_inv_cdf self<block_start>"""...Test inverse of normal cumulative distribution function
"""<line_sep>tested_sample=uniform(size=self.size)<line_sep>actual=np.array([standard_normal_inv_cdf(s)<for>s tested_sample])<line_sep>expected=norm.ppf(tested_sample)<line_sep>np.testing.assert_almost_equal(actual expected decimal=7)<line_sep>actual_array=np.empty(self.size)<line_sep>standard_normal_inv_cdf(tested_sample actual_array)<line_sep>np.testing.assert_almost_equal(actual_array expected decimal=7)<block_end><block_end> |
"""
Welcome to your first Halite-II bot!
This bot's name is Settler. It's purpose is simple (don't expect it to win complex games :) ):
1. Initialize game
2. If a ship is not docked and there are unowned planets
2.a. Try to Dock in the planet if close enough
2.b If not, go towards the planet
Note: Please do not place print statements here as they are used to communicate with the Halite engine. If you need
to log anything use the logging module.
"""<line_sep># Let's start by importing the Halite Starter Kit so we can interface with the Halite engine
<import_stmt>hlt<import_stmt>numpy<import_stmt>math<import_stmt>gc<import_stmt>hlt.entity<import_stmt>hlt.collision<import_stmt>logging<import_stmt>time<import_stmt>random<line_sep># GAME START
# Here we define the bot's name as Settler and initialize the game, including communication with the Halite engine.
game=hlt.Game("MyBot16")<line_sep>initialized=<false><line_sep>first_dock=<false><line_sep>cos=[math.cos(math.radians(x))<for>x range(360)]<line_sep>sin=[math.sin(math.radians(x))<for>x range(360)]<def_stmt>compute_dist dx dy<block_start><return>numpy.sqrt(dx<times>dx+dy<times>dy)<block_end><def_stmt>compute_square_dist dx dy<block_start><return>dx<times>dx+dy<times>dy<block_end><def_stmt>custom_intersect_segment_circle start end circle * fudge=0.5# threshold = 2 * hlt.constants.MAX_SPEED + fudge + circle.radius
# if numpy.abs(start.x - circle.x) > threshold or numpy.abs(start.y - circle.y) > threshold:
# return False
<block_start>dx=end.x-start.x<line_sep>dy=end.y-start.y<line_sep>a=dx<power>2+dy<power>2<line_sep>b=-2<times>(start.x<power>2-start.x<times>end.x-start.x<times>circle.x+end.x<times>circle.x+start.y<power>2-start.y<times>end.y-start.y<times>circle.y+end.y<times>circle.y)<line_sep>c=(start.x-circle.x)<power>2+(start.y-circle.y)<power>2<if_stmt>a<eq>0.0# Start and end are the same point
<block_start><return>start.calculate_distance_between(circle)<le>circle.radius+fudge<block_end># Time along segment when closest to the circle (vertex of the quadratic)
t=min(-b/(2<times>a) 1.0)<if_stmt>t<l>0<block_start><return><false><block_end>closest_x=start.x+dx<times>t<line_sep>closest_y=start.y+dy<times>t<line_sep>closest_distance=hlt.entity.Position(closest_x closest_y).calculate_distance_between(circle)<line_sep><return>closest_distance<le>circle.radius+fudge<block_end>SKIP_THRESHOLD=(hlt.constants.MAX_SPEED+1.1)<power>2<def_stmt>exists_obstacles_between ship target all_planets all_ships all_my_ships_moves ignore=()<block_start>obstacles=[]<line_sep>entities=([]<if>issubclass(hlt.entity.Planet ignore)<else>all_planets)+([]<if>issubclass(hlt.entity.Ship ignore)<else>all_ships)+([]<if>issubclass(hlt.entity.Ship ignore)<else>all_my_ships_moves)<if_stmt><not>issubclass(hlt.entity.Planet ignore)<block_start><for_stmt>foreign_entity all_planets<block_start><if_stmt>foreign_entity<eq>ship<or>foreign_entity<eq>target<block_start><continue><block_end><if_stmt>custom_intersect_segment_circle(ship target foreign_entity fudge=ship.radius+0.1)<block_start><return><true><block_end><block_end><block_end><if_stmt><not>issubclass(hlt.entity.Ship ignore)<block_start><for_stmt>foreign_entity all_ships+all_my_ships_moves<block_start><if_stmt>foreign_entity<eq>ship<or>foreign_entity<eq>target<block_start><continue><block_end><if_stmt>compute_square_dist(foreign_entity.x-ship.x foreign_entity.y-ship.y)<g>SKIP_THRESHOLD<block_start><continue><block_end><if_stmt>custom_intersect_segment_circle(ship target foreign_entity fudge=ship.radius+0.1)<block_start><return><true><block_end><block_end><block_end><return><false><block_end><def_stmt>custom_navigate ship target game_map max_speed min_speed speed_decay step all_planets all_ships all_my_ships_moves avoid_obstacles=<true> max_corrections=90 angular_step=1 ignore_ships=<false> ignore_planets=<false> suicide=<false># Assumes a position, not planet (as it would go to the center of the planet otherwise)
<block_start><if_stmt>max_corrections<le>0<block_start><return>999999 <none> <none><block_end><if_stmt><not>suicide<block_start>distance=ship.calculate_distance_between(target)-target.radius-ship.radius<block_end><else_stmt><block_start>distance=ship.calculate_distance_between(target)<block_end>angle=int(ship.calculate_angle_between(target))<line_sep>ignore=()<if><not>(ignore_ships<or>ignore_planets)<else>hlt.entity.Ship<if>(ignore_ships<and><not>ignore_planets)<else>hlt.entity.Planet<if>(ignore_planets<and><not>ignore_ships)<else>hlt.entity.Entity<if_stmt>avoid_obstacles<and>exists_obstacles_between(ship target all_planets all_ships all_my_ships_moves ignore)<block_start>new_angle=angle+angular_step<while_stmt>new_angle<ge>360<block_start>new_angle<augsub>360<block_end><while_stmt>new_angle<l>0<block_start>new_angle<augadd>360<block_end>new_target_dx=cos[int(new_angle)]<times>distance<line_sep>new_target_dy=sin[int(new_angle)]<times>distance<line_sep>new_target=hlt.entity.Position(ship.x+new_target_dx ship.y+new_target_dy)<line_sep><return>custom_navigate(ship new_target game_map max_speed min_speed speed_decay step+1 all_planets all_ships all_my_ships_moves <true> max_corrections-1 angular_step ignore_ships ignore_planets suicide)<block_end># TODO formulize this better
speed=max(max_speed-step<times>speed_decay min_speed)<line_sep>speed=speed<if>(distance<ge>speed)<else>distance-0.1<line_sep>final_target_dx=cos[int(angle)]<times>speed<line_sep>final_target_dy=sin[int(angle)]<times>speed<line_sep>final_target=hlt.entity.Position(ship.x+final_target_dx ship.y+final_target_dy)<line_sep>final_target.radius=ship.radius<line_sep><return>step final_target ship.thrust(speed angle)<block_end># parameters
ANGULAR_STEP=6<line_sep>MAX_SPEED=hlt.constants.MAX_SPEED<line_sep>MIN_SPEED=hlt.constants.MAX_SPEED<times>0.5<line_sep>SPEED_DECAY=0.0<line_sep>MAX_CORRECTIONS=30<line_sep>MIN_OPPONENT_DIST_TO_DOCK=25.0<line_sep>MIN_OPPONENT_DIST_TO_TARGET_PLANET=25.0<line_sep>DOCKED_BONUS=0.0<line_sep>PLANET_BONUS=10.0<line_sep>UNDOCKED_BONUS=-100.0<line_sep>MAX_OPPONENT_SHIP_TARGET_CNT=4<line_sep>MAX_MY_SHIP_TARGET_CNT=4<line_sep>PLANET_DOCKED_ALLIES_BONUS=40.0<line_sep>OPPONENT_SHIP_CLOSE_TO_MY_DOCKED_BONUS=40.0<line_sep>MAX_DIST_TO_TARGET_OPPONENT_UNDOCKED_SHIP=15.0<line_sep>#PLANET_CAPACITY_BONUS =
#UNDOCKED_OPPONENT_CLOSE_TO_MY_DOCKED_BONUS = 10.0
PLANET_NEARBY_PLANET_MAX_BONUS=36.0<line_sep>PLANET_NEARBY_PLANET_BIAS=3.0<line_sep>PLANET_NEARBY_PLANET_SLOPE=0.25<line_sep>SUICIDE_UNDOCKED_OPPONENT_DIST=15.0<line_sep>ALL_IN_DIST=50.0<line_sep>PLANET_FAR_FROM_CENTER_BONUS=1.0<line_sep>MAX_PLANET_FAR_FROM_CENTER_BONUS=70.0<line_sep>SUICIDE_HEALTH_MULT=1.0<line_sep>CLOSE_OPPONENT_DIST=12.0<line_sep>CLOSE_ALLY_DIST=5.0<line_sep>DOUBLE_NAVIGATE_SHIP_CNT=999<def_stmt>planet_nearby_empty_planet_score dist_matrix planet_owner planet_capacity<block_start>score=numpy.maximum(0.0 PLANET_NEARBY_PLANET_BIAS-dist_matrix<times>PLANET_NEARBY_PLANET_SLOPE)<line_sep>score=((planet_owner<eq>-1)<times>planet_capacity)[numpy.newaxis :]<times>((planet_owner<eq>-1)<times>planet_capacity)[: numpy.newaxis]<times>score<line_sep><return>numpy.minimum(PLANET_NEARBY_PLANET_MAX_BONUS numpy.sum(score axis=0))<block_end>#PLANET_DOCK_SYNERGE_BONUS = 5.0
# TODOS
# 2. parameter tuning
# 5. collide to planets?
# 6. if timeout, move ship to center of the enemies or allies?
# 7. Add our own planet in target to be more defensive
# 8. count ships of I and opponent to figure out who's winning. If even, be more defensive
# 9. if I have more ships, collide to opponent planet
# 10. go to my ally when there's more enemy
# 11. if you are a lone warrior, far away from my docked ship, and many enemies in your target but no allies, get back
# 12. In a 4P game, be more defensive
# 13. Defend early game rush
# 14. Create a pivot
early_game_all_in=0<while_stmt><true># TURN START
<block_start>st=time.time()<line_sep># Update the map for the new turn and get the latest version
game_map=game.update_map()<line_sep># Here we define the set of commands to be sent to the Halite engine at the end of the turn
command_queue=[]<line_sep># initialize game info
<if_stmt><not>initialized<block_start>my_id=game_map.my_id<line_sep>me=game_map.get_me()<line_sep>width=game_map.width<line_sep>height=game_map.height<line_sep>initialized=<true><block_end># cache players, planets and ships
all_players_ids=game_map._players.keys()<line_sep>num_players=len(all_players_ids)<line_sep>all_planets=game_map.all_planets()<line_sep>all_my_ships=game_map.get_me().all_ships()<line_sep>num_my_ships=len(all_my_ships)<line_sep>all_opponent_ships=[]<for_stmt>pid all_players_ids<block_start><if_stmt>my_id<ne>pid<block_start>all_opponent_ships<augadd>game_map.get_player(pid).all_ships()<block_end><block_end>num_opponent_ships=len(all_opponent_ships)<line_sep>all_ships=all_my_ships+all_opponent_ships<line_sep># cache coordinates and misc
all_my_ships_x=numpy.array([v.x<for>v all_my_ships])<line_sep>all_my_ships_y=numpy.array([v.y<for>v all_my_ships])<line_sep>all_my_ships_center_x=numpy.mean(all_my_ships_x)<line_sep>all_my_ships_center_y=numpy.mean(all_my_ships_y)<line_sep>all_opponent_ships_x=numpy.array([v.x<for>v all_opponent_ships])<line_sep>all_opponent_ships_y=numpy.array([v.y<for>v all_opponent_ships])<line_sep>all_opponent_ships_center_x=numpy.mean(all_opponent_ships_x)<line_sep>all_opponent_ships_center_y=numpy.mean(all_opponent_ships_y)<line_sep>all_planets_x=numpy.array([v.x<for>v all_planets])<line_sep>all_planets_y=numpy.array([v.y<for>v all_planets])<line_sep>my_ships_status=numpy.array([v.docking_status<for>v all_my_ships])<line_sep>num_my_undocked_ships=numpy.sum(my_ships_status<eq>hlt.entity.Ship.DockingStatus.UNDOCKED)<line_sep>opponent_ships_status=numpy.array([v.docking_status<for>v all_opponent_ships])<line_sep>num_opponent_undocked_ships=numpy.sum(opponent_ships_status<eq>hlt.entity.Ship.DockingStatus.UNDOCKED)<line_sep>planet_owner=numpy.array([-1<if>v.owner<is><none><else>v.owner.id<for>v all_planets])<def_stmt>compute_dist_matrix x1 y1 x2 y2<block_start>dx=x1[: numpy.newaxis]-x2[numpy.newaxis :]<line_sep>dy=y1[: numpy.newaxis]-y2[numpy.newaxis :]<line_sep><return>numpy.sqrt(dx<times>dx+dy<times>dy)<block_end>my_ship_dist_matrix=compute_dist_matrix(all_my_ships_x all_my_ships_y all_my_ships_x all_my_ships_y)<line_sep>ship_dist_matrix=compute_dist_matrix(all_my_ships_x all_my_ships_y all_opponent_ships_x all_opponent_ships_y)<line_sep>planet_dist_matrix=compute_dist_matrix(all_my_ships_x all_my_ships_y all_planets_x all_planets_y)<line_sep>planet_planet_dist_matrix=compute_dist_matrix(all_planets_x all_planets_y all_planets_x all_planets_y)<line_sep>closest_opponent_ship=numpy.min(ship_dist_matrix axis=1)<line_sep>closest_undocked_opponent_ship=numpy.min(ship_dist_matrix+99999999.0<times>(opponent_ships_status<ne>hlt.entity.Ship.DockingStatus.UNDOCKED)[numpy.newaxis :] axis=1)<line_sep>cnt_too_close_to_dock_opponent=numpy.sum((ship_dist_matrix<l>MIN_OPPONENT_DIST_TO_DOCK)<times>((my_ships_status<eq>hlt.entity.Ship.DockingStatus.DOCKED)|(my_ships_status<eq>hlt.entity.Ship.DockingStatus.DOCKING))[: numpy.newaxis] axis=0)<line_sep>cnt_too_close_to_dock_ally=numpy.sum((ship_dist_matrix<l>MIN_OPPONENT_DIST_TO_DOCK)<times>((my_ships_status<eq>hlt.entity.Ship.DockingStatus.DOCKED)|(my_ships_status<eq>hlt.entity.Ship.DockingStatus.DOCKING))[: numpy.newaxis] axis=1)<line_sep>close_opponent_ship_cnt=numpy.sum((ship_dist_matrix<l>CLOSE_OPPONENT_DIST)<times>(opponent_ships_status<eq>hlt.entity.Ship.DockingStatus.UNDOCKED)[numpy.newaxis :] axis=1)<line_sep>close_ally_ship_cnt=numpy.sum((my_ship_dist_matrix<l>CLOSE_ALLY_DIST) axis=1)<line_sep>cnt_too_close_to_dock_closest_ally=numpy.zeros(len(all_my_ships) dtype=numpy.int)<for_stmt>i range(len(all_opponent_ships))<block_start><if_stmt>opponent_ships_status[i]<eq>hlt.entity.Ship.DockingStatus.UNDOCKED# TODO optimize this
<block_start>k=numpy.argmin(ship_dist_matrix[: i]+99999999.0<times>((my_ships_status<eq>hlt.entity.Ship.DockingStatus.UNDOCKED)|(my_ships_status<eq>hlt.entity.Ship.DockingStatus.UNDOCKING)))<if_stmt>ship_dist_matrix[k][i]<l>MIN_OPPONENT_DIST_TO_DOCK<block_start>cnt_too_close_to_dock_closest_ally[k]<augadd>1<block_end><block_end><block_end>planet_capacity=numpy.array([p.num_docking_spots<for>p all_planets])<line_sep>planet_docked_cnt=numpy.array([len(p._docked_ship_ids)<for>p all_planets])#TODO does this include docking ships?
planet_remaining_cnt=planet_capacity-planet_docked_cnt<line_sep># my ship target scores
my_ship_score=numpy.array([0.0]<times>len(all_my_ships))<line_sep>my_ship_score<augadd>OPPONENT_SHIP_CLOSE_TO_MY_DOCKED_BONUS<times>cnt_too_close_to_dock_closest_ally<line_sep>my_ship_score<augadd>-99999999.0<times>(cnt_too_close_to_dock_closest_ally<eq>0)<line_sep>my_ship_max_target_cnt=numpy.minimum(MAX_MY_SHIP_TARGET_CNT cnt_too_close_to_dock_closest_ally)<line_sep># opponent ship target scores
opponent_ship_score=numpy.array([0.0]<times>len(all_opponent_ships))<line_sep>opponent_ship_score<augadd>OPPONENT_SHIP_CLOSE_TO_MY_DOCKED_BONUS<times>cnt_too_close_to_dock_opponent<line_sep>opponent_ship_score<augadd>UNDOCKED_BONUS<times>((opponent_ships_status<eq>hlt.entity.Ship.DockingStatus.UNDOCKED)|(opponent_ships_status<eq>hlt.entity.Ship.DockingStatus.UNDOCKING))<line_sep>opponent_ship_score<augadd>DOCKED_BONUS<times>((opponent_ships_status<eq>hlt.entity.Ship.DockingStatus.DOCKED)|(opponent_ships_status<eq>hlt.entity.Ship.DockingStatus.DOCKING))<line_sep>opponent_ship_max_target_cnt=numpy.array([MAX_OPPONENT_SHIP_TARGET_CNT]<times>len(all_opponent_ships))<line_sep># planet target scores
planet_score=numpy.array([PLANET_BONUS]<times>len(all_planets))<if_stmt><not>first_dock<and>num_players<eq>2<block_start>planet_score[numpy.argmin(planet_dist_matrix[0])]<augadd>20.0# so that all ships go to the same planet at the beginning
<block_end>planet_score[(planet_owner<eq>my_id)]<augadd>PLANET_DOCKED_ALLIES_BONUS<if_stmt>num_players<eq>2<block_start>planet_score<augadd>planet_nearby_empty_planet_score(planet_planet_dist_matrix planet_owner planet_capacity)<block_end><elif_stmt>num_players<g>2<block_start>planet_score<augadd>numpy.minimum(MAX_PLANET_FAR_FROM_CENTER_BONUS PLANET_FAR_FROM_CENTER_BONUS<times>(compute_dist(all_planets_x-width/2.0 all_planets_y-height/2.0)))<block_end>planet_max_target_cnt=planet_remaining_cnt.copy()<line_sep>my_ship_target_cnt=numpy.array([0]<times>len(all_my_ships))<line_sep>opponent_ship_target_cnt=numpy.array([0]<times>len(all_opponent_ships))<line_sep>planet_target_cnt=numpy.array([0]<times>len(all_planets))<line_sep>my_ship_target_available=my_ship_target_cnt<l>my_ship_max_target_cnt<line_sep>opponent_ship_target_available=opponent_ship_target_cnt<l>opponent_ship_max_target_cnt<line_sep>planet_target_available=planet_target_cnt<l>planet_max_target_cnt<line_sep># Early game exception
<if_stmt>early_game_all_in<eq>0<block_start><if_stmt>len(all_my_ships)<ne>3<or>len(all_opponent_ships)<ne>3<or>num_players<g>2<or>numpy.sum(my_ships_status<ne>hlt.entity.Ship.DockingStatus.UNDOCKED)<eq>3<block_start>early_game_all_in=2<block_end><if_stmt>numpy.min(ship_dist_matrix)<l>ALL_IN_DIST<block_start>early_game_all_in=1<block_end><block_end><if_stmt>early_game_all_in<eq>1<block_start>opponent_ship_score<augadd>1.0e9<block_end># compute scores of all edges
scores=[0.0]<times>(len(all_my_ships)<times>(1+len(all_planets)+len(all_opponent_ships)+len(all_my_ships)))<line_sep>len_scores=0<for_stmt>k range(len(all_my_ships))<block_start>ed=time.time()<if_stmt>ed-st<g>1.7<block_start><break><block_end>ship=all_my_ships[k]<if_stmt>ship.docking_status<ne>ship.DockingStatus.UNDOCKED<block_start><continue><block_end><if_stmt><not>early_game_all_in<eq>1<block_start>opponent_too_close_to_target_planet=<false><if>closest_undocked_opponent_ship[k]<g>MIN_OPPONENT_DIST_TO_TARGET_PLANET<else><true><line_sep>opponent_too_close_to_dock=<false><if>closest_undocked_opponent_ship[k]<g>MIN_OPPONENT_DIST_TO_DOCK<else><true><for_stmt>i range(len(all_planets))<block_start>planet=all_planets[i]<if_stmt>planet.owner<eq><none><or>planet.owner.id<eq>my_id<block_start>dist_score=-(planet_dist_matrix[k][i]-planet.radius)<line_sep># TODO move this to planet_score
opponent_score=-99999999.0<if>opponent_too_close_to_target_planet<else>0.0# TODO opponent_score # TODO geographical_score
total_score=planet_score[i]+dist_score+opponent_score<line_sep>scores[len_scores]=(total_score k i 'planet')<line_sep>len_scores<augadd>1<if_stmt>ship.can_dock(planet)<and><not>opponent_too_close_to_dock<block_start>total_score=99999999.0<line_sep>scores[len_scores]=(total_score k i 'dock')<line_sep>len_scores<augadd>1<block_end><block_end><else_stmt># TODO: suicide to opponent planet when I got more ships
<block_start><pass><block_end><block_end><block_end><for_stmt>i range(len(all_my_ships))<block_start><if_stmt>my_ships_status[i]<eq>hlt.entity.Ship.DockingStatus.UNDOCKED<or>my_ships_status[i]<eq>hlt.entity.Ship.DockingStatus.UNDOCKING<block_start><continue><block_end>mship=all_my_ships[i]<line_sep>dist_score=-(my_ship_dist_matrix[k][i]-mship.radius)<line_sep>total_score=my_ship_score[i]+dist_score<line_sep>scores[len_scores]=(total_score k i 'my_ship')<line_sep>len_scores<augadd>1<block_end><for_stmt>i range(len(all_opponent_ships))<block_start><if_stmt>ship_dist_matrix[k][i]<g>MAX_DIST_TO_TARGET_OPPONENT_UNDOCKED_SHIP<and>opponent_ships_status[i]<eq>hlt.entity.Ship.DockingStatus.UNDOCKED<and><not>early_game_all_in<eq>1<block_start><continue><block_end>oship=all_opponent_ships[i]<line_sep>dist_score=-(ship_dist_matrix[k][i]-oship.radius)<line_sep># TODO geograpihcal_score
total_score=opponent_ship_score[i]+dist_score<line_sep>scores[len_scores]=(total_score k i 'opponent_ship')<line_sep>len_scores<augadd>1<block_end><block_end># choose action in decreasing score order
all_my_ships_moves_from=[]<line_sep>all_my_ships_moves_to=[]<line_sep>ship_used=numpy.array([<false>]<times>len(all_my_ships))<line_sep>scores=sorted(scores[:len_scores] reverse=<true>)<for_stmt>i range(len(scores))<block_start>ed=time.time()<if_stmt>ed-st<g>1.7<block_start><break><block_end>ship_idx=scores[i][1]<line_sep>my_ship=all_my_ships[ship_idx]<line_sep>target_idx=scores[i][2]<line_sep>action=scores[i][3]<if_stmt>ship_used[ship_idx]<block_start><continue><block_end>command=<none><if_stmt>action<eq>'dock'<block_start><if_stmt><not>planet_target_available[target_idx]<block_start><continue><block_end>target=all_planets[target_idx]<line_sep>command=my_ship.dock(target)<line_sep>first_dock=<true><line_sep>planet_target_cnt[target_idx]<augadd>1<if_stmt>planet_target_cnt[target_idx]<ge>planet_max_target_cnt[target_idx]<block_start>planet_target_available[target_idx]=<false><block_end><block_end><elif_stmt>action<eq>'planet'<block_start><if_stmt><not>planet_target_available[target_idx]<block_start><continue><block_end>target=all_planets[target_idx]<line_sep># rand_angle = random.randint(0, 359)
# rand_dist = random.uniform(0.0, radius
# rand_target = hlt.entity.Position(target.x +
step,ship_move,command=custom_navigate(my_ship target game_map MAX_SPEED MIN_SPEED SPEED_DECAY 0 all_planets all_ships all_my_ships_moves_to avoid_obstacles=<true> max_corrections=MAX_CORRECTIONS angular_step=ANGULAR_STEP ignore_ships=<false> ignore_planets=<false> suicide=<false>)<if_stmt>step<ne>0<and>num_my_ships<l>DOUBLE_NAVIGATE_SHIP_CNT<block_start>step2,ship_move2,command2=custom_navigate(my_ship target game_map MAX_SPEED MIN_SPEED SPEED_DECAY 0 all_planets all_ships all_my_ships_moves_to avoid_obstacles=<true> max_corrections=MAX_CORRECTIONS angular_step=-ANGULAR_STEP ignore_ships=<false> ignore_planets=<false> suicide=<false>)<if_stmt>step2<l>step<block_start>ship_move=ship_move2<line_sep>command=command2<block_end><block_end><if_stmt>(ship_move<is><not><none>)<and>(command<is><not><none>)# TODO refactor this
<block_start>collide=<false><for_stmt>j range(len(all_my_ships_moves_from))<block_start>end=hlt.entity.Position(ship_move.x-(all_my_ships_moves_to[j].x-all_my_ships_moves_from[j].x) ship_move.y-(all_my_ships_moves_to[j].y-all_my_ships_moves_from[j].y))<line_sep>end.radius=my_ship.radius<if_stmt>custom_intersect_segment_circle(my_ship end all_my_ships_moves_from[j] fudge=my_ship.radius+0.1)<block_start>collide=<true><line_sep><break><block_end><block_end><if_stmt><not>collide<block_start>all_my_ships_moves_to.append(ship_move)<line_sep>all_my_ships_moves_from.append(my_ship)<line_sep>planet_target_cnt[target_idx]<augadd>1<if_stmt>planet_target_cnt[target_idx]<ge>planet_max_target_cnt[target_idx]<block_start>planet_target_available[target_idx]=<false><block_end><block_end><else_stmt><block_start>command=<none><line_sep>ship_move=<none><block_end><block_end><block_end><elif_stmt>action<eq>'my_ship'<block_start><if_stmt><not>my_ship_target_available[target_idx]<block_start><continue><block_end>target=all_my_ships[target_idx]<line_sep>suicide=<false><line_sep>step,ship_move,command=custom_navigate(my_ship target game_map MAX_SPEED MIN_SPEED SPEED_DECAY 0 all_planets all_ships all_my_ships_moves_to avoid_obstacles=<true> max_corrections=MAX_CORRECTIONS angular_step=ANGULAR_STEP ignore_ships=<false> ignore_planets=<false> suicide=suicide)<if_stmt>step<ne>0<and>num_my_ships<l>DOUBLE_NAVIGATE_SHIP_CNT<block_start>step2,ship_move2,command2=custom_navigate(my_ship target game_map MAX_SPEED MIN_SPEED SPEED_DECAY 0 all_planets all_ships all_my_ships_moves_to avoid_obstacles=<true> max_corrections=MAX_CORRECTIONS angular_step=-ANGULAR_STEP ignore_ships=<false> ignore_planets=<false> suicide=suicide)<if_stmt>step2<l>step<block_start>ship_move=ship_move2<line_sep>command=command2<block_end><block_end><if_stmt>(ship_move<is><not><none>)<and>(command<is><not><none>)<block_start>collide=<false><for_stmt>j range(len(all_my_ships_moves_from))<block_start>end=hlt.entity.Position(ship_move.x-(all_my_ships_moves_to[j].x-all_my_ships_moves_from[j].x) ship_move.y-(all_my_ships_moves_to[j].y-all_my_ships_moves_from[j].y))<line_sep>end.radius=my_ship.radius<if_stmt>custom_intersect_segment_circle(my_ship end all_my_ships_moves_from[j] fudge=my_ship.radius+0.1)<block_start>collide=<true><line_sep><break><block_end><block_end><if_stmt><not>collide<block_start>all_my_ships_moves_to.append(ship_move)<line_sep>all_my_ships_moves_from.append(my_ship)<line_sep>my_ship_target_cnt[target_idx]<augadd>1<if_stmt>my_ship_target_cnt[target_idx]<ge>my_ship_max_target_cnt[target_idx]<block_start>my_ship_target_available[target_idx]=<false><block_end><block_end><else_stmt><block_start>command=<none><line_sep>ship_move=<none><block_end><block_end><block_end><elif_stmt>action<eq>'opponent_ship'<block_start><if_stmt><not>opponent_ship_target_available[target_idx]<block_start><continue><block_end>target=all_opponent_ships[target_idx]<line_sep>suicide=<false><line_sep>ignore_ships=<false><if_stmt><not>early_game_all_in<eq>1<block_start><if_stmt>my_ship.health<le>SUICIDE_HEALTH_MULT<times>hlt.constants.WEAPON_DAMAGE<times>float(close_opponent_ship_cnt[ship_idx])/float(close_ally_ship_cnt[ship_idx])<or>(opponent_ships_status[target_idx]<eq>hlt.entity.Ship.DockingStatus.DOCKED<and>closest_undocked_opponent_ship[ship_idx]<l>SUICIDE_UNDOCKED_OPPONENT_DIST)<block_start>suicide=<true><line_sep>ignore_ships=<true><block_end><block_end><else_stmt><block_start><if_stmt>my_ship.health<le>SUICIDE_HEALTH_MULT<times>hlt.constants.WEAPON_DAMAGE<times>float(close_opponent_ship_cnt[ship_idx])/float(close_ally_ship_cnt[ship_idx])<block_start>suicide=<true><line_sep>ignore_ships=<true><block_end><block_end>step,ship_move,command=custom_navigate(my_ship target game_map MAX_SPEED MIN_SPEED SPEED_DECAY 0 all_planets all_ships all_my_ships_moves_to avoid_obstacles=<true> max_corrections=MAX_CORRECTIONS angular_step=ANGULAR_STEP ignore_ships=ignore_ships ignore_planets=<false> suicide=suicide)<if_stmt>step<ne>0<and>num_my_ships<l>DOUBLE_NAVIGATE_SHIP_CNT<block_start>step2,ship_move2,command2=custom_navigate(my_ship target game_map MAX_SPEED MIN_SPEED SPEED_DECAY 0 all_planets all_ships all_my_ships_moves_to avoid_obstacles=<true> max_corrections=MAX_CORRECTIONS angular_step=-ANGULAR_STEP ignore_ships=ignore_ships ignore_planets=<false> suicide=suicide)<if_stmt>step2<l>step<block_start>ship_move=ship_move2<line_sep>command=command2<block_end><block_end><if_stmt>(ship_move<is><not><none>)<and>(command<is><not><none>)<block_start>collide=<false><for_stmt>j range(len(all_my_ships_moves_from))<block_start>end=hlt.entity.Position(ship_move.x-(all_my_ships_moves_to[j].x-all_my_ships_moves_from[j].x) ship_move.y-(all_my_ships_moves_to[j].y-all_my_ships_moves_from[j].y))<line_sep>end.radius=my_ship.radius<if_stmt>custom_intersect_segment_circle(my_ship end all_my_ships_moves_from[j] fudge=my_ship.radius+0.1)<block_start>collide=<true><line_sep><break><block_end><block_end><if_stmt><not>collide<block_start>all_my_ships_moves_to.append(ship_move)<line_sep>all_my_ships_moves_from.append(my_ship)<line_sep>opponent_ship_target_cnt[target_idx]<augadd>1<if_stmt>opponent_ship_target_cnt[target_idx]<ge>opponent_ship_max_target_cnt[target_idx]<block_start>opponent_ship_target_available[target_idx]=<false><block_end><block_end><else_stmt><block_start>command=<none><line_sep>ship_move=<none><block_end><block_end><block_end><else_stmt><block_start><assert_stmt><false><block_end><if_stmt>command<is><not><none><block_start>ship_used[ship_idx]=<true><line_sep>command_queue.append(command)<block_end><block_end># logging.info('my_id ' + str(my_id))
# for i in range(len(all_planets)):
# planet = all_planets[i]
# logging.info(planet.owner)
# Send our set of commands to the Halite engine for this turn
game.send_command_queue(command_queue)<line_sep># TURN END
<block_end># GAME END
|
#! /usr/bin/env python
<import_stmt>sys<import_stmt>getopt<import_stmt>os.path<import_stmt>logging<import_stmt>random<line_sep># Intrapackage imports
libpath=os.path.abspath(os.path.join(os.path.dirname(__file__) '../..'))<if_stmt>os.path.isdir(libpath)<block_start>sys.path.insert(0 libpath)<block_end><import_stmt>pyx12<import_stmt>pyx12.x12file<import_stmt>pyx12.x12context<import_stmt>pyx12.params<import_stmt>pyx12.segment<import_from_stmt>collections namedtuple<line_sep>__author__='<NAME>'<line_sep>__version__='1.0'<line_sep>__date__='2015-02-12'<line_sep>"""
De-indentify 834 Enrollment file
Not production ready
"""<line_sep>VERBOSE=0<line_sep>logger=logging.getLogger()<line_sep>sub_idx=0<line_sep>Demographic=namedtuple('Demographic' 'primaryId, ssn, \
medicaidId, dob, dod, firstname, lastname, middlename, street, street2, county')<class_stmt>FakeDeidentify(object)<block_start><def_stmt>__init__ self<block_start><pass><block_end><def_stmt>getDeidentified self primaryId datatree<block_start>demo=Demographic(primaryId '99999999' '009999999' '19500101' '' 'Joe' 'Smith' '' '123 Elm' '' '99')<line_sep><return>demo<block_end><block_end><class_stmt>RandomDeidentify(object)<block_start><def_stmt>__init__ self<block_start>self.identities={}<block_end><def_stmt>getDeidentified self primaryId datatree<block_start><if_stmt>primaryId<in>self.identities<block_start><return>self.identities[primaryId]<block_end>demo=Demographic(primaryId="{0:0>10}".format(random.randint(1000 99999999999)) ssn="{0:0>9}".format(random.randint(10000 999999999)) medicaidId="{0:0>10}".format(random.randint(1000 99999999999)) dob='19520101' dod='' firstname='AA' lastname='Smith' middlename='' street="{0} Oak".format(random.randint(10 9999)) street2='' county='98')<line_sep>self.identities[primaryId]=demo<line_sep><return>demo<block_end><block_end><def_stmt>deidentify_file fd_in<block_start>"""
"""<line_sep>param=pyx12.params.params()<line_sep>errh=pyx12.error_handler.errh_null()<line_sep>src=pyx12.x12context.X12ContextReader(param errh fd_in)<line_sep>#deident = FakeDeidentify()
deident=RandomDeidentify()<with_stmt>open('newfile.txt' 'w' encoding='ascii')<as>fd_out<block_start>wr=pyx12.x12file.X12Writer(fd_out)<for_stmt>datatree src.iter_segments('2000')<block_start><if_stmt>datatree.id<eq>'2000'<block_start>scrub2000(datatree deident)<block_end><for_stmt>seg1 datatree.iterate_segments()#wr.Write(seg1['segment'].format())
<block_start>print((seg1['segment'].format()))<block_end><block_end><block_end><block_end><def_stmt>scrub2000 loop_sub deident<block_start>primaryId=loop_sub.get_value('2100A/NM109')<line_sep>demo=deident.getDeidentified(primaryId loop_sub)<line_sep>loop_sub.set_value('INS12' demo.dod)<line_sep>loop_sub.set_value('REF[0F]02' demo.primaryId)<line_sep>loop_sub.set_value('2100A/NM103' demo.lastname)<line_sep>loop_sub.set_value('2100A/NM104' demo.firstname)<line_sep>loop_sub.set_value('2100A/NM105' demo.middlename)<line_sep>loop_sub.set_value('2100A/NM109' demo.medicaidId)<line_sep>loop_sub.set_value('2100A/N301' demo.street)<line_sep>loop_sub.set_value('2100A/N302' demo.street2)<line_sep>loop_sub.set_value('2100A/N406' demo.county)<line_sep>loop_sub.set_value('2100A/DMG02' demo.dob)<block_end><def_stmt>usage <block_start>pgm_nme=os.path.basename(sys.argv[0])<line_sep>sys.stdout.write('%s %s (%s)\n'%(pgm_nme __version__ __date__))<line_sep>sys.stdout.write('usage: %s [options] source_file\n'%(pgm_nme))<line_sep>sys.stdout.write('\noptions:\n')<line_sep>sys.stdout.write(' -h Help\n')<line_sep>sys.stdout.write(' -d Debug mode\n')<line_sep>sys.stdout.write(' -o output_directory \n')<block_end><def_stmt>main <block_start><try_stmt><block_start>opts,args=getopt.getopt(sys.argv[1:] 'dhv')<block_end><except_stmt>getopt.error<as>msg<block_start>usage()<line_sep><return><false><block_end>formatter=logging.Formatter('%(asctime)s %(levelname)s %(message)s')<line_sep>stdout_hdlr=logging.StreamHandler()<line_sep>stdout_hdlr.setFormatter(formatter)<line_sep>logger.addHandler(stdout_hdlr)<line_sep>logger.setLevel(logging.INFO)<for_stmt>o,a opts<block_start><if_stmt>o<eq>'-h'<block_start>usage()<line_sep><return><true><block_end><if_stmt>o<eq>'-d'<block_start>logger.setLevel(logging.DEBUG)<block_end><if_stmt>o<eq>'-v'<block_start>logger.setLevel(logging.DEBUG)<block_end><block_end><for_stmt>file_in args<block_start><if_stmt><not>os.path.isfile(file_in)<block_start>logger.error('File %s was not found'%(file_in))<line_sep>usage()<line_sep><return><false><block_end>#file_name = os.path.basename(file_in)
fd_in=open(file_in 'r' encoding='ascii')<line_sep>deidentify_file(fd_in)<block_end><return><true><block_end><if_stmt>__name__<eq>'__main__'<block_start>sys.exit(<not>main())<block_end> |
# -*- coding: utf-8 -*-
"""
Copyright ©2017. The Regents of the University of California (Regents). All Rights Reserved.
Permission to use, copy, modify, and distribute this software and its documentation for educational,
research, and not-for-profit purposes, without fee and without a signed licensing agreement, is
hereby granted, provided that the above copyright notice, this paragraph and the following two
paragraphs appear in all copies, modifications, and distributions. Contact The Office of Technology
Licensing, UC Berkeley, 2150 Shattuck Avenue, Suite 510, Berkeley, CA 94720-1620, (510) 643-
7201, <EMAIL>, http://ipira.berkeley.edu/industry-info for commercial licensing opportunities.
IN NO EVENT SHALL REGENTS BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, SPECIAL,
INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS, ARISING OUT OF
THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF REGENTS HAS BEEN
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED
HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE
MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
"""<line_sep># Keys for easy lookups in HDF5 databases
METRICS_KEY='metrics'<line_sep>OBJECTS_KEY='objects'<line_sep>MESH_KEY='mesh'<line_sep>SDF_KEY='sdf'<line_sep>GRASPS_KEY='grasps'<line_sep>GRIPPERS_KEY='grippers'<line_sep>NUM_GRASPS_KEY='num_grasps'<line_sep>LOCAL_FEATURES_KEY='local_features'<line_sep>GLOBAL_FEATURES_KEY='global_features'<line_sep>SHOT_FEATURES_KEY='shot'<line_sep>RENDERED_IMAGES_KEY='rendered_images'<line_sep>SENSOR_DATA_KEY='sensor_data'<line_sep>STP_KEY='stable_poses'<line_sep>CATEGORY_KEY='category'<line_sep>MASS_KEY='mass'<line_sep>CONVEX_PIECES_KEY='convex_pieces'<line_sep>CREATION_KEY='time_created'<line_sep>DATASETS_KEY='datasets'<line_sep>DATASET_KEY='dataset'<line_sep># data keys for easy access
SDF_DATA_KEY='data'<line_sep>SDF_ORIGIN_KEY='origin'<line_sep>SDF_RES_KEY='resolution'<line_sep>SDF_POSE_KEY='pose'<line_sep>SDF_SCALE_KEY='scale'<line_sep>SDF_FRAME_KEY='frame'<line_sep>MESH_VERTICES_KEY='vertices'<line_sep>MESH_TRIANGLES_KEY='triangles'<line_sep>MESH_NORMALS_KEY='normals'<line_sep>MESH_POSE_KEY='pose'<line_sep>MESH_SCALE_KEY='scale'<line_sep>MESH_DENSITY_KEY='density'<line_sep>LOCAL_FEATURE_NUM_FEAT_KEY='num_features'<line_sep>LOCAL_FEATURE_DESC_KEY='descriptors'<line_sep>LOCAL_FEATURE_RF_KEY='rfs'<line_sep>LOCAL_FEATURE_POINT_KEY='points'<line_sep>LOCAL_FEATURE_NORMAL_KEY='normals'<line_sep>SHOT_FEATURES_KEY='shot'<line_sep>FEATURE_KEY='feature'<line_sep>NUM_STP_KEY='num_stable_poses'<line_sep>POSE_KEY='pose'<line_sep>STABLE_POSE_PROB_KEY='p'<line_sep>STABLE_POSE_ROT_KEY='r'<line_sep>STABLE_POSE_PT_KEY='x0'<line_sep>NUM_GRASPS_KEY='num_grasps'<line_sep>GRASP_KEY='grasp'<line_sep>GRASP_ID_KEY='id'<line_sep>GRASP_TYPE_KEY='type'<line_sep>GRASP_CONFIGURATION_KEY='configuration'<line_sep>GRASP_RF_KEY='frame'<line_sep>GRASP_TIMESTAMP_KEY='timestamp'<line_sep>GRASP_METRICS_KEY='metrics'<line_sep>GRASP_FEATURES_KEY='features'<line_sep>GRASP_FEATURE_NAME_KEY='name'<line_sep>GRASP_FEATURE_TYPE_KEY='type'<line_sep>GRASP_FEATURE_VECTOR_KEY='vector'<line_sep>NUM_IMAGES_KEY='num_images'<line_sep>IMAGE_KEY='image'<line_sep>IMAGE_DATA_KEY='image_data'<line_sep>IMAGE_FRAME_KEY='image_frame'<line_sep>CAM_POS_KEY='cam_pos'<line_sep>CAM_ROT_KEY='cam_rot'<line_sep>CAM_INT_PT_KEY='cam_int_pt'<line_sep>CAM_FRAME_KEY='cam_frame'<line_sep># Extras
RENDERED_IMAGE_TYPES=['segmask' 'depth' 'scaled_depth']<line_sep># Metadata
METADATA_KEY='metadata'<line_sep>METADATA_TYPE_KEY='type'<line_sep>METADATA_DESC_KEY='description'<line_sep>METADATA_FUNC_KEY='func'<line_sep># Connected components
CONNECTED_COMPONENTS_KEY='connected_components'<line_sep> |
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_stmt>os<import_from_stmt>typing Optional<import_stmt>onnxruntime<as>ort<def_stmt>get_sess model_path:Optional[os.PathLike]=<none> sess_conf:dict=<none><block_start>sess_options=ort.SessionOptions()<line_sep>sess_options.graph_optimization_level=ort.GraphOptimizationLevel.ORT_ENABLE_ALL<line_sep>sess_options.execution_mode=ort.ExecutionMode.ORT_SEQUENTIAL<if_stmt>"gpu"<in>sess_conf["device"]# fastspeech2/mb_melgan can't use trt now!
<block_start><if_stmt>sess_conf["use_trt"]<block_start>providers=['TensorrtExecutionProvider']<block_end><else_stmt><block_start>providers=['CUDAExecutionProvider']<block_end><block_end><elif_stmt>sess_conf["device"]<eq>"cpu"<block_start>providers=['CPUExecutionProvider']<block_end>sess_options.intra_op_num_threads=sess_conf["cpu_threads"]<line_sep>sess=ort.InferenceSession(model_path providers=providers sess_options=sess_options)<line_sep><return>sess<block_end> |
# coding: utf8
"""
This software is licensed under the Apache 2 license, quoted below.
Copyright 2014 Crystalnix Limited
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
"""<import_stmt>logging<import_stmt>os<import_from_stmt>django.core.mail.message EmailMessage<import_from_stmt>django.core.files.storage default_storage<import_from_stmt>omaha_server.celery app<import_from_stmt>feedback.models Feedback<line_sep>logger=logging.getLogger(__name__)<line_sep>email_body_tmpl="""
Description: %s
Page URL: %s
User email: %s
User IP: %s
Feedback JSON data: %s
"""<line_sep>@app.task(name='tasks.send_email_feedback' ignore_result=<true> max_retries=12 bind=<true>)<def_stmt>send_email_feedback self feedback_pk sender recipents<block_start><try_stmt><block_start>feedback=Feedback.objects.get(pk=feedback_pk)<block_end><except_stmt>Feedback.DoesNotExist<as>exc<block_start>logger.error('Failed processing_crash_dump' exc_info=<true> extra=dict(crash_pk=feedback_pk))<line_sep><raise>self.retry(exc=exc countdown=2<power>send_email_feedback.request.retries)<block_end>recipients=[x.strip()<for>x recipents.split(',')]<line_sep>body=email_body_tmpl%(feedback.description feedback.page_url feedback.email feedback.ip feedback.feedback_data )<line_sep>email=EmailMessage("Feedback # %s"%feedback_pk body sender recipients)<line_sep>attachments=[feedback.screenshot feedback.blackbox feedback.system_logs feedback.attached_file]<for_stmt>attach attachments<block_start><if_stmt>attach<block_start>email.attach(os.path.basename(attach.name) attach.read())<block_end><block_end>email.send()<block_end> |
<import_from_future_stmt> division<import_from_future_stmt> unicode_literals<import_from_future_stmt> print_function<import_from_future_stmt> absolute_import<import_from_stmt>future standard_library<line_sep>standard_library.install_aliases()<import_from_stmt>past.utils old_div<import_stmt>rlpy<import_stmt>numpy<as>np<import_from_stmt>hyperopt hp<line_sep>param_space={'kernel_resolution':hp.loguniform("kernel_resolution" np.log(5) np.log(50)) 'discover_threshold':hp.loguniform("discover_threshold" np.log(1e4) np.log(1e8)) 'lambda_':hp.uniform("lambda_" 0. 1.) 'boyan_N0':hp.loguniform("boyan_N0" np.log(1e1) np.log(1e5)) 'initial_learn_rate':hp.loguniform("initial_learn_rate" np.log(5e-2) np.log(1))}<def_stmt>make_experiment exp_id=1 path="./Results/Temp/{domain}/{agent}/{representation}/" discover_threshold=88044. boyan_N0=64502 lambda_=0.43982644088 initial_learn_rate=0.920244401 kernel_resolution=11.6543336229<block_start>opt={}<line_sep>opt["exp_id"]=exp_id<line_sep>opt["path"]=path<line_sep>opt["max_steps"]=150000<line_sep>opt["num_policy_checks"]=30<line_sep>opt["checks_per_policy"]=1<line_sep>active_threshold=0.01<line_sep>max_base_feat_sim=0.5<line_sep>sparsify=1<line_sep>domain=rlpy.Domains.BicycleRiding()<line_sep>opt["domain"]=domain<line_sep>kernel_width=old_div((domain.statespace_limits[: 1]-domain.statespace_limits[: 0]) kernel_resolution)<line_sep>representation=rlpy.Representations.KernelizediFDD(domain sparsify=sparsify kernel=rlpy.Representations.linf_triangle_kernel kernel_args=[kernel_width] active_threshold=active_threshold discover_threshold=discover_threshold normalization=<true> max_active_base_feat=10 max_base_feat_sim=max_base_feat_sim)<line_sep>policy=rlpy.Policies.eGreedy(representation epsilon=0.1)<line_sep># agent = SARSA(representation,policy,domain,initial_learn_rate=initial_learn_rate,
# lambda_=.0, learn_rate_decay_mode="boyan", boyan_N0=boyan_N0)
opt["agent"]=rlpy.Agents.Q_Learning(policy representation discount_factor=domain.discount_factor lambda_=lambda_ initial_learn_rate=initial_learn_rate learn_rate_decay_mode="boyan" boyan_N0=boyan_N0)<line_sep>experiment=rlpy.Experiments.Experiment(**opt)<line_sep><return>experiment<block_end><if_stmt>__name__<eq>'__main__'<block_start><import_from_stmt>rlpy.Tools.run run_profiled<line_sep># run_profiled(make_experiment)
experiment=make_experiment(1)<line_sep>experiment.run(visualize_learning=<true> visualize_performance=<true>)<line_sep>experiment.plot()<line_sep># experiment.save()
<block_end> |
#
# Copyright (C) <NAME>, <NAME>, and <NAME>, 2016
#
# Distributed under the same BSD license as Scipy.
#
# adapted from scipy's cython version
<import_stmt>numpy<as>np<import_stmt>numpy.random<as>random<line_sep>#pythran export directed_hausdorff(float64[:,:], float64[:,:], int)
#pythran export directed_hausdorff_noshuffle(float64[:,:], float64[:,:])
#runas import numpy as np; x = np.arange((100 * 100.)).reshape(100,-1); y = np.ones((100,100)) * 3; directed_hausdorff_noshuffle(x, y)
<def_stmt>directed_hausdorff ar1 ar2 seed=0<block_start>N1,data_dims=ar1.shape<line_sep>N2=ar2.shape[0]<line_sep>i_store=j_store=i_ret=j_ret=0<line_sep># shuffling the points in each array generally increases the likelihood of
# an advantageous break in the inner search loop and never decreases the
# performance of the algorithm
random.seed(seed)<line_sep>resort1=np.arange(N1)<line_sep>resort2=np.arange(N2)<line_sep>random.shuffle(resort1)<line_sep>random.shuffle(resort2)<line_sep>ar1=np.asarray(ar1)[resort1]<line_sep>ar2=np.asarray(ar2)[resort2]<line_sep>cmax=0<for_stmt>i range(N1)<block_start>cmin=np.inf<for_stmt>j range(N2)<block_start>d=np.sum((ar1[i]-ar2[j])<power>2)<line_sep># faster performance with square of distance
# avoid sqrt until very end
<if_stmt>d<l>cmax# break out of `for j` loop
<block_start><break><block_end><if_stmt>d<l>cmin# always true on first iteration of for-j loop
<block_start>cmin=d<line_sep>i_store=i<line_sep>j_store=j<block_end><block_end><else_stmt># always true on first iteration of for-j loop, after that only
# if d >= cmax
<block_start><if_stmt>cmin<ne>np.inf<and>cmin<g>cmax<block_start>cmax=cmin<line_sep>i_ret=i_store<line_sep>j_ret=j_store<block_end><block_end><block_end><return>np.sqrt(cmax) resort1[i_ret] resort2[j_ret]<block_end><def_stmt>directed_hausdorff_noshuffle ar1 ar2 seed=0<block_start>N1,data_dims=ar1.shape<line_sep>N2=ar2.shape[0]<line_sep>i_store=j_store=i_ret=j_ret=0<line_sep>resort1=np.arange(N1)<line_sep>resort2=np.arange(N2)<line_sep>ar1=np.asarray(ar1)[resort1]<line_sep>ar2=np.asarray(ar2)[resort2]<line_sep>cmax=0<for_stmt>i range(N1)<block_start>cmin=np.inf<for_stmt>j range(N2)<block_start>d=np.sum((ar1[i]-ar2[j])<power>2)<line_sep># faster performance with square of distance
# avoid sqrt until very end
<if_stmt>d<l>cmax# break out of `for j` loop
<block_start><break><block_end><if_stmt>d<l>cmin# always true on first iteration of for-j loop
<block_start>cmin=d<line_sep>i_store=i<line_sep>j_store=j<block_end><block_end><else_stmt># always true on first iteration of for-j loop, after that only
# if d >= cmax
<block_start><if_stmt>cmin<ne>np.inf<and>cmin<g>cmax<block_start>cmax=cmin<line_sep>i_ret=i_store<line_sep>j_ret=j_store<block_end><block_end><block_end><return>np.sqrt(cmax) resort1[i_ret] resort2[j_ret]<block_end> |
# -*- coding: utf-8 -*-
<import_from_stmt>unittest.mock patch<import_from_stmt>django.db transaction<import_from_stmt>django.test override_settings TestCase TransactionTestCase<import_from_stmt>ralph.assets.tests.factories ConfigurationClassFactory EthernetFactory <import_from_stmt>ralph.data_center.models BaseObjectCluster DataCenterAsset<import_from_stmt>ralph.dns.dnsaas DNSaaS<import_from_stmt>ralph.dns.forms DNSRecordForm RecordType<import_from_stmt>ralph.dns.publishers _get_txt_data_to_publish_to_dnsaas<import_from_stmt>ralph.dns.views add_errors DNSaaSIntegrationNotEnabledError DNSView <import_from_stmt>ralph.networks.tests.factories IPAddressFactory<import_from_stmt>ralph.virtual.models VirtualServer<import_from_stmt>ralph.virtual.tests.factories VirtualServerFactory<class_stmt>TestGetDnsRecords(TestCase)<block_start>@patch.object(DNSaaS '_get_oauth_token')<def_stmt>setUp self mocked<block_start>mocked.return_value='token'<line_sep>self.dnsaas=DNSaaS()<block_end>@patch.object(DNSaaS 'get_api_result')<def_stmt>test_return_empty_when_api_returns_empty self mocked<block_start>mocked.return_value=[]<line_sep>found_dns=self.dnsaas.get_dns_records(['192.168.0.1'])<line_sep>self.assertEqual(found_dns [])<block_end><def_stmt>test_return_empty_when_no_ipaddress self<block_start>found_dns=self.dnsaas.get_dns_records([])<line_sep>self.assertEqual(found_dns [])<block_end>@patch.object(DNSaaS 'get_api_result')<def_stmt>test_return_dns_records_when_api_returns_records self mocked<block_start>data={'content':'127.0.0.3' 'name':'1.test.pl' 'type':'A' 'id':1}<line_sep>mocked.return_value=[data]<line_sep>found_dns=self.dnsaas.get_dns_records(['192.168.0.1'])<line_sep>self.assertEqual(len(found_dns) 1)<line_sep>self.assertEqual(found_dns[0]['content'] data['content'])<line_sep>self.assertEqual(found_dns[0]['name'] data['name'])<line_sep>self.assertEqual(found_dns[0]['type'] RecordType.a)<block_end>@override_settings(DNSAAS_URL='http://dnsaas.com/')<def_stmt>test_build_url self<block_start>self.assertEqual(self.dnsaas.build_url('domains') 'http://dnsaas.com/api/domains/')<block_end>@override_settings(DNSAAS_URL='http://dnsaas.com/')<def_stmt>test_build_url_with_version self<block_start>self.assertEqual(self.dnsaas.build_url('domains') 'http://dnsaas.com/api/domains/')<block_end>@override_settings(DNSAAS_URL='http://dnsaas.com/')<def_stmt>test_build_url_with_id self<block_start>self.assertEqual(self.dnsaas.build_url('domains' id=1) 'http://dnsaas.com/api/domains/1/')<block_end>@override_settings(DNSAAS_URL='http://dnsaas.com/')<def_stmt>test_build_url_with_get_params self<block_start>self.assertEqual(self.dnsaas.build_url('domains' get_params=[('name' 'ralph')]) 'http://dnsaas.com/api/domains/?name=ralph')<block_end>@override_settings(DNSAAS_URL='http://dnsaas.com/')<def_stmt>test_build_url_with_id_and_get_params self<block_start>self.assertEqual(self.dnsaas.build_url('domains' id=1 get_params=[('name' 'ralph')]) 'http://dnsaas.com/api/domains/1/?name=ralph')<block_end><block_end><class_stmt>TestDNSView(TestCase)<block_start>@override_settings(ENABLE_DNSAAS_INTEGRATION=<false>)<def_stmt>test_dnsaasintegration_disabled self<block_start><with_stmt>self.assertRaises(DNSaaSIntegrationNotEnabledError)<block_start>DNSView()<block_end><block_end>@override_settings(ENABLE_DNSAAS_INTEGRATION=<true>)@patch('ralph.dns.views.DNSaaS._get_oauth_token')<def_stmt>test_dnsaasintegration_enabled self _get_oauth_token_mock# should not raise exception
<block_start>_get_oauth_token_mock.return_value='token'<line_sep>DNSView()<block_end><block_end><class_stmt>TestGetTXTDataToPublishToDNSaaS(TestCase)<block_start>@classmethod<def_stmt>setUpClass cls<block_start><import_from_stmt>ralph.data_center.tests.factories ClusterFactory DataCenterAssetFactory RackFactory <line_sep>super().setUpClass()<line_sep>cls.dc_asset=DataCenterAssetFactory(hostname='ralph0.allegro.pl' service_env__service__name='service' service_env__environment__name='test' model__name='DL360' model__manufacturer__name='Asus' model__category__name='ATS' rack=RackFactory(name='Rack #100' server_room__name='Server Room A' server_room__data_center__name='DC1' ) position=1 slot_no='1' configuration_path__class_name='www' configuration_path__module__name='ralph' )<line_sep>cls.dc_ip=IPAddressFactory(base_object=cls.dc_asset ethernet=EthernetFactory(base_object=cls.dc_asset) )<line_sep>IPAddressFactory(base_object=cls.dc_asset ethernet=EthernetFactory(base_object=cls.dc_asset) is_management=<true> )<line_sep>cls.virtual_server=VirtualServerFactory(hostname='s000.local' configuration_path=ConfigurationClassFactory(class_name='worker' module__name='auth') service_env__service__name='service' service_env__environment__name='prod' type__name='Xen' parent=DataCenterAssetFactory(hostname='parent' model__name='DL380p' model__manufacturer__name='Brother' model__category__name='Database Machine' rack=RackFactory(name='Rack #101' server_room__name='Server Room B' server_room__data_center__name='DC2' ) position=1 slot_no='1' ) )<line_sep># refresh virtual server to get parent as BaseObject, not
# DataCenterAsset
cls.vs_ip=IPAddressFactory(base_object=cls.virtual_server ethernet=EthernetFactory(base_object=cls.virtual_server) )<line_sep>cls.virtual_server=VirtualServer.objects.get(pk=cls.virtual_server.id)<line_sep>cluster=ClusterFactory(hostname='' type__name='Application' configuration_path__class_name='www' configuration_path__module__name='ralph' service_env__service__name='service' service_env__environment__name='preprod' )<line_sep>cls.boc_1=BaseObjectCluster.objects.create(cluster=cluster base_object=DataCenterAssetFactory(rack=RackFactory() position=1 ))<line_sep>cls.boc_2=BaseObjectCluster.objects.create(cluster=cluster base_object=DataCenterAssetFactory(rack=RackFactory(server_room__data_center__name='DC2' server_room__name='Server Room B' name='Rack #101' ) position=1 ) is_master=<true>)<line_sep>cls.cluster=ClusterFactory._meta.model.objects.get(pk=cluster)<line_sep>cls.cluster_ip=IPAddressFactory(base_object=cls.cluster ethernet=EthernetFactory(base_object=cls.cluster) )<block_end><def_stmt>test_dc_asset_gets_data_ok self<block_start>data=_get_txt_data_to_publish_to_dnsaas(self.dc_asset)<line_sep>self.assertEqual(data [{'content':'www' 'ips':[self.dc_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'VENTURE' 'service_uid':self.dc_asset.service.uid} {'content':'ralph' 'ips':[self.dc_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'ROLE' 'service_uid':self.dc_asset.service.uid} {'content':'ralph/www' 'ips':[self.dc_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'CONFIGURATION_PATH' 'service_uid':self.dc_asset.service.uid} {'content':'service - test' 'ips':[self.dc_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'SERVICE_ENV' 'service_uid':self.dc_asset.service.uid} {'content':'[ATS] Asus DL360' 'ips':[self.dc_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'MODEL' 'service_uid':self.dc_asset.service.uid} {'content':'DC1 / Server Room A / Rack #100 / 1 / 1' 'ips':[self.dc_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'LOCATION' 'service_uid':self.dc_asset.service.uid}])<block_end><def_stmt>test_dc_asset_without_service_gets_data_ok self<block_start>self.dc_asset.service_env=<none><line_sep>self.dc_asset.save()<line_sep>data=_get_txt_data_to_publish_to_dnsaas(self.dc_asset)<line_sep>self.assertEqual(data [{'content':'www' 'ips':[self.dc_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'VENTURE'} {'content':'ralph' 'ips':[self.dc_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'ROLE'} {'content':'ralph/www' 'ips':[self.dc_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'CONFIGURATION_PATH'} {'content':'[ATS] Asus DL360' 'ips':[self.dc_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'MODEL'} {'content':'DC1 / Server Room A / Rack #100 / 1 / 1' 'ips':[self.dc_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'LOCATION'}])<block_end><def_stmt>test_virtual_server_gets_data_ok self<block_start>data=_get_txt_data_to_publish_to_dnsaas(self.virtual_server)<line_sep>self.assertEqual(data [{'content':'worker' 'ips':[self.vs_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'VENTURE' 'service_uid':self.virtual_server.service.uid} {'content':'auth' 'ips':[self.vs_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'ROLE' 'service_uid':self.virtual_server.service.uid} {'content':'auth/worker' 'ips':[self.vs_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'CONFIGURATION_PATH' 'service_uid':self.virtual_server.service.uid} {'content':'service - prod' 'ips':[self.vs_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'SERVICE_ENV' 'service_uid':self.virtual_server.service.uid} {'content':'Xen' 'ips':[self.vs_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'MODEL' 'service_uid':self.virtual_server.service.uid} {'content':'DC2 / Server Room B / Rack #101 / 1 / 1 / parent' 'ips':[self.vs_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'LOCATION' 'service_uid':self.virtual_server.service.uid}])<block_end><def_stmt>test_virtual_server_without_service_gets_data_ok self<block_start>self.virtual_server.service_env=<none><line_sep>self.virtual_server.save()<line_sep>data=_get_txt_data_to_publish_to_dnsaas(self.virtual_server)<line_sep>self.assertEqual(data [{'content':'worker' 'ips':[self.vs_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'VENTURE'} {'content':'auth' 'ips':[self.vs_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'ROLE'} {'content':'auth/worker' 'ips':[self.vs_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'CONFIGURATION_PATH'} {'content':'Xen' 'ips':[self.vs_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'MODEL'} {'content':'DC2 / Server Room B / Rack #101 / 1 / 1 / parent' 'ips':[self.vs_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'LOCATION'}])<block_end><def_stmt>test_cluster_gets_data_ok self<block_start>data=_get_txt_data_to_publish_to_dnsaas(self.cluster)<line_sep>self.assertEqual(data [{'content':'www' 'ips':[self.cluster_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'VENTURE' 'service_uid':self.cluster.service.uid} {'content':'ralph' 'ips':[self.cluster_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'ROLE' 'service_uid':self.cluster.service.uid} {'content':'ralph/www' 'ips':[self.cluster_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'CONFIGURATION_PATH' 'service_uid':self.cluster.service.uid} {'content':'service - preprod' 'ips':[self.cluster_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'SERVICE_ENV' 'service_uid':self.cluster.service.uid} {'content':'Application' 'ips':[self.cluster_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'MODEL' 'service_uid':self.cluster.service.uid} {'content':'DC2 / Server Room B / Rack #101 / 1' 'ips':[self.cluster_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'LOCATION' 'service_uid':self.cluster.service.uid}])<block_end><def_stmt>test_cluster_without_service_gets_data_ok self<block_start>self.cluster.service_env=<none><line_sep>self.cluster.save()<line_sep>data=_get_txt_data_to_publish_to_dnsaas(self.cluster)<line_sep>self.assertEqual(data [{'content':'www' 'ips':[self.cluster_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'VENTURE'} {'content':'ralph' 'ips':[self.cluster_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'ROLE'} {'content':'ralph/www' 'ips':[self.cluster_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'CONFIGURATION_PATH'} {'content':'Application' 'ips':[self.cluster_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'MODEL'} {'content':'DC2 / Server Room B / Rack #101 / 1' 'ips':[self.cluster_ip.address] 'owner':'ralph' 'target_owner':'ralph' 'purpose':'LOCATION'}])<block_end><block_end><class_stmt>TestPublishAutoTXTToDNSaaS(TransactionTestCase)<block_start>@classmethod<def_stmt>setUpClass cls<block_start><import_from_stmt>ralph.data_center.tests.factories DataCenterAssetFactory RackFactory <line_sep>super().setUpClass()<line_sep>cls.dc_asset=DataCenterAssetFactory(hostname='ralph0.allegro.pl' service_env__service__name='service' service_env__environment__name='test' model__name='DL360' model__manufacturer__name='Asus' model__category__name='ATS' rack=RackFactory(name='Rack #100' server_room__name='Server Room A' server_room__data_center__name='DC1' ) position=1 slot_no='1' configuration_path__class_name='www' configuration_path__module__name='ralph' )<line_sep>cls.dc_ip=IPAddressFactory(base_object=cls.dc_asset ethernet=EthernetFactory(base_object=cls.dc_asset) )<line_sep>IPAddressFactory(base_object=cls.dc_asset ethernet=EthernetFactory(base_object=cls.dc_asset) is_management=<true> )<block_end>@override_settings(DNSAAS_AUTO_TXT_RECORD_TOPIC_NAME='dnsaas_auto_txt_record')@patch('ralph.dns.publishers.publish')<def_stmt>test_publishing_auto_txt_data_when_dc_asset_updated self publish_mock# fetch clean instance
<block_start>dc_asset=DataCenterAsset.objects.get(pk=self.dc_asset)<with_stmt>transaction.atomic()<block_start>dc_asset.save()<block_end>self.assertEqual(publish_mock.call_count 1)<line_sep>publish_data=publish_mock.call_args[0][1]<line_sep># owner could be non-deterministic, depending on order of tests
# and it's not part of this test to check its correctness
<for_stmt>data_dict publish_data<block_start>data_dict.pop('owner')<block_end>self.assertCountEqual(publish_data [{'content':'www' 'ips':[self.dc_ip.address] 'target_owner':'ralph' 'purpose':'VENTURE' 'service_uid':dc_asset.service.uid} {'content':'ralph' 'ips':[self.dc_ip.address] 'target_owner':'ralph' 'purpose':'ROLE' 'service_uid':dc_asset.service.uid} {'content':'ralph/www' 'ips':[self.dc_ip.address] 'target_owner':'ralph' 'purpose':'CONFIGURATION_PATH' 'service_uid':dc_asset.service.uid} {'content':'service - test' 'ips':[self.dc_ip.address] 'target_owner':'ralph' 'purpose':'SERVICE_ENV' 'service_uid':dc_asset.service.uid} {'content':'[ATS] Asus DL360' 'ips':[self.dc_ip.address] 'target_owner':'ralph' 'purpose':'MODEL' 'service_uid':dc_asset.service.uid} {'content':'DC1 / Server Room A / Rack #100 / 1 / 1' 'ips':[self.dc_ip.address] 'target_owner':'ralph' 'purpose':'LOCATION' 'service_uid':dc_asset.service.uid}])<block_end><block_end><class_stmt>TestDNSForm(TestCase)<block_start><def_stmt>test_unknown_field_goes_to_non_field_errors self<block_start>errors={'errors':[{'reason':'unknown' 'comment':'value'}]}<line_sep>form=DNSRecordForm({})<line_sep>add_errors(form errors)<line_sep>self.assertIn('value' form.non_field_errors())<block_end><block_end> |
<import_stmt>weakref<import_from_stmt>rpython.rtyper.lltypesystem lltype llmemory rffi<import_from_stmt>rpython.rlib.rarithmetic LONG_BIT<class_stmt>GroupType(lltype.ContainerType)<block_start>"""A 'group' that stores static structs together in memory.
On 32-bit platforms, the point is that they can be referenced by a
GroupMemberOffset which only takes 2 bytes (a USHORT), so the total
size of a group is limited to 18 (= the 16 bits in a USHORT, plus 2
bits at the end that are zero and so don't need to be stored).
On 64-bit platforms, we check that the address they end up at is
within the first 32 bits, so that we can store that address in half
a long (i.e. in a UINT).
"""<line_sep>_gckind='raw'<block_end>Group=GroupType()<class_stmt>group(lltype._container)<block_start>_TYPE=Group<line_sep>outdated=<none><def_stmt>__init__ self name<block_start>self.name=name<line_sep>self.members=[]<block_end><def_stmt>add_member self structptr<block_start>TYPE=lltype.typeOf(structptr)<assert_stmt>isinstance(TYPE.TO lltype.Struct)<assert_stmt>TYPE.TO._gckind<eq>'raw'<line_sep>struct=structptr._as_obj()<line_sep>prevgroup=_membership.get(struct)<if_stmt>prevgroup<is><not><none><block_start>prevgroup.outdated=("structure %s was inserted into another group"%(struct ))<block_end><assert_stmt>struct._parentstructure()<is><none><line_sep>index=len(self.members)<line_sep>self.members.append(struct)<line_sep>_membership[struct]=self<line_sep><return>GroupMemberOffset(self index)<block_end><block_end><def_stmt>member_of_group structptr<block_start><return>_membership.get(structptr._as_obj() <none>)<block_end>_membership=weakref.WeakValueDictionary()<if_stmt>LONG_BIT<eq>32<block_start>HALFSHIFT=16<line_sep>HALFWORD=rffi.USHORT<line_sep>r_halfword=rffi.r_ushort<block_end><else_stmt><block_start>HALFSHIFT=32<line_sep>HALFWORD=rffi.UINT<line_sep>r_halfword=rffi.r_uint<block_end><class_stmt>GroupMemberOffset(llmemory.Symbolic)<block_start>"""The offset of a struct inside a group, stored compactly in a HALFWORD
(a USHORT or UINT). Can only be used by the lloperation 'get_group_member'.
"""<def_stmt>annotation self<block_start><import_from_stmt>rpython.annotator model<line_sep><return>model.SomeInteger(knowntype=r_halfword)<block_end><def_stmt>lltype self<block_start><return>HALFWORD<block_end><def_stmt>__init__ self grp memberindex<block_start><assert_stmt>lltype.typeOf(grp)<eq>Group<line_sep>self.grpptr=grp._as_ptr()<line_sep>self.index=memberindex<line_sep>self.member=grp.members[memberindex]._as_ptr()<block_end><def_stmt>__repr__ self<block_start><return>'%s(%s, %s)'%(self.__class__.__name__ self.grpptr self.index)<block_end><def_stmt>__nonzero__ self<block_start><return><true><block_end><def_stmt>_get_group_member self grpptr<block_start><assert_stmt>grpptr<eq>self.grpptr "get_group_member: wrong group!"<line_sep><return>self.member<block_end><def_stmt>_get_next_group_member self grpptr skipoffset# ad-hoc: returns a pointer to the group member that follows this one,
# given information in 'skipoffset' about how much to skip -- which
# is the size of the current member.
<block_start><assert_stmt>grpptr<eq>self.grpptr "get_next_group_member: wrong group!"<assert_stmt>isinstance(skipoffset llmemory.ItemOffset)<assert_stmt>skipoffset.TYPE<eq>lltype.typeOf(self.member).TO<assert_stmt>skipoffset.repeat<eq>1<line_sep><return>self.grpptr._as_obj().members[self.index+1]._as_ptr()<block_end><block_end><class_stmt>CombinedSymbolic(llmemory.Symbolic)<block_start>"""A general-purpose Signed symbolic that combines an unsigned half-word
(USHORT on 32-bit platforms, UINT on 64-bit platforms) and the rest
of the word (typically flags). Only supports extracting the half-word
with 'llop.extract_ushort', and extracting the rest of the word with
'&~0xFFFF' or with a direct masking like '&0x10000' (resp. on 64-bit
platform, with '&~0xFFFFFFFF' or '&0x100000000').
"""<line_sep>__slots__=['lowpart' 'rest']<line_sep>MASK=(1<lshift>HALFSHIFT)-1# 0xFFFF or 0xFFFFFFFF
<def_stmt>annotation self<block_start><import_from_stmt>rpython.annotator model<line_sep><return>model.SomeInteger()<block_end><def_stmt>lltype self<block_start><return>lltype.Signed<block_end><def_stmt>__init__ self lowpart rest<block_start><assert_stmt>(rest&CombinedSymbolic.MASK)<eq>0<line_sep>self.lowpart=lowpart<line_sep>self.rest=rest<block_end><def_stmt>__repr__ self<block_start><return>'<CombinedSymbolic %r|%s>'%(self.lowpart self.rest)<block_end><def_stmt>__nonzero__ self<block_start><return><true><block_end><def_stmt>__and__ self other<block_start><if_stmt>(other&CombinedSymbolic.MASK)<eq>0<block_start><return>self.rest&other<block_end><if_stmt>(other&CombinedSymbolic.MASK)<eq>CombinedSymbolic.MASK<block_start><return>CombinedSymbolic(self.lowpart self.rest&other)<block_end><raise>Exception("other=0x%x"%other)<block_end><def_stmt>__or__ self other<block_start><assert_stmt>(other&CombinedSymbolic.MASK)<eq>0<line_sep><return>CombinedSymbolic(self.lowpart self.rest|other)<block_end><def_stmt>__add__ self other<block_start><assert_stmt>(other&CombinedSymbolic.MASK)<eq>0<line_sep><return>CombinedSymbolic(self.lowpart self.rest+other)<block_end><def_stmt>__sub__ self other<block_start><assert_stmt>(other&CombinedSymbolic.MASK)<eq>0<line_sep><return>CombinedSymbolic(self.lowpart self.rest-other)<block_end><def_stmt>__rshift__ self other<block_start><assert_stmt>other<ge>HALFSHIFT<line_sep><return>self.rest<rshift>other<block_end><def_stmt>__eq__ self other<block_start><if_stmt>(isinstance(other CombinedSymbolic)<and>self.lowpart<is>other.lowpart)<block_start><return>self.rest<eq>other.rest<block_end><else_stmt><block_start><return>NotImplemented<block_end><block_end><def_stmt>__ne__ self other<block_start><if_stmt>(isinstance(other CombinedSymbolic)<and>self.lowpart<is>other.lowpart)<block_start><return>self.rest<ne>other.rest<block_end><else_stmt><block_start><return>NotImplemented<block_end><block_end><block_end> |
# This example code is in the Public Domain (or CC0 licensed, at your option.)
# Unless required by applicable law or agreed to in writing, this
# software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied.
# -*- coding: utf-8 -*-
<import_from_stmt>builtins input<import_stmt>socket<import_stmt>sys<line_sep># ----------- Config ----------
PORT=3333<line_sep>IP_VERSION='IPv4'<line_sep>IPV4='192.168.0.167'<line_sep>IPV6='FE80::32AE:A4FF:FE80:5288'<line_sep># -------------------------------
<if_stmt>IP_VERSION<eq>'IPv4'<block_start>host=IPV4<line_sep>family_addr=socket.AF_INET<block_end><elif_stmt>IP_VERSION<eq>'IPv6'<block_start>host=IPV6<line_sep>family_addr=socket.AF_INET6<block_end><else_stmt><block_start>print('IP_VERSION must be IPv4 or IPv6')<line_sep>sys.exit(1)<block_end><try_stmt><block_start>sock=socket.socket(family_addr socket.SOCK_DGRAM)<block_end><except_stmt>socket.error<as>msg<block_start>print('Failed to create socket')<line_sep>sys.exit()<block_end><while_stmt><true><block_start>msg=input('Enter message to send : ')<try_stmt><block_start>sock.sendto(msg.encode() (host PORT))<line_sep>reply,addr=sock.recvfrom(128)<if_stmt><not>reply<block_start><break><block_end>print('Reply['+addr[0]+':'+str(addr[1])+'] - '+str(reply))<block_end><except_stmt>socket.error<as>msg<block_start>print('Error Code : '+str(msg[0])+' Message: '+msg[1])<line_sep>sys.exit()<block_end><block_end> |
<import_stmt>os<try_stmt><block_start>os.rename('_build/html/_static' '_build/html/static')<line_sep>os.rename('_build/html/_modules' '_build/html/modules')<line_sep>os.rename('_build/html/_sources' '_build/html/sources')<block_end><except_stmt><block_start><pass><block_end>root_dir='_build/html'<for_stmt>directory,subdirectories,files os.walk(root_dir)<block_start><for_stmt>fileName files<block_start><try_stmt><block_start>fileName=os.path.join(directory fileName)<line_sep>file=open(fileName 'r')<line_sep>contents=file.read()<line_sep>file.close()<line_sep>file=open(fileName 'w')<line_sep>replaced_contents=contents.replace('_static' 'static')<line_sep>replaced_contents=replaced_contents.replace('_modules' 'modules')<line_sep>replaced_contents=replaced_contents.replace('_sources' 'sources')<line_sep>file.write(replaced_contents)<block_end><except_stmt><block_start><pass><block_end><block_end><block_end>print("Finished renaming all directories and mentions of directories with underscores")<line_sep> |
<import_stmt>sc2<class_stmt>CompetitiveBot(sc2.BotAI)<block_start><async_keyword><def_stmt>on_start self<block_start>print("Game started")<line_sep># Do things here before the game starts
<block_end><async_keyword><def_stmt>on_step self iteration# Populate this function with whatever your bot should do!
<block_start><pass><block_end><def_stmt>on_end self result<block_start>print("Game ended.")<line_sep># Do things here after the game ends
<block_end><block_end> |
<import_from_future_stmt> annotations<import_from_stmt>typing TYPE_CHECKING<import_stmt>click<import_from_stmt>.output echo ok<import_from_stmt>.common convert_api_errors existing_config_option inject_proxy<import_from_stmt>.core DropboxPath CliException<if_stmt>TYPE_CHECKING<block_start><import_from_stmt>..main Maestral<block_end>@click.command(help="""
Automatically start the sync daemon on login.
A systemd or launchd service will be created to start a sync daemon for the given
configuration on user login.
""" )@click.option("--yes" "-Y" is_flag=<true> default=<false>)@click.option("--no" "-N" is_flag=<true> default=<false>)@existing_config_option<def_stmt>autostart yes:bool no:bool config_name:str<arrow><none><block_start><import_from_stmt>..autostart AutoStart<line_sep>auto_start=AutoStart(config_name)<if_stmt><not>auto_start.implementation<block_start>echo("Autostart is currently not supported for your platform.\n"<concat>"Autostart requires systemd on Linux or launchd on macOS.")<line_sep><return><block_end><if_stmt>yes<or>no<block_start><if_stmt>yes<block_start>auto_start.enable()<line_sep>ok("Enabled start on login.")<block_end><else_stmt><block_start>auto_start.disable()<line_sep>ok("Disabled start on login.")<block_end><block_end><else_stmt><block_start><if_stmt>auto_start.enabled<block_start>echo("Autostart is enabled. Use -N to disable.")<block_end><else_stmt><block_start>echo("Autostart is disabled. Use -Y to enable.")<block_end><block_end><block_end>@click.group(help="View and manage excluded folders.")<def_stmt>excluded <block_start><pass><block_end>@excluded.command(name="list" help="List all excluded files and folders.")@inject_proxy(fallback=<true> existing_config=<true>)<def_stmt>excluded_list m:Maestral<arrow><none><block_start>excluded_items=m.excluded_items<line_sep>excluded_items.sort()<if_stmt>len(excluded_items)<eq>0<block_start>echo("No excluded files or folders.")<block_end><else_stmt><block_start><for_stmt>item excluded_items<block_start>echo(item)<block_end><block_end><block_end>@excluded.command(name="add" help="Add a file or folder to the excluded list and re-sync." )@click.argument("dropbox_path" type=DropboxPath())@inject_proxy(fallback=<true> existing_config=<true>)@convert_api_errors<def_stmt>excluded_add m:Maestral dropbox_path:str<arrow><none><block_start><if_stmt>dropbox_path<eq>"/"<block_start><raise>CliException("Cannot exclude the root directory.")<block_end>m.exclude_item(dropbox_path)<line_sep>ok(f"Excluded '{dropbox_path}'.")<block_end>@excluded.command(name="remove" help="""
Remove a file or folder from the excluded list and re-sync.
It is safe to call this method with items which have already been included, they will
not be downloaded again. If the given path lies inside an excluded folder, the parent
folder will be included as well (but no other items inside it).
""" )@click.argument("dropbox_path" type=DropboxPath())@inject_proxy(fallback=<false> existing_config=<true>)@convert_api_errors<def_stmt>excluded_remove m:Maestral dropbox_path:str<arrow><none><block_start><if_stmt>dropbox_path<eq>"/"<block_start><return>echo("The root directory is always included")<block_end>m.include_item(dropbox_path)<line_sep>ok(f"Included '{dropbox_path}'. Now downloading...")<block_end>@click.group(help="Manage desktop notifications.")<def_stmt>notify <block_start><pass><block_end>@notify.command(name="level" help="Get or set the level for desktop notifications." )@click.argument("level_name" required=<false> type=click.Choice(["ERROR" "SYNCISSUE" "FILECHANGE"] case_sensitive=<false>) )@inject_proxy(fallback=<true> existing_config=<true>)<def_stmt>notify_level m:Maestral level_name:str<arrow><none><block_start><import_from_stmt>.. notify<as>_notify<if_stmt>level_name<block_start>m.notification_level=_notify.level_name_to_number(level_name)<line_sep>ok(f"Notification level set to {level_name}.")<block_end><else_stmt><block_start>level_name=_notify.level_number_to_name(m.notification_level)<line_sep>echo(f"Notification level: {level_name}.")<block_end><block_end>@notify.command(name="snooze" help="Snooze desktop notifications of file changes." )@click.argument("minutes" type=click.IntRange(min=0))@inject_proxy(fallback=<true> existing_config=<true>)<def_stmt>notify_snooze m:Maestral minutes:int<arrow><none><block_start>m.notification_snooze=minutes<if_stmt>minutes<g>0<block_start>ok(f"Notifications snoozed for {minutes} min. Set snooze to 0 to reset.")<block_end><else_stmt><block_start>ok("Notifications enabled.")<block_end><block_end> |
"""
Lowering implementation for object mode.
"""<import_from_future_stmt> print_function division absolute_import<import_from_stmt>llvmlite.llvmpy.core Type Constant<import_stmt>llvmlite.llvmpy.core<as>lc<import_stmt>operator<import_from_stmt>. cgutils generators ir types utils<import_from_stmt>.errors ForbiddenConstruct<import_from_stmt>.lowering BaseLower<import_from_stmt>.utils builtins HAS_MATMUL_OPERATOR IS_PY3<line_sep># Issue #475: locals() is unsupported as calling it naively would give
# out wrong results.
_unsupported_builtins=set([locals])<line_sep># Map operators to methods on the PythonAPI class
PYTHON_BINOPMAP={operator.add:("number_add" <false>) operator.sub:("number_subtract" <false>) operator.mul:("number_multiply" <false>) operator.truediv:("number_truedivide" <false>) operator.floordiv:("number_floordivide" <false>) operator.mod:("number_remainder" <false>) operator.pow:("number_power" <false>) operator.lshift:("number_lshift" <false>) operator.rshift:("number_rshift" <false>) operator.and_:("number_and" <false>) operator.or_:("number_or" <false>) operator.xor:("number_xor" <false>) # inplace operators
operator.iadd:("number_add" <true>) operator.isub:("number_subtract" <true>) operator.imul:("number_multiply" <true>) operator.itruediv:("number_truedivide" <true>) operator.ifloordiv:("number_floordivide" <true>) operator.imod:("number_remainder" <true>) operator.ipow:("number_power" <true>) operator.ilshift:("number_lshift" <true>) operator.irshift:("number_rshift" <true>) operator.iand:("number_and" <true>) operator.ior:("number_or" <true>) operator.ixor:("number_xor" <true>) }<if_stmt><not>IS_PY3<block_start>PYTHON_BINOPMAP[operator.div]=("number_divide" <false>)<line_sep>PYTHON_BINOPMAP[operator.idiv]=("number_divide" <true>)<block_end><if_stmt>HAS_MATMUL_OPERATOR<block_start>PYTHON_BINOPMAP[operator.matmul]=("number_matrix_multiply" <false>)<line_sep>PYTHON_BINOPMAP[operator.imatmul]=("number_matrix_multiply" <true>)<block_end>PYTHON_COMPAREOPMAP={operator.eq:'==' operator.ne:'!=' operator.lt:'<' operator.le:'<=' operator.gt:'>' operator.ge:'>=' operator.is_:'is' operator.is_not:'is not' operator.contains:'in'}<class_stmt>PyLower(BaseLower)<block_start>GeneratorLower=generators.PyGeneratorLower<def_stmt>init self# Strings to be frozen into the Environment object
<block_start>self._frozen_strings=set()<line_sep>self._live_vars=set()<block_end><def_stmt>pre_lower self<block_start>super(PyLower self).pre_lower()<line_sep>self.init_pyapi()<line_sep># Pre-computed for later use
<import_from_stmt>.dispatcher OmittedArg<line_sep>self.omitted_typobj=self.pyapi.unserialize(self.pyapi.serialize_object(OmittedArg))<block_end><def_stmt>post_lower self<block_start><pass><block_end><def_stmt>pre_block self block<block_start>self.init_vars(block)<block_end><def_stmt>lower_inst self inst<block_start><if_stmt>isinstance(inst ir.Assign)<block_start>value=self.lower_assign(inst)<line_sep>self.storevar(value inst.target.name)<block_end><elif_stmt>isinstance(inst ir.SetItem)<block_start>target=self.loadvar(inst.target.name)<line_sep>index=self.loadvar(inst.index.name)<line_sep>value=self.loadvar(inst.value.name)<line_sep>ok=self.pyapi.object_setitem(target index value)<line_sep>self.check_int_status(ok)<block_end><elif_stmt>isinstance(inst ir.DelItem)<block_start>target=self.loadvar(inst.target.name)<line_sep>index=self.loadvar(inst.index.name)<line_sep>ok=self.pyapi.object_delitem(target index)<line_sep>self.check_int_status(ok)<block_end><elif_stmt>isinstance(inst ir.SetAttr)<block_start>target=self.loadvar(inst.target.name)<line_sep>value=self.loadvar(inst.value.name)<line_sep>ok=self.pyapi.object_setattr(target self._freeze_string(inst.attr) value)<line_sep>self.check_int_status(ok)<block_end><elif_stmt>isinstance(inst ir.DelAttr)<block_start>target=self.loadvar(inst.target.name)<line_sep>ok=self.pyapi.object_delattr(target self._freeze_string(inst.attr))<line_sep>self.check_int_status(ok)<block_end><elif_stmt>isinstance(inst ir.StoreMap)<block_start>dct=self.loadvar(inst.dct.name)<line_sep>key=self.loadvar(inst.key.name)<line_sep>value=self.loadvar(inst.value.name)<line_sep>ok=self.pyapi.dict_setitem(dct key value)<line_sep>self.check_int_status(ok)<block_end><elif_stmt>isinstance(inst ir.Return)<block_start>retval=self.loadvar(inst.value.name)<if_stmt>self.generator_info# StopIteration
# We own a reference to the "return value", but we
# don't return it.
<block_start>self.pyapi.decref(retval)<line_sep>self.genlower.return_from_generator(self)<line_sep><return><block_end># No need to incref() as the reference is already owned.
self.call_conv.return_value(self.builder retval)<block_end><elif_stmt>isinstance(inst ir.Branch)<block_start>cond=self.loadvar(inst.cond.name)<if_stmt>cond.type<eq>Type.int(1)<block_start>istrue=cond<block_end><else_stmt><block_start>istrue=self.pyapi.object_istrue(cond)<block_end>zero=lc.Constant.null(istrue.type)<line_sep>pred=self.builder.icmp(lc.ICMP_NE istrue zero)<line_sep>tr=self.blkmap[inst.truebr]<line_sep>fl=self.blkmap[inst.falsebr]<line_sep>self.builder.cbranch(pred tr fl)<block_end><elif_stmt>isinstance(inst ir.Jump)<block_start>target=self.blkmap[inst.target]<line_sep>self.builder.branch(target)<block_end><elif_stmt>isinstance(inst ir.Del)<block_start>self.delvar(inst.value)<block_end><elif_stmt>isinstance(inst ir.Raise)<block_start><if_stmt>inst.exception<is><not><none><block_start>exc=self.loadvar(inst.exception.name)<line_sep># A reference will be stolen by raise_object() and another
# by return_exception_raised().
self.incref(exc)<block_end><else_stmt><block_start>exc=<none><block_end>self.pyapi.raise_object(exc)<line_sep>self.return_exception_raised()<block_end><else_stmt><block_start><raise>NotImplementedError(type(inst) inst)<block_end><block_end><def_stmt>lower_assign self inst<block_start>"""
The returned object must have a new reference
"""<line_sep>value=inst.value<if_stmt>isinstance(value (ir.Const ir.FreeVar))<block_start><return>self.lower_const(value.value)<block_end><elif_stmt>isinstance(value ir.Var)<block_start>val=self.loadvar(value.name)<line_sep>self.incref(val)<line_sep><return>val<block_end><elif_stmt>isinstance(value ir.Expr)<block_start><return>self.lower_expr(value)<block_end><elif_stmt>isinstance(value ir.Global)<block_start><return>self.lower_global(value.name value.value)<block_end><elif_stmt>isinstance(value ir.Yield)<block_start><return>self.lower_yield(value)<block_end><elif_stmt>isinstance(value ir.Arg)<block_start>obj=self.fnargs[value.index]<line_sep># When an argument is omitted, the dispatcher hands it as
# _OmittedArg(<default value>)
typobj=self.pyapi.get_type(obj)<line_sep>slot=cgutils.alloca_once_value(self.builder obj)<line_sep>is_omitted=self.builder.icmp_unsigned('==' typobj self.omitted_typobj)<with_stmt>self.builder.if_else(is_omitted likely=<false>)<as>(omitted present)<block_start><with_stmt>present<block_start>self.incref(obj)<line_sep>self.builder.store(obj slot)<block_end><with_stmt>omitted# The argument is omitted => get the default value
<block_start>obj=self.pyapi.object_getattr_string(obj 'value')<line_sep>self.builder.store(obj slot)<block_end><block_end><return>self.builder.load(slot)<block_end><else_stmt><block_start><raise>NotImplementedError(type(value) value)<block_end><block_end><def_stmt>lower_yield self inst<block_start>yp=self.generator_info.yield_points[inst.index]<assert_stmt>yp.inst<is>inst<line_sep>self.genlower.init_generator_state(self)<line_sep># Save live vars in state
# We also need to save live vars that are del'ed afterwards.
y=generators.LowerYield(self yp yp.live_vars|yp.weak_live_vars)<line_sep>y.lower_yield_suspend()<line_sep># Yield to caller
val=self.loadvar(inst.value.name)<line_sep># Let caller own the reference
self.pyapi.incref(val)<line_sep>self.call_conv.return_value(self.builder val)<line_sep># Resumption point
y.lower_yield_resume()<line_sep># None is returned by the yield expression
<return>self.pyapi.make_none()<block_end><def_stmt>lower_binop self expr op inplace=<false><block_start>lhs=self.loadvar(expr.lhs.name)<line_sep>rhs=self.loadvar(expr.rhs.name)<assert_stmt><not>isinstance(op str)<if_stmt>op<in>PYTHON_BINOPMAP<block_start>fname,inplace=PYTHON_BINOPMAP[op]<line_sep>fn=getattr(self.pyapi fname)<line_sep>res=fn(lhs rhs inplace=inplace)<block_end><else_stmt># Assumed to be rich comparison
<block_start>fn=PYTHON_COMPAREOPMAP.get(expr.fn expr.fn)<if_stmt>fn<eq>'in'# 'in' and operator.contains have args reversed
<block_start>lhs,rhs=rhs lhs<block_end>res=self.pyapi.object_richcompare(lhs rhs fn)<block_end>self.check_error(res)<line_sep><return>res<block_end><def_stmt>lower_expr self expr<block_start><if_stmt>expr.op<eq>'binop'<block_start><return>self.lower_binop(expr expr.fn inplace=<false>)<block_end><elif_stmt>expr.op<eq>'inplace_binop'<block_start><return>self.lower_binop(expr expr.fn inplace=<true>)<block_end><elif_stmt>expr.op<eq>'unary'<block_start>value=self.loadvar(expr.value.name)<if_stmt>expr.fn<eq>operator.neg<block_start>res=self.pyapi.number_negative(value)<block_end><elif_stmt>expr.fn<eq>operator.pos<block_start>res=self.pyapi.number_positive(value)<block_end><elif_stmt>expr.fn<eq>operator.not_<block_start>res=self.pyapi.object_not(value)<line_sep>self.check_int_status(res)<line_sep>longval=self.builder.zext(res self.pyapi.long)<line_sep>res=self.pyapi.bool_from_long(longval)<block_end><elif_stmt>expr.fn<eq>operator.invert<block_start>res=self.pyapi.number_invert(value)<block_end><else_stmt><block_start><raise>NotImplementedError(expr)<block_end>self.check_error(res)<line_sep><return>res<block_end><elif_stmt>expr.op<eq>'call'<block_start>argvals=[self.loadvar(a.name)<for>a expr.args]<line_sep>fn=self.loadvar(expr.func.name)<line_sep>args=self.pyapi.tuple_pack(argvals)<if_stmt>expr.vararg# Expand *args
<block_start>new_args=self.pyapi.number_add(args self.loadvar(expr.vararg.name))<line_sep>self.decref(args)<line_sep>args=new_args<block_end><if_stmt><not>expr.kws# No named arguments
<block_start>ret=self.pyapi.call(fn args <none>)<block_end><else_stmt># Named arguments
<block_start>keyvalues=[(k self.loadvar(v.name))<for>k,v expr.kws]<line_sep>kws=self.pyapi.dict_pack(keyvalues)<line_sep>ret=self.pyapi.call(fn args kws)<line_sep>self.decref(kws)<block_end>self.decref(args)<line_sep>self.check_error(ret)<line_sep><return>ret<block_end><elif_stmt>expr.op<eq>'getattr'<block_start>obj=self.loadvar(expr.value.name)<line_sep>res=self.pyapi.object_getattr(obj self._freeze_string(expr.attr))<line_sep>self.check_error(res)<line_sep><return>res<block_end><elif_stmt>expr.op<eq>'build_tuple'<block_start>items=[self.loadvar(it.name)<for>it expr.items]<line_sep>res=self.pyapi.tuple_pack(items)<line_sep>self.check_error(res)<line_sep><return>res<block_end><elif_stmt>expr.op<eq>'build_list'<block_start>items=[self.loadvar(it.name)<for>it expr.items]<line_sep>res=self.pyapi.list_pack(items)<line_sep>self.check_error(res)<line_sep><return>res<block_end><elif_stmt>expr.op<eq>'build_map'<block_start>res=self.pyapi.dict_new(expr.size)<line_sep>self.check_error(res)<for_stmt>k,v expr.items<block_start>key=self.loadvar(k.name)<line_sep>value=self.loadvar(v.name)<line_sep>ok=self.pyapi.dict_setitem(res key value)<line_sep>self.check_int_status(ok)<block_end><return>res<block_end><elif_stmt>expr.op<eq>'build_set'<block_start>items=[self.loadvar(it.name)<for>it expr.items]<line_sep>res=self.pyapi.set_new()<line_sep>self.check_error(res)<for_stmt>it items<block_start>ok=self.pyapi.set_add(res it)<line_sep>self.check_int_status(ok)<block_end><return>res<block_end><elif_stmt>expr.op<eq>'getiter'<block_start>obj=self.loadvar(expr.value.name)<line_sep>res=self.pyapi.object_getiter(obj)<line_sep>self.check_error(res)<line_sep><return>res<block_end><elif_stmt>expr.op<eq>'iternext'<block_start>iterobj=self.loadvar(expr.value.name)<line_sep>item=self.pyapi.iter_next(iterobj)<line_sep>is_valid=cgutils.is_not_null(self.builder item)<line_sep>pair=self.pyapi.tuple_new(2)<with_stmt>self.builder.if_else(is_valid)<as>(then otherwise)<block_start><with_stmt>then<block_start>self.pyapi.tuple_setitem(pair 0 item)<block_end><with_stmt>otherwise<block_start>self.check_occurred()<line_sep># Make the tuple valid by inserting None as dummy
# iteration "result" (it will be ignored).
self.pyapi.tuple_setitem(pair 0 self.pyapi.make_none())<block_end><block_end>self.pyapi.tuple_setitem(pair 1 self.pyapi.bool_from_bool(is_valid))<line_sep><return>pair<block_end><elif_stmt>expr.op<eq>'pair_first'<block_start>pair=self.loadvar(expr.value.name)<line_sep>first=self.pyapi.tuple_getitem(pair 0)<line_sep>self.incref(first)<line_sep><return>first<block_end><elif_stmt>expr.op<eq>'pair_second'<block_start>pair=self.loadvar(expr.value.name)<line_sep>second=self.pyapi.tuple_getitem(pair 1)<line_sep>self.incref(second)<line_sep><return>second<block_end><elif_stmt>expr.op<eq>'exhaust_iter'<block_start>iterobj=self.loadvar(expr.value.name)<line_sep>tup=self.pyapi.sequence_tuple(iterobj)<line_sep>self.check_error(tup)<line_sep># Check tuple size is as expected
tup_size=self.pyapi.tuple_size(tup)<line_sep>expected_size=self.context.get_constant(types.intp expr.count)<line_sep>has_wrong_size=self.builder.icmp(lc.ICMP_NE tup_size expected_size)<with_stmt>cgutils.if_unlikely(self.builder has_wrong_size)<block_start>self.return_exception(ValueError)<block_end><return>tup<block_end><elif_stmt>expr.op<eq>'getitem'<block_start>value=self.loadvar(expr.value.name)<line_sep>index=self.loadvar(expr.index.name)<line_sep>res=self.pyapi.object_getitem(value index)<line_sep>self.check_error(res)<line_sep><return>res<block_end><elif_stmt>expr.op<eq>'static_getitem'<block_start>value=self.loadvar(expr.value.name)<line_sep>index=self.context.get_constant(types.intp expr.index)<line_sep>indexobj=self.pyapi.long_from_ssize_t(index)<line_sep>self.check_error(indexobj)<line_sep>res=self.pyapi.object_getitem(value indexobj)<line_sep>self.decref(indexobj)<line_sep>self.check_error(res)<line_sep><return>res<block_end><elif_stmt>expr.op<eq>'getslice'<block_start>target=self.loadvar(expr.target.name)<line_sep>start=self.loadvar(expr.start.name)<line_sep>stop=self.loadvar(expr.stop.name)<line_sep>slicefn=self.get_builtin_obj("slice")<line_sep>sliceobj=self.pyapi.call_function_objargs(slicefn (start stop))<line_sep>self.decref(slicefn)<line_sep>self.check_error(sliceobj)<line_sep>res=self.pyapi.object_getitem(target sliceobj)<line_sep>self.check_error(res)<line_sep><return>res<block_end><elif_stmt>expr.op<eq>'cast'<block_start>val=self.loadvar(expr.value.name)<line_sep>self.incref(val)<line_sep><return>val<block_end><else_stmt><block_start><raise>NotImplementedError(expr)<block_end><block_end><def_stmt>lower_const self const# All constants are frozen inside the environment
<block_start>index=self.env_manager.add_const(const)<line_sep>ret=self.env_manager.read_const(index)<line_sep>self.check_error(ret)<line_sep>self.incref(ret)<line_sep><return>ret<block_end><def_stmt>lower_global self name value<block_start>"""
1) Check global scope dictionary.
2) Check __builtins__.
2a) is it a dictionary (for non __main__ module)
2b) is it a module (for __main__ module)
"""<line_sep>moddict=self.get_module_dict()<line_sep>obj=self.pyapi.dict_getitem(moddict self._freeze_string(name))<line_sep>self.incref(obj)# obj is borrowed
<try_stmt><block_start><if_stmt>value<in>_unsupported_builtins<block_start><raise>ForbiddenConstruct("builtins %s() is not supported"%name loc=self.loc)<block_end><block_end><except_stmt>TypeError# `value` is unhashable, ignore
<block_start><pass><block_end><if_stmt>hasattr(builtins name)<block_start>obj_is_null=self.is_null(obj)<line_sep>bbelse=self.builder.basic_block<with_stmt>self.builder.if_then(obj_is_null)<block_start>mod=self.pyapi.dict_getitem(moddict self._freeze_string("__builtins__"))<line_sep>builtin=self.builtin_lookup(mod name)<line_sep>bbif=self.builder.basic_block<block_end>retval=self.builder.phi(self.pyapi.pyobj)<line_sep>retval.add_incoming(obj bbelse)<line_sep>retval.add_incoming(builtin bbif)<block_end><else_stmt><block_start>retval=obj<with_stmt>cgutils.if_unlikely(self.builder self.is_null(retval))<block_start>self.pyapi.raise_missing_global_error(name)<line_sep>self.return_exception_raised()<block_end><block_end><return>retval<block_end># -------------------------------------------------------------------------
<def_stmt>get_module_dict self<block_start><return>self.env_body.globals<block_end><def_stmt>get_builtin_obj self name# XXX The builtins dict could be bound into the environment
<block_start>moddict=self.get_module_dict()<line_sep>mod=self.pyapi.dict_getitem(moddict self._freeze_string("__builtins__"))<line_sep><return>self.builtin_lookup(mod name)<block_end><def_stmt>builtin_lookup self mod name<block_start>"""
Args
----
mod:
The __builtins__ dictionary or module, as looked up in
a module's globals.
name: str
The object to lookup
"""<line_sep>fromdict=self.pyapi.dict_getitem(mod self._freeze_string(name))<line_sep>self.incref(fromdict)# fromdict is borrowed
bbifdict=self.builder.basic_block<with_stmt>cgutils.if_unlikely(self.builder self.is_null(fromdict))# This happen if we are using the __main__ module
<block_start>frommod=self.pyapi.object_getattr(mod self._freeze_string(name))<with_stmt>cgutils.if_unlikely(self.builder self.is_null(frommod))<block_start>self.pyapi.raise_missing_global_error(name)<line_sep>self.return_exception_raised()<block_end>bbifmod=self.builder.basic_block<block_end>builtin=self.builder.phi(self.pyapi.pyobj)<line_sep>builtin.add_incoming(fromdict bbifdict)<line_sep>builtin.add_incoming(frommod bbifmod)<line_sep><return>builtin<block_end><def_stmt>check_occurred self<block_start>"""
Return if an exception occurred.
"""<line_sep>err_occurred=cgutils.is_not_null(self.builder self.pyapi.err_occurred())<with_stmt>cgutils.if_unlikely(self.builder err_occurred)<block_start>self.return_exception_raised()<block_end><block_end><def_stmt>check_error self obj<block_start>"""
Return if *obj* is NULL.
"""<with_stmt>cgutils.if_unlikely(self.builder self.is_null(obj))<block_start>self.return_exception_raised()<block_end><return>obj<block_end><def_stmt>check_int_status self num ok_value=0<block_start>"""
Raise an exception if *num* is smaller than *ok_value*.
"""<line_sep>ok=lc.Constant.int(num.type ok_value)<line_sep>pred=self.builder.icmp(lc.ICMP_SLT num ok)<with_stmt>cgutils.if_unlikely(self.builder pred)<block_start>self.return_exception_raised()<block_end><block_end><def_stmt>is_null self obj<block_start><return>cgutils.is_null(self.builder obj)<block_end><def_stmt>return_exception_raised self<block_start>"""
Return with the currently raised exception.
"""<line_sep>self.cleanup_vars()<line_sep>self.call_conv.return_exc(self.builder)<block_end><def_stmt>init_vars self block<block_start>"""
Initialize live variables for *block*.
"""<line_sep>self._live_vars=set(self.func_ir.get_block_entry_vars(block))<block_end><def_stmt>_getvar self name ltype=<none><block_start><if_stmt>name<not><in>self.varmap<block_start>self.varmap[name]=self.alloca(name ltype=ltype)<block_end><return>self.varmap[name]<block_end><def_stmt>loadvar self name<block_start>"""
Load the llvm value of the variable named *name*.
"""<line_sep># If this raises then the live variables analysis is wrong
<assert_stmt>name<in>self._live_vars name<line_sep>ptr=self.varmap[name]<line_sep>val=self.builder.load(ptr)<with_stmt>cgutils.if_unlikely(self.builder self.is_null(val))<block_start>self.pyapi.raise_missing_name_error(name)<line_sep>self.return_exception_raised()<block_end><return>val<block_end><def_stmt>delvar self name<block_start>"""
Delete the variable slot with the given name. This will decref
the corresponding Python object.
"""<line_sep># If this raises then the live variables analysis is wrong
self._live_vars.remove(name)<line_sep>ptr=self._getvar(name)# initializes `name` if not already
self.decref(self.builder.load(ptr))<line_sep># This is a safety guard against double decref's, but really
# the IR should be correct and have only one Del per variable
# and code path.
self.builder.store(cgutils.get_null_value(ptr.type.pointee) ptr)<block_end><def_stmt>storevar self value name clobber=<false><block_start>"""
Stores a llvm value and allocate stack slot if necessary.
The llvm value can be of arbitrary type.
"""<line_sep>is_redefine=name<in>self._live_vars<and><not>clobber<line_sep>ptr=self._getvar(name ltype=value.type)<if_stmt>is_redefine<block_start>old=self.builder.load(ptr)<block_end><else_stmt><block_start>self._live_vars.add(name)<block_end><assert_stmt>value.type<eq>ptr.type.pointee (str(value.type) str(ptr.type.pointee))<line_sep>self.builder.store(value ptr)<line_sep># Safe to call decref even on non python object
<if_stmt>is_redefine<block_start>self.decref(old)<block_end><block_end><def_stmt>cleanup_vars self<block_start>"""
Cleanup live variables.
"""<for_stmt>name self._live_vars<block_start>ptr=self._getvar(name)<line_sep>self.decref(self.builder.load(ptr))<block_end><block_end><def_stmt>alloca self name ltype=<none><block_start>"""
Allocate a stack slot and initialize it to NULL.
The default is to allocate a pyobject pointer.
Use ``ltype`` to override.
"""<if_stmt>ltype<is><none><block_start>ltype=self.context.get_value_type(types.pyobject)<block_end><with_stmt>self.builder.goto_block(self.entry_block)<block_start>ptr=self.builder.alloca(ltype name=name)<line_sep>self.builder.store(cgutils.get_null_value(ltype) ptr)<block_end><return>ptr<block_end><def_stmt>incref self value<block_start>self.pyapi.incref(value)<block_end><def_stmt>decref self value<block_start>"""
This is allow to be called on non pyobject pointer, in which case
no code is inserted.
"""<line_sep>lpyobj=self.context.get_value_type(types.pyobject)<if_stmt>value.type<eq>lpyobj<block_start>self.pyapi.decref(value)<block_end><block_end><def_stmt>_freeze_string self string<block_start>"""
Freeze a Python string object into the code.
"""<line_sep><return>self.lower_const(string)<block_end><block_end> |
<import_stmt>collections<import_stmt>logging<import_stmt>os<import_stmt>tempfile<import_from_stmt>pickle loads<import_stmt>leveldb<line_sep>logger=logging.getLogger('mapreduce')<def_stmt>group_by_key iterator<block_start>'''Group identical keys together.
Given a sorted iterator of (key, value) pairs, returns an iterator of
(key1, values), (key2, values).
'''<line_sep>last_key=<none><line_sep>values=[]<for_stmt>key,value iterator<block_start>value=loads(value)<line_sep>key=key.decode()<line_sep>user_key,_=key.rsplit('.' 1)<if_stmt>user_key<ne>last_key<block_start><if_stmt>last_key<is><not><none><block_start><yield>last_key values<block_end>last_key=user_key<line_sep>values=[value]<block_end><else_stmt><block_start>values.append(value)<block_end><block_end><if_stmt>last_key<is><not><none><block_start><yield>last_key values<block_end><block_end><class_stmt>Reducer(object)<block_start><def_stmt>initialize self input_queue tmp_prefix output_class output_prefix shard_idx num_shards<block_start>self.tmp_prefix=tmp_prefix<line_sep>self.output_prefix=output_prefix<line_sep>self.input_queue=input_queue<line_sep>self.output_class=output_class<line_sep>self.shard_idx=shard_idx<line_sep>self.num_shards=num_shards<block_end><def_stmt>reduce_shard self input_db output_db<block_start><for_stmt>idx,(key values) enumerate(group_by_key(input_db.RangeIter()))# if idx % 1000 == 0:
# logger.info('Reducing records=%d key=%s shard=%d', idx, key, self.shard_idx)
<block_start>self.reduce(key values output_db)<block_end><block_end><def_stmt>shuffle self<block_start>os.system('mkdir -p "%s"'%self.tmp_prefix)<line_sep>shuffle_dir=tempfile.mkdtemp(prefix='shard-%05d-of-%05d'%(self.shard_idx self.num_shards) dir=self.tmp_prefix)<line_sep>shuffle_db=leveldb.LevelDB(shuffle_dir)<line_sep>idx=0<while_stmt>1<block_start>next_entry=self.input_queue.get()<if_stmt>next_entry<is><none><block_start><break><block_end>key,value_str=next_entry<line_sep>shuffle_db.Put((key+('.%s'%idx)).encode() value_str)<line_sep>idx<augadd>1<line_sep># if idx % 1000 == 0:
# logger.info('Shuffling records=%d key=%s shard=%d', idx, key, self.shard_idx)
<block_end>output_db=self.output_class.create_writer(self.output_prefix self.shard_idx self.num_shards)<line_sep># logger.debug('Reducer: %s', output_db)
self.reduce_shard(shuffle_db output_db)<line_sep>output_db.flush()<del_stmt>output_db<del_stmt>shuffle_db<line_sep>os.system('rm -rf "%s"'%shuffle_dir)<block_end><def_stmt>reduce self key values output<block_start><raise>NotImplementedError<block_end><def_stmt>reduce_finished self<block_start>'''Called after all values have been reduced.
The result of this call is returned to the caller of `mapreduce`.
'''<line_sep><pass><block_end><block_end><class_stmt>IdentityReducer(Reducer)<block_start><def_stmt>reduce self key values output<block_start><for_stmt>value values<block_start>output.put(key value)<block_end><block_end><block_end><class_stmt>SumReducer(Reducer)<block_start><def_stmt>reduce self key values output<block_start>output.put(key sum([float(v)<for>v values]))<block_end><block_end><class_stmt>ListReducer(Reducer)<block_start><def_stmt>reduce self key values output<block_start>output.put(key list(values))<block_end><block_end><class_stmt>NullReducer(Reducer)<block_start><def_stmt>reduce self key values output<block_start><return><block_end><block_end><def_stmt>pivot_values value_list<block_start>''' Takes a list of (name, value) tuples, and `pivots` them, returning
a dictionary from name -> [values].
This is frequently used when joining a number of inputs together,
where each input is tagged with a table name.
'''<line_sep>intermediate=collections.defaultdict(list)<for_stmt>row value_list<block_start>table_name,val=row<line_sep>intermediate[table_name].append(val)<block_end><return>intermediate<block_end><class_stmt>PivotReducer(Reducer)<block_start><def_stmt>reduce self key values output<block_start>val=pivot_values(values)<line_sep>output.put(key val)<block_end><block_end> |
# stdlib
<import_from_stmt>typing Any<line_sep># third party
<import_from_stmt>sqlalchemy Column<import_from_stmt>sqlalchemy LargeBinary<import_from_stmt>sqlalchemy String<line_sep># relative
<import_from_stmt>. Base<import_from_stmt>..... deserialize<import_from_stmt>..... serialize<class_stmt>Entity(Base)<block_start>__tablename__="entity"<line_sep>name=Column(String(255) primary_key=<true>)<line_sep>entity_bin=Column(LargeBinary(3072))<line_sep>@property<def_stmt>obj self<arrow>Any<block_start><return>deserialize(self.entity_bin from_bytes=<true>)<block_end># TODO: techdebt fix
@obj.setter<def_stmt>obj self value:Any<arrow><none><block_start>self.entity_bin=serialize(value to_bytes=<true>)<block_end><block_end># TODO: techdebt fix
|
<import_stmt>sys<import_stmt>math<import_stmt>numpy<as>np<import_stmt>torch<import_stmt>torch.nn<as>nn<import_stmt>torch.nn.functional<as>F<import_from_stmt>fairseq utils<def_stmt>progressive_max x<block_start>T=x.size(1)<line_sep>x=F.pad(x (T-1 0) 'constant' -1)<line_sep>x=F.max_pool1d(x.unsqueeze(1).float() # shape into B, C, T
T # kernel size
1 # stride
0 # padding
1 # dilation
<false> # ceil_mode
<false> # return indices
)<line_sep><return>x.squeeze(1)<block_end># B, Tt
<def_stmt>logsumexp a b<block_start>m=torch.max(a b)<line_sep><return>torch.log(torch.exp(a-m)+torch.exp(b-m))<block_end><def_stmt>Linear in_features out_features bias=<true><block_start>m=nn.Linear(in_features out_features bias)<line_sep>nn.init.xavier_uniform_(m.weight)<if_stmt>bias<block_start>nn.init.constant_(m.bias 0.)<block_end><return>m<block_end><class_stmt>LLControls(nn.Module)<block_start>"""
LL based controller
"""<def_stmt>__init__ self args controller_dim<block_start>nn.Module.__init__(self)<line_sep>self.gate=nn.Linear(controller_dim 1 bias=<true>)<line_sep>nn.init.normal_(self.gate.weight 0 1/controller_dim)<line_sep>nn.init.constant_(self.gate.bias 0)<line_sep>self.penalty=args.oracle_penalty<line_sep>self.write_right=args.write_right<block_end><def_stmt>get_positions_proba self rw_logits<block_start>"""
Inputs:
rw_logits [log(rho), log(1-rho)] : (Tt, B, Ts, 2)
Returns the probabilities of being at position (t,j) (Tt, B, Ts)
"""<line_sep>Tt,B,Ts,_=rw_logits.size()<line_sep>Mr1=rw_logits[0:1 : :-1 0].exp()<line_sep>Mc1=rw_logits[: : 0:1 1].exp()<line_sep>M=rw_logits[1: : :-1 0].exp()+rw_logits[:-1 : 1: 1].exp()<line_sep>M=torch.cat((Mr1 M) dim=0)<line_sep>M=torch.cat((Mc1 M) dim=-1)<line_sep><return>M<block_end><def_stmt>predict_read_write self x<block_start>""" Returns log(rho), log(1-rho) in B, Tt, Ts, 2 """<line_sep>x=self.gate(x)<line_sep>s=F.logsigmoid(x)<line_sep><return>torch.cat((s s-x) dim=-1).float()<block_end><def_stmt>forward self observations scores<block_start>"""
Inputs:
observations : Input for the controller: B, Tt, Ts, C
Scores : log p(y_t | x<j) : B, Tt, Ts
"""<line_sep>controls=self.predict_read_write(observations)# B,Tt,Ts,2
B,Tt,Ts=scores.size()<with_stmt>torch.no_grad()<block_start><if_stmt>self.penalty# Penalize large contexts:
<block_start>indices=torch.arange(Ts dtype=scores.dtype device=scores.device)/Ts<line_sep>scores=scores-self.penalty<times>indices.unsqueeze(0).unsqueeze(0)<block_end>best_context=scores.max(-1)[1]# B, Tt
best_context=progressive_max(best_context).type_as(best_context)<line_sep>AP=best_context.float().mean(dim=1)/Ts<line_sep>print('AP:' ' '.join(map(<lambda>x:'{:.2f}'.format(x) AP.tolist())))<line_sep>gamma=torch.zeros_like(scores).scatter_(-1 best_context.unsqueeze(-1) 1.0)# B, Tt, Ts
<if_stmt>self.write_right<block_start>gamma=gamma.cumsum(dim=-1)<block_end><block_end># Write beyond the ideal context
<if_stmt>self.write_right<block_start>write=gamma[: 1:]# B, Tt-1, Ts
<block_end><else_stmt><block_start>write=gamma[: 1:].cumsum(dim=-1)# B, Tt-1, Ts
<block_end>read=1-write<line_sep><return>controls[: :-1] gamma read write<block_end><block_end> |
# -*- coding: utf-8 -*-
<import_stmt>unittest.mock<import_from_stmt>girder constants logger<line_sep># Mock the logging methods so that we don't actually write logs to disk,
# and so tests can potentially inspect calls to logging methods.
print(constants.TerminalColor.warning('Mocking Girder log methods.'))<for_stmt>handler logger.handlers<block_start>handler.emit=unittest.mock.MagicMock()<block_end> |
__author__='lada'<import_stmt>os.path<import_from_stmt>blocks.extensions SimpleExtension<import_from_stmt>blocks.serialization secure_dump<line_sep>SAVED_TO="saved_to"<class_stmt>SaveTheBest(SimpleExtension)<block_start>"""Check if a log quantity has the minimum/maximum value so far
and if that is true then save a pickled version of the main loop
to the disk.
The pickled main loop can be later reloaded and training can be
resumed.
Makes a `SAVED_TO` record in the log with the serialization destination
in the case of success and ``None`` in the case of failure. The
value of the record is a tuple of paths to which saving was done
(there can be more than one if the user added a condition
with an argument, see :meth:`do` docs).
Parameters
----------
record_name : str
The name of the record to track.
choose_best : callable, optional
A function that takes the current value and the best so far
and return the best of two. By default :func:`min`, which
corresponds to tracking the minimum value.
path : str
The destination path for pickling.
save_separately : list of str, optional
The list of the main loop's attributes to be pickled separately
to their own files. The paths will be formed by adding the
attribute name preceded by an underscore before the before the
`path` extension. The whole main loop will still be pickled
as usual.
use_cpickle : bool
See docs of :func:`~blocks.serialization.dump`.
Attributes
----------
best_name : str
The name of the status record to keep the best value so far.
Notes
-----
Using pickling for saving the whole main loop object comes with
certain limitations:
* Theano computation graphs build in the GPU-mode cannot be used in
the usual mode (and vice-versa). Therefore using this extension
binds you to using only one kind of device.
"""<def_stmt>__init__ self record_name path choose_best=min save_separately=<none> use_cpickle=<false> **kwargs<block_start>self.record_name=record_name<line_sep>self.best_name="bestsave_"+record_name<line_sep>self.choose_best=choose_best<if_stmt><not>save_separately<block_start>save_separately=[]<block_end>self.path=path<line_sep>self.save_separately=save_separately<line_sep>self.use_cpickle=use_cpickle<line_sep># kwargs.setdefault("after_training", True)
kwargs.setdefault("after_epoch" <true>)<line_sep>super(SaveTheBest self).__init__(**kwargs)<block_end><def_stmt>save_separately_filenames self path<block_start>""" Compute paths for separately saved attributes.
Parameters
----------
path : str
Path to which the main savethebest file is being saved.
Returns
-------
paths : dict
A dictionary mapping attribute names to derived paths
based on the `path` passed in as an argument.
"""<line_sep>root,ext=os.path.splitext(path)<line_sep><return>{attribute:root+"_"+attribute+ext<for>attribute self.save_separately}<block_end><def_stmt>do self which_callback *args<block_start>current_value=self.main_loop.log.current_row.get(self.record_name)<if_stmt>current_value<is><none><block_start><return><block_end>best_value=self.main_loop.status.get(self.best_name <none>)<if_stmt>(best_value<is><none><or>(current_value<ne>best_value<and>self.choose_best(current_value best_value)<eq>current_value))<block_start>self.main_loop.status[self.best_name]=current_value<line_sep># save main_loop
_,from_user=self.parse_args(which_callback args)<try_stmt><block_start>path=self.path<if_stmt>from_user<block_start>path,=from_user<block_end>secure_dump(self.main_loop path use_cpickle=self.use_cpickle)<line_sep>filenames=self.save_separately_filenames(path)<for_stmt>attribute self.save_separately<block_start>secure_dump(getattr(self.main_loop attribute) filenames[attribute] use_cpickle=self.use_cpickle)<block_end><block_end><except_stmt>Exception<block_start>path=<none><line_sep><raise><block_end><finally_stmt><block_start>already_saved_to=self.main_loop.log.current_row.get(SAVED_TO ())<line_sep>self.main_loop.log.current_row[SAVED_TO]=(already_saved_to+(path ))<block_end><block_end><block_end><block_end><import_stmt>logging<line_sep>logger=logging.getLogger(__name__)<line_sep> |
<import_stmt>numpy<as>np<import_stmt>warnings<import_from_stmt>scipy.ndimage.interpolation zoom<import_stmt>torch<import_stmt>math<import_stmt>copy<import_stmt>cv2<import_from_stmt>skimage measure<import_stmt>pandas<as>pd<def_stmt>resample imgs spacing new_spacing order=2<block_start><if_stmt>len(imgs.shape)<eq>3<block_start>new_shape=np.round(imgs.shape<times>spacing/new_spacing)<line_sep>true_spacing=spacing<times>imgs.shape/new_shape<line_sep>resize_factor=new_shape/imgs.shape<with_stmt>warnings.catch_warnings()<block_start>warnings.simplefilter("ignore")<line_sep>imgs=zoom(imgs resize_factor mode='nearest' order=order)<block_end><return>imgs true_spacing resize_factor<block_end><elif_stmt>len(imgs.shape)<eq>4<block_start>n=imgs.shape[-1]<line_sep>newimg=[]<for_stmt>i range(n)<block_start>slice=imgs[: : : i]<line_sep>newslice,true_spacing=resample(slice spacing new_spacing)<line_sep>newimg.append(newslice)<block_end>newimg=np.transpose(np.array(newimg) [1 2 3 0])<line_sep><return>newimg true_spacing<block_end><else_stmt><block_start><raise>ValueError('wrong shape')<block_end><block_end><def_stmt>get_start_ind center_points<block_start>curr_x=center_points[0][0]<line_sep>curr_y=center_points[0][1]<line_sep>curr_z=center_points[0][2]<line_sep>curr_r=3<line_sep>start_ind=-1<line_sep>ellipsis=0.1<for_stmt>i range(1 len(center_points))<block_start>v1=np.array([curr_x curr_y curr_z])<line_sep>v2=np.array([center_points[i][0] center_points[i][1] center_points[i][2]])<line_sep>dist=np.linalg.norm(v1-v2)<if_stmt>(dist-curr_r)<le>ellipsis<and>dist<ge>curr_r<block_start>start_ind=i<line_sep><break><block_end><block_end><return>start_ind<block_end><def_stmt>get_spacing_res2 x spacing_x spacing_new<block_start><return>int(round((x/spacing_x)<times>spacing_new))<block_end><def_stmt>get_world_cood x spacing_x spacing_new<block_start><return>(x/spacing_new)<times>spacing_x<block_end><def_stmt>data_preprocess img<block_start>mean_intensity=np.mean(img)<line_sep>std_intensity=np.std(img)<line_sep>upper_bound=np.percentile(img 99.5)<line_sep>lower_bound=np.percentile(img 00.5)<line_sep>img=np.clip(img lower_bound upper_bound)<line_sep># 防止除0
img=(img-mean_intensity)/(std_intensity+1e-9)<line_sep>img=np.array([img])<line_sep>img=torch.from_numpy(img)<line_sep><return>img.unsqueeze(0)<block_end><def_stmt>get_shell fl_Num_Points fl_Radius<block_start>x_list=[]<line_sep>y_list=[]<line_sep>z_list=[]<line_sep>offset=2.0/fl_Num_Points<line_sep>increment=math.pi<times>(3.0-math.sqrt(5.0))<for_stmt>i range(fl_Num_Points)<block_start>z=((i<times>offset)-1.0)+(offset/2.0)<line_sep>r=math.sqrt(1.0-pow(z 2.0))<line_sep>phi=((i+1)%fl_Num_Points)<times>increment<line_sep>x=math.cos(phi)<times>r<line_sep>y=math.sin(phi)<times>r<line_sep>x_list.append(fl_Radius<times>x)<line_sep>y_list.append(fl_Radius<times>y)<line_sep>z_list.append(fl_Radius<times>z)<block_end><return>x_list y_list z_list<block_end><def_stmt>prob_terminates pre_y max_points<block_start>res=torch.sum(-pre_y<times>torch.log2(pre_y))<line_sep><return>res/torch.log2(torch.from_numpy(np.array([max_points])).float())<block_end><def_stmt>get_closer_distance vessel target_point<block_start>min_dis=float("inf")<for_stmt>i range(len(vessel))<block_start>curr_point=vessel[i]<line_sep>dist=np.linalg.norm(target_point-curr_point)<if_stmt>dist<l>min_dis<block_start>min_dis=dist<line_sep>index=i<block_end><block_end><return>min_dis index<block_end><def_stmt>get_distance v1 v2<block_start><return>np.linalg.norm(v1-v2)<block_end><def_stmt>get_angle v1 v2<block_start>cosangle=v1.dot(v2)/(np.linalg.norm(v1)<times>np.linalg.norm(v2))<line_sep>cosangle=np.clip(cosangle -1 1)<line_sep><return>math.degrees(np.arccos(cosangle))<block_end><def_stmt>save_info res:list path:str<block_start>x_list=[]<line_sep>y_list=[]<line_sep>z_list=[]<for_stmt>i range(len(res))<block_start>x_list.append(res[i][0][0])<line_sep>y_list.append(res[i][0][1])<line_sep>z_list.append(res[i][0][2])<block_end>dataframe=pd.DataFrame({'x':x_list 'y':y_list 'z':z_list})<line_sep>dataframe.to_csv(path index=<false> columns=['x' 'y' 'z'] sep=',' float_format='%.5f')<block_end><def_stmt>crop_heart input_arr<block_start>'''
In order to remove the influence of pulmonary vessels, we will use threshold method to segment the heart region
:param input_arr: image arr
:return: Data after removing lung areas
'''<line_sep>src_array=copy.deepcopy(input_arr)<line_sep>z,w,h=src_array.shape<line_sep>new_arr=np.zeros((z w h))<line_sep>new_arr<augadd>-1000<line_sep>sum_minr=0<line_sep>sum_minc=0<line_sep>sum_maxr=0<line_sep>sum_maxc=0<for_stmt>k range(z)<block_start>image=src_array[k][: :]<line_sep>ret,thresh=cv2.threshold(image 20 400 cv2.THRESH_BINARY)<line_sep>kernel=cv2.getStructuringElement(cv2.MORPH_ELLIPSE (5 5))<line_sep>opening=cv2.morphologyEx(thresh cv2.MORPH_OPEN kernel anchor=(-1 -1) iterations=4)<line_sep>label_opening=measure.label(opening)<line_sep>regionprops=measure.regionprops(label_opening)<line_sep>max_area=0<line_sep>index=0<for_stmt>i range(len(regionprops))<block_start><if_stmt>regionprops[i].area<g>max_area<block_start>max_area=regionprops[i].area<line_sep>index=i<block_end><block_end>minr,minc,maxr,maxc=regionprops[index].bbox<line_sep>new_arr[k][minr:maxr minc:maxc]=src_array[k][minr:maxr minc:maxc]<line_sep>sum_minr<augadd>minr<line_sep>sum_minc<augadd>minc<line_sep>sum_maxr<augadd>maxr<line_sep>sum_maxc<augadd>maxc<block_end>mean_minr=sum_minr<floordiv>z<line_sep>meam_minc=sum_minc<floordiv>z<line_sep>mean_maxr=sum_maxr<floordiv>z<line_sep>mean_maxc=sum_maxc<floordiv>z<line_sep><return>new_arr meam_minc mean_minr mean_maxc mean_maxr<block_end> |
#%% Demo 4: Simple Image reconstruction
#
#
# This demo will show how a simple image reconstruction can be performed,
# by using OS-SART and FDK
#
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
# This file is part of the TIGRE Toolbox
#
# Copyright (c) 2015, University of Bath and
# CERN-European Organization for Nuclear Research
# All rights reserved.
#
# License: Open Source under BSD.
# See the full license at
# https://github.com/CERN/TIGRE/blob/master/LICENSE
#
# Contact: <EMAIL>
# Codes: https://github.com/CERN/TIGRE/
# Coded by: <NAME>
# --------------------------------------------------------------------------
#%%Initialize
<import_stmt>tigre<import_stmt>numpy<as>np<import_from_stmt>tigre.utilities sample_loader<import_from_stmt>tigre.utilities CTnoise<import_stmt>tigre.algorithms<as>algs<line_sep>#%% Geometry
geo=tigre.geometry_default(high_resolution=<false>)<line_sep>#%% Load data and generate projections
# define angles
angles=np.linspace(0 2<times>np.pi 100)<line_sep># Load thorax phatom data
head=sample_loader.load_head_phantom(geo.nVoxel)<line_sep># generate projections
projections=tigre.Ax(head geo angles)<line_sep># add noise
noise_projections=CTnoise.add(projections Poisson=1e5 Gaussian=np.array([0 10]))<line_sep>#%% Reconstruct image using OS-SART and FDK
# FDK
imgFDK=algs.fdk(noise_projections geo angles)<line_sep># OS-SART
niter=50<line_sep>imgOSSART=algs.ossart(noise_projections geo angles niter)<line_sep>#%% Show the results
tigre.plotimg(np.concatenate([imgFDK imgOSSART] axis=1) dim="z")<line_sep> |
# Copyright 2018 Deep Learning Service of Huawei Cloud. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
<import_from_future_stmt> absolute_import division print_function<import_stmt>os<import_stmt>numpy<as>np<import_from_stmt>moxing.framework file<import_from_stmt>data.yolo_load.detection_dataset Detection_dataset<import_from_stmt>utils.read_image_to_list get_image_list<import_from_stmt>mxnet gluon io nd<def_stmt>_pad_arrs_to_max_length arrs max_gt_box_number pad_axis=0 pad_val=-1<block_start>"""Inner Implementation of the Pad batchify"""<if_stmt><not>isinstance(arrs[0] (nd.NDArray np.ndarray))<block_start>arrs=[np.asarray(ele)<for>ele arrs]<block_end>max_size=max_gt_box_number<line_sep>ret_shape=list(arrs[0].shape)<line_sep>ret_shape[pad_axis]=max_size<line_sep>ret_shape=(len(arrs) )+tuple(ret_shape)<line_sep>ret=nd.full(shape=ret_shape val=pad_val dtype=arrs[0].dtype)<for_stmt>i,arr enumerate(arrs)<block_start><if_stmt>arr.shape[pad_axis]<eq>max_size<block_start>ret[i]=arr<block_end><else_stmt><block_start>slices=[slice(<none>)<for>_ range(arr.ndim)]<line_sep>slices[pad_axis]=slice(0 arr.shape[pad_axis])<line_sep>slices=[slice(i i+1)]+slices<line_sep>ret[tuple(slices)]=arr<block_end><block_end><return>ret<block_end><class_stmt>_train_batchify_fn(object)<block_start><def_stmt>__init__ self max_gt_box_number<block_start>self._max_gt_box_number=max_gt_box_number<block_end><def_stmt>__call__ self data<block_start>"""Collate train data into batch."""<line_sep>img_data=nd.stack(*[item[0]<for>item data])<line_sep>center_targets=nd.stack(*[item[1]<for>item data])<line_sep>scale_targets=nd.stack(*[item[2]<for>item data])<line_sep>weights=nd.stack(*[item[3]<for>item data])<line_sep>objectness=nd.stack(*[item[4]<for>item data])<line_sep>class_targets=nd.stack(*[item[5]<for>item data])<line_sep>gt_bboxes=_pad_arrs_to_max_length([item[6]<for>item data] self._max_gt_box_number pad_axis=0 pad_val=-1)<line_sep>batch_data=io.DataBatch(data=[img_data] label=[gt_bboxes objectness center_targets scale_targets weights class_targets])<line_sep><return>batch_data<block_end><block_end><class_stmt>_val_batchify_fn(object)<block_start><def_stmt>__init__ self max_gt_box_number<block_start>self._max_gt_box_number=max_gt_box_number<block_end><def_stmt>__call__ self data<block_start>"""Collate train data into batch."""<line_sep>img_data=nd.stack(*[item[0]<for>item data])<line_sep>gt_bboxes=_pad_arrs_to_max_length([item[1]<for>item data] self._max_gt_box_number pad_axis=0 pad_val=-1)<line_sep>batch_data=io.DataBatch(data=[img_data] label=[gt_bboxes])<line_sep><return>batch_data<block_end><block_end><def_stmt>_get_provide_data next_batch<block_start>next_data=next_batch.data<line_sep><return>[io.DataDesc(name='data' shape=next_data[0].shape)]<block_end><def_stmt>_get_provide_label next_batch gt_boxes_shape=(32 56 4) is_train=<true><block_start>next_label=next_batch.label<if_stmt>is_train<block_start>provide_label=[io.DataDesc(name='gt_boxes' shape=next_label[0].shape) io.DataDesc(name='obj_t' shape=next_label[1].shape) io.DataDesc(name='centers_t' shape=next_label[2].shape) io.DataDesc(name='scales_t' shape=next_label[3].shape) io.DataDesc(name='weights_t' shape=next_label[4].shape) io.DataDesc(name='clas_t' shape=next_label[5].shape)]<block_end><else_stmt><block_start>provide_label=<none><block_end><return>provide_label<block_end><def_stmt>_reset <block_start><pass><block_end><def_stmt>get_data_iter data_path train_file=<none> val_file=<none> split_spec=1 hyper_train={} hyper_val={} **kwargs<block_start>train_set=<none><line_sep>val_set=<none><line_sep>train_list=<none><line_sep>val_list=<none><if_stmt>train_file<is><not><none><block_start><assert_stmt>file.exists(train_file) 'not found train file'<line_sep>train_path=file.read(train_file).split("\n")[0:-1]<line_sep>train_list=[path.replace('\r' '').split(' ')<for>path train_path]<line_sep>train_list=[[os.path.join(data_path path[0]) os.path.join(data_path path[1])]<for>path train_list]<block_end><if_stmt>val_file<is><not><none><block_start><assert_stmt>file.exists(val_file) 'not found val file'<line_sep>val_path=file.read(val_file).split("\n")[0:-1]<line_sep>val_list=[path.replace('\r' '').split(' ')<for>path val_path]<line_sep>val_list=[[os.path.join(data_path path[0]) os.path.join(data_path path[1])]<for>path val_list]<block_end><if_stmt>train_file<is><none><and>val_file<is><none><block_start>train_list,val_list,_=get_image_list(data_path split_spec)<block_end><if_stmt>'anchors'<not><in>kwargs<block_start>kwargs['anchors']=[[116 90 156 198 373 326] [30 61 62 45 59 119] [10 13 16 30 33 23]]<block_end><if_stmt>'offsets'<not><in>kwargs<block_start>kwargs['offsets']=[(13 13) (26 26) (52 52)]<block_end><if_stmt>train_list<is><not><none><and>len(train_list)<g>0<block_start>dataset=Detection_dataset(img_list=train_list index_file=hyper_train.get('index_file' <none>) width=hyper_train.get('width' 416) height=hyper_train.get('height' 416) is_train=<true> ** kwargs)<line_sep>max_gt_box_number=max([len(item)<for>item dataset.label_cache])<line_sep>batch_size=hyper_train.get('batch_size' 32)<line_sep>train_set=gluon.data.DataLoader(dataset=dataset batch_size=batch_size shuffle=hyper_train.get('shuffle' <true>) batchify_fn=_train_batchify_fn(max_gt_box_number) last_batch='rollover' num_workers=hyper_train.get('preprocess_threads' 4))<line_sep>next_data_batch=next(iter(train_set))<line_sep>setattr(train_set 'reset' _reset)<line_sep>setattr(train_set 'provide_data' _get_provide_data(next_data_batch))<line_sep>setattr(train_set 'provide_label' _get_provide_label(next_data_batch (batch_size max_gt_box_number 4) is_train=<true>))<block_end><if_stmt>val_list<is><not><none><and>len(val_list)<g>0<block_start><assert_stmt>'index_file'<in>hyper_val<and>file.exists(hyper_val['index_file']) 'not found label name file'<line_sep>dataset=Detection_dataset(img_list=val_list index_file=hyper_val.get('index_file') width=hyper_val.get('width' 416) height=hyper_val.get('height' 416) is_train=<false> ** kwargs)<line_sep>max_gt_box_number=max([len(item)<for>item dataset.label_cache])<line_sep>batch_size=hyper_val.get('batch_size' 32)<line_sep>val_set=gluon.data.DataLoader(dataset=dataset batch_size=batch_size shuffle=hyper_val.get('shuffle' <true>) batchify_fn=_val_batchify_fn(max_gt_box_number) last_batch='keep' num_workers=hyper_val.get('preprocess_threads' 4))<line_sep>next_data_batch=next(iter(val_set))<line_sep>setattr(val_set 'reset' _reset)<line_sep>setattr(val_set 'provide_data' _get_provide_data(next_data_batch))<line_sep>setattr(val_set 'provide_label' _get_provide_label(next_data_batch is_train=<false>))<block_end><return>train_set val_set<block_end> |
src=Split('''
uart_test.c
''')<line_sep>component=aos_component('uart_test' src)<line_sep>component.add_cflags('-Wall')<line_sep>component.add_cflags('-Werror')<line_sep> |
# Copyright 2017. <NAME>. All rights reserved
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
<import_stmt>types<import_from_stmt>functools wraps<class_stmt>_PyFunctions(object)<block_start>"""Structure for holding custom user-defined python functions.
Will store a set of functions created by the user. Should not access this directly but rather user the
decorators or setter functions, and use the py_modules class variable to access individual functions. Is divided
up into
synaptic_weight: functions for calcuating synaptic weight.
cell_model: should return NEURON cell hobj.
synapse model: should return a NEURON synapse object.
"""<def_stmt>__init__ self<block_start>self.__syn_weights={}<line_sep>self.__cell_models={}<line_sep>self.__synapse_models={}<line_sep>self.__cell_processors={}<block_end><def_stmt>clear self<block_start>self.__syn_weights.clear()<line_sep>self.__cell_models.clear()<line_sep>self.__synapse_models.clear()<line_sep>self.__cell_processors.clear()<block_end><def_stmt>add_synaptic_weight self name func overwrite=<true><block_start>"""stores synpatic fuction for given name"""<if_stmt>overwrite<or>name<not><in>self.__syn_weights<block_start>self.__syn_weights[name]=func<block_end><block_end>@property<def_stmt>synaptic_weights self<block_start>"""return list of the names of all available synaptic weight functions"""<line_sep><return>self.__syn_weights.keys()<block_end><def_stmt>synaptic_weight self name<block_start>"""return the synpatic weight function"""<line_sep><return>self.__syn_weights[name]<block_end><def_stmt>has_synaptic_weight self name<block_start><return>name<in>self.__syn_weights<block_end><def_stmt>__cell_model_key self directive model_type<block_start><return>(directive model_type)<block_end><def_stmt>add_cell_model self directive model_type func overwrite=<true><block_start>key=self.__cell_model_key(directive model_type)<if_stmt>overwrite<or>key<not><in>self.__cell_models<block_start>self.__cell_models[key]=func<block_end><block_end>@property<def_stmt>cell_models self<block_start><return>self.__cell_models.keys()<block_end><def_stmt>cell_model self directive model_type<block_start><return>self.__cell_models[self.__cell_model_key(directive model_type)]<block_end><def_stmt>has_cell_model self directive model_type<block_start><return>self.__cell_model_key(directive model_type)<in>self.__cell_models<block_end><def_stmt>add_synapse_model self name func overwrite=<true><block_start><if_stmt>overwrite<or>name<not><in>self.__synapse_models<block_start>self.__synapse_models[name]=func<block_end><block_end>@property<def_stmt>synapse_models self<block_start><return>self.__synapse_models.keys()<block_end><def_stmt>synapse_model self name<block_start><return>self.__synapse_models[name]<block_end>@property<def_stmt>cell_processors self<block_start><return>self.__cell_processors.keys()<block_end><def_stmt>cell_processor self name<block_start><return>self.__cell_processors[name]<block_end><def_stmt>add_cell_processor self name func overwrite=<true><block_start><if_stmt>overwrite<or>name<not><in>self.__syn_weights<block_start>self.__cell_processors[name]=func<block_end><block_end><def_stmt>__repr__ self<block_start>rstr='{}: {}\n'.format('cell_models' self.cell_models)<line_sep>rstr<augadd>'{}: {}\n'.format('synapse_models' self.synapse_models)<line_sep>rstr<augadd>'{}: {}'.format('synaptic_weights' self.synaptic_weights)<line_sep><return>rstr<block_end><block_end>py_modules=_PyFunctions()<def_stmt>synaptic_weight *wargs **wkwargs<block_start>"""A decorator for registering a function as a synaptic weight function.
To use either::
@synaptic_weight
def weight_function():
...
or::
@synaptic_weight(name='name_in_edge_types')
def weight_function():
...
Once the decorator has been attached and imported the functions will automatically be added to py_modules.
"""<if_stmt>len(wargs)<eq>1<and>callable(wargs[0])# for the case without decorator arguments, grab the function object in wargs and create a decorator
<block_start>func=wargs[0]<line_sep>py_modules.add_synaptic_weight(func.__name__ func)# add function assigned to its original name
@wraps(func)<def_stmt>func_wrapper *args **kwargs<block_start><return>func(*args **kwargs)<block_end><return>func_wrapper<block_end><else_stmt># for the case with decorator arguments
<block_start><assert_stmt>(all(k<in>['name']<for>k wkwargs.keys()))<def_stmt>decorator func# store the function in py_modules but under the name given in the decorator arguments
<block_start>py_modules.add_synaptic_weight(wkwargs['name'] func)<line_sep>@wraps(func)<def_stmt>func_wrapper *args **kwargs<block_start><return>func(*args **kwargs)<block_end><return>func_wrapper<block_end><return>decorator<block_end><block_end><def_stmt>cell_model *wargs **wkwargs<block_start>"""A decorator for registering NEURON cell loader functions."""<if_stmt>len(wargs)<eq>1<and>callable(wargs[0])# for the case without decorator arguments, grab the function object in wargs and create a decorator
<block_start>func=wargs[0]<line_sep>py_modules.add_cell_model(func.__name__ func)# add function assigned to its original name
@wraps(func)<def_stmt>func_wrapper *args **kwargs<block_start><return>func(*args **kwargs)<block_end><return>func_wrapper<block_end><else_stmt># for the case with decorator arguments
<block_start><assert_stmt>(all(k<in>['name']<for>k wkwargs.keys()))<def_stmt>decorator func# store the function in py_modules but under the name given in the decorator arguments
<block_start>py_modules.add_cell_model(wkwargs['name'] func)<line_sep>@wraps(func)<def_stmt>func_wrapper *args **kwargs<block_start><return>func(*args **kwargs)<block_end><return>func_wrapper<block_end><return>decorator<block_end><block_end><def_stmt>synapse_model *wargs **wkwargs<block_start>"""A decorator for registering NEURON synapse loader functions."""<if_stmt>len(wargs)<eq>1<and>callable(wargs[0])# for the case without decorator arguments, grab the function object in wargs and create a decorator
<block_start>func=wargs[0]<line_sep>py_modules.add_synapse_model(func.__name__ func)# add function assigned to its original name
@wraps(func)<def_stmt>func_wrapper *args **kwargs<block_start><return>func(*args **kwargs)<block_end><return>func_wrapper<block_end><else_stmt># for the case with decorator arguments
<block_start><assert_stmt>(all(k<in>['name']<for>k wkwargs.keys()))<def_stmt>decorator func# store the function in py_modules but under the name given in the decorator arguments
<block_start>py_modules.add_synapse_model(wkwargs['name'] func)<line_sep>@wraps(func)<def_stmt>func_wrapper *args **kwargs<block_start><return>func(*args **kwargs)<block_end><return>func_wrapper<block_end><return>decorator<block_end><block_end><def_stmt>add_weight_function func name=<none> overwrite=<true><block_start><assert_stmt>(callable(func))<line_sep>func_name=name<if>name<is><not><none><else>func.__name__<line_sep>py_modules.add_synaptic_weight(func_name func overwrite)<block_end><def_stmt>add_cell_model func directive model_type overwrite=<true><block_start><assert_stmt>(callable(func))<line_sep># func_name = name if name is not None else func.__name__
py_modules.add_cell_model(directive model_type func overwrite)<block_end><def_stmt>add_cell_processor func name=<none> overwrite=<true><block_start><assert_stmt>(callable(func))<line_sep>func_name=name<if>name<is><not><none><else>func.__name__<line_sep>py_modules.add_cell_processor(func_name func overwrite)<block_end><def_stmt>add_synapse_model func name=<none> overwrite=<true><block_start><assert_stmt>(callable(func))<line_sep>func_name=name<if>name<is><not><none><else>func.__name__<line_sep>py_modules.add_synapse_model(func_name func overwrite)<block_end><def_stmt>load_py_modules cell_models=<none> syn_models=<none> syn_weights=<none># py_modules.clear()
<block_start><if_stmt>cell_models<is><not><none><block_start><assert_stmt>(isinstance(cell_models types.ModuleType))<for_stmt>f [cell_models.__dict__.get(f)<for>f dir(cell_models)]<block_start><if_stmt>isinstance(f types.FunctionType)<block_start>py_modules.add_cell_model(f.__name__ f)<block_end><block_end><block_end><if_stmt>syn_models<is><not><none><block_start><assert_stmt>(isinstance(syn_models types.ModuleType))<for_stmt>f [syn_models.__dict__.get(f)<for>f dir(syn_models)]<block_start><if_stmt>isinstance(f types.FunctionType)<block_start>py_modules.add_synapse_model(f.__name__ f)<block_end><block_end><block_end><if_stmt>syn_weights<is><not><none><block_start><assert_stmt>(isinstance(syn_weights types.ModuleType))<for_stmt>f [syn_weights.__dict__.get(f)<for>f dir(syn_weights)]<block_start><if_stmt>isinstance(f types.FunctionType)<block_start>py_modules.add_synaptic_weight(f.__name__ f)<block_end><block_end><block_end><block_end> |
"""Mock hooks."""<import_stmt>os<line_sep>GLOBAL_VALUE=os.getenv("AWS_DEFAULT_REGION")<line_sep> |
# -*- coding: utf-8 -*-
#
"""
https://github.com/RDFLib/pySHACL/issues/12
"""<import_from_stmt>pyshacl validate<line_sep>shacl_file_text="""
@prefix hei: <http://hei.org/customer/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix sh: <http://www.w3.org/ns/shacl#> .
@prefix xml: <http://www.w3.org/XML/1998/namespace> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
hei:HeiAddressShape a sh:NodeShape ;
sh:property [ rdfs:comment "Street constraint" ;
sh:datatype xsd:string ;
sh:minLength 30 ;
sh:path hei:Ship_to_street ] ;
sh:targetClass hei:Hei_customer .
"""<line_sep>data_file_text="""
@prefix hei: <http://hei.org/customer/> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
hei:hei_cust_1281 a hei:Hei_customer ;
rdfs:label "XYZHorecagroothandel" ;
hei:Klant_nummer 1281 ;
hei:Ship_to_City "Middenmeer" ;
hei:Ship_to_postcode "1799 AB" ;
hei:Ship_to_street "Industrieweg" .
"""<def_stmt>test_012_text <block_start>res=validate(data_file_text shacl_graph=shacl_file_text data_graph_format='turtle' shacl_graph_format='turtle' inference='both' debug=<true>)<line_sep>conforms,graph,string=res<assert_stmt><not>conforms<block_end><def_stmt>test_012_graph <block_start><import_from_stmt>rdflib Graph<line_sep>g=Graph()<line_sep>g.parse(data=data_file_text format='turtle')<line_sep>sg=Graph()<line_sep>sg.parse(data=shacl_file_text format='turtle')<line_sep>res=validate(g shacl_graph=sg inference='both' debug=<true>)<line_sep>conforms,graph,string=res<assert_stmt><not>conforms<block_end> |
data="""CO2-H2O 1.030538 0.828472 1.021392 0.895156 1
CO2-N2 0.994140013 1.107654104 1.022709642 1.047578256 1
CO2-O2 1.000000 1.031986 1.000000 1.084460 0
CO2-Ar 1.027147 0.968781 1.001378 1.029710 1
CO2-CO 0.993245 1.068392 1.030855 1.245499 0
H2O-N2 0.954149 0.805147 1.079628 0.733443 1
H2O-O2 0.798046 0.807842 0.972576 0.873460 0.6017
H2O-Ar 0.679104 0.921000 0.940398 1.050952 0
H2O-CO 1.045927 0.823984 1.063348 0.766756 0.9897
N2-O2 0.997190589 0.995157044 0.999521770 0.997082328 0
N2-Ar 0.999442 0.989311 1.006697 1.001549 0
N2-CO 1.002409 0.994100 1.000000 1.001317 0
O2-Ar 0.999039 0.988822 1.006502 1.001341 0
O2-CO 1.000000 1.000000 1.000000 1.000000 0
CO-Ar 1.000000000 0.954215746 1.000000000 1.159720623 0"""<line_sep>namedict=dict(O2='Oxygen' N2='Nitrogen' CO2='CarbonDioxide' CO='CarbonMonoxide' H2O='Water' Ar='Argon')<import_stmt>CoolProp<line_sep>CASdict={namedict[n]:CoolProp.CoolProp.get_fluid_param_string(namedict[n] "CAS")<for>n namedict}<line_sep>functiondict={'CO2-H2O':'CarbonDioxide-Water' 'CO2-N2':'CarbonDioxide-Nitrogen' 'CO2-Ar':'CarbonDioxide-Argon' 'H2O-N2':'GeneralizedAirWater' 'H2O-O2':'GeneralizedAirWater' 'H2O-CO':'GeneralizedAirWater'}<line_sep>out=[]<for_stmt>line data.split('\n')<block_start>pair,betaT,betaV,gammaT,gammaV,F=line.split(' ')<line_sep>n1,n2=pair.split('-')<line_sep>out.append(dict(BibTeX='Gernert-Thesis-2013' F=float(F) betaT=float(betaT) betaV=float(betaV) gammaT=float(gammaT) gammaV=float(gammaV) Name1=namedict[n1] Name2=namedict[n2] CAS1=CASdict[namedict[n1]] CAS2=CASdict[namedict[n2]]))<if_stmt>F<ne>'0'<block_start>out[-1]['function']=functiondict[pair]<block_end><block_end><import_stmt>json sys<line_sep>sys.path.append('..')<import_from_stmt>package_json json_options<line_sep>print(json.dumps(out **json_options))<line_sep> |
# Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Base class for training examples."""<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_from_stmt>base embeddings<line_sep>CONTRACTION_WORDS=set(w+'n'<for>w ['do' 'does' 'did' 'is' 'are' 'was' 'were' 'has' 'have' 'had' 'could' 'would' 'should' 'ca' 'wo' 'ai' 'might'])<class_stmt>Example(object)<block_start><def_stmt>__init__ self words word_vocab char_vocab<block_start>words=words[:]<line_sep># Fix inconsistent tokenization between datasets
<for_stmt>i range(len(words))<block_start><if_stmt>(words[i].lower()<eq>'\'t'<and>i<g>0<and>words[i-1].lower()<in>CONTRACTION_WORDS)<block_start>words[i]=words[i-1][-1]+words[i]<line_sep>words[i-1]=words[i-1][:-1]<block_end><block_end>self.words=([embeddings.START]+[word_vocab[embeddings.normalize_word(w)]<for>w words]+[embeddings.END])<line_sep>self.chars=([[embeddings.MISSING]]+[[char_vocab[c]<for>c embeddings.normalize_chars(w)]<for>w words]+[[embeddings.MISSING]])<block_end><def_stmt>__repr__ self <block_start>inv_char_vocab=embeddings.get_inv_char_vocab()<line_sep><return>' '.join([''.join([inv_char_vocab[c]<for>c w])<for>w self.chars])<block_end><block_end> |
# -*- coding: utf-8 -*-
# Copyright (C) 2013 <NAME>
<import_from_future_stmt> absolute_import unicode_literals print_function division <import_stmt>inspect<try_stmt><block_start><import_from_stmt>mock Mock<block_end><except_stmt>ImportError<block_start><import_from_stmt>unittest.mock Mock<block_end><import_stmt>pytest<import_stmt>six<import_from_stmt>valve.source messages<class_stmt>TestUseDefault(object)<block_start><def_stmt>test_pass_value self<block_start>instance=messages.MessageField("" optional=<false> default_value=5)<line_sep>called=[]<line_sep>@messages.use_default<def_stmt>test instance value values<block_start>called.append(<none>)<assert_stmt>value<eq>5<block_end>test(instance 5)<assert_stmt>called<block_end><def_stmt>test_nonoptional_no_value self<block_start>instance=messages.MessageField("" optional=<false> default_value=5)<line_sep>called=[]<line_sep>@messages.use_default<def_stmt>test instance value values<block_start>called.append(<none>)<assert_stmt>value<eq>5<block_end><with_stmt>pytest.raises(ValueError)<block_start>test(instance)<block_end><assert_stmt><not>called<block_end><def_stmt>test_optional_pass_value self<block_start>instance=messages.MessageField("" optional=<true> default_value=5)<line_sep>called=[]<line_sep>@messages.use_default<def_stmt>test instance value values<block_start>called.append(<none>)<assert_stmt>value<eq>10<block_end>test(instance 10)<assert_stmt>called<block_end><def_stmt>test_optional_no_value self<block_start>instance=messages.MessageField("" optional=<true> default_value=5)<line_sep>called=[]<line_sep>@messages.use_default<def_stmt>test instance value values<block_start>called.append(<none>)<assert_stmt>value<eq>5<block_end>test(instance)<assert_stmt>called<block_end><block_end><class_stmt>TestNeedsBuffer(object)<block_start><def_stmt>test_not_empty self<block_start>called=[]<line_sep>@messages.needs_buffer<def_stmt>test instance buf values<block_start>called.append(<none>)<block_end>test(<none> b"..." {})<assert_stmt>called<block_end><def_stmt>test_empty self<block_start>called=[]<line_sep>@messages.needs_buffer<def_stmt>test instance buf values<block_start>called.append(<none>)<block_end><with_stmt>pytest.raises(messages.BufferExhaustedError)<block_start>test(<none> b"" {})<block_end><assert_stmt><not>called<block_end><block_end><class_stmt>TestMessageField(object)<block_start><def_stmt>test_default_little_endian self<block_start><class_stmt>TestField(messages.MessageField)<block_start>fmt="i"<block_end><assert_stmt>TestField("").format.startswith("<")<block_end><def_stmt>test_explicit_endian self<block_start><for_stmt>fmt "!<>=@"<block_start>TestField=type("TestField"<if>six.PY3<else>b"TestField" (messages.MessageField ) {"fmt":fmt})<assert_stmt>TestField("").format.startswith(fmt)<block_end><block_end><def_stmt>test_validate self<block_start>validators=[Mock(side_effect=<lambda>x:x<eq>5) Mock(side_effect=<lambda>x:isinstance(x int))]<line_sep>field=messages.MessageField("" validators=validators)<line_sep>field.validate(5)<for_stmt>validator validators<block_start><assert_stmt>validator.called<block_end><with_stmt>pytest.raises(messages.BrokenMessageError)<block_start>field.validate("10")<block_end><block_end><def_stmt>test_validate_exception self<block_start>field=messages.MessageField("" validators=[Mock(side_effect=Exception)])<with_stmt>pytest.raises(messages.BrokenMessageError)<block_start>field.validate(5)<block_end><block_end><def_stmt>test_decode_empty self<block_start>field=messages.MessageField("")<with_stmt>pytest.raises(messages.BufferExhaustedError)<block_start>field.decode(b"")<block_end><block_end><def_stmt>test_decode_small_buffer self<block_start>field=messages.MessageField("")<line_sep>field.format=b"<d"# 8 bytes
<with_stmt>pytest.raises(messages.BufferExhaustedError)<block_start>field.decode(b"\x00\x00\x00\x00\x00\x00\x00")<block_end><block_end><def_stmt>test_decode self<block_start>field=messages.MessageField("")<line_sep>field.format=b"<B"# 1 byte
value,remnants=field.decode(b"\xFF\x01\x02\x03")<assert_stmt>value<eq>255<assert_stmt>isinstance(remnants bytes)<assert_stmt>remnants<eq>b"\x01\x02\x03"<block_end><def_stmt>test_decode_junk self monkeypatch<block_start>field=messages.MessageField("")<line_sep>field.format=b"B"<line_sep>unpack=Mock(side_effect=messages.struct.error)<line_sep>monkeypatch.setattr(messages.struct "unpack" unpack)<with_stmt>pytest.raises(messages.BrokenMessageError)<block_start>field.decode(b"\x01\x02\x03")<block_end><block_end>@pytest.mark.parametrize("field,value,expected" [(messages.ByteField 26 b"\x1A") (messages.ShortField 4056 b"\xD8\x0F") (messages.LongField 2394838 b"\xD6\x8A\x24\x00") (messages.FloatField 1.0 b"\x00\x00\x80\x3F") (messages.MSAddressEntryPortField 6969 b"\x1B\x39")])<def_stmt>test_encode self field value expected<block_start>encoded=field("").encode(value)<assert_stmt>isinstance(encoded bytes)<assert_stmt>encoded<eq>expected<block_end>@pytest.mark.parametrize("field,value" [(messages.ByteField -1) (messages.ByteField 256) (messages.ShortField -32769) (messages.ShortField 32768) (messages.LongField -2147483649) (messages.LongField 2147483648) (messages.MSAddressEntryPortField -1) (messages.MSAddressEntryPortField 65536)])<def_stmt>test_encode_out_of_range self field value<block_start><with_stmt>pytest.raises(messages.BrokenMessageError)<block_start>field("").encode(value)<block_end><block_end><block_end><class_stmt>TestStringField(object)<block_start><def_stmt>test_encode self<block_start>field=messages.StringField("")<line_sep>encoded=field.encode("Hello")<assert_stmt>isinstance(encoded bytes)<assert_stmt>encoded.endswith(b"\x00")<assert_stmt>encoded[:-1]<eq>b"\x48\x65\x6C\x6C\x6F"<block_end><def_stmt>test_decode self<block_start>field=messages.StringField("")<line_sep>encoded=b"\x48\x65\x6C\x6C\x6F\x00\x02\x01\x00"<line_sep>decoded,remnants=field.decode(encoded)<assert_stmt>isinstance(decoded six.text_type)<assert_stmt>decoded<eq>"Hello"<assert_stmt>isinstance(remnants bytes)<assert_stmt>remnants<eq>b"\x02\x01\x00"<block_end><def_stmt>test_decode_empty self<block_start>field=messages.StringField("")<with_stmt>pytest.raises(messages.BufferExhaustedError)<block_start>field.decode(b"")<block_end><block_end><def_stmt>test_no_null_terminator self<block_start>field=messages.StringField("")<with_stmt>pytest.raises(messages.BufferExhaustedError)<block_start>field.decode(b"\xFF\xFF\xFF")<block_end><block_end><block_end><class_stmt>TestMessageArrayField(object)<block_start>@pytest.fixture<def_stmt>Message self<block_start>"""Simple message with a byte field and short filed"""<class_stmt>Message(messages.Message)<block_start>fields=(messages.ByteField("byte") messages.ShortField("short"))<block_end><return>Message<block_end><def_stmt>test_constant_count self<block_start>array=messages.MessageArrayField("" <none> 5)<assert_stmt>array.count()<eq>5<assert_stmt>array.count.minimum<eq>5<block_end><def_stmt>test_callable_count self<block_start><def_stmt>function values={}<block_start><pass><block_end>array=messages.MessageArrayField("" <none> function)<assert_stmt>array.count<is>function<block_end><def_stmt>test_decode_constant self<block_start><class_stmt>Message(messages.Message)<block_start>fields=messages.ByteField("field") <block_end>array=messages.MessageArrayField("" Message 5)<line_sep>encoded=b"\x00\x01\x02\x03\x04\x00\x00\x00"<line_sep>values,remnants=array.decode(encoded)<for_stmt>sub_message,expected zip(values range(4))<block_start><assert_stmt>sub_message["field"]<eq>expected<block_end><assert_stmt>isinstance(remnants bytes)<assert_stmt>remnants<eq>b"\x00\x00\x00"<block_end><def_stmt>test_decode_insufficient_buffer self<block_start><class_stmt>Message(messages.Message)<block_start>fields=messages.ByteField("field") <block_end>array=messages.MessageArrayField("" Message 5)<line_sep>encoded=b"\xFF\xFE\xFD"<with_stmt>pytest.raises(messages.BrokenMessageError)<block_start>array.decode(encoded)<block_end><block_end><def_stmt>test_decode_minimum self<block_start><class_stmt>Message(messages.Message)<block_start>fields=messages.ByteField("field") <block_end>array=messages.MessageArrayField("" Message 5)<line_sep>array.count.minimum=2<line_sep>encoded=b"\x00\x01"<line_sep>values,remnants=array.decode(encoded)# Minimum
<for_stmt>sub_message,expected zip(values range(1))<block_start><assert_stmt>sub_message["field"]<eq>expected<block_end><assert_stmt><not>remnants<line_sep>encoded<augadd>b"\x02\x03\x04"<line_sep>values,remnants=array.decode(encoded)# Maximum
<for_stmt>sub_message,expected zip(values range(4))<block_start><assert_stmt>sub_message["field"]<eq>expected<block_end><assert_stmt><not>remnants<block_end><def_stmt>test_decode_minimum_remnants self<block_start><class_stmt>Message(messages.Message)<block_start>fields=messages.ShortField("field") <block_end>array=messages.MessageArrayField("" Message 3)<line_sep>array.count.minimum=2<line_sep># Two shorts and a trailing byte
encoded=b"\x00\x00\x11\x11\x22"<line_sep>values,remnants=array.decode(encoded)<for_stmt>sub_message,expected zip(values [0 0x1111])<block_start><assert_stmt>sub_message["field"]<eq>expected<block_end><assert_stmt>isinstance(remnants bytes)<assert_stmt>remnants<eq>b"\x22"<block_end><def_stmt>test_deocde_value_of self<block_start><assert_stmt>messages.MessageArrayField.value_of("f")({"f":26})<eq>26<block_end><def_stmt>test_deocde_all self<block_start><class_stmt>Message(messages.Message)<block_start>fields=messages.ByteField("") <block_end>array=messages.MessageArrayField("" Message messages.MessageArrayField.all())<line_sep>values,remnants=array.decode(b"\x00"<times>128)<assert_stmt>len(values)<eq>128<assert_stmt><not>remnants<block_end><def_stmt>test_deocde_all_remnants self<block_start><class_stmt>Message(messages.Message)<block_start>fields=messages.ShortField("") <block_end>array=messages.MessageArrayField("" Message messages.MessageArrayField.all())<line_sep>values,remnants=array.decode((b"\x00\x00"<times>64)+b"\xFF")<assert_stmt>len(values)<eq>64<assert_stmt>isinstance(remnants bytes)<assert_stmt>remnants<eq>b"\xFF"<block_end><def_stmt>test_deocde_at_least_minimum self<block_start><class_stmt>Message(messages.Message)<block_start>fields=messages.ByteField("") <block_end>array=messages.MessageArrayField("" Message messages.MessageArrayField.at_least(5))<line_sep>values,remnants=array.decode(b"\x00"<times>5)<assert_stmt>len(values)<eq>5<assert_stmt><not>remnants<block_end><def_stmt>test_decode_at_least_more self<block_start><class_stmt>Message(messages.Message)<block_start>fields=messages.ByteField("") <block_end>array=messages.MessageArrayField("" Message messages.MessageArrayField.at_least(5))<line_sep>values,remnants=array.decode(b"\x00"<times>10)<assert_stmt>len(values)<eq>10<assert_stmt><not>remnants<block_end><def_stmt>test_deocde_at_least_too_few self<block_start><class_stmt>Message(messages.Message)<block_start>fields=messages.ByteField("") <block_end>array=messages.MessageArrayField("" Message messages.MessageArrayField.at_least(5))<with_stmt>pytest.raises(messages.BrokenMessageError)<block_start>array.decode(b"\x00"<times>4)<block_end><block_end><def_stmt>test_deocde_at_least_remnants self<block_start><class_stmt>Message(messages.Message)<block_start>fields=messages.ShortField("") <block_end>array=messages.MessageArrayField("" Message messages.MessageArrayField.at_least(5))<line_sep>values,remnants=array.decode((b"\x00\x00"<times>10)+b"\xFF")<assert_stmt>len(values)<eq>10<assert_stmt>isinstance(remnants bytes)<assert_stmt>remnants<eq>b"\xFF"<block_end><def_stmt>test_encode self Message<block_start>array=messages.MessageArrayField("" Message 3)<line_sep>elements=[Message(byte=255 short=0x11AA)]<times>3<line_sep>encoded=array.encode(elements)<assert_stmt>isinstance(encoded bytes)<assert_stmt>encoded<eq>elements[0].encode()<times>3<block_end><def_stmt>test_encode_invalid_element self<block_start><class_stmt>Element(messages.Message)<block_start>fields=()<block_end><class_stmt>Borked(messages.Message)<block_start>fields=()<block_end>array=messages.MessageArrayField("" Element 3)<with_stmt>pytest.raises(messages.BrokenMessageError)<block_start>array.encode([Borked()])<block_end><block_end><def_stmt>test_encode_too_many_elements self Message<block_start>array=messages.MessageArrayField("" Message 3)<line_sep>elements=[Message(byte=255 short=0x11AA)]<times>5<with_stmt>pytest.raises(messages.BrokenMessageError)<block_start>array.encode(elements)<block_end><block_end><def_stmt>test_encode_too_few_elements self Message<block_start>array=messages.MessageArrayField("" Message 5)<line_sep>elements=[Message(byte=255 short=0x11AA)]<times>3<with_stmt>pytest.raises(messages.BrokenMessageError)<block_start>array.encode(elements)<block_end><block_end><def_stmt>test_encode_all self Message<block_start>array=messages.MessageArrayField("" Message)<line_sep>elements=[Message(byte=255 short=0x11AA)]<times>10<line_sep>encoded=array.encode(elements)<assert_stmt>isinstance(encoded bytes)<assert_stmt>encoded<eq>elements[0].encode()<times>10<block_end><def_stmt>test_encode_all_none self Message<block_start>array=messages.MessageArrayField("" Message)<line_sep>encoded=array.encode([])<assert_stmt>isinstance(encoded bytes)<assert_stmt>len(encoded)<eq>0<block_end><def_stmt>test_encode_value_of self Message<block_start>array=messages.MessageArrayField("" Message messages.MessageArrayField.value_of("life"))<line_sep>elements=[Message(byte=255 short=0x11AA)]<times>5<line_sep>encoded=array.encode(elements {"life":5})<assert_stmt>isinstance(encoded bytes)<assert_stmt>encoded<eq>elements[0].encode()<times>5<block_end><def_stmt>test_encode_at_least_minimum self Message<block_start>array=messages.MessageArrayField("" Message messages.MessageArrayField.at_least(3))<line_sep>elements=[Message(byte=255 short=0x11AA)]<times>3<line_sep>encoded=array.encode(elements)<assert_stmt>isinstance(encoded bytes)<assert_stmt>encoded<eq>elements[0].encode()<times>3<block_end><def_stmt>test_encode_at_least_more self Message<block_start>array=messages.MessageArrayField("" Message messages.MessageArrayField.at_least(3))<line_sep>elements=[Message(byte=255 short=0x11AA)]<times>5<line_sep>encoded=array.encode(elements)<assert_stmt>isinstance(encoded bytes)<assert_stmt>encoded<eq>elements[0].encode()<times>5<block_end><def_stmt>test_encode_at_least_too_few self Message<block_start>array=messages.MessageArrayField("" Message messages.MessageArrayField.at_least(5))<line_sep>elements=[Message(byte=255 short=0x11AA)]<times>4<with_stmt>pytest.raises(messages.BrokenMessageError)<block_start>encoded=array.encode(elements)<block_end><block_end><block_end><class_stmt>TestMessageDictField(object)<block_start><def_stmt>test_decode self<block_start>ddict=messages.MessageDictField("" messages.ByteField("key") messages.ByteField("value") 5)<line_sep>encoded=b""<for_stmt>key six.moves.range(5)<block_start>encoded<augadd>six.int2byte(key)+b"\xFF"<block_end>values,remnants=ddict.decode(encoded)<for_stmt>key values.keys()<block_start><assert_stmt>key<in>set(six.moves.range(5))<assert_stmt>values[key]<eq>255<block_end><block_end><block_end><class_stmt>TestMessage(object)<block_start><def_stmt>test_getitem self<block_start><assert_stmt>messages.Message(key=":)")["key"]<eq>":)"<block_end><def_stmt>test_setitem self<block_start>message=messages.Message()<line_sep>message["key"]=":)"<assert_stmt>message["key"]<eq>":)"<block_end><def_stmt>test_delitem self<block_start>message=messages.Message(key=":(")<del_stmt>message["key"]<with_stmt>pytest.raises(KeyError)<block_start>message["key"]<block_end><block_end><def_stmt>test_len self<block_start>message=messages.Message(key1=<none> key2=<none> key3=<none>)<assert_stmt>len(message)<eq>3<block_end><def_stmt>test_iter self<block_start>keys={"key1":<none> "key2":<none> "key3":<none>}<line_sep>message=messages.Message(**keys)<for_stmt>key message<block_start>keys.pop(key)<block_end><assert_stmt><not>keys<block_end><def_stmt>test_encode_simple self<block_start><class_stmt>Message(messages.Message)<block_start>fields=(messages.ByteField("first_field") messages.ByteField("last_field"))<block_end>encoded=Message(first_field=5).encode(last_field=10)<assert_stmt>isinstance(encoded bytes)<assert_stmt>encoded<eq>b"\x05\x0A"<block_end><def_stmt>test_encode_missing_nonoptional_field self<block_start><class_stmt>Message(messages.Message)<block_start>fields=(messages.ByteField("first_field") messages.ByteField("last_field"))<block_end><with_stmt>pytest.raises(ValueError)<block_start>Message(first_field=5).encode()<block_end><block_end><def_stmt>test_encode_missing_optional_field self<block_start><class_stmt>Message(messages.Message)<block_start>fields=(messages.ByteField("first_field") messages.ByteField("last_field" optional=<true> default_value=10))<block_end>encoded=Message(first_field=5).encode()<assert_stmt>isinstance(encoded bytes)<assert_stmt>encoded<eq>b"\x05\x0A"<block_end><def_stmt>test_encode_array self<block_start>count=Mock(return_value=1)<line_sep>count.minimum=1<class_stmt>Element(messages.Message)<block_start>fields=()<line_sep>encode=Mock(return_value=b"")<block_end><class_stmt>Message(messages.Message)<block_start>fields=(messages.ByteField("byte") messages.MessageArrayField("array" Element count))<block_end>message=Message(byte=26 array=[Element()])<line_sep>encoded=message.encode()<assert_stmt>isinstance(encoded bytes)<assert_stmt>Element.encode.called<assert_stmt>count.called<assert_stmt>count.call_args[0][0]<eq>message.values<block_end># TODO: more complex structures, e.g. ArrayField and DictFields
<block_end><class_stmt>TestFragment(object)<block_start><def_stmt>test_is_compressed self<block_start><assert_stmt>messages.Fragment(message_id=(1<lshift>31)-1).is_compressed<assert_stmt><not>messages.Fragment(message_id=1<lshift>30).is_compressed<block_end><block_end><class_stmt>TestMSAddressEntry(object)<block_start><def_stmt>test_decode_ip_insufficient_buffer self<block_start><with_stmt>pytest.raises(messages.BufferExhaustedError)<block_start>messages.MSAddressEntryIPField("").decode(b"\x00\x00")<block_end><block_end><def_stmt>test_decode_ip self<block_start>ip,remnants=messages.MSAddressEntryIPField("").decode(b"\x00\x01\x02\x03\xFF\xFF")<assert_stmt>isinstance(ip six.text_type)<assert_stmt>ip<eq>"0.1.2.3"<assert_stmt>isinstance(remnants bytes)<assert_stmt>remnants<eq>b"\xFF\xFF"<block_end><def_stmt>test_is_null self<block_start><assert_stmt>messages.MSAddressEntry.decode(b"\x00\x00\x00\x00\x00\x00").is_null<assert_stmt><not>messages.MSAddressEntry.decode(b"\x01\x02\x03\x04\x69\x87").is_null<block_end><block_end> |
"""
Testing what the fastest way is to create a 1D Array with 2 values
"""<import_stmt>os<import_stmt>sys<line_sep>sys.path.append(os.path.join(os.path.dirname(__file__) ".."))<import_stmt>random<import_stmt>numpy<as>np<line_sep>x,y=random.uniform(0 300) random.uniform(0 300)<def_stmt>numpy_array x y# Calculate distances between each of the points
<block_start><return>np.array((x y) dtype=np.float)<block_end><def_stmt>numpy_array_tuple my_tuple# Calculate distances between each of the points
<block_start><return>np.array(my_tuple dtype=np.float)<block_end><def_stmt>numpy_asarray x y# Calculate distances between each of the points
<block_start><return>np.asarray((x y) dtype=np.float)<block_end><def_stmt>numpy_asarray_tuple my_tuple# Calculate distances between each of the points
<block_start><return>np.asarray(my_tuple dtype=np.float)<block_end><def_stmt>numpy_asanyarray x y# Calculate distances between each of the points
<block_start><return>np.asanyarray((x y) dtype=np.float)<block_end><def_stmt>numpy_asanyarray_tuple my_tuple# Calculate distances between each of the points
<block_start><return>np.asanyarray(my_tuple dtype=np.float)<block_end><def_stmt>numpy_fromiter x y# Calculate distances between each of the points
<block_start><return>np.fromiter((x y) dtype=float count=2)<block_end><def_stmt>numpy_fromiter_tuple my_tuple# Calculate distances between each of the points
<block_start><return>np.fromiter(my_tuple dtype=float count=2)<block_end><def_stmt>numpy_fromiter_np_float x y# Calculate distances between each of the points
<block_start><return>np.fromiter((x y) dtype=np.float count=2)<block_end><def_stmt>numpy_fromiter_np_float_tuple my_tuple# Calculate distances between each of the points
<block_start><return>np.fromiter(my_tuple dtype=np.float count=2)<block_end><def_stmt>numpy_zeros x y# Calculate distances between each of the points
<block_start>a=np.zeros(2 dtype=np.float)<line_sep>a[0]=x<line_sep>a[1]=y<line_sep><return>a<block_end><def_stmt>numpy_ones x y# Calculate distances between each of the points
<block_start>a=np.ones(2 dtype=np.float)<line_sep>a[0]=x<line_sep>a[1]=y<line_sep><return>a<block_end>numpy_array(x y)<line_sep>correct_array=np.array([x y])<def_stmt>test_numpy_array benchmark<block_start>result=benchmark(numpy_array x y)<assert_stmt>np.array_equal(result correct_array)<block_end><def_stmt>test_numpy_array_tuple benchmark<block_start>result=benchmark(numpy_array_tuple (x y))<assert_stmt>np.array_equal(result correct_array)<block_end><def_stmt>test_numpy_asarray benchmark<block_start>result=benchmark(numpy_asarray x y)<assert_stmt>np.array_equal(result correct_array)<block_end><def_stmt>test_numpy_asarray_tuple benchmark<block_start>result=benchmark(numpy_asarray_tuple (x y))<assert_stmt>np.array_equal(result correct_array)<block_end><def_stmt>test_numpy_asanyarray benchmark<block_start>result=benchmark(numpy_asanyarray x y)<assert_stmt>np.array_equal(result correct_array)<block_end><def_stmt>test_numpy_asanyarray_tuple benchmark<block_start>result=benchmark(numpy_asanyarray_tuple (x y))<assert_stmt>np.array_equal(result correct_array)<block_end><def_stmt>test_numpy_fromiter benchmark<block_start>result=benchmark(numpy_fromiter x y)<assert_stmt>np.array_equal(result correct_array)<block_end><def_stmt>test_numpy_fromiter_tuple benchmark<block_start>result=benchmark(numpy_fromiter_tuple (x y))<assert_stmt>np.array_equal(result correct_array)<block_end><def_stmt>test_numpy_fromiter_np_float benchmark<block_start>result=benchmark(numpy_fromiter_np_float x y)<assert_stmt>np.array_equal(result correct_array)<block_end><def_stmt>test_numpy_fromiter_np_float_tuple benchmark<block_start>result=benchmark(numpy_fromiter_np_float_tuple (x y))<assert_stmt>np.array_equal(result correct_array)<block_end><def_stmt>test_numpy_zeros benchmark<block_start>result=benchmark(numpy_zeros x y)<assert_stmt>np.array_equal(result correct_array)<block_end><def_stmt>test_numpy_ones benchmark<block_start>result=benchmark(numpy_ones x y)<assert_stmt>np.array_equal(result correct_array)<block_end># Run this file using
# poetry run pytest test/test_benchmark_array_creation.py --benchmark-compare
|
<import_stmt>os<import_stmt>pytest<import_from_stmt>montreal_forced_aligner.command_line.classify_speakers run_classify_speakers<import_from_stmt>montreal_forced_aligner.command_line.mfa parser<def_stmt>test_classify basic_corpus_dir sick_dict_path english_ivector_model generated_dir temp_dir<block_start>output_path=os.path.join(generated_dir 'classify_test')<line_sep>command=['classify_speakers' basic_corpus_dir 'english_ivector' output_path '-t' temp_dir '-q' '--clean' '--debug' '-v' '--disable_mp' '-s' '1']<line_sep>args,unknown=parser.parse_known_args(command)<line_sep>run_classify_speakers(args)<block_end><def_stmt>test_cluster basic_corpus_dir sick_dict_path english_ivector_model generated_dir transcription_acoustic_model transcription_language_model temp_dir<block_start>output_path=os.path.join(generated_dir 'cluster_test')<line_sep>command=['classify_speakers' basic_corpus_dir 'english_ivector' output_path '-t' temp_dir '-q' '--clean' '--debug' '--cluster' '-s' '2' '--disable_mp']<line_sep>args,unknown=parser.parse_known_args(command)<line_sep>run_classify_speakers(args)<block_end> |
"""This problem was asked by Oracle.
Given a binary search tree, find the floor and ceiling of a given integer.
The floor is the highest element in the tree less than or equal to an integer,
while the ceiling is the lowest element in the tree greater than or equal to an integer.
If either value does not exist, return None.
"""<line_sep> |
# Copyright (c) Glow Contributors. See CONTRIBUTORS file.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_stmt>numpy<as>np<import_stmt>onnx<import_stmt>tensorflow<as>tf<import_from_stmt>onnx TensorProto helper<import_from_stmt>tensorflow.python.ops gen_audio_ops<as>audio_ops<line_sep># ONNX utility.
<def_stmt>make_init name dtype tensor<block_start><return>helper.make_tensor(name=name data_type=dtype dims=tensor.shape vals=tensor.reshape(tensor.size).tolist() )<block_end># Function to generate AudioSpectrogram ONNX test model.
<def_stmt>gen_spectrogram_onnx_test_model model_path window_count window_size stride magnitude_squared=<true># Tensor sizes.
<block_start>input_length=window_size+(window_count-1)<times>stride<line_sep>fft_length=int(2<power>np.ceil(np.log2(window_size)))<line_sep>input_shape=[1 input_length]<line_sep>spectrogram_length=int(fft_length/2+1)<line_sep>spectrogram_shape=[window_count spectrogram_length]<line_sep># Generate random input data.
np.random.seed(1)<line_sep>input_data=np.random.randn(*input_shape)<line_sep># ----------------------------------------- COMPUTE TensorFlow REFERENCE -------------------------------------------
# Define TensorFlow model.
tf_input=tf.constant(input_data.reshape([input_length 1]) name="input" dtype=tf.float32)<line_sep>tf_spectrogram=audio_ops.audio_spectrogram(tf_input window_size=window_size stride=stride magnitude_squared=magnitude_squared )<line_sep># Run TensorFlow model and get reference output.
<with_stmt>tf.Session()<as>sess<block_start>spectrogram_ref=sess.run(tf_spectrogram)<block_end>spectrogram_ref=np.reshape(spectrogram_ref spectrogram_shape)<line_sep># ---------------------------------------------- NODE DEFINITION --------------------------------------------------
# AudioSpectrogram node definition.
spectrogram_node_def=onnx.helper.make_node("AudioSpectrogram" name="audio_spectrogram" inputs=["input"] outputs=["spectrogram"] window_size=int(window_size) stride=int(stride) magnitude_squared=int(magnitude_squared) )<line_sep># Error node definition.
err_node_def=onnx.helper.make_node("Sub" name="error" inputs=["spectrogram" "spectrogram_ref"] outputs=["spectrogram_err"] )<line_sep># --------------------------------------------- GRAPH DEFINITION --------------------------------------------------
graph_input=list()<line_sep>graph_init=list()<line_sep>graph_output=list()<line_sep># Graph inputs.
graph_input.append(helper.make_tensor_value_info("input" TensorProto.FLOAT input_shape))<line_sep>graph_input.append(helper.make_tensor_value_info("spectrogram_ref" TensorProto.FLOAT spectrogram_shape))<line_sep># Graph initializers.
graph_init.append(make_init("input" TensorProto.FLOAT input_data))<line_sep>graph_init.append(make_init("spectrogram_ref" TensorProto.FLOAT spectrogram_ref))<line_sep># Graph outputs.
graph_output.append(helper.make_tensor_value_info("spectrogram_err" TensorProto.FLOAT spectrogram_shape))<line_sep># Graph name.
graph_name="audio_spectrogram_test"<line_sep># Define graph (GraphProto).
graph_def=helper.make_graph([spectrogram_node_def err_node_def] graph_name inputs=graph_input outputs=graph_output )<line_sep># Set initializers.
graph_def.initializer.extend(graph_init)<line_sep># --------------------------------------------- MODEL DEFINITION --------------------------------------------------
# Define model (ModelProto).
model_def=helper.make_model(graph_def producer_name="onnx-audio-spectrogram")<line_sep># Print model.
<with_stmt>open(model_path "w")<as>f<block_start>f.write(str(model_def))<block_end><block_end># One window spectrogram.
gen_spectrogram_onnx_test_model(model_path="audioSpectrogramOneWindow.onnxtxt" window_count=1 window_size=512 stride=256 magnitude_squared=<true> )<line_sep># Two window spectrogram.
gen_spectrogram_onnx_test_model(model_path="audioSpectrogramTwoWindow.onnxtxt" window_count=2 window_size=640 stride=320 magnitude_squared=<true> )<line_sep># Magnitude non-squared.
gen_spectrogram_onnx_test_model(model_path="audioSpectrogramNonSquared.onnxtxt" window_count=1 window_size=640 stride=320 magnitude_squared=<false> )<line_sep> |
#-----------------------------------------------------------------------------
# This file is part of the 'SLAC Firmware Standard Library'. It is subject to
# the license terms in the LICENSE.txt file found in the top-level directory
# of this distribution and at:
# https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html.
# No part of the 'SLAC Firmware Standard Library', including this file, may be
# copied, modified, propagated, or distributed except according to the terms
# contained in the LICENSE.txt file.
#-----------------------------------------------------------------------------
<import_stmt>pyrogue<as>pr<import_stmt>surf.protocols.i2c<class_stmt>UCD92xx(surf.protocols.i2c.PMBus)<block_start><def_stmt>__init__ self **kwargs<block_start>super().__init__(**kwargs)<line_sep>self.add(pr.LinkVariable(name='VIN' mode='RO' units='V' disp='{:1.3f}' linkedGet=self.getPMbusLinearDataFormat11Bit dependencies=[self.READ_VIN] ))<line_sep>self.add(pr.LinkVariable(name='IIN' mode='RO' units='A' disp='{:1.3f}' linkedGet=self.getPMbusLinearDataFormat11Bit dependencies=[self.READ_IIN] ))<line_sep>self.add(pr.LinkVariable(name='VOUT' mode='RO' units='V' disp='{:1.3f}' linkedGet=surf.protocols.i2c.getPMbusLinearDataFormat dependencies=[self.READ_VIN] ))<line_sep>self.add(pr.LinkVariable(name='IOUT' mode='RO' units='A' disp='{:1.3f}' linkedGet=self.getPMbusLinearDataFormat11Bit dependencies=[self.READ_IOUT] ))<line_sep>self.add(pr.LinkVariable(name='TEMPERATURE[1]' mode='RO' units='degC' disp='{:1.3f}' linkedGet=self.getPMbusLinearDataFormat11Bit dependencies=[self.READ_TEMPERATURE_1] ))<line_sep>self.add(pr.LinkVariable(name='TEMPERATURE[2]' mode='RO' units='degC' disp='{:1.3f}' linkedGet=self.getPMbusLinearDataFormat11Bit dependencies=[self.READ_TEMPERATURE_2] ))<line_sep>self.add(pr.LinkVariable(name='FAN_SPEED[1]' mode='RO' units='RPM' disp='{:1.3f}' linkedGet=self.getPMbusLinearDataFormat11Bit dependencies=[self.READ_FAN_SPEED_1] ))<line_sep>self.add(pr.LinkVariable(name='DUTY_CYCLE' mode='RO' units='%' disp='{:1.3f}' linkedGet=self.getPMbusLinearDataFormat11Bit dependencies=[self.READ_DUTY_CYCLE] ))<line_sep>self.add(pr.LinkVariable(name='POUT' mode='RO' units='W' disp='{:1.3f}' linkedGet=self.getPMbusLinearDataFormat11Bit dependencies=[self.READ_POUT] ))<line_sep>self.add(pr.LinkVariable(name='PIN' mode='RO' units='W' disp='{:1.3f}' linkedGet=self.getPMbusLinearDataFormat11Bit dependencies=[self.READ_PIN] ))<block_end>@staticmethod<def_stmt>getPMbusLinearDataFormat11Bit var# Get the 16-bt RAW value
<block_start>raw=var.dependencies[0].value()<line_sep># V is a 16-bit unsigned binary integer mantissa,
V=1.0<times>raw<line_sep># The exponent is reported in the bottom 5 bits of the VOUT_MODE parameter.
# In the UCD92xx, this exponent is a read-only parameter
# whose value is fixed at –12. This allows setting voltage-related variables
# over a range from 0 to 15.9997V, with a resolution of 0.244mV.
X=-12.0<line_sep><return>V<times>(2<power>X)<block_end><block_end> |
<import_stmt>unittest<import_stmt>numpy<as>np<import_from_stmt>numpy array<import_from_stmt>bruges.models reconcile interpolate panel<import_from_stmt>bruges.models wedge<class_stmt>ModelTest(unittest.TestCase)<block_start>"""
Tests models.
"""<def_stmt>test_reconcile self<block_start>a=np.array([2 6 7 7 3])<line_sep>b=np.array([3 7 3])<line_sep>A,B=reconcile(a b order=0)<line_sep>A_,B_=array([2 6 7 7 3]) array([3 7 7 3 3])<line_sep>self.assertTrue(np.array_equal(A A_))<line_sep>self.assertTrue(np.array_equal(B B_))<block_end><def_stmt>test_interpolate self<block_start>a=np.array([2 6 7 7 3])<line_sep>b=np.array([3 7 7 3 3])<line_sep>interp=interpolate(a b num=10)<line_sep>self.assertTrue(interp.shape<eq>(5 10))<block_end><def_stmt>test_panel self<block_start>a=np.array([2 6 7 7 3])<line_sep>b=np.array([3 7 3])<line_sep>dists=(10 )<line_sep>out=panel(a b num=15 dists=dists)<line_sep>sample=out[: 7]<line_sep>self.assertTrue(np.all(sample[:4]<eq>array([2.5 6.5 5. 3.])))<line_sep>self.assertTrue(np.isnan(sample[-1]))<block_end><def_stmt>test_wedge self<block_start>w,top,base,ref=wedge(depth=10 width=7 strat=(10 (20 30) 40))<line_sep>col=array([10 10 10 20 20 30 40 40 40 40])<line_sep>t=array([3. 3. 3. 3. 3. 3. 3.])<line_sep>b=array([3. 3. 3.6 4.2 4.8 5.4 6.])<line_sep>self.assertTrue(np.all(w[: -1]<eq>col))<line_sep>self.assertTrue(w.sum()<eq>1990)<line_sep>self.assertTrue(np.allclose(top t))<line_sep>self.assertTrue(np.allclose(base b))<line_sep>self.assertTrue(ref<eq>6)<block_end><def_stmt>test_netgross self<block_start>w,top,*_=wedge(depth=10 width=7 breadth=3 strat=(10 (20 30) 40))<line_sep>self.assertTrue(w.sum()<eq>6003)<line_sep>self.assertTrue(w.shape<eq>(10 7 3))<line_sep>self.assertTrue(top.sum()<eq>63.0)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>suite=unittest.TestLoader().loadTestsFromTestCase(ModelTest)<line_sep>unittest.TextTestRunner(verbosity=2).run(suite)<block_end> |
<import_stmt>os<import_stmt>numpy<as>np<import_stmt>tensorflow<as>tf<import_from_stmt>depth.self_supervised_sfm.utils readlines<line_sep>AUTOTUNE=tf.data.experimental.AUTOTUNE<line_sep>########################
# Constants
#########################
KITTI_K=np.array([[0.58 0 0.5 0] # fx/width
[0 1.92 0.5 0] [0 0 1 0] [0 0 0 1]] dtype=np.float)<class_stmt>KittiSFMDataset<block_start><def_stmt>__init__ self dataset_dir load_option img_size batch_size split='eigen_zhou' frame_idx=(0 -1 1)<block_start>self.h,self.w=img_size<line_sep>self.split=split<line_sep>self.batch_size=batch_size<line_sep>self.load_option=load_option<line_sep>self.dataset_dir=dataset_dir<line_sep>self.frame_idx=frame_idx<line_sep>self.side_map={"2":2 "3":3 "l":2 "r":3}# Correspond to image folder
# Check that the folder exists
<assert_stmt>os.path.exists(dataset_dir)<and>os.path.isdir(dataset_dir) f"Dataset {dataset_dir} does not exist !"<if_stmt>self.split<eq>'eigen_zhou'<block_start>filename=os.path.join('splits' f'eigen_zhou/{load_option}_files.txt')<block_end><else_stmt><block_start><raise>NotImplementedError<block_end>print(f'Loading from: {filename}')<line_sep>data_paths=readlines(filename)<line_sep>self.img_paths=[]<for_stmt>i,line enumerate(data_paths)# Image files
<block_start>folder,frame_idx,side=line.split()<line_sep>per_sample_imgs=[]<line_sep># Load sequence img
<for_stmt>t self.frame_idx<block_start>f_str=f"{int(frame_idx)+t:010d}"<line_sep>image_path=os.path.join(dataset_dir folder f"image_0{self.side_map[side]}/data" f_str+'.png')<line_sep>per_sample_imgs.append(image_path)<block_end>self.img_paths.append(per_sample_imgs)<block_end>print(f'Total Images for {load_option}: {len(self.img_paths)}')<line_sep>self.num_samples=len(self.img_paths)<block_end><def_stmt>load_tfdataset self<block_start>inputs={}<line_sep># Intrinsic
intrinsic=KITTI_K.copy()<line_sep>intrinsic[0 :]<augmul>self.w<line_sep>intrinsic[1 :]<augmul>self.h<line_sep>inputs['K']=tf.convert_to_tensor(intrinsic tf.float32)<line_sep>inputs['K_inv']=tf.linalg.inv(inputs['K'])<line_sep>dataset=tf.data.Dataset.from_tensor_slices(self.img_paths)<line_sep>dataset=dataset.shuffle(self.num_samples)<line_sep># Load data
<def_stmt>load_sample img_paths# load the raw data from the file as a string
<block_start>image_cur=tf.io.read_file(img_paths[0])<line_sep>image_prev=tf.io.read_file(img_paths[1])<line_sep>image_next=tf.io.read_file(img_paths[2])<line_sep>image_cur=tf.image.decode_png(image_cur)<line_sep>image_prev=tf.image.decode_png(image_prev)<line_sep>image_next=tf.image.decode_png(image_next)<line_sep>image_cur=tf.cast(tf.image.resize(image_cur [self.h self.w]) tf.float32)/255.<line_sep>image_prev=tf.cast(tf.image.resize(image_prev [self.h self.w]) tf.float32)/255.<line_sep>image_next=tf.cast(tf.image.resize(image_next [self.h self.w]) tf.float32)/255.<if_stmt>self.load_option<eq>"train"<block_start><if_stmt>tf.random.uniform(())<g>0.5<block_start>image_cur=tf.image.flip_left_right(image_cur)<line_sep>image_prev=tf.image.flip_left_right(image_prev)<line_sep>image_next=tf.image.flip_left_right(image_next)<block_end><block_end>inputs['img']=image_cur<line_sep>inputs['img-1']=image_prev<line_sep>inputs['img1']=image_next<line_sep><return>inputs<block_end>dataset=dataset.map(load_sample num_parallel_calls=AUTOTUNE)<line_sep>dataset=dataset.batch(self.batch_size drop_remainder=<true>)<line_sep>dataset=dataset.prefetch(buffer_size=AUTOTUNE)<line_sep><return>dataset<block_end><block_end> |
<import_from_stmt>re A<import_stmt>boto3<import_stmt>botocore<import_stmt>click<import_stmt>configparser<import_from_stmt>csv DictWriter<import_stmt>io<import_stmt>itertools<import_stmt>json<import_stmt>mimetypes<import_stmt>os<import_stmt>re<import_stmt>sys<import_stmt>textwrap<import_from_stmt>. policies<def_stmt>bucket_exists s3 bucket<block_start><try_stmt><block_start>s3.head_bucket(Bucket=bucket)<line_sep><return><true><block_end><except_stmt>botocore.exceptions.ClientError<block_start><return><false><block_end><block_end><def_stmt>user_exists iam username<block_start><try_stmt><block_start>iam.get_user(UserName=username)<line_sep><return><true><block_end><except_stmt>iam.exceptions.NoSuchEntityException<block_start><return><false><block_end><block_end><def_stmt>common_boto3_options fn<block_start><for_stmt>decorator reversed((click.option("--access-key" help="AWS access key ID" ) click.option("--secret-key" help="AWS secret access key" ) click.option("--session-token" help="AWS session token" ) click.option("--endpoint-url" help="Custom endpoint URL" ) click.option("-a" "--auth" type=click.File("r") help="Path to JSON/INI file containing credentials" ) ))<block_start>fn=decorator(fn)<block_end><return>fn<block_end><def_stmt>common_output_options fn<block_start><for_stmt>decorator reversed((click.option("--nl" help="Output newline-delimited JSON" is_flag=<true>) click.option("--csv" help="Output CSV" is_flag=<true>) click.option("--tsv" help="Output TSV" is_flag=<true>) ))<block_start>fn=decorator(fn)<block_end><return>fn<block_end>@click.group()@click.version_option()<def_stmt>cli <block_start>"A tool for creating credentials for accessing S3 buckets"<block_end><class_stmt>PolicyParam(click.ParamType)<block_start>"Returns string of guaranteed well-formed JSON"<line_sep>name="policy"<def_stmt>convert self policy param ctx<block_start><if_stmt>policy.strip().startswith("{")# Verify policy string is valid JSON
<block_start><try_stmt><block_start>json.loads(policy)<block_end><except_stmt>ValueError<block_start>self.fail("Invalid JSON string")<block_end><return>policy<block_end><else_stmt># Assume policy is a file path or '-'
<block_start><try_stmt><block_start><with_stmt>click.open_file(policy)<as>f<block_start>contents=f.read()<try_stmt><block_start>json.loads(contents)<line_sep><return>contents<block_end><except_stmt>ValueError<block_start>self.fail("{} contained invalid JSON".format("Input"<if>policy<eq>"-"<else>"File"))<block_end><block_end><block_end><except_stmt>FileNotFoundError<block_start>self.fail("File not found")<block_end><block_end><block_end><block_end><class_stmt>DurationParam(click.ParamType)<block_start>name="duration"<line_sep>pattern=re.compile(r"^(\d+)(m|h|s)?$")<def_stmt>convert self value param ctx<block_start>match=self.pattern.match(value)<if_stmt>match<is><none><block_start>self.fail("Duration must be of form 3600s or 15m or 2h")<block_end>integer_string,suffix=match.groups()<line_sep>integer=int(integer_string)<if_stmt>suffix<eq>"m"<block_start>integer<augmul>60<block_end><elif_stmt>suffix<eq>"h"<block_start>integer<augmul>3600<block_end># Must be between 15 minutes and 12 hours
<if_stmt><not>(15<times>60<le>integer<le>12<times>60<times>60)<block_start>self.fail("Duration must be between 15 minutes and 12 hours")<block_end><return>integer<block_end><block_end>@cli.command()@click.argument("buckets" nargs=-1 required=<true> )@click.option("--read-only" help="Only allow reading from the bucket" is_flag=<true>)@click.option("--write-only" help="Only allow writing to the bucket" is_flag=<true>)@click.option("--prefix" help="Restrict to keys starting with this prefix" default="*")@click.option("--public-bucket" help="Bucket policy for allowing public access" is_flag=<true> )<def_stmt>policy buckets read_only write_only prefix public_bucket<block_start>"""
Output generated JSON policy for one or more buckets
Takes the same options as s3-credentials create
To output a read-only JSON policy for a bucket:
s3-credentials policy my-bucket --read-only
"""<line_sep>"Generate JSON policy for one or more buckets"<if_stmt>public_bucket<block_start><if_stmt>len(buckets)<ne>1<block_start><raise>click.ClickException("--public-bucket policy can only be generated for a single bucket")<block_end>click.echo(json.dumps(policies.bucket_policy_allow_all_get(buckets[0]) indent=4))<line_sep><return><block_end>permission="read-write"<if_stmt>read_only<block_start>permission="read-only"<block_end><if_stmt>write_only<block_start>permission="write-only"<block_end>statements=[]<if_stmt>permission<eq>"read-write"<block_start><for_stmt>bucket buckets<block_start>statements.extend(policies.read_write_statements(bucket prefix))<block_end><block_end><elif_stmt>permission<eq>"read-only"<block_start><for_stmt>bucket buckets<block_start>statements.extend(policies.read_only_statements(bucket prefix))<block_end><block_end><elif_stmt>permission<eq>"write-only"<block_start><for_stmt>bucket buckets<block_start>statements.extend(policies.write_only_statements(bucket prefix))<block_end><block_end><else_stmt><block_start><assert_stmt><false> "Unknown permission: {}".format(permission)<block_end>bucket_access_policy=policies.wrap_policy(statements)<line_sep>click.echo(json.dumps(bucket_access_policy indent=4))<block_end>@cli.command()@click.argument("buckets" nargs=-1 required=<true> )@click.option("format_" "-f" "--format" type=click.Choice(["ini" "json"]) default="json" help="Output format for credentials" )@click.option("-d" "--duration" type=DurationParam() help="How long should these credentials work for? Default is forever, use 3600 for 3600 seconds, 15m for 15 minutes, 1h for 1 hour" )@click.option("--username" help="Username to create or existing user to use")@click.option("-c" "--create-bucket" help="Create buckets if they do not already exist" is_flag=<true> )@click.option("--prefix" help="Restrict to keys starting with this prefix" default="*")@click.option("--public" help="Make the created bucket public: anyone will be able to download files if they know their name" is_flag=<true> )@click.option("--read-only" help="Only allow reading from the bucket" is_flag=<true>)@click.option("--write-only" help="Only allow writing to the bucket" is_flag=<true>)@click.option("--policy" type=PolicyParam() help="Path to a policy.json file, or literal JSON string - $!BUCKET_NAME!$ will be replaced with the name of the bucket" )@click.option("--bucket-region" help="Region in which to create buckets")@click.option("--silent" help="Don't show performed steps" is_flag=<true>)@click.option("--dry-run" help="Show steps without executing them" is_flag=<true>)@click.option("--user-permissions-boundary" help=("Custom permissions boundary to use for created users, or 'none' to "<concat>"create without. Defaults to limiting to S3 based on "<concat>"--read-only and --write-only options.") )@common_boto3_options<def_stmt>create buckets format_ duration username create_bucket prefix public read_only write_only policy bucket_region user_permissions_boundary silent dry_run **boto_options<block_start>"""
Create and return new AWS credentials for specified S3 buckets - optionally
also creating the bucket if it does not yet exist.
To create a new bucket and output read-write credentials:
s3-credentials create my-new-bucket -c
To create read-only credentials for an existing bucket:
s3-credentials create my-existing-bucket --read-only
To create write-only credentials that are only valid for 15 minutes:
s3-credentials create my-existing-bucket --write-only -d 15m
"""<if_stmt>read_only<and>write_only<block_start><raise>click.ClickException("Cannot use --read-only and --write-only at the same time")<block_end><def_stmt>log message<block_start><if_stmt><not>silent<block_start>click.echo(message err=<true>)<block_end><block_end>permission="read-write"<if_stmt>read_only<block_start>permission="read-only"<block_end><if_stmt>write_only<block_start>permission="write-only"<block_end>s3=<none><line_sep>iam=<none><line_sep>sts=<none><if_stmt><not>dry_run<block_start>s3=make_client("s3" **boto_options)<line_sep>iam=make_client("iam" **boto_options)<line_sep>sts=make_client("sts" **boto_options)<block_end># Verify buckets
<for_stmt>bucket buckets# Create bucket if it doesn't exist
<block_start><if_stmt>dry_run<or>(<not>bucket_exists(s3 bucket))<block_start><if_stmt>(<not>dry_run)<and>(<not>create_bucket)<block_start><raise>click.ClickException("Bucket does not exist: {} - try --create-bucket to create it".format(bucket))<block_end><if_stmt>dry_run<or>create_bucket<block_start>kwargs={}<if_stmt>bucket_region<block_start>kwargs={"CreateBucketConfiguration":{"LocationConstraint":bucket_region}}<block_end>bucket_policy={}<if_stmt>public<block_start>bucket_policy=policies.bucket_policy_allow_all_get(bucket)<block_end><if_stmt>dry_run<block_start>click.echo("Would create bucket: '{}'{}".format(bucket (" with args {}".format(json.dumps(kwargs indent=4))<if>kwargs<else>"") ))<if_stmt>bucket_policy<block_start>click.echo("... then attach the following bucket policy to it:")<line_sep>click.echo(json.dumps(bucket_policy indent=4))<block_end><block_end><else_stmt><block_start>s3.create_bucket(Bucket=bucket **kwargs)<line_sep>info="Created bucket: {}".format(bucket)<if_stmt>bucket_region<block_start>info<augadd>" in region: {}".format(bucket_region)<block_end>log(info)<if_stmt>bucket_policy<block_start>s3.put_bucket_policy(Bucket=bucket Policy=json.dumps(bucket_policy))<line_sep>log("Attached bucket policy allowing public access")<block_end><block_end><block_end><block_end><block_end># At this point the buckets definitely exist - create the inline policy for assume_role()
assume_role_policy={}<line_sep>bucket_access_policy={}<if_stmt>policy<block_start>assume_role_policy=json.loads(policy.replace("$!BUCKET_NAME!$" bucket))<block_end><else_stmt><block_start>statements=[]<if_stmt>permission<eq>"read-write"<block_start><for_stmt>bucket buckets<block_start>statements.extend(policies.read_write_statements(bucket prefix))<block_end><block_end><elif_stmt>permission<eq>"read-only"<block_start><for_stmt>bucket buckets<block_start>statements.extend(policies.read_only_statements(bucket prefix))<block_end><block_end><elif_stmt>permission<eq>"write-only"<block_start><for_stmt>bucket buckets<block_start>statements.extend(policies.write_only_statements(bucket prefix))<block_end><block_end><else_stmt><block_start><assert_stmt><false> "Unknown permission: {}".format(permission)<block_end>assume_role_policy=policies.wrap_policy(statements)<block_end><if_stmt>duration# We're going to use sts.assume_role() rather than creating a user
<block_start><if_stmt>dry_run<block_start>click.echo("Would ensure role: 's3-credentials.AmazonS3FullAccess'")<line_sep>click.echo("Would assume role using following policy for {} seconds:".format(duration))<line_sep>click.echo(json.dumps(assume_role_policy indent=4))<block_end><else_stmt><block_start>s3_role_arn=ensure_s3_role_exists(iam sts)<line_sep>log("Assume role against {} for {}s".format(s3_role_arn duration))<line_sep>credentials_response=sts.assume_role(RoleArn=s3_role_arn RoleSessionName="s3.{permission}.{buckets}".format(permission="custom"<if>policy<else>permission buckets=",".join(buckets) ) Policy=json.dumps(assume_role_policy) DurationSeconds=duration )<if_stmt>format_<eq>"ini"<block_start>click.echo(("[default]\naws_access_key_id={}\n"<concat>"aws_secret_access_key={}\naws_session_token={}").format(credentials_response["Credentials"]["AccessKeyId"] credentials_response["Credentials"]["SecretAccessKey"] credentials_response["Credentials"]["SessionToken"] ))<block_end><else_stmt><block_start>click.echo(json.dumps(credentials_response["Credentials"] indent=4 default=str))<block_end><block_end><return><block_end># No duration, so wo create a new user so we can issue non-expiring credentials
<if_stmt><not>username# Default username is "s3.read-write.bucket1,bucket2"
<block_start>username="s3.{permission}.{buckets}".format(permission="custom"<if>policy<else>permission buckets=",".join(buckets))<block_end><if_stmt>dry_run<or>(<not>user_exists(iam username))<block_start>kwargs={"UserName":username}<if_stmt>user_permissions_boundary<ne>"none"# This is a user-account level limitation, it does not grant
# permissions on its own but is a useful extra level of defense
# https://github.com/simonw/s3-credentials/issues/1#issuecomment-958201717
<block_start><if_stmt><not>user_permissions_boundary# Pick one based on --read-only/--write-only
<block_start><if_stmt>read_only<block_start>user_permissions_boundary=("arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess")<block_end><else_stmt># Need full access in order to be able to write
<block_start>user_permissions_boundary=("arn:aws:iam::aws:policy/AmazonS3FullAccess")<block_end><block_end>kwargs["PermissionsBoundary"]=user_permissions_boundary<block_end>info=" user: '{}'".format(username)<if_stmt>user_permissions_boundary<ne>"none"<block_start>info<augadd>" with permissions boundary: '{}'".format(user_permissions_boundary)<block_end><if_stmt>dry_run<block_start>click.echo("Would create{}".format(info))<block_end><else_stmt><block_start>iam.create_user(**kwargs)<line_sep>log("Created {}".format(info))<block_end><block_end># Add inline policies to the user so they can access the buckets
user_policy={}<for_stmt>bucket buckets<block_start>policy_name="s3.{permission}.{bucket}".format(permission="custom"<if>policy<else>permission bucket=bucket )<if_stmt>policy<block_start>user_policy=json.loads(policy.replace("$!BUCKET_NAME!$" bucket))<block_end><else_stmt><block_start><if_stmt>permission<eq>"read-write"<block_start>user_policy=policies.read_write(bucket prefix)<block_end><elif_stmt>permission<eq>"read-only"<block_start>user_policy=policies.read_only(bucket prefix)<block_end><elif_stmt>permission<eq>"write-only"<block_start>user_policy=policies.write_only(bucket prefix)<block_end><else_stmt><block_start><assert_stmt><false> "Unknown permission: {}".format(permission)<block_end><block_end><if_stmt>dry_run<block_start>click.echo("Would attach policy called '{}' to user '{}', details:\n{}".format(policy_name username json.dumps(user_policy indent=4) ))<block_end><else_stmt><block_start>iam.put_user_policy(PolicyDocument=json.dumps(user_policy) PolicyName=policy_name UserName=username )<line_sep>log("Attached policy {} to user {}".format(policy_name username))<block_end><block_end># Retrieve and print out the credentials
<if_stmt>dry_run<block_start>click.echo("Would call create access key for user '{}'".format(username))<block_end><else_stmt><block_start>response=iam.create_access_key(UserName=username )<line_sep>log("Created access key for user: {}".format(username))<if_stmt>format_<eq>"ini"<block_start>click.echo(("[default]\naws_access_key_id={}\n"<concat>"aws_secret_access_key={}").format(response["AccessKey"]["AccessKeyId"] response["AccessKey"]["SecretAccessKey"] ))<block_end><elif_stmt>format_<eq>"json"<block_start>click.echo(json.dumps(response["AccessKey"] indent=4 default=str))<block_end><block_end><block_end>@cli.command()@common_boto3_options<def_stmt>whoami **boto_options<block_start>"Identify currently authenticated user"<line_sep>sts=make_client("sts" **boto_options)<line_sep>identity=sts.get_caller_identity()<line_sep>identity.pop("ResponseMetadata")<line_sep>click.echo(json.dumps(identity indent=4 default=str))<block_end>@cli.command()@common_output_options@common_boto3_options<def_stmt>list_users nl csv tsv **boto_options<block_start>"""
List all users for this account
s3-credentials list-users
Add --csv or --csv for CSV or TSV format:
s3-credentials list-users --csv
"""<line_sep>iam=make_client("iam" **boto_options)<line_sep>output(paginate(iam "list_users" "Users") ("UserName" "UserId" "Arn" "Path" "CreateDate" "PasswordLast<PASSWORD>" "PermissionsBoundary" "Tags" ) nl csv tsv )<block_end>@cli.command()@click.argument("role_names" nargs=-1)@click.option("--details" help="Include attached policies (slower)" is_flag=<true>)@common_output_options@common_boto3_options<def_stmt>list_roles role_names details nl csv tsv **boto_options<block_start>"""
List roles
To list all roles for this AWS account:
s3-credentials list-roles
Add --csv or --csv for CSV or TSV format:
s3-credentials list-roles --csv
For extra details per role (much slower) add --details
s3-credentials list-roles --details
"""<line_sep>iam=make_client("iam" **boto_options)<line_sep>headers=("Path" "RoleName" "RoleId" "Arn" "CreateDate" "AssumeRolePolicyDocument" "Description" "MaxSessionDuration" "PermissionsBoundary" "Tags" "RoleLastUsed" )<if_stmt>details<block_start>headers<augadd>("inline_policies" "attached_policies")<block_end><def_stmt>iterate <block_start><for_stmt>role paginate(iam "list_roles" "Roles")<block_start><if_stmt>role_names<and>role["RoleName"]<not><in>role_names<block_start><continue><block_end><if_stmt>details<block_start>role_name=role["RoleName"]<line_sep>role["inline_policies"]=[]<line_sep># Get inline policy names, then policy for each one
<for_stmt>policy_name paginate(iam "list_role_policies" "PolicyNames" RoleName=role_name)<block_start>role_policy_response=iam.get_role_policy(RoleName=role_name PolicyName=policy_name )<line_sep>role_policy_response.pop("ResponseMetadata" <none>)<line_sep>role["inline_policies"].append(role_policy_response)<block_end># Get attached managed policies
role["attached_policies"]=[]<for_stmt>attached paginate(iam "list_attached_role_policies" "AttachedPolicies" RoleName=role_name )<block_start>policy_arn=attached["PolicyArn"]<line_sep>attached_policy_response=iam.get_policy(PolicyArn=policy_arn )<line_sep>policy_details=attached_policy_response["Policy"]<line_sep># Also need to fetch the policy JSON
version_id=policy_details["DefaultVersionId"]<line_sep>policy_version_response=iam.get_policy_version(PolicyArn=policy_arn VersionId=version_id )<line_sep>policy_details["PolicyVersion"]=policy_version_response["PolicyVersion"]<line_sep>role["attached_policies"].append(policy_details)<block_end><block_end><yield>role<block_end><block_end>output(iterate() headers nl csv tsv)<block_end>@cli.command()@click.argument("usernames" nargs=-1)@common_boto3_options<def_stmt>list_user_policies usernames **boto_options<block_start>"""
List inline policies for specified users
s3-credentials list-user-policies username
Returns policies for all users if no usernames are provided.
"""<line_sep>iam=make_client("iam" **boto_options)<if_stmt><not>usernames<block_start>usernames=[user["UserName"]<for>user paginate(iam "list_users" "Users")]<block_end><for_stmt>username usernames<block_start>click.echo("User: {}".format(username))<for_stmt>policy_name paginate(iam "list_user_policies" "PolicyNames" UserName=username)<block_start>click.echo("PolicyName: {}".format(policy_name))<line_sep>policy_response=iam.get_user_policy(UserName=username PolicyName=policy_name)<line_sep>click.echo(json.dumps(policy_response["PolicyDocument"] indent=4 default=str))<block_end><block_end><block_end>@cli.command()@click.argument("buckets" nargs=-1)@click.option("--details" help="Include extra bucket details (slower)" is_flag=<true>)@common_output_options@common_boto3_options<def_stmt>list_buckets buckets details nl csv tsv **boto_options<block_start>"""
List buckets
To list all buckets and their creation time as JSON:
s3-credentials list-buckets
Add --csv or --csv for CSV or TSV format:
s3-credentials list-buckets --csv
For extra details per bucket (much slower) add --details
s3-credentials list-buckets --details
"""<line_sep>s3=make_client("s3" **boto_options)<line_sep>headers=["Name" "CreationDate"]<if_stmt>details<block_start>headers<augadd>["bucket_acl" "public_access_block" "bucket_website"]<block_end><def_stmt>iterator <block_start><for_stmt>bucket s3.list_buckets()["Buckets"]<block_start><if_stmt>buckets<and>(bucket["Name"]<not><in>buckets)<block_start><continue><block_end><if_stmt>details<block_start>bucket_acl=dict((key value)<for>key,value s3.get_bucket_acl(Bucket=bucket["Name"] ).items()<if>key<ne>"ResponseMetadata")<try_stmt><block_start>pab=s3.get_public_access_block(Bucket=bucket["Name"] )["PublicAccessBlockConfiguration"]<block_end><except_stmt>s3.exceptions.ClientError<block_start>pab=<none><block_end><try_stmt><block_start>bucket_website=dict((key value)<for>key,value s3.get_bucket_website(Bucket=bucket["Name"] ).items()<if>key<ne>"ResponseMetadata")<block_end><except_stmt>s3.exceptions.ClientError<block_start>bucket_website=<none><block_end>bucket["bucket_acl"]=bucket_acl<line_sep>bucket["public_access_block"]=pab<line_sep>bucket["bucket_website"]=bucket_website<block_end><yield>bucket<block_end><block_end>output(iterator() headers nl csv tsv)<block_end>@cli.command()@click.argument("usernames" nargs=-1 required=<true>)@common_boto3_options<def_stmt>delete_user usernames **boto_options<block_start>"""
Delete specified users, their access keys and their inline policies
s3-credentials delete-user username1 username2
"""<line_sep>iam=make_client("iam" **boto_options)<for_stmt>username usernames<block_start>click.echo("User: {}".format(username))<line_sep># Fetch and delete their policies
policy_names_to_delete=list(paginate(iam "list_user_policies" "PolicyNames" UserName=username))<for_stmt>policy_name policy_names_to_delete<block_start>iam.delete_user_policy(UserName=username PolicyName=policy_name )<line_sep>click.echo(" Deleted policy: {}".format(policy_name))<block_end># Fetch and delete their access keys
access_key_ids_to_delete=[access_key["AccessKeyId"]<for>access_key paginate(iam "list_access_keys" "AccessKeyMetadata" UserName=username)]<for_stmt>access_key_id access_key_ids_to_delete<block_start>iam.delete_access_key(UserName=username AccessKeyId=access_key_id )<line_sep>click.echo(" Deleted access key: {}".format(access_key_id))<block_end>iam.delete_user(UserName=username)<line_sep>click.echo(" Deleted user")<block_end><block_end><def_stmt>make_client service access_key secret_key session_token endpoint_url auth<block_start><if_stmt>auth<block_start><if_stmt>access_key<or>secret_key<or>session_token<block_start><raise>click.ClickException("--auth cannot be used with --access-key, --secret-key or --session-token")<block_end>auth_content=auth.read().strip()<if_stmt>auth_content.startswith("{")# Treat as JSON
<block_start>decoded=json.loads(auth_content)<line_sep>access_key=decoded.get("AccessKeyId")<line_sep>secret_key=decoded.get("SecretAccessKey")<line_sep>session_token=decoded.get("SessionToken")<block_end><else_stmt># Treat as INI
<block_start>config=configparser.ConfigParser()<line_sep>config.read_string(auth_content)<line_sep># Use the first section that has an aws_access_key_id
<for_stmt>section config.sections()<block_start><if_stmt>"aws_access_key_id"<in>config[section]<block_start>access_key=config[section].get("aws_access_key_id")<line_sep>secret_key=config[section].get("aws_secret_access_key")<line_sep>session_token=config[section].get("aws_session_token")<line_sep><break><block_end><block_end><block_end><block_end>kwargs={}<if_stmt>access_key<block_start>kwargs["aws_access_key_id"]=access_key<block_end><if_stmt>secret_key<block_start>kwargs["aws_secret_access_key"]=secret_key<block_end><if_stmt>session_token<block_start>kwargs["aws_session_token"]=session_token<block_end><if_stmt>endpoint_url<block_start>kwargs["endpoint_url"]=endpoint_url<block_end><return>boto3.client(service **kwargs)<block_end><def_stmt>ensure_s3_role_exists iam sts<block_start>"Create s3-credentials.AmazonS3FullAccess role if not exists, return ARN"<line_sep>role_name="s3-credentials.AmazonS3FullAccess"<line_sep>account_id=sts.get_caller_identity()["Account"]<try_stmt><block_start>role=iam.get_role(RoleName=role_name)<line_sep><return>role["Role"]["Arn"]<block_end><except_stmt>iam.exceptions.NoSuchEntityException<block_start>create_role_response=iam.create_role(Description=("Role used by the s3-credentials tool to create time-limited "<concat>"credentials that are restricted to specific buckets") RoleName=role_name AssumeRolePolicyDocument=json.dumps({"Version":"2012-10-17" "Statement":[{"Effect":"Allow" "Principal":{"AWS":"arn:aws:iam::{}:root".format(account_id)} "Action":"sts:AssumeRole" }] }) )<line_sep># Attach AmazonS3FullAccess to it - note that even though we use full access
# on the role itself any time we call sts.assume_role() we attach an additional
# policy to ensure reduced access for the temporary credentials
iam.attach_role_policy(RoleName="s3-credentials.AmazonS3FullAccess" PolicyArn="arn:aws:iam::aws:policy/AmazonS3FullAccess" )<line_sep><return>create_role_response["Role"]["Arn"]<block_end><block_end>@cli.command()@click.argument("bucket")@click.option("--prefix" help="List keys starting with this prefix")@common_output_options@common_boto3_options<def_stmt>list_bucket bucket prefix nl csv tsv **boto_options<block_start>"""
List contents of bucket
To list the contents of a bucket as JSON:
s3-credentials list-bucket my-bucket
Add --csv or --csv for CSV or TSV format:
s3-credentials list-bucket my-bucket --csv
"""<line_sep>s3=make_client("s3" **boto_options)<line_sep>kwargs={"Bucket":bucket}<if_stmt>prefix<block_start>kwargs["Prefix"]=prefix<block_end><try_stmt><block_start>output(paginate(s3 "list_objects_v2" "Contents" **kwargs) ("Key" "LastModified" "ETag" "Size" "StorageClass" "Owner") nl csv tsv )<block_end><except_stmt>botocore.exceptions.ClientError<as>e<block_start><raise>click.ClickException(e)<block_end><block_end>@cli.command()@click.argument("bucket")@click.argument("key")@click.argument("path" type=click.Path(exists=<true> file_okay=<true> dir_okay=<false> readable=<true> allow_dash=<true>) )@click.option("--content-type" help="Content-Type to use (default is auto-detected based on file extension)" )@click.option("silent" "-s" "--silent" is_flag=<true> help="Don't show progress bar")@common_boto3_options<def_stmt>put_object bucket key path content_type silent **boto_options<block_start>"""
Upload an object to an S3 bucket
To upload a file to /my-key.txt in the my-bucket bucket:
s3-credentials put-object my-bucket my-key.txt /path/to/file.txt
Use - to upload content from standard input:
echo "Hello" | s3-credentials put-object my-bucket hello.txt -
"""<line_sep>s3=make_client("s3" **boto_options)<line_sep>size=<none><line_sep>extra_args={}<if_stmt>path<eq>"-"# boto needs to be able to seek
<block_start>fp=io.BytesIO(sys.stdin.buffer.read())<if_stmt><not>silent<block_start>size=fp.getbuffer().nbytes<block_end><block_end><else_stmt><block_start><if_stmt><not>content_type<block_start>content_type=mimetypes.guess_type(path)[0]<block_end>fp=click.open_file(path "rb")<if_stmt><not>silent<block_start>size=os.path.getsize(path)<block_end><block_end><if_stmt>content_type<is><not><none><block_start>extra_args["ContentType"]=content_type<block_end><if_stmt><not>silent# Show progress bar
<block_start><with_stmt>click.progressbar(length=size label="Uploading")<as>bar<block_start>s3.upload_fileobj(fp bucket key Callback=bar.update ExtraArgs=extra_args)<block_end><block_end><else_stmt><block_start>s3.upload_fileobj(fp bucket key ExtraArgs=extra_args)<block_end><block_end>@cli.command()@click.argument("bucket")@click.argument("key")@click.option("output" "-o" "--output" type=click.Path(file_okay=<true> dir_okay=<false> writable=<true> allow_dash=<false>) help="Write to this file instead of stdout" )@common_boto3_options<def_stmt>get_object bucket key output **boto_options<block_start>"""
Download an object from an S3 bucket
To see the contents of the bucket on standard output:
s3-credentials get-object my-bucket hello.txt
To save to a file:
s3-credentials get-object my-bucket hello.txt -o hello.txt
"""<line_sep>s3=make_client("s3" **boto_options)<if_stmt><not>output<block_start>fp=sys.stdout.buffer<block_end><else_stmt><block_start>fp=click.open_file(output "wb")<block_end>s3.download_fileobj(bucket key fp)<block_end>@cli.command()@click.argument("bucket")@click.option("allowed_methods" "-m" "--allowed-method" multiple=<true> help="Allowed method e.g. GET" )@click.option("allowed_headers" "-h" "--allowed-header" multiple=<true> help="Allowed header e.g. Authorization" )@click.option("allowed_origins" "-o" "--allowed-origin" multiple=<true> help="Allowed origin e.g. https://www.example.com/" )@click.option("expose_headers" "-e" "--expose-header" multiple=<true> help="Header to expose e.g. ETag" )@click.option("max_age_seconds" "--max-age-seconds" type=int help="How long to cache preflight requests" )@common_boto3_options<def_stmt>set_cors_policy bucket allowed_methods allowed_headers allowed_origins expose_headers max_age_seconds **boto_options<block_start>"""
Set CORS policy for a bucket
To allow GET requests from any origin:
s3-credentials set-cors-policy my-bucket
To allow GET and PUT from a specific origin and expose ETag headers:
\b
s3-credentials set-cors-policy my-bucket \\
--allowed-method GET \\
--allowed-method PUT \\
--allowed-origin https://www.example.com/ \\
--expose-header ETag
"""<line_sep>s3=make_client("s3" **boto_options)<if_stmt><not>bucket_exists(s3 bucket)<block_start><raise>click.ClickException("Bucket {} does not exists".format(bucket))<block_end>cors_rule={"ID":"set-by-s3-credentials" "AllowedOrigins":allowed_origins<or>["*"] "AllowedHeaders":allowed_headers "AllowedMethods":allowed_methods<or>["GET"] "ExposeHeaders":expose_headers }<if_stmt>max_age_seconds<block_start>cors_rule["MaxAgeSeconds"]=max_age_seconds<block_end><try_stmt><block_start>s3.put_bucket_cors(Bucket=bucket CORSConfiguration={"CORSRules":[cors_rule]})<block_end><except_stmt>botocore.exceptions.ClientError<as>e<block_start><raise>click.ClickException(e)<block_end><block_end>@cli.command()@click.argument("bucket")@common_boto3_options<def_stmt>get_cors_policy bucket **boto_options<block_start>"""
Get CORS policy for a bucket
s3-credentials get-cors-policy my-bucket
Returns the CORS policy for this bucket, if set, as JSON
"""<line_sep>s3=make_client("s3" **boto_options)<try_stmt><block_start>response=s3.get_bucket_cors(Bucket=bucket)<block_end><except_stmt>botocore.exceptions.ClientError<as>e<block_start><raise>click.ClickException(e)<block_end>click.echo(json.dumps(response["CORSRules"] indent=4 default=str))<block_end><def_stmt>output iterator headers nl csv tsv<block_start><if_stmt>nl<block_start><for_stmt>item iterator<block_start>click.echo(json.dumps(item default=str))<block_end><block_end><elif_stmt>csv<or>tsv<block_start>writer=DictWriter(sys.stdout headers dialect="excel-tab"<if>tsv<else>"excel")<line_sep>writer.writeheader()<line_sep>writer.writerows(fix_json(row)<for>row iterator)<block_end><else_stmt><block_start><for_stmt>line stream_indented_json(iterator)<block_start>click.echo(line)<block_end><block_end><block_end><def_stmt>stream_indented_json iterator indent=2# We have to iterate two-at-a-time so we can know if we
# should output a trailing comma or if we have reached
# the last item.
<block_start>current_iter,next_iter=itertools.tee(iterator 2)<line_sep>next(next_iter <none>)<line_sep>first=<true><for_stmt>item,next_item itertools.zip_longest(current_iter next_iter)<block_start>is_last=next_item<is><none><line_sep>data=item<line_sep>line="{first}{serialized}{separator}{last}".format(first="[\n"<if>first<else>"" serialized=textwrap.indent(json.dumps(data indent=indent default=str) " "<times>indent) separator=","<if><not>is_last<else>"" last="\n]"<if>is_last<else>"" )<line_sep><yield>line<line_sep>first=<false><block_end><if_stmt>first# We didn't output anything, so yield the empty list
<block_start><yield>"[]"<block_end><block_end><def_stmt>paginate service method list_key **kwargs<block_start>paginator=service.get_paginator(method)<for_stmt>response paginator.paginate(**kwargs)<block_start><yield><from>response[list_key]<block_end><block_end><def_stmt>fix_json row# If a key value is list or dict, json encode it
<block_start><return>dict([(key json.dumps(value indent=2 default=str)<if>isinstance(value (dict list tuple))<else>value )<for>key,value row.items()])<block_end> |
<import_stmt>unittest<import_stmt>unittest.mock<import_from_stmt>programy.extensions.base Extension<class_stmt>MockExtension(Extension)<block_start><def_stmt>execute self context data<block_start><raise>NotImplementedError()<block_end><block_end><class_stmt>ExtensionTests(unittest.TestCase)<block_start><def_stmt>test_ensure_not_implemented self<block_start>bot=unittest.mock.Mock()<line_sep>extension=MockExtension()<line_sep>self.assertIsNotNone(extension)<with_stmt>self.assertRaises(Exception)<block_start>extension.execute(bot "testid" "Some Data")<block_end><block_end><block_end> |
<import_stmt>argparse<import_stmt>os<import_stmt>subprocess<def_stmt>split_scenes raw_data_path out_data_path<block_start>out_data_path=os.path.join(out_data_path 'orig' 'scenes')<if_stmt><not>os.path.isdir(os.path.join(out_data_path 'train'))<block_start>os.makedirs(os.path.join(out_data_path 'train'))<block_end><if_stmt><not>os.path.isdir(os.path.join(out_data_path 'val'))<block_start>os.makedirs(os.path.join(out_data_path 'val'))<block_end>start="00:00:00.0"<with_stmt>open("./data/timestamps")<as>f<block_start><for_stmt>i,line enumerate(f.readlines())<block_start>m,s=divmod(float(line) 60)<line_sep>h,m=divmod(m 60)<line_sep>end="%02d:%02d:%02d"%(h m s)<if_stmt>i<l>53<block_start>subset='train'<block_end><else_stmt><block_start>subset='val'<block_end>filepath=os.path.join(out_data_path subset)<line_sep>filename=os.path.join(filepath 'scene_'+str(i)+'.mp4')<line_sep>cmd=["ffmpeg" "-i" raw_data_path "-ss" start "-to" end "-c:v" "copy" "-an" filename]<line_sep>print("Running: " ' '.join(cmd))<line_sep>subprocess.run(cmd)<line_sep>start=end<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>parser=argparse.ArgumentParser()<line_sep>parser.add_argument('--raw_data' type=str default=<none>)<line_sep>parser.add_argument('--out_data' type=str default=<none>)<line_sep>args=parser.parse_args()<assert_stmt>args.raw_data<is><not><none> 'Provide --raw_data path to Myanmar 4K mp4'<assert_stmt>args.out_data<is><not><none> 'Provide --raw_data path to Myanmar 4K mp4'<line_sep>split_scenes(args.raw_data args.out_data)<block_end> |
<import_from_stmt>sklearn.svm OneClassSVM<import_from_stmt>uq360.utils.transformers.feature_transformer FeatureTransformer<class_stmt>OneClassSVMTransformer(FeatureTransformer)<block_start>"""One-class SVM outlier-classifier based derived feature.
This transformer fits an SVM decision boundary enclosing the
full training set. This is then the decision boundary to identify
outliers in production data at inference time. """<def_stmt>__init__ self<block_start>super(OneClassSVMTransformer self).__init__()<line_sep>self.one_class_classifier=OneClassSVM(nu=0.1 kernel="rbf" gamma='auto')<line_sep>self.fit_status=<false><block_end>@classmethod<def_stmt>name cls<block_start><return>('one_class_svm')<block_end><def_stmt>fit self x y<block_start>self.one_class_classifier.fit(x)<line_sep>self.fit_status=<true><block_end><def_stmt>transform self x predictions<block_start><return>self.one_class_classifier.decision_function(x)<block_end><def_stmt>save self output_location=<none><block_start>self.register_pkl_object(self.one_class_classifier 'one_class_classifier')<line_sep>super(OneClassSVMTransformer self)._save(output_location)<block_end><def_stmt>load self input_location=<none><block_start>self._load(input_location)<line_sep>self.one_class_classifier=self.pkl_registry[0][0]<assert_stmt>type(self.one_class_classifier)<eq>OneClassSVM<line_sep>self.fit_status=<true><block_end><block_end> |
<import_from_stmt>django.http HttpResponse<import_from_stmt>django.views.decorators.csrf csrf_exempt<def_stmt>index request<block_start><return>HttpResponse(status=200)<block_end><def_stmt>get_user request id<block_start><return>HttpResponse(id)<block_end>@csrf_exempt<def_stmt>create_user request<block_start><return>HttpResponse(status=200)<block_end> |
"""Define inputters reading from TFRecord files."""<import_stmt>tensorflow<as>tf<import_from_stmt>opennmt.inputters.inputter Inputter<import_from_stmt>opennmt.utils compat<import_stmt>numpy<as>np<import_from_stmt>collections defaultdict<import_stmt>yaml<class_stmt>Feature<block_start><def_stmt>__init__ self name shape where<block_start>self.name=name<line_sep>self.shape=shape<line_sep>self.where=where<block_end><block_end><class_stmt>RecordInputter(Inputter)<block_start>"""Inputter that reads a header file that discribes the tensors and shapes
"""<def_stmt>__init__ self dtype=tf.float32<block_start>"""Initializes the parameters of the record inputter.
Args:
dtype: The values type.
"""<line_sep>super(RecordInputter self).__init__(dtype=dtype)<block_end><def_stmt>initialize self metadata asset_dir=<none> asset_prefix=""<block_start>config_file=metadata['config_file']<line_sep># read config file
self.input_features=[]<line_sep>self.has=defaultdict(bool)<with_stmt>open(config_file "r")<as>f<block_start>config=yaml.safe_load(f)<for_stmt>fi config["features"]<block_start>f=Feature(fi[0] fi[1] fi[2])<line_sep>self.has[f.where]=<true><line_sep>self.input_features.append(f)<block_end><block_end>print(self.input_features)<block_end><def_stmt>make_dataset self data_file training=<none><block_start><return>tf.data.TFRecordDataset(data_file)<block_end><def_stmt>get_dataset_size self data_file<block_start><return>sum(1<for>_ compat.tf_compat(v1="python_io.tf_record_iterator")(data_file))<block_end><def_stmt>get_receiver_tensors self<block_start>ret={}<if_stmt>self.has_word()<block_start>ret["numWords"]=tf.placeholder(tf.int32 shape=(<none> ) name="numWords")<block_end><for_stmt>feature self.input_features<block_start>shape=list(map(<lambda>x:<none><if>x<l>0<else>x list(feature.shape)))<line_sep>shape.insert(0 <none>)# batch size
ret[feature.name]=tf.placeholder(tf.float32 shape=tuple(shape) name=feature.name)<block_end><return>ret<block_end><def_stmt>make_features self element=<none> features=<none> training=<none><block_start><if_stmt>features<is><none><block_start>features={}<block_end><if_stmt>self.input_features[0].name<in>features<block_start><return>features<block_end><if_stmt>element<is><none><block_start><raise>RuntimeError("make_features was called with None element")<block_end>tf_parse_example=compat.tf_compat(v2="io.parse_single_example" v1="parse_single_example")<line_sep>tf_var_len_feature=compat.tf_compat(v2="io.VarLenFeature" v1="VarLenFeature")<line_sep>featuresDict={}<if_stmt>self.has_word()<block_start>featuresDict["numWords"]=tf_var_len_feature(tf.int64)<block_end><for_stmt>feature self.input_features<block_start>featuresDict[feature.name]=tf_var_len_feature(tf.float32)<block_end>example=tf_parse_example(element features=featuresDict)<if_stmt>self.has_word()<block_start>features["numWords"]=tf.cast(example["numWords"].values tf.int32)[0]<block_end><for_stmt>feature self.input_features<block_start>print(feature.name feature.shape)<line_sep>features[feature.name]=tf.reshape(example[feature.name].values feature.shape)<block_end>print("features" features)<line_sep><return>features<block_end><def_stmt>get_word self features training=<none><block_start>to_concat=[]<for_stmt>feature self.input_features<block_start><if_stmt>feature.where<eq>"word"<block_start>to_concat.append(features[feature.name])<block_end><block_end><return>tf.concat(to_concat axis=-1)<block_end><def_stmt>get_global self features training=<none><block_start>to_concat=[]<for_stmt>feature self.input_features<block_start><if_stmt>feature.where<eq>"global"<block_start>to_concat.append(features[feature.name])<block_end><block_end><if_stmt>len(to_concat)<eq>1<block_start><return>to_concat[0]<block_end><return>tf.concat(to_concat axis=-1)<block_end><def_stmt>get_lm self features training=<none><block_start>to_concat=[]<for_stmt>feature self.input_features<block_start><if_stmt>feature.where<eq>"lm"<block_start>to_concat.append(features[feature.name])<block_end><block_end><return>tf.concat(to_concat axis=-2)<block_end><def_stmt>has_word self<block_start><return>self.has["word"]<block_end><def_stmt>has_lm self<block_start><return>self.has["lm"]<block_end><def_stmt>has_global self<block_start><return>self.has["global"]<block_end><def_stmt>get_length self features training=<none><block_start><return>features["numWords"]<if>"numWords"<in>features<else>1<block_end><block_end> |
<import_stmt>numpy<as>np<line_sep>n=int(input().strip())<line_sep>array=np.array([[float(x)<for>x input().strip().split()]<for>_ range(n)] dtype=float)<line_sep>print(np.linalg.det(array))<line_sep> |
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{'targets':[{'target_name':'libflac' 'product_name':'flac' 'type':'static_library' 'sources':['include/FLAC/all.h' 'include/FLAC/assert.h' 'include/FLAC/callback.h' 'include/FLAC/export.h' 'include/FLAC/format.h' 'include/FLAC/metadata.h' 'include/FLAC/ordinals.h' 'include/FLAC/stream_decoder.h' 'include/FLAC/stream_encoder.h' 'include/share/alloc.h' 'include/share/compat.h' 'include/share/endswap.h' 'include/share/private.h' 'src/libFLAC/alloc.c' 'src/libFLAC/bitmath.c' 'src/libFLAC/bitreader.c' 'src/libFLAC/bitwriter.c' 'src/libFLAC/cpu.c' 'src/libFLAC/crc.c' 'src/libFLAC/fixed.c' 'src/libFLAC/float.c' 'src/libFLAC/format.c' 'src/libFLAC/lpc.c' 'src/libFLAC/md5.c' 'src/libFLAC/memory.c' 'src/libFLAC/stream_decoder.c' 'src/libFLAC/stream_encoder.c' 'src/libFLAC/stream_encoder_framing.c' 'src/libFLAC/window.c' 'src/libFLAC/include/private/all.h' 'src/libFLAC/include/private/bitmath.h' 'src/libFLAC/include/private/bitreader.h' 'src/libFLAC/include/private/bitwriter.h' 'src/libFLAC/include/private/cpu.h' 'src/libFLAC/include/private/crc.h' 'src/libFLAC/include/private/fixed.h' 'src/libFLAC/include/private/float.h' 'src/libFLAC/include/private/format.h' 'src/libFLAC/include/private/lpc.h' 'src/libFLAC/include/private/macros.h' 'src/libFLAC/include/private/md5.h' 'src/libFLAC/include/private/memory.h' 'src/libFLAC/include/private/metadata.h' 'src/libFLAC/include/private/stream_encoder.h' 'src/libFLAC/include/private/stream_encoder_framing.h' 'src/libFLAC/include/private/window.h' 'src/libFLAC/include/protected/all.h' 'src/libFLAC/include/protected/stream_decoder.h' 'src/libFLAC/include/protected/stream_encoder.h' ] 'defines':['FLAC__NO_DLL' 'FLAC__OVERFLOW_DETECT' 'VERSION="1.3.1"' 'HAVE_LROUND' ] 'conditions':[['OS=="win"' {'sources':['include/share/win_utf8_io.h' 'src/share/win_utf8_io/win_utf8_io.c' ] 'defines!':['WIN32_LEAN_AND_MEAN' # win_utf8_io.c defines this itself.
] 'msvs_settings':{'VCCLCompilerTool':{'AdditionalOptions':['/wd4334' # 32-bit shift converted to 64 bits.
'/wd4267' # Converting from size_t to unsigned on 64-bit.
] } } } {'defines':['HAVE_INTTYPES_H' ] }] ] 'include_dirs':['include' 'src/libFLAC/include' ] 'direct_dependent_settings':{'defines':['FLAC__NO_DLL' ] } 'variables':{'clang_warning_flags':[# libflac converts between FLAC__StreamDecoderState and
# FLAC__StreamDecoderInitStatus a lot in stream_decoder.c.
'-Wno-conversion' # libflac contains constants that are only used in certain
# compile-time cases, which triggers unused-const-variable warnings in
# other cases.
'-Wno-unused-const-variable' ] } } ] }<line_sep># Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
|
<import_from_stmt>matrix.server MatrixServer<import_from_stmt>matrix._weechat MockConfig<import_stmt>matrix.globals<as>G<line_sep>G.CONFIG=MockConfig()<class_stmt>TestClass(object)<block_start><def_stmt>test_address_parsing self<block_start>homeserver=MatrixServer._parse_url("example.org" 8080)<assert_stmt>homeserver.hostname<eq>"example.org"<assert_stmt>homeserver.geturl()<eq>"https://example.org:8080"<line_sep>homeserver=MatrixServer._parse_url("example.org/_matrix" 80)<assert_stmt>homeserver.hostname<eq>"example.org"<assert_stmt>homeserver.geturl()<eq>"https://example.org:80/_matrix"<line_sep>homeserver=MatrixServer._parse_url("https://example.org/_matrix" 80)<assert_stmt>homeserver.hostname<eq>"example.org"<assert_stmt>homeserver.geturl()<eq>"https://example.org:80/_matrix"<block_end><block_end> |
<import_stmt>esphome.codegen<as>cg<import_stmt>esphome.config_validation<as>cv<import_from_stmt>esphome.components i2c sensor<import_from_stmt>esphome.const CONF_ID CONF_PM_1_0 CONF_PM_2_5 CONF_PM_10_0 CONF_PMC_0_5 CONF_PMC_1_0 CONF_PMC_2_5 CONF_PMC_10_0 UNIT_MICROGRAMS_PER_CUBIC_METER ICON_CHEMICAL_WEAPON ICON_COUNTER DEVICE_CLASS_PM1 DEVICE_CLASS_PM10 DEVICE_CLASS_PM25 STATE_CLASS_MEASUREMENT <line_sep>CODEOWNERS=["@sjtrny"]<line_sep>DEPENDENCIES=["i2c"]<line_sep>pmsa003i_ns=cg.esphome_ns.namespace("pmsa003i")<line_sep>PMSA003IComponent=pmsa003i_ns.class_("PMSA003IComponent" cg.PollingComponent i2c.I2CDevice)<line_sep>CONF_STANDARD_UNITS="standard_units"<line_sep>UNIT_COUNTS_PER_100ML="#/0.1L"<line_sep>CONF_PMC_0_3="pmc_0_3"<line_sep>CONF_PMC_5_0="pmc_5_0"<line_sep>CONFIG_SCHEMA=(cv.Schema({cv.GenerateID():cv.declare_id(PMSA003IComponent) cv.Optional(CONF_STANDARD_UNITS default=<true>):cv.boolean cv.Optional(CONF_PM_1_0):sensor.sensor_schema(unit_of_measurement=UNIT_MICROGRAMS_PER_CUBIC_METER icon=ICON_CHEMICAL_WEAPON accuracy_decimals=2 device_class=DEVICE_CLASS_PM1 state_class=STATE_CLASS_MEASUREMENT ) cv.Optional(CONF_PM_2_5):sensor.sensor_schema(unit_of_measurement=UNIT_MICROGRAMS_PER_CUBIC_METER icon=ICON_CHEMICAL_WEAPON accuracy_decimals=2 device_class=DEVICE_CLASS_PM25 state_class=STATE_CLASS_MEASUREMENT ) cv.Optional(CONF_PM_10_0):sensor.sensor_schema(unit_of_measurement=UNIT_MICROGRAMS_PER_CUBIC_METER icon=ICON_CHEMICAL_WEAPON accuracy_decimals=2 device_class=DEVICE_CLASS_PM10 state_class=STATE_CLASS_MEASUREMENT ) cv.Optional(CONF_PMC_0_3):sensor.sensor_schema(unit_of_measurement=UNIT_COUNTS_PER_100ML icon=ICON_COUNTER accuracy_decimals=0 state_class=STATE_CLASS_MEASUREMENT ) cv.Optional(CONF_PMC_0_5):sensor.sensor_schema(unit_of_measurement=UNIT_COUNTS_PER_100ML icon=ICON_COUNTER accuracy_decimals=0 state_class=STATE_CLASS_MEASUREMENT ) cv.Optional(CONF_PMC_1_0):sensor.sensor_schema(unit_of_measurement=UNIT_COUNTS_PER_100ML icon=ICON_COUNTER accuracy_decimals=0 state_class=STATE_CLASS_MEASUREMENT ) cv.Optional(CONF_PMC_2_5):sensor.sensor_schema(unit_of_measurement=UNIT_COUNTS_PER_100ML icon=ICON_COUNTER accuracy_decimals=0 state_class=STATE_CLASS_MEASUREMENT ) cv.Optional(CONF_PMC_5_0):sensor.sensor_schema(unit_of_measurement=UNIT_COUNTS_PER_100ML icon=ICON_COUNTER accuracy_decimals=0 state_class=STATE_CLASS_MEASUREMENT ) cv.Optional(CONF_PMC_10_0):sensor.sensor_schema(unit_of_measurement=UNIT_COUNTS_PER_100ML icon=ICON_COUNTER accuracy_decimals=0 state_class=STATE_CLASS_MEASUREMENT ) }).extend(cv.polling_component_schema("60s")).extend(i2c.i2c_device_schema(0x12)))<line_sep>TYPES={CONF_PM_1_0:"set_pm_1_0_sensor" CONF_PM_2_5:"set_pm_2_5_sensor" CONF_PM_10_0:"set_pm_10_0_sensor" CONF_PMC_0_3:"set_pmc_0_3_sensor" CONF_PMC_0_5:"set_pmc_0_5_sensor" CONF_PMC_1_0:"set_pmc_1_0_sensor" CONF_PMC_2_5:"set_pmc_2_5_sensor" CONF_PMC_5_0:"set_pmc_5_0_sensor" CONF_PMC_10_0:"set_pmc_10_0_sensor" }<async_keyword><def_stmt>to_code config<block_start>var=cg.new_Pvariable(config[CONF_ID])<line_sep><await>cg.register_component(var config)<line_sep><await>i2c.register_i2c_device(var config)<line_sep>cg.add(var.set_standard_units(config[CONF_STANDARD_UNITS]))<for_stmt>key,funcName TYPES.items()<block_start><if_stmt>key<in>config<block_start>sens=<await>sensor.new_sensor(config[key])<line_sep>cg.add(getattr(var funcName)(sens))<block_end><block_end><block_end> |
<import_stmt>itertools<import_stmt>operator<import_stmt>re<import_stmt>bs4<import_from_stmt>pudzu.utils *<line_sep># Various utilities for BeautifulSoup
# helper functions since: (a) bs4 tags need to be compared with is, not eq; (b) they're iterable
<def_stmt>remove_duplicate_tags l<block_start>"""Remove duplicate tags from a list (using object identity rather than equality)"""<line_sep><return>remove_duplicates(l key=id)<block_end><def_stmt>non_bs4_iterable v<block_start>"""Whether an object is a non-bs4 iterable."""<line_sep><return>non_string_iterable(v)<and><not>hasattr(v "find_all")<block_end><def_stmt>make_bs4_iterable v<block_start>"""Return a non-bs4 iterable from an object, wrapping it in a tuple if needed."""<line_sep><return>v<if>non_bs4_iterable(v)<else>(v )<block_end># pretty-printing tags
<def_stmt>print_tags tags attr=<none><block_start>"""Print one or more tags, excluding any nested content."""<for_stmt>tag make_bs4_iterable(tags)<block_start><if_stmt>attr<is><not><none><block_start>print(tag.attrs.get(attr ""))<block_end><elif_stmt>hasattr(tag "attr")<block_start>attrs=" ".join('{}="{}"'.format(k " ".join(v)<if>non_string_iterable(v)<else>v)<for>k,v sorted(tag.attrs.items()))<line_sep>print("<{}{}{}>".format(tag.name ""<if>len(tag.attrs)<eq>0<else>" " attrs))<block_end><else_stmt><block_start>print(tag)<block_end><block_end><block_end><def_stmt>print_path tag<block_start>"""Print the path from the root down to a tag."""<line_sep>print_tags(list(itertools.chain([tag] tag.parents))[-2::-1])<block_end>pt=print_tags<line_sep>pp=print_path<line_sep># filtering
<def_stmt>re_exclude pattern<block_start>"""Negated regular expression filter."""<line_sep>pattern=re.compile(pattern)<line_sep><return><lambda>v:v<and><not>re.search(pattern v)<block_end><def_stmt>is_parent t s<block_start>"""Whether t is s's parent."""<line_sep><return>t<is>s.parent<block_end><def_stmt>is_child t s<block_start>"""Whether t is s's child."""<line_sep><return>s<is>t.parent<block_end><def_stmt>is_ancestor t s<block_start>"""Whether t is an ancestor of s."""<line_sep><return>is_in(t s.parents)<block_end><def_stmt>is_descendent t s<block_start>"""Whether t is a descendent of s."""<line_sep><return>is_in(s t.parents)<block_end><def_stmt>is_after t s<block_start>"""Whether t occurs after s."""<line_sep><return>is_in(t s.next_elements)<block_end><def_stmt>is_before t s<block_start>"""Whether t occurs before s."""<line_sep><return>is_in(s t.next_elements)<block_end><def_stmt>exclude_tags tags excluded relation=operator.is_<block_start>"""Filter out tags that are related to at least one of the excluded set."""<line_sep><return>[t<for>t make_bs4_iterable(tags)<if><not>any(relation(t s)<for>s make_bs4_iterable(excluded))]<block_end><def_stmt>restrict_tags tags included relation=operator.is_<block_start>"""Restrict to tags that are related to at least one of the included set."""<line_sep><return>[t<for>t make_bs4_iterable(tags)<if>any(relation(t s)<for>s make_bs4_iterable(included))]<block_end># finding tags by chaining
<def_stmt>curry_method method<block_start><def_stmt>fn *args **kwargs<block_start><return><lambda>o:method(o *args **kwargs)<block_end><return>fn<block_end>all_=curry_method(bs4.element.Tag.find_all)<line_sep>next_=curry_method(bs4.element.Tag.find_all_next)<line_sep>prev_=curry_method(bs4.element.Tag.find_all_previous)<line_sep>parents_=curry_method(bs4.element.Tag.find_parents)<line_sep>next_siblings_=curry_method(bs4.element.Tag.find_next_siblings)<line_sep>prev_siblings_=curry_method(bs4.element.Tag.find_previous_siblings)<line_sep>select_=curry_method(bs4.element.Tag.select)<line_sep>exclude_=curry_method(exclude_tags)<line_sep>restrict_=curry_method(restrict_tags)<def_stmt>find_tags tags *fns<block_start>"""Apply a chain sequence of find methods to a collection of tags. Result may contain duplicates."""<line_sep>ts=make_bs4_iterable(tags)<for_stmt>fn fns<block_start><if_stmt><not>callable(fn)<block_start>fn=all_(fn)<block_end>ts=[s<for>t ts<for>s make_bs4_iterable(fn(t))]<block_end><return>ts<block_end><def_stmt>find_tag tags *fns<block_start>"""Same as find_tags but returns the first result only (or None if there are none)."""<line_sep><return>first(find_tags(tags *fns))<block_end> |
# Given an array of integers, every element appears twice except for one. Find that single one.
#
# Note:
# Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
#
# Python, Python3 all accepted.
<class_stmt>SingleNumber<block_start><def_stmt>singleNumber self nums<block_start>"""
:type nums: List[int]
:rtype: int
"""<line_sep>s=0<if_stmt>nums<is><none><block_start><return>s<block_end><for_stmt>i nums<block_start>s<augxor>i<block_end><return>s<block_end><block_end> |
<import_stmt>torch<as>t<import_from_stmt>torch nn<import_from_stmt>torchvision.models vgg16<import_from_stmt>src.region_proposal_network RegionProposalNetwork<import_from_stmt>src.head_detector Head_Detector<import_from_stmt>src.config opt<def_stmt>decom_vgg16 <block_start>""" Load the default PyTorch model or the pre-trained caffe model.
Freeze the weights of some layers of the network and train the rest
of the features.
"""<if_stmt>opt.caffe_pretrain# Load the caffe model
<block_start>model=vgg16(pretrained=<false>)<line_sep>model.load_state_dict(t.load(opt.caffe_pretrain_path))<block_end><else_stmt># Load the default model in PyTorch
<block_start>model=vgg16(pretrained=<true>)<block_end>features=list(model.features)[:30]<line_sep># Freeze some of the layers.
# for layer in features[:10]:
# for p in layer.parameters():
# p.requires_grad = False
<return>nn.Sequential(*features)<block_end><class_stmt>Head_Detector_VGG16(Head_Detector)<block_start>""" Head detector based on VGG16 model.
Have two components:
1) Fixed feature extractor from the conv_5 layer of the VGG16
2) A region proposal network on the top of the extractor.
"""<line_sep>feat_stride=16<def_stmt>__init__ self ratios=[0.5 1 2] anchor_scales=[8 16 32]<block_start>extractor=decom_vgg16()<line_sep>rpn=RegionProposalNetwork(512 512 ratios=ratios anchor_scales=anchor_scales feat_stride=self.feat_stride)<line_sep>super(Head_Detector_VGG16 self).__init__(extractor rpn)<block_end><pass><block_end> |
<import_from_stmt>.rman_translator RmanTranslator<import_from_stmt>..rman_sg_nodes.rman_sg_alembic RmanSgAlembic<import_from_stmt>..rfb_utils transform_utils<import_from_stmt>..rfb_utils string_utils<import_from_stmt>..rfb_logger rfb_log<class_stmt>RmanAlembicTranslator(RmanTranslator)<block_start><def_stmt>__init__ self rman_scene<block_start>super().__init__(rman_scene)<line_sep>self.bl_type='ALEMBIC'<block_end><def_stmt>export self ob db_name<block_start>sg_node=self.rman_scene.sg_scene.CreateProcedural(db_name)<line_sep>sg_node.Define("DynamicLoad" <none>)<line_sep>rman_sg_alembic=RmanSgAlembic(self.rman_scene sg_node db_name)<line_sep><return>rman_sg_alembic<block_end><def_stmt>export_deform_sample self rman_sg_alembic ob time_sample<block_start><pass><block_end><def_stmt>update self ob rman_sg_alembic<block_start>rm=ob.renderman<line_sep>abc_filepath=string_utils.expand_string(rm.abc_filepath)<line_sep>bounds=(-100000 100000 -100000 100000 -100000 100000)<line_sep>primvar=rman_sg_alembic.sg_node.GetPrimVars()<line_sep>primvar.SetString(self.rman_scene.rman.Tokens.Rix.k_dsoname 'AlembicProcPrim')<line_sep>primvar.SetFloatArray(self.rman_scene.rman.Tokens.Rix.k_bound bounds 6)<line_sep>shutter_interval=self.rman_scene.bl_scene.renderman.shutter_angle/360.0<line_sep>shutter_open,shutter_close=0 shutter_interval<line_sep>abc_frame=rm.abc_frame<if_stmt>rm.abc_use_scene_frame<block_start>rman_sg_alembic.is_frame_sensitive=<true><line_sep>abc_frame=float(self.rman_scene.bl_frame_current)<block_end><else_stmt><block_start>rman_sg_alembic.is_frame_sensitive=<false><block_end>abc_args="-filename %s"%abc_filepath<line_sep>abc_args<augadd>" -frame %f"%abc_frame<line_sep>abc_args<augadd>" -fps %f"%rm.abc_fps<line_sep>abc_args<augadd>" -shutteropen %f"%shutter_open<line_sep>abc_args<augadd>" -shutterclose %f"%shutter_close<line_sep>abc_args<augadd>" -velocityscale %f"%rm.abc_velocityScale<line_sep>abc_args<augadd>" -ccw"<line_sep>primvar.SetString(self.rman_scene.rman.Tokens.Rix.k_data abc_args)<line_sep>rman_sg_alembic.sg_node.SetPrimVars(primvar)<block_end><block_end> |
<import_stmt>torch<import_stmt>torch.nn<as>nn<class_stmt>SeparableConv2d(nn.Module)<block_start><def_stmt>__init__ self in_channels out_channels kernel_size=1 stride=1 padding=0 dilation=1 bias=<false><block_start>super(SeparableConv2d self).__init__()<line_sep>self.conv1=nn.Conv2d(in_channels in_channels kernel_size stride padding dilation groups=in_channels bias=bias)<line_sep>self.pointwise=nn.Conv2d(in_channels out_channels 1 1 0 1 1 bias=bias)<block_end><def_stmt>forward self x<block_start>x=self.conv1(x)<line_sep>x=self.pointwise(x)<line_sep><return>x<block_end><block_end> |
#
# Autogenerated by Thrift
#
# DO NOT EDIT
# @generated
#
<import_stmt>typing<as>_typing<import_stmt>folly.iobuf<as>_fbthrift_iobuf<import_from_stmt>thrift.py3lite.client AsyncClient<as>_fbthrift_py3lite_AsyncClient SyncClient<as>_fbthrift_py3lite_SyncClient Client<as>_fbthrift_py3lite_Client <import_stmt>thrift.py3lite.exceptions<as>_fbthrift_py3lite_exceptions<import_stmt>thrift.py3lite.types<as>_fbthrift_py3lite_types<import_stmt>py3lite_module_root.my.namespacing.extend.test.extend.lite_types<import_stmt>py3lite_module_root.my.namespacing.test.hsmodule.lite_types<import_stmt>py3lite_module_root.my.namespacing.test.hsmodule.lite_clients<class_stmt>ExtendTestService(_fbthrift_py3lite_Client["ExtendTestService.Async" "ExtendTestService.Sync"])<block_start><class_stmt>Async(py3lite_module_root.my.namespacing.test.hsmodule.lite_clients.HsTestService.Async)<block_start><async_keyword><def_stmt>check self struct1:py3lite_module_root.my.namespacing.test.hsmodule.lite_types.HsFoo<arrow>bool<block_start>resp=<await>self._send_request("ExtendTestService" "check" py3lite_module_root.my.namespacing.extend.test.extend.lite_types._fbthrift_ExtendTestService_check_args(struct1=struct1 ) py3lite_module_root.my.namespacing.extend.test.extend.lite_types._fbthrift_ExtendTestService_check_result )<line_sep># shortcut to success path for non-void returns
<if_stmt>resp.success<is><not><none><block_start><return>resp.success<block_end><raise>_fbthrift_py3lite_exceptions.ApplicationError(_fbthrift_py3lite_exceptions.ApplicationErrorType.MISSING_RESULT "Empty Response" )<block_end><block_end><class_stmt>Sync(py3lite_module_root.my.namespacing.test.hsmodule.lite_clients.HsTestService.Sync)<block_start><def_stmt>check self struct1:py3lite_module_root.my.namespacing.test.hsmodule.lite_types.HsFoo<arrow>bool<block_start>resp=self._send_request("ExtendTestService" "check" py3lite_module_root.my.namespacing.extend.test.extend.lite_types._fbthrift_ExtendTestService_check_args(struct1=struct1 ) py3lite_module_root.my.namespacing.extend.test.extend.lite_types._fbthrift_ExtendTestService_check_result )<line_sep># shortcut to success path for non-void returns
<if_stmt>resp.success<is><not><none><block_start><return>resp.success<block_end><raise>_fbthrift_py3lite_exceptions.ApplicationError(_fbthrift_py3lite_exceptions.ApplicationErrorType.MISSING_RESULT "Empty Response" )<block_end><block_end><block_end> |
<import_from_stmt>resotolib.args get_arg_parser ArgumentParser<import_from_stmt>resoto_plugin_k8s KubernetesCollectorPlugin<def_stmt>test_args <block_start>arg_parser=get_arg_parser()<line_sep>KubernetesCollectorPlugin.add_args(arg_parser)<line_sep>arg_parser.parse_args()<assert_stmt>len(ArgumentParser.args.k8s_context)<eq>0<assert_stmt>ArgumentParser.args.k8s_config<is><none><assert_stmt>len(ArgumentParser.args.k8s_cluster)<eq>0<assert_stmt>len(ArgumentParser.args.k8s_apiserver)<eq>0<assert_stmt>len(ArgumentParser.args.k8s_token)<eq>0<assert_stmt>len(ArgumentParser.args.k8s_cacert)<eq>0<assert_stmt>len(ArgumentParser.args.k8s_collect)<eq>0<assert_stmt>len(ArgumentParser.args.k8s_no_collect)<eq>0<assert_stmt>ArgumentParser.args.k8s_pool_size<eq>5<assert_stmt>ArgumentParser.args.k8s_fork<is><false><block_end> |
# -*- coding=utf-8 -*-
<import_from_future_stmt> absolute_import unicode_literals<import_stmt>argparse<import_stmt>os<import_stmt>sys<import_from_stmt>.options project<class_stmt>BaseCommand(object)<block_start>"""A CLI command.
"""<line_sep>name=<none><line_sep>description=<none><line_sep>default_arguments=[project]<line_sep>arguments=[]<def_stmt>__init__ self parser=<none><block_start><if_stmt><not>parser<block_start>parser=argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]) description="Base argument parser for passa")<block_end>self.parser=parser<line_sep>self.add_arguments()<block_end>@classmethod<def_stmt>build_parser cls<block_start>parser=argparse.ArgumentParser(prog="passa {}".format(cls.name) description=cls.description )<line_sep><return>cls(parser)<block_end>@classmethod<def_stmt>run_parser cls<block_start>parser=cls.build_parser()<line_sep>parser()<block_end><def_stmt>__call__ self argv=<none><block_start>options=self.parser.parse_args(argv)<line_sep>result=self.main(options)<if_stmt>result<is><not><none><block_start>sys.exit(result)<block_end><block_end><def_stmt>add_default_arguments self<block_start><for_stmt>arg self.default_arguments<block_start>arg.add_to_parser(self.parser)<block_end><block_end><def_stmt>add_arguments self<block_start>self.add_default_arguments()<for_stmt>arg self.arguments<block_start>arg.add_to_parser(self.parser)<block_end><block_end><def_stmt>main self options<block_start><return>self.run(options)<block_end><def_stmt>run self options<block_start><raise>NotImplementedError<block_end><block_end> |
# Copyright 2019-2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
<import_from_stmt>torch nn<import_stmt>torch<import_stmt>threading<import_stmt>hashlib<import_stmt>pickle<import_stmt>os<class_stmt>cache<block_start><def_stmt>__init__ self function<block_start>self.function=function<line_sep>self.pickle_name=self.function.__name__<block_end><def_stmt>__call__ self *args **kwargs<block_start>m=hashlib.sha256()<line_sep>m.update(pickle.dumps((self.function.__name__ args frozenset(kwargs.items()))))<line_sep>output_path=os.path.join('.cache' "%s_%s"%(m.hexdigest() self.pickle_name))<try_stmt><block_start><with_stmt>open(output_path 'rb')<as>f<block_start>data=pickle.load(f)<block_end><block_end><except_stmt>(FileNotFoundError pickle.PickleError)<block_start>data=self.function(*args **kwargs)<line_sep>os.makedirs(os.path.dirname(output_path) exist_ok=<true>)<with_stmt>open(output_path 'wb')<as>f<block_start>pickle.dump(data f)<block_end><block_end><return>data<block_end><block_end><def_stmt>save_model x name<block_start><if_stmt>isinstance(x nn.DataParallel)<block_start>torch.save(x.module.state_dict() name)<block_end><else_stmt><block_start>torch.save(x.state_dict() name)<block_end><block_end><class_stmt>AsyncCall(object)<block_start><def_stmt>__init__ self fnc callback=<none><block_start>self.Callable=fnc<line_sep>self.Callback=callback<line_sep>self.result=<none><block_end><def_stmt>__call__ self *args **kwargs<block_start>self.Thread=threading.Thread(target=self.run name=self.Callable.__name__ args=args kwargs=kwargs)<line_sep>self.Thread.start()<line_sep><return>self<block_end><def_stmt>wait self timeout=<none><block_start>self.Thread.join(timeout)<if_stmt>self.Thread.isAlive()<block_start><raise>TimeoutError<block_end><else_stmt><block_start><return>self.result<block_end><block_end><def_stmt>run self *args **kwargs<block_start>self.result=self.Callable(*args **kwargs)<if_stmt>self.Callback<block_start>self.Callback(self.result)<block_end><block_end><block_end><class_stmt>AsyncMethod(object)<block_start><def_stmt>__init__ self fnc callback=<none><block_start>self.Callable=fnc<line_sep>self.Callback=callback<block_end><def_stmt>__call__ self *args **kwargs<block_start><return>AsyncCall(self.Callable self.Callback)(*args **kwargs)<block_end><block_end><def_stmt>async_func fnc=<none> callback=<none><block_start><if_stmt>fnc<is><none><block_start><def_stmt>add_async_callback f<block_start><return>AsyncMethod(f callback)<block_end><return>add_async_callback<block_end><else_stmt><block_start><return>AsyncMethod(fnc callback)<block_end><block_end><class_stmt>Registry(dict)<block_start><def_stmt>__init__ self *args **kwargs<block_start>super(Registry self).__init__(*args **kwargs)<block_end><def_stmt>register self module_name<block_start><def_stmt>register_fn module<block_start><assert_stmt>module_name<not><in>self<line_sep>self[module_name]=module<line_sep><return>module<block_end><return>register_fn<block_end><block_end> |
"""Axes behaviour."""<def_stmt>lines * base_width=0.5 line_base_ratio=2.0 tick_major_base_ratio=1.0 tick_minor_base_ratio=0.5 tick_size_width_ratio=3.0 tick_major_size_min=3.0 tick_minor_size_min=2.0 axisbelow=<true> <block_start>"""Adjust linewidth(s) according to a base width."""<line_sep>tick_major_width=tick_major_base_ratio<times>base_width<line_sep>tick_minor_width=tick_minor_base_ratio<times>base_width<line_sep>tick_major_size=max(tick_major_size_min tick_size_width_ratio<times>tick_major_width)<line_sep>tick_minor_size=max(tick_minor_size_min tick_size_width_ratio<times>tick_minor_width)<line_sep><return>{# Set the line-widths appropriately (including the grid)
"axes.linewidth":base_width "lines.linewidth":line_base_ratio<times>base_width "xtick.major.width":tick_major_width "ytick.major.width":tick_major_width "xtick.minor.width":tick_minor_width "ytick.minor.width":tick_minor_width "xtick.major.size":tick_major_size "ytick.major.size":tick_major_size "xtick.minor.size":tick_minor_size "ytick.minor.size":tick_minor_size "grid.linewidth":base_width # Legend frame linewidth
"patch.linewidth":base_width "legend.edgecolor":"inherit" # inherit color from axes. passing 'color' leads to awkward future warnings.
# Control the zorder of the ticks and gridlines
# This is somewhat out of place in this function, but creating a new function
# seems a bit unnecessary here... suggestions welcome!
"axes.axisbelow":axisbelow }<block_end><def_stmt>grid * grid_alpha=0.2 grid_linestyle="solid"<block_start>"""Adjust the grid-style."""<line_sep><return>{# Update the linestyle of the grid
# (it shares a color with the frame, and needs to be distinguishable)
"grid.linestyle":grid_linestyle "grid.alpha":grid_alpha }<block_end><def_stmt>legend * shadow=<false> frameon=<true> fancybox=<false><block_start>"""Adjust the legend-style."""<line_sep><return>{"legend.shadow":shadow "legend.frameon":frameon "legend.fancybox":fancybox }<block_end><def_stmt>color * base="black" face="none"<block_start>"""Adjust the axes' color."""<line_sep><return>{"text.color":base "axes.edgecolor":base "axes.labelcolor":base "xtick.color":base "ytick.color":base "grid.color":base "axes.facecolor":face }<block_end><def_stmt>spines * left=<true> right=<true> top=<true> bottom=<true><block_start>"""Adjust the visibility of the axes' spines."""<line_sep><return>{"axes.spines.left":left "axes.spines.right":right "axes.spines.top":top "axes.spines.bottom":bottom }<block_end><def_stmt>tick_direction * x="inout" y="inout"<block_start>"""Adjust the tick direction."""<line_sep><return>{"xtick.direction":x "ytick.direction":y }<block_end> |
# Lint as: python3
# Copyright 2020 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tracks the order of alchemy events and resulting stones and potions."""<import_stmt>abc<import_stmt>collections<import_stmt>copy<import_stmt>itertools<import_stmt>random<import_from_stmt>typing Any Dict List Mapping Optional Sequence Set Tuple Union<import_from_stmt>dm_alchemy.types graphs<import_from_stmt>dm_alchemy.types stones_and_potions<import_from_stmt>dm_alchemy.types utils<import_stmt>numpy<as>np<line_sep>Stone=stones_and_potions.Stone<line_sep>Potion=stones_and_potions.Potion<line_sep>LatentStone=stones_and_potions.LatentStone<line_sep>LatentPotion=stones_and_potions.LatentPotion<line_sep>AlignedStone=stones_and_potions.AlignedStone<line_sep>PerceivedPotion=stones_and_potions.PerceivedPotion<line_sep>StoneMap=stones_and_potions.StoneMap<line_sep>PotionMap=stones_and_potions.PotionMap<line_sep>CAULDRON=stones_and_potions.CAULDRON<line_sep>RewardWeights=stones_and_potions.RewardWeights<line_sep>Graph=graphs.Graph<line_sep>NEVER_USED=-1<line_sep>NO_OUTCOME=-1<line_sep>UNKNOWN_TYPE=-3<class_stmt>EventTracker(abc.ABC)<block_start>"""Base class for things that track alchemy events."""<def_stmt>__init__ self name<block_start>self.name=name<block_end>@abc.abstractmethod<def_stmt>potion_used self stone_ind:int potion_ind:int val:int start_stone:graphs.Node stone_inst:int potion:Potion end_stone:graphs.Node<arrow><none><block_start><pass><block_end><def_stmt>failed_potion_use self stone_ind:int start_stone:graphs.Node stone_inst:int<arrow><none><block_start>"""Optional callback when a potion use is attempted but fails."""<line_sep><pass><block_end><block_end><class_stmt>GameState<block_start>"""Keeps track of the symbolic state of an alchemy game."""<def_stmt>__init__ self graph:graphs.Graph trial_items:utils.TrialItems event_trackers:Optional[Sequence[EventTracker]]=<none><block_start>self._stones=copy.deepcopy(trial_items.stones)<line_sep>self._stone_idx_to_ind={p.idx:i<for>i,p enumerate(self._stones)}<line_sep>self._stone_ind_to_idx={i:p.idx<for>i,p enumerate(self._stones)}<line_sep>self._potions=copy.deepcopy(trial_items.potions)<line_sep>self._potion_idx_to_ind={p.idx:i<for>i,p enumerate(self._potions)}<line_sep>self._graph=graph<line_sep>num_stones=len(self._stones)<line_sep>num_potions=len(self._potions)<line_sep>self._existing_stones=set(range(num_stones))<line_sep>self._existing_potions=set(range(num_potions))<line_sep>trackers=event_trackers<if>event_trackers<is><not><none><else>[]<line_sep>self.trackers={tracker.name:tracker<for>tracker trackers}<line_sep>self._count=0<block_end><def_stmt>add_event_trackers self event_trackers:Sequence[EventTracker]<arrow><none><block_start>"""Adds event trackers if they are not already there."""<line_sep>self.trackers.update({tracker.name:tracker<for>tracker event_trackers})<block_end><def_stmt>get_stone_ind self stone_inst:Optional[int]=<none> stone:Optional[Union[graphs.Node LatentStone]]=<none><arrow>Optional[int]<block_start>"""Gets a stone referred to through a variety of methods.
The caller must pass exactly one of stone_inst and stone.
Args:
stone_inst: The instance id of the stone used in the potion.
stone: The stone used.
Returns:
The index (into the list of stones originally passed to the EventTracker
in construction) for the stone used in the potion or None if no match can
be found.
"""<if_stmt>len([e<for>e [stone_inst stone]<if>e<is><not><none>])<ne>1<block_start><raise>ValueError('Exactly one of stone inst and stone must be given.')<block_end><if_stmt>stone_inst<is><not><none><block_start><return>self._stone_idx_to_ind[stone_inst]<block_end><if_stmt>isinstance(stone LatentStone)<block_start>stone_node=graphs.Node(-1 stone.latent_coords)<block_end><else_stmt><block_start>stone_node=stone<block_end>matches=self._matching_stones(stone_node)<if_stmt><not>matches<block_start><return><none><block_end><return>matches[0]<block_end><def_stmt>get_potion_ind self potion_inst:Optional[int]=<none> potion:Optional[Union[Potion LatentPotion]]=<none><arrow>Optional[int]<block_start>"""Gets a potion referred to through a variety of methods.
The caller must pass exactly one of potion_inst and potion.
Args:
potion_inst: The instance id of the potion used.
potion: The potion used.
Returns:
The index (into the list of potions originally passed to the EventTracker
in construction) for the potion used or None if no match can be found.
-1 refers to the cauldron.
"""<if_stmt>len([e<for>e [potion_inst potion]<if>e<is><not><none>])<ne>1<block_start><raise>ValueError('Exactly one of potion inst and potion must be given.')<block_end><if_stmt>potion_inst<is><not><none><block_start><return>self._potion_idx_to_ind[potion_inst]<block_end><if_stmt>isinstance(potion LatentPotion)<block_start>potion=Potion(-1 potion.latent_dim potion.latent_dir)<block_end>matches=self._matching_potions(potion)<if_stmt><not>matches<block_start><return><none><block_end><return>matches[0]<block_end><def_stmt>_stone_node self ind:int<arrow>graphs.Node<block_start>node_=self._graph.node_list.get_node_by_coords(list(self._stones[ind].latent))<assert_stmt>node_<is><not><none><line_sep>node:graphs.Node=node_<line_sep><return>node<block_end><def_stmt>_matching_potions self potion:Potion<arrow>List[int]<block_start><return>[p<for>p self._existing_potions<if>self._potions[p].as_index<eq>potion.as_index]<block_end><def_stmt>_matching_stones self stone_node:graphs.Node<arrow>List[int]<block_start><return>[i<for>i self._existing_stones<if>tuple(self._stone_node(i).coords)<eq>tuple(stone_node.coords)]<block_end><def_stmt>has_stone_ind self stone_ind:int<arrow>bool<block_start><return>stone_ind<in>self._existing_stones<block_end><def_stmt>has_potion_ind self potion_ind:int<arrow>bool<block_start><return>potion_ind<in>self._existing_potions<block_end><def_stmt>_remove_potion self potion_ind:int<arrow><none><block_start>self._existing_potions.remove(potion_ind)<block_end><def_stmt>_remove_stone self stone_ind:int<arrow><none><block_start>self._existing_stones.remove(stone_ind)<block_end><def_stmt>potion_used self stone_ind:int potion_ind:int val:Optional[int]=<none><arrow>int<block_start>"""Records that a potion has been used.
The caller must pass exactly one of stone_ind, stone_inst and stone, and
exactly one of potion_ind, potion_inst and potion.
Args:
stone_ind: The index (into the list of stones originally passed to the
EventTracker in construction) for the stone used in the potion.
potion_ind: The index (into the list of potions originally passed to the
EventTracker in construction) for the potion used. -1 refers to the
cauldron.
val: The value to record in this event (typically the frame number that
this event occurs). If this is not set then the value set will be
arbitrary but will preserve the order in which the potion_used and
stone_used functions are called.
Returns:
The index (into the list of stones originally passed to the EventTracker
in construction) for the stone used in the potion. This may not have been
passed into the function (if stone_inst or stone was passed instead).
"""<line_sep># -1 corresponds to the cauldron and so there is no potion to remove and the
# stone does not change
old_node=self._stone_node(stone_ind)<line_sep>outcome_stone=<none><line_sep>potion=<none><if_stmt>potion_ind<ne>CAULDRON<block_start>outcome_stone=copy.deepcopy(old_node)<line_sep>potion=self._potions[potion_ind]<line_sep># Change the stone in _stones
<if_stmt>old_node<in>self._graph.edge_list.edges<block_start>outcome_stone=[end_node<for>end_node,v self._graph.edge_list.edges[old_node].items()<if>potion.same_effect(v[1])]<if_stmt>outcome_stone<block_start><assert_stmt>len(outcome_stone)<eq>1<line_sep>outcome_stone=outcome_stone[0]<line_sep>self._stones[stone_ind].latent=np.array(list(outcome_stone.coords))<block_end><else_stmt><block_start>outcome_stone=old_node<block_end><block_end>self._remove_potion(potion_ind)<block_end><if_stmt>self.trackers<block_start><if_stmt>val<is><none><block_start>val=self._count<line_sep>self._count<augadd>1<block_end><for_stmt>event_tracker self.trackers.values()<block_start>event_tracker.potion_used(stone_ind potion_ind val old_node self._stone_ind_to_idx[stone_ind] potion outcome_stone)<block_end><block_end><return>stone_ind<block_end><def_stmt>stone_used self stone_ind:int val:Optional[int]=<none><arrow><none><block_start>"""Records that a stone has been used (placed in the cauldron).
The caller must pass exactly one of stone_ind, stone_inst and stone.
Args:
stone_ind: The index (into the list of stones originally passed to the
EventTracker in construction) for the stone used in the potion.
val: The value to record in this event (typically the frame number that
this event occurs). If this is not set then the value set will be
arbitrary but will preserve the order in which the potion_used and
stone_used functions are called.
"""<line_sep>self.potion_used(stone_ind=stone_ind potion_ind=CAULDRON val=val)<line_sep>self._remove_stone(stone_ind)<block_end><def_stmt>failed_potion_use self stone_ind:int<arrow><none><block_start>old_node=self._stone_node(stone_ind)<for_stmt>event_tracker self.trackers.values()<block_start>event_tracker.failed_potion_use(stone_ind old_node self._stone_ind_to_idx[stone_ind])<block_end><block_end><def_stmt>has_stones self<arrow>bool<block_start><return>bool(self._existing_stones)<block_end><def_stmt>has_potions self<arrow>bool<block_start><return>bool(self._existing_potions)<block_end><def_stmt>has_stones_and_potions self<arrow>bool<block_start><return>self.has_stones()<and>self.has_potions()<block_end><def_stmt>rand_stone_ind self<arrow>int<block_start><return>random.sample(self._existing_stones 1)[0]<block_end><def_stmt>rand_potion_ind self<arrow>int<block_start><return>random.sample(self._existing_potions 1)[0]<block_end><def_stmt>use_rand_stone_potion_pair self<arrow>Tuple[Stone int]<block_start>"""Uses a random stone with a random potion.
Returns:
The new value of the stone and the index of that stone.
"""<line_sep>stone_index=self.rand_stone_ind()<line_sep><return>self.use_rand_potion(stone_index)<block_end><def_stmt>use_rand_potion self stone_ind:int<arrow>Tuple[Stone int]<block_start>"""Uses the stone passed with a random potion.
Args:
stone_ind: The index (into the list of stones originally passed to the
EventTracker in construction) for the stone to use in a random potion.
Returns:
The new value of the stone and the index of that stone.
"""<line_sep>potion_index=self.rand_potion_ind()<line_sep>self.potion_used(stone_ind potion_index)<line_sep><return>self._stones[stone_ind] stone_ind<block_end><def_stmt>existing_stone_nodes self<arrow>List[graphs.Node]<block_start>"""Returns a list of nodes for the remaining existing stones."""<line_sep><return>[self._stone_node(i)<for>i self._existing_stones]<block_end><def_stmt>existing_stones self<arrow>List[Stone]<block_start>"""Returns a list of the remaining existing stones."""<line_sep><return>[self._stones[i]<for>i self._existing_stones]<block_end><def_stmt>existing_potions self<arrow>List[Potion]<block_start>"""Returns a list of the remaining existing potions."""<line_sep><return>[self._potions[i]<for>i self._existing_potions]<block_end><def_stmt>existing_items self<arrow>utils.TrialItems<block_start><return>utils.TrialItems(stones=self.existing_stones() potions=self.existing_potions())<block_end>@property<def_stmt>num_stones self<arrow>int<block_start><return>len(self._existing_stones)<block_end>@property<def_stmt>num_potions self<arrow>int<block_start><return>len(self._existing_potions)<block_end><def_stmt>check_have_potions self needed_potions:Sequence[Potion]<arrow>bool<block_start>"""Checks that we have all the potions we need."""<line_sep>need=collections.Counter([p.as_index<for>p needed_potions])<line_sep>have=collections.Counter([self._potions[p].as_index<for>p self._existing_potions])<for_stmt>k need.keys()<block_start><if_stmt>k<not><in>have.keys()<block_start><return><false><block_end><else_stmt><block_start><if_stmt>have[k]<l>need[k]<block_start><return><false><block_end><block_end><block_end><return><true><block_end><def_stmt>get_stones_above_thresh self reward_weights:RewardWeights threshold:int<arrow>List[int]<block_start>"""Gets all the stones whose value exceeds the threshold passed in."""<line_sep>current_vals={i:reward_weights(self._stones[i].latent)<for>i self._existing_stones}<line_sep><return>[i<for>i,current_val current_vals.items()<if>current_val<g>threshold]<block_end><def_stmt>use_stones_above_thresh self reward_weights:RewardWeights threshold:int<arrow><none><block_start>"""Uses all the stones whose value exceeds the threshold passed in."""<for_stmt>i self.get_stones_above_thresh(reward_weights threshold)<block_start>self.stone_used(i)<block_end><block_end><def_stmt>get_stone self ind:int<arrow>Stone<block_start><return>self._stones[ind]<block_end><def_stmt>get_potion self ind:int<arrow>Potion<block_start><return>self._potions[ind]<block_end>@property<def_stmt>node_list self<arrow>graphs.NodeList<block_start><return>self._graph.node_list<block_end>@property<def_stmt>edge_list self<arrow>graphs.EdgeList<block_start><return>self._graph.edge_list<block_end>@property<def_stmt>stone_ind_to_idx self<arrow>Dict[int int]<block_start><return>self._stone_ind_to_idx<block_end>@property<def_stmt>stone_idx_to_ind self<arrow>Dict[int int]<block_start><return>self._stone_idx_to_ind<block_end>@property<def_stmt>potion_idx_to_ind self<arrow>Dict[int int]<block_start><return>self._potion_idx_to_ind<block_end><block_end><class_stmt>TrialTracker(EventTracker)<block_start>"""Type which tracks all events in a trial."""<line_sep>@abc.abstractmethod<def_stmt>events_list self<arrow>List[Tuple[int int int]]<block_start>"""Returns a list of stone index, potion index, val for the trial events."""<line_sep><pass><block_end><block_end><class_stmt>MatrixEventTracker(TrialTracker)<block_start>"""Tracks the order of potion used and stone used events in matrix."""<def_stmt>__init__ self num_stones:int num_potions:int<block_start>self.events=np.full(shape=(num_stones num_potions+1) fill_value=-1 dtype=np.int)<line_sep>super().__init__(name='matrix_event')<block_end><def_stmt>potion_used self stone_ind:int potion_ind:int val:int start_stone:graphs.Node stone_inst:int potion:Potion end_stone:graphs.Node<arrow><none><block_start>"""Records that a potion has been used.
Args:
stone_ind: The index (into the list of stones originally passed to the
EventTracker in construction) for the stone used in the potion.
potion_ind: The index (into the list of potions originally passed to the
EventTracker in construction) for the potion used. -1 refers to the
cauldron.
val: The value to record in this event (typically the frame number that
this event occurs). If this is not set then the value set will be
arbitrary but will preserve the order in which the potion_used and
stone_used functions are called.
start_stone: The stone node before the potion is used.
stone_inst: The instance id for the stone we are using.
potion: The potion used.
end_stone: The stone node after the potion is used.
"""<line_sep>self.events[stone_ind potion_ind]=val<block_end><def_stmt>events_list self<arrow>List[Tuple[int int int]]<block_start>stone_used,potion_used=np.where(self.events<ne>-1)<line_sep>frame=[self.events[x y]<for>(x y) zip(stone_used potion_used)]<line_sep>num_potions=self.events.shape[1]-1<line_sep>events=sorted(zip(stone_used potion_used frame) key=<lambda>x:x[2])<line_sep><return>[(stone_ind CAULDRON<if>potion_ind<eq>num_potions<else>potion_ind frame)<for>stone_ind,potion_ind,frame events]<block_end><block_end>ActionSequenceElement=Tuple[int Mapping[str Any] int int]<class_stmt>ActionSequenceTracker(TrialTracker)<block_start>"""Tracks the order of potion used and stone used events in matrix."""<def_stmt>__init__ self<block_start>self._action_sequence=[]<line_sep>super().__init__(name='action_sequence')<block_end><def_stmt>potion_used self stone_ind:int potion_ind:int val:int start_stone:graphs.Node stone_inst:int potion:Potion end_stone:graphs.Node<arrow><none><block_start>"""Records that a potion has been used.
Args:
stone_ind: The index (into the list of stones originally passed to the
EventTracker in construction) for the stone used in the potion.
potion_ind: The index (into the list of potions originally passed to the
EventTracker in construction) for the potion used. -1 refers to the
cauldron.
val: The value to record in this event (typically the frame number that
this event occurs). If this is not set then the value set will be
arbitrary but will preserve the order in which the potion_used and
stone_used functions are called.
start_stone: The stone node before the potion is used.
stone_inst: The instance id for the stone we are using.
potion: The potion used.
end_stone: The stone node after the potion is used.
"""<line_sep># add to action sequence
action_dict={'node':(start_stone.idx start_stone.coords) 'stone_idx':stone_inst}<line_sep># -1 corresponds to the cauldron and so there is no potion to remove and the
# stone does not change
<if_stmt>potion_ind<eq>CAULDRON<block_start>action_dict['action']='cauldron'<block_end><else_stmt># Change the stone in _stones
<block_start>action_dict['action']=(potion.as_index (potion.dimension potion.direction))<line_sep>action_dict['potion_idx']=potion.idx<line_sep>action_dict['outcome_node']=(end_stone.idx end_stone.coords)<block_end>self._action_sequence.append((val action_dict stone_ind potion_ind))<block_end>@property<def_stmt>action_sequence self<arrow>List[Tuple[int Dict[str Any] int int]]<block_start>self._action_sequence.sort(key=<lambda>x:x[0])<line_sep><return>self._action_sequence<block_end><def_stmt>events_list self<arrow>List[Tuple[int int int]]<block_start><return>[(stone_ind potion_ind val)<for>val,_,stone_ind,potion_ind self.action_sequence]<block_end><block_end><class_stmt>LatestOutcomeTracker(EventTracker)<block_start>"""Tracks the most recent outcome of using a potion."""<def_stmt>__init__ self potion_map:PotionMap stone_map:StoneMap rotation:np.ndarray# -1 represents no change and is the default value for outcome.
<block_start>self.outcome=<none><line_sep>self.type_based_action=<none><line_sep>self._potion_map,self._stone_map=potion_map stone_map<line_sep>self._rotation=rotation<line_sep>super().__init__(name='latest_outcome')<block_end><def_stmt>reset self<arrow><none><block_start>self.outcome=<none><line_sep>self.type_based_action=<none><block_end><def_stmt>_perceived_stone self stone:graphs.Node<block_start>aligned_stone=self._stone_map.apply_inverse(LatentStone(np.array(stone.coords)))<line_sep><return>stones_and_potions.unalign(aligned_stone self._rotation)<block_end><def_stmt>potion_used self stone_ind:int potion_ind:int val:int start_stone:graphs.Node stone_inst:int potion:Potion end_stone:Optional[graphs.Node]<arrow><none><block_start><if_stmt>end_stone<is><not><none><block_start>aligned_stone=self._stone_map.apply_inverse(LatentStone(np.array(end_stone.coords)))<line_sep>self.outcome=stones_and_potions.unalign(aligned_stone self._rotation)<block_end>perceived_stone=self._perceived_stone(start_stone)<if_stmt>potion_ind<eq>CAULDRON<block_start>self.type_based_action=utils.TypeBasedAction(stone=perceived_stone cauldron=<true>)<block_end><else_stmt><block_start>perceived_potion=self._potion_map.apply_inverse(LatentPotion(potion.dimension potion.direction))<line_sep>self.type_based_action=utils.TypeBasedAction(stone=perceived_stone potion=perceived_potion)<block_end><block_end><def_stmt>failed_potion_use self stone_ind:int start_stone:graphs.Node stone_inst:int<block_start>"""Optional callback when a potion use is attempted but fails."""<line_sep>self.outcome=<none><line_sep>perceived_stone=self._perceived_stone(start_stone)<line_sep># This is an invalid action but the stone type can be used for
# visualization.
self.type_based_action=utils.TypeBasedAction(stone=perceived_stone)<block_end><block_end><class_stmt>RewardTracker(EventTracker)<block_start>"""Tracks the reward obtained."""<def_stmt>__init__ self reward_weights:RewardWeights<block_start>self._reward=0<line_sep>self._reward_weights=reward_weights<line_sep>super().__init__(name='reward')<block_end><def_stmt>potion_used self stone_ind:int potion_ind:int val:int start_stone:graphs.Node stone_inst:int potion:Potion end_stone:graphs.Node<arrow><none><block_start>"""Adds reward when a potion has been used.
Args:
stone_ind: The index (into the list of stones originally passed to the
EventTracker in construction) for the stone used in the potion.
potion_ind: The index (into the list of potions originally passed to the
EventTracker in construction) for the potion used. -1 refers to the
cauldron.
val: The value to record in this event (typically the frame number that
this event occurs). If this is not set then the value set will be
arbitrary but will preserve the order in which the potion_used and
stone_used functions are called.
start_stone: The stone node before the potion is used.
stone_inst: The instance id for the stone we are using.
potion: The potion used.
end_stone: The stone node after the potion is used.
"""<if_stmt>potion_ind<eq>CAULDRON<block_start>self._reward<augadd>self._reward_weights(start_stone.coords)<block_end><block_end>@property<def_stmt>reward self<arrow>int<block_start><return>self._reward<block_end><block_end><class_stmt>ItemsUsedTracker(EventTracker)<block_start>"""Tracks the stones and potions used."""<def_stmt>__init__ self<block_start>self.potions_used=[]<line_sep>self.stones_used=[]<line_sep>super().__init__(name='items_used')<block_end><def_stmt>potion_used self stone_ind:int potion_ind:int val:int start_stone:graphs.Node stone_inst:int potion:Potion end_stone:graphs.Node<arrow><none><block_start>"""Keeps lists of potions and stones which have been used.
Args:
stone_ind: The index (into the list of stones originally passed to the
EventTracker in construction) for the stone used in the potion.
potion_ind: The index (into the list of potions originally passed to the
EventTracker in construction) for the potion used. -1 refers to the
cauldron.
val: The value to record in this event (typically the frame number that
this event occurs). This is not relevant for this tracker.
start_stone: The stone node before the potion is used.
stone_inst: The instance id for the stone we are using.
potion: The potion used.
end_stone: The stone node after the potion is used.
"""<if_stmt>potion_ind<eq>CAULDRON<block_start>self.stones_used.append(stone_ind)<block_end><else_stmt><block_start>self.potions_used.append(potion_ind)<block_end><block_end>@property<def_stmt>num_potions_used self<arrow>int<block_start><return>len(self.potions_used)<block_end>@property<def_stmt>num_stones_used self<arrow>int<block_start><return>len(self.stones_used)<block_end><block_end><class_stmt>Event(abc.ABC)<block_start>"""Abstract base class for events we want to check in the event tracker."""<line_sep>@abc.abstractmethod<def_stmt>next_occurrence self events:np.ndarray<arrow>Tuple[int int Optional[Set[int]]]<block_start><pass><block_end><def_stmt>occurs self events:np.ndarray<arrow>bool<block_start>event_start,_,_=self.next_occurrence(events)<line_sep>not_occurred=event_start<eq>NEVER_USED<line_sep><return><not>not_occurred<block_end><block_end><class_stmt>SingleEvent(Event)<block_start>"""A single event where a stone is used with one of a set of potions."""<def_stmt>__init__ self stone_ind:int potion_inds:Set[int]<block_start>self.stone_ind=stone_ind<line_sep>self.potion_inds=potion_inds<block_end><def_stmt>next_occurrence self events:np.ndarray<arrow>Tuple[int int Optional[Set[int]]]<block_start>"""Gets the next occurrence of this event.
Args:
events: numpy array of stones against potions with the last entry
corresponding to the cauldron with a -1 in places where that stone was
never used with that potion and the time of usage otherwise.
Returns:
When event starts, when event ends, which potions were used by event.
"""<line_sep>frames_potions=[(events[self.stone_ind p] p)<for>p self.potion_inds<if>events[self.stone_ind p]<ge>0]<if_stmt><not>frames_potions<block_start><return>NEVER_USED NEVER_USED <none><block_end>frame,potion_used=min(frames_potions key=<lambda>v:v[0])<line_sep><return>frame frame {potion_used}<block_end><block_end><class_stmt>AnyOrderEvents(Event)<block_start>"""A set of events which can happen in any order."""<def_stmt>__init__ self set_events:Set[Event]<block_start>self.set_events=set_events<block_end><def_stmt>next_occurrence self events:np.ndarray<arrow>Tuple[int int Optional[Set[int]]]<block_start>"""Gets the next occurrence of this event.
Args:
events: numpy array of stones against potions with the last entry
corresponding to the cauldron with a -1 in places where that stone was
never used with that potion and the time of usage otherwise.
Returns:
When event starts, when event ends, which potions were used by event.
"""<line_sep>results=[e.next_occurrence(events)<for>e self.set_events]<if_stmt>any(v[0]<eq>NEVER_USED<for>v results)<block_start><return>NEVER_USED NEVER_USED <none><block_end><return>(min(v[0]<for>v results) max(v[1]<for>v results) set(itertools.chain.from_iterable([v[2]<for>v results])))<block_end><block_end><class_stmt>OrderedEvents(Event)<block_start>"""A list of events which must happen in the order passed in."""<def_stmt>__init__ self iter_events:Sequence[Event]<block_start>self.iter_events=iter_events<block_end><def_stmt>next_occurrence self events:np.ndarray<arrow>Tuple[int int Optional[Set[int]]]<block_start>"""Gets the next occurrence of this event.
Args:
events: numpy array of stones against potions with the last entry
corresponding to the cauldron with a -1 in places where that stone was
never used with that potion and the time of usage otherwise.
Returns:
When event starts, when event ends, which potions were used by event.
"""<line_sep>results=[e.next_occurrence(events)<for>e self.iter_events]<if_stmt>any(v[0]<eq>NEVER_USED<for>v results)<block_start><return>NEVER_USED NEVER_USED <none><block_end><for_stmt>end_first,start_next zip([v[1]<for>v results[:-1]] [v[0]<for>v results[1:]])# If the events happen on the same step this is allowed.
<block_start><if_stmt>end_first<g>start_next<block_start><return>NEVER_USED NEVER_USED <none><block_end><block_end><return>(results[0][0] results[-1][1] set(itertools.chain.from_iterable([v[2]<for>v results])))<block_end><block_end><def_stmt>replay_events game_state:GameState event_tracker:TrialTracker<arrow><none><block_start><for_stmt>stone_ind,potion_ind,val event_tracker.events_list()<block_start><if_stmt>potion_ind<eq>CAULDRON<block_start>game_state.stone_used(stone_ind=stone_ind val=val)<block_end><else_stmt><block_start>game_state.potion_used(stone_ind=stone_ind potion_ind=potion_ind val=val)<block_end><block_end><block_end><def_stmt>matrix_events_to_action_sequence graph:Graph items:utils.TrialItems matrix_events:MatrixEventTracker<arrow>List[ActionSequenceElement]<block_start>"""Takes events/output of evaluation analysis and creates an event tracker."""<line_sep>action_sequence_tracker=ActionSequenceTracker()<line_sep>game_state=GameState(trial_items=items graph=graph event_trackers=[action_sequence_tracker])<if_stmt>matrix_events.events.shape<ne>(items.num_stones items.num_potions+1)<block_start><raise>ValueError('Matrix of events shape does not match the number of stones and '<concat>'potions present.')<block_end>replay_events(game_state matrix_events)<line_sep><return>action_sequence_tracker.action_sequence<block_end> |
<import_from_stmt>collections deque<import_from_stmt>.types is_seqcont<line_sep>__all__=['tree_leaves' 'ltree_leaves' 'tree_nodes' 'ltree_nodes']<def_stmt>tree_leaves root follow=is_seqcont children=iter<block_start>"""Iterates over tree leaves."""<line_sep>q=deque([[root]])<while_stmt>q<block_start>node_iter=iter(q.pop())<for_stmt>sub node_iter<block_start><if_stmt>follow(sub)<block_start>q.append(node_iter)<line_sep>q.append(children(sub))<line_sep><break><block_end><else_stmt><block_start><yield>sub<block_end><block_end><block_end><block_end><def_stmt>ltree_leaves root follow=is_seqcont children=iter<block_start>"""Lists tree leaves."""<line_sep><return>list(tree_leaves(root follow children))<block_end><def_stmt>tree_nodes root follow=is_seqcont children=iter<block_start>"""Iterates over all tree nodes."""<line_sep>q=deque([[root]])<while_stmt>q<block_start>node_iter=iter(q.pop())<for_stmt>sub node_iter<block_start><yield>sub<if_stmt>follow(sub)<block_start>q.append(node_iter)<line_sep>q.append(children(sub))<line_sep><break><block_end><block_end><block_end><block_end><def_stmt>ltree_nodes root follow=is_seqcont children=iter<block_start>"""Lists all tree nodes."""<line_sep><return>list(tree_nodes(root follow children))<block_end> |
#
# Copyright 2015 <NAME>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
<import_stmt>unittest<import_stmt>testbase<import_stmt>util<import_stmt>redis_sock<import_stmt>config<import_stmt>default_cluster<line_sep># NOTE
# oom state persistence test is done by hand (hard to automate in this test framework)
#
<class_stmt>TestDenyOOM(unittest.TestCase)<block_start>cluster=config.clusters[0]<line_sep>leader_cm=config.clusters[0]['servers'][0]<line_sep>@classmethod<def_stmt>setUpClass cls<block_start>cls.conf_checker=default_cluster.initialize_starting_up_smr_before_redis(cls.cluster)<assert_stmt>cls.conf_checker<ne><none> 'failed to initialize cluster'<block_end>@classmethod<def_stmt>tearDownClass cls<block_start>testbase.defaultTearDown(cls)<block_end><def_stmt>setUp self<block_start>util.set_process_logfile_prefix('TestDenyOOM_%s'%self._testMethodName)<line_sep>server=self.cluster['servers'][0]<line_sep>self.redis=redis_sock.RedisClient(server['ip'] server['redis_port'])<block_end><def_stmt>tearDown self<block_start><if_stmt>self.redis<ne><none><block_start>self.redis.close()<block_end><return>0<block_end><def_stmt>check_oom self is_oom=<false><block_start>m,s1,s2=util.get_mss(self.cluster)<line_sep>mr=redis_sock.RedisClient(m['ip'] m['redis_port'])<line_sep>sr=redis_sock.RedisClient(s1['ip'] s1['redis_port'])<try_stmt><block_start>ok,data=mr.do_request("get nosuchkey_check_oom\r\n")<assert_stmt>(ok) ok<line_sep>ok,data=sr.do_request("get nosuchkey_check_oom\r\n")<assert_stmt>(ok) ok<line_sep>expected_ok=<not>is_oom<line_sep>ok,data=mr.do_request("set nosuchkey_check_oom 100\r\n")<assert_stmt>(ok<eq>expected_ok) (ok data)<line_sep>ok,data=sr.do_request("set nosuchkey_check_oom 100\r\n")<assert_stmt>(ok<eq>expected_ok) (ok data)<block_end><finally_stmt><block_start><if_stmt>mr<ne><none><block_start>mr.close()<block_end><if_stmt>sr<ne><none><block_start>sr.close()<block_end><block_end><block_end><def_stmt>test_basic self<block_start>util.print_frame()<line_sep>redis=self.redis<line_sep># set oom
ok,resp=redis.do_request('deny-oom 1\r\n')<assert_stmt>(resp<eq>'OK') resp<line_sep>self.check_oom(<true>)<line_sep># reset oom
ok,resp=redis.do_request('deny-oom 0\r\n')<assert_stmt>(resp<eq>'OK') resp<line_sep>self.check_oom(<false>)<block_end><block_end> |
'''
Copyright 2016, EMC, Inc.
Author(s):
<NAME>
'''<import_stmt>fit_path# NOQA: unused import
<import_stmt>os<import_stmt>sys<import_stmt>subprocess<import_stmt>fit_common<line_sep># Select test group here using @attr
<import_from_stmt>nose.plugins.attrib attr<line_sep>@attr(all=<true> regression=<true> smoke=<true>)<class_stmt>rackhd20_api_schemas(fit_common.unittest.TestCase)<block_start><def_stmt>test_api_20_schemas self<block_start>api_data=fit_common.rackhdapi("/api/2.0/schemas")<line_sep>self.assertEqual(api_data['status'] 200 'Incorrect HTTP return code, expected 200, got:'+str(api_data['status']))<for_stmt>item api_data['json']<block_start>self.assertEqual(fit_common.rackhdapi(item)['status'] 200 'Incorrect HTTP return code, expected 200, got:'+str(api_data['status']))<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>fit_common.unittest.main()<block_end> |
<import_stmt>unittest<import_from_stmt>checkov.common.models.enums CheckResult<import_from_stmt>checkov.terraform.checks.resource.aws.MSKClusterEncryption check<class_stmt>TestMSKClusterEncryption(unittest.TestCase)<block_start><def_stmt>test_failure self<block_start>resource_conf={"name":"test-project" }<line_sep>scan_result=check.scan_resource_conf(conf=resource_conf)<line_sep>self.assertEqual(CheckResult.FAILED scan_result)<block_end><def_stmt>test_failure_non_tls self<block_start>resource_conf={"name":"test-project" "encryption_info":[{"encryption_at_rest_kms_key_arn":"aws_kms_key.kms.arn" "encryption_in_transit":[{"client_broker":["PLAINTEXT"] "in_cluster":["true"] }] }] }<line_sep>scan_result=check.scan_resource_conf(conf=resource_conf)<line_sep>self.assertEqual(CheckResult.FAILED scan_result)<block_end><def_stmt>test_failure_in_cluster self<block_start>resource_conf={"name":"test-project" "encryption_info":[{"encryption_at_rest_kms_key_arn":["aws_kms_key.kms.arn"] "encryption_in_transit":[{"client_broker":["TLS"] "in_cluster":[<false>] }] }] }<line_sep>scan_result=check.scan_resource_conf(conf=resource_conf)<line_sep>self.assertEqual(CheckResult.FAILED scan_result)<block_end><def_stmt>test_success self<block_start>resource_conf={"name":"test-project" "encryption_info":[{"encryption_at_rest_kms_key_arn":["aws_kms_key.kms.arn"] "encryption_in_transit":[{"client_broker":["TLS"] "in_cluster":["true"] }] }] }<line_sep>scan_result=check.scan_resource_conf(conf=resource_conf)<line_sep>self.assertEqual(CheckResult.PASSED scan_result)<block_end><def_stmt>test_success_no_encrypt_block self<block_start>resource_conf={"name":"test-project" "encryption_info":[{"encryption_at_rest_kms_key_arn":["aws_kms_key.kms.arn"] }] }<line_sep>scan_result=check.scan_resource_conf(conf=resource_conf)<line_sep>self.assertEqual(CheckResult.PASSED scan_result)<block_end># Regression test for https://github.com/bridgecrewio/checkov/issues/747
<def_stmt>test_success_no_encryption_at_rest_kms_key_arn_specified self<block_start>resource_conf={"name":"test-project" "encryption_info":[{}] }<line_sep>scan_result=check.scan_resource_conf(conf=resource_conf)<line_sep>self.assertEqual(CheckResult.PASSED scan_result)<block_end># Regression test for https://github.com/bridgecrewio/checkov/issues/747
<def_stmt>test_success_encryption_in_transit_and_no_encryption_at_rest_kms_key_arn_specified self<block_start>resource_conf={"name":"test-project" "encryption_info":[{"encryption_in_transit":[{"client_broker":["TLS"] "in_cluster":[<true>] }] }] }<line_sep>scan_result=check.scan_resource_conf(conf=resource_conf)<line_sep>self.assertEqual(CheckResult.PASSED scan_result)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end> |
# edsk_double_step.py
#
# Create a double-step EDSK image by doubling up cylinders.
#
# Written & released by <NAME> <<EMAIL>>
#
# This is free and unencumbered software released into the public domain.
# See the file COPYING for more details, or visit <http://unlicense.org>.
<import_stmt>struct sys random<def_stmt>main argv<block_start><if_stmt>len(argv)<ne>3<block_start>print("%s <input_file> <output_file>"%argv[0])<line_sep><return><block_end>in_f=open(argv[1] "rb")<line_sep>in_dat=in_f.read()<line_sep>out=bytearray(in_dat[:256])<line_sep># Check image size
<if_stmt>len(in_dat)<l>2048<block_start>print("Not a valid EDSK image - Too short")<line_sep><return><block_end># Check image header
sig,_,tracks,sides,tsz=struct.unpack("<34s14sBBH" in_dat[:52])<line_sep>out[48]=tracks<times>2# double up on number of cyls
tszs=in_dat[52:256]<line_sep>in_dat=in_dat[256:]<if_stmt>sig.startswith(b"MV - CPCEMU")<block_start><for_stmt>i range(tracks)<block_start>out<augadd>in_dat[:tsz<times>sides]<for_stmt>j range(sides)<block_start>out[16-tsz<times>(j+1)]=i<times>2# fix cyl#
<block_end>out<augadd>in_dat[:tsz<times>sides]<for_stmt>j range(sides)<block_start>out[16-tsz<times>(j+1)]=i<times>2+1# fix cyl#
<block_end>in_dat=in_dat[tsz<times>sides:]<block_end><block_end><elif_stmt>sig.startswith(b"EXTENDED CPC DSK File\r\nDisk-Info\r\n")<block_start><for_stmt>i range(tracks)<block_start><for_stmt>j range(2)<block_start>off=0<for_stmt>k range(sides)<block_start>tsz=tszs[k]<times>256<line_sep>out<augadd>in_dat[off:off+tsz]<line_sep>out[16-tsz]=i<times>2+j# fix cyl#
out[52+(i<times>2+j)<times>sides+k]=tszs[k]# fix track size
off<augadd>tsz<block_end><block_end>tszs=tszs[sides:]<line_sep>in_dat=in_dat[off:]<block_end><block_end><else_stmt><block_start>print("Not a valid EDSK image")<line_sep><return><block_end><with_stmt>open(argv[2] "wb")<as>f<block_start>f.write(out)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>main(sys.argv)<block_end> |
###########################################################################
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
<import_from_stmt>starthinker.util.bigquery query_to_view<import_from_stmt>starthinker.util.data get_rows<import_from_stmt>starthinker.util.data put_rows<import_from_stmt>starthinker.util.google_api API_DV360<import_from_stmt>starthinker.util.sheets sheets_clear<import_from_stmt>starthinker.task.cm_to_dv.log log_write<def_stmt>preview_li_clear config task<block_start>sheets_clear(task['auth_sheets'] task['sheet'] 'LI Preview' 'A2:AJ')<block_end><def_stmt>preview_li_load config task<block_start>preview_li_clear(config task)<line_sep># download LI Rules
put_rows(task["auth_bigquery"] {"bigquery":{"dataset":task["dataset"] "table":"SHEET_LI_Rules" "schema":[{"name":"CM_Campaign" "type":"STRING"} {"name":"DV_Campaign" "type":"STRING"} {"name":"Type" "type":"STRING"} {"name":"Budget_Allocation" "type":"STRING"} {"name":"Pacing_Type" "type":"STRING"} {"name":"Pacing_Period" "type":"STRING"} {"name":"Pacing_Period_Max_Spend" "type":"INTEGER"} {"name":"Pacing_Period_Max_Impressions" "type":"INTEGER"} {"name":"Frequency_Cap_Unlimited" "type":"BOOLEAN"} {"name":"Frequency_Cap_Time_Unit" "type":"STRING"} {"name":"Frequency_Cap_Time_Unit_Count" "type":"INTEGER"} {"name":"Frequency_Cap_Max_Impressions" "type":"INTEGER"} {"name":"Post_View_Count_Percent" "type":"INTEGER"} {"name":"Performance_Goal_Type" "type":"STRING"} {"name":"Performance_Goal_Amount" "type":"INTEGER"} {"name":"Max_Average_CPM_Amount" "type":"INTEGER"} {"name":"Custom_Bidding_Algorithm" "type":"STRING"} ] "format":"CSV"}} get_rows(task["auth_sheets"] {"sheets":{"sheet":task["sheet"] "tab":"LI Rules" "header":<false> "range":"A2:AQ"}}))<line_sep># create LI preview ( main logic )
query_to_view(task["auth_bigquery"] config.project task["dataset"] "PREVIEW_LI" """WITH
cm AS (
SELECT
CM_P.name,
CM_P.advertiserId,
CM_C.id AS campaignId,
CM_C.name AS campaignName,
CM_P.compatibility,
CM_PG.pricingSchedule.startDate AS budgetSegmentStartDate,
CM_PG.pricingSchedule.endDate AS budgetSegmentEndDate,
NULLIF(CAST(CM_PP.rateOrCostNanos / 1000000000 AS INT64), 0) AS bidAmount,
CM_PG.name AS ioDisplayName,
CM_P.name AS liDisplayName
FROM `{dataset}.CM_PlacementGroups` AS CM_PG, UNNEST(childPlacementIds) AS childPlacementId, UNNEST(CM_PG.pricingSchedule.pricingPeriods) AS CM_PP
JOIN `{dataset}.CM_Placements` CM_P
ON childPlacementId = CM_P.id
JOIN `{dataset}.CM_Campaigns` AS CM_C
ON CM_P.campaignId = CM_C.id
JOIN `{dataset}.CM_Sites` AS CM_S
ON CM_PG.siteId = CM_S.id AND CM_S.name = 'Google DBM'
WHERE
pg_ProductCode IS NOT NULL
AND p_ProductCode IS NOT NULL
),
sheet AS (
SELECT
CONCAT(dv_a.displayName, ' - ', dv_a.advertiserid) AS DV_Advertiser,
sheet.*
FROM `{dataset}.SHEET_LI_Rules` as sheet
LEFT JOIN `{dataset}.DV_Campaigns` AS dv_c
ON CAST(REGEXP_EXTRACT(sheet.DV_Campaign, r' - (\d+)$') AS INT64) = dv_c.campaignId
LEFT JOIN `{dataset}.DV_Advertisers` AS dv_a
ON dv_a.advertiserid=dv_c.advertiserId
),
li_flattened AS (
SELECT
lineItemId,
displayName,
MAX(postViewLookbackWindowDays) AS postViewLookbackWindowDays,
MAX(postClickLookbackWindowDays) AS postClickLookbackWindowDays,
ARRAY_TO_STRING(ARRAY_AGG(CAST(floodlightActivityConfig.floodlightActivityId AS STRING) IGNORE NULLS), ",") AS floodlightActivityIds,
ARRAY_TO_STRING(ARRAY_AGG(CAST(inventorySourceId AS STRING) IGNORE NULLS), ",") AS inventorySourceIds
FROM `{dataset}.DV_LineItems`
LEFT JOIN UNNEST(conversionCounting.floodlightActivityConfigs) AS floodlightActivityConfig
LEFT JOIN UNNEST(inventorySourceIds) AS inventorySourceId
GROUP BY 1,2
),
io_flattened AS (
SELECT
insertionOrderId,
displayName,
MIN(DATE(segments.dateRange.startDate.year, segments.dateRange.startDate.month, segments.dateRange.startDate.day)) AS budgetSegmentStartDate,
MAX(DATE(segments.dateRange.endDate.year, segments.dateRange.endDate.month, segments.dateRange.endDate.day)) AS budgetSegmentEndtDate,
FROM `{dataset}.DV_InsertionOrders`
LEFT JOIN UNNEST(budget.budgetSegments) AS segments
GROUP BY 1,2
)
SELECT
'PREVIEW' AS action,
sheet.DV_Advertiser,
sheet.DV_Campaign,
CONCAT(dv_io.displayName, ' - ', dv_io.insertionOrderId) as DV_InsertionOrder,
cm.liDisplayName AS displayName,
sheet.Type AS lineItemType,
'ENTITY_STATUS_DRAFT' AS entityStatus,
CAST(NULL AS INT64) AS bidAmount,
dv_io.budgetSegmentStartDate,
dv_io.budgetSegmentEndtDate,
sheet.Budget_Allocation AS lineItemBudgetAllocationType,
sheet.Pacing_Period AS pacingPeriod,
sheet.Pacing_Type AS pacingType,
sheet.Pacing_Period_Max_Spend AS dailyMaxMicros,
sheet.Pacing_Period_Max_Impressions AS dailyMaxImpressions,
sheet.Frequency_Cap_Unlimited AS frequencyCapUnlimited,
sheet.Frequency_Cap_Time_Unit AS frequencyCapTimeUnit,
sheet.Frequency_Cap_Time_Unit_Count AS frequencyCapTimeUnitCount,
sheet.Frequency_Cap_Max_Impressions AS frequencyCapMaxImpressions,
sheet.Post_View_Count_Percent AS postViewCountPercentageMillis,
90 AS postViewLookbackWindowDays,
90 AS postClickLookbackWindowDays,
sheet.Performance_Goal_Type AS biddingStrategyPerformanceGoalType,
sheet.Performance_Goal_Amount AS performanceGoalAmountMicros,
sheet.Max_Average_CPM_Amount AS maxAverageCpmBidAmountMicros,
sheet.Custom_Bidding_Algorithm,
dv_li.floodlightActivityIds,
dv_li.inventorySourceIds,
CAST(NULL AS STRING) AS Partner_Cost_CPM_Fee_Cost_Type,
CAST(NULL AS STRING) AS Partner_Cost_CPM_Fee_Invoice_Type,
CAST(NULL AS STRING) AS Partner_Cost_CPM_Fee_Amount,
CAST(NULL AS STRING) AS Partner_Cost_Media_Fee_Cost_Type,
CAST(NULL AS STRING) AS Partner_Cost_Media_Fee_Invoice_Type,
CAST(NULL AS STRING) AS Partner_Cost_Media_Fee_Percent
FROM sheet
LEFT JOIN cm
ON CAST(REGEXP_EXTRACT(sheet.CM_Campaign, r' - (\d+)$') AS INT64) = cm.campaignId
AND (
(SPLIT(cm.name,'_')[OFFSET(0)] = 'VID' AND LOWER(SPLIT(sheet.Type , '_')[SAFE_OFFSET(3)]) = 'video')
OR (NOT SPLIT(cm.name, '_')[OFFSET(0)] = 'VID' AND LOWER(SPLIT(sheet.Type, '_')[SAFE_OFFSET(3)]) = 'display')
)
LEFT JOIN io_flattened dv_io
ON dv_io.displayName = cm.ioDisplayName
LEFT JOIN li_flattened dv_li
ON dv_li.displayName = cm.liDisplayName
""".format(**task) legacy=<false>)<line_sep># create audit view
query_to_view(task["auth_bigquery"] config.project task["dataset"] "AUDIT_LI" """WITH
/* Check if sheet values are set */
INPUT_ERRORS AS (
SELECT
'LI Rules' AS Operation,
'Missing Sheet input value.' AS Error,
'ERROR' AS Severity,
CAST(NULL AS STRING) AS DV_Advertiser,
DV_Campaign,
CAST(NULL AS STRING) AS DV_InsertionOrder,
CM_Campaign AS DV_LineItem
FROM `{dataset}.SHEET_LI_Rules`
WHERE
CM_Campaign IS NULL
OR DV_Campaign IS NULL
OR Type IS NULL
OR Budget_Allocation IS NULL
OR Pacing_Period IS NULL
OR Pacing_Type IS NULL
OR Pacing_Period_Max_Spend IS NULL
OR Pacing_Period_Max_Impressions IS NULL
OR Frequency_Cap_Unlimited IS NULL
OR Frequency_Cap_Time_Unit IS NULL
OR Frequency_Cap_Time_Unit_Count IS NULL
OR Frequency_Cap_Max_Impressions IS NULL
OR Post_View_Count_Percent IS NULL
OR Performance_Goal_Type IS NULL
OR Performance_Goal_Amount IS NULL
OR Max_Average_CPM_Amount IS NULL
OR Custom_Bidding_Algorithm IS NULL
),
/* Check if duplicate LI */
DUPLICATE_ERRORS AS (
SELECT
'LI Rules' AS Operation,
'Duplicate Line Item.' AS Error,
'WARNING' AS Severity,
DV_Advertiser,
DV_Campaign,
DV_InsertionOrder,
DV_R.displayName AS DV_LineItem
FROM
`{dataset}.PREVIEW_LI` AS DV_R
LEFT JOIN (
SELECT
advertiserId,
campaignId,
insertionOrderId,
displayName
FROM `{dataset}.DV_LineItems`
GROUP BY 1,2,3,4
) AS DV_LI
ON DV_R.displayName = DV_LI.displayName
AND CAST(REGEXP_EXTRACT(DV_R.DV_Campaign, r' - (\d+)$') AS INT64) = DV_LI.campaignId
AND CAST(REGEXP_EXTRACT(DV_R.DV_InsertionOrder, r' - (\d+)$') AS INT64) = DV_LI.insertionOrderId
)
SELECT * FROM INPUT_ERRORS
UNION ALL
SELECT * FROM DUPLICATE_ERRORS
""".format(**task) legacy=<false>)<line_sep># write io preview to sheet
put_rows(task['auth_sheets'] {'sheets':{'sheet':task['sheet'] 'tab':'LI Preview' 'header':<false> 'range':'A2'}} get_rows(task['auth_bigquery'] {'bigquery':{'dataset':task['dataset'] 'query':"""SELECT
A.Severity,
A.Error,
P.*
FROM `{dataset}.PREVIEW_LI` AS P
LEFT JOIN (
SELECT
DV_Advertiser,
DV_Campaign,
DV_InsertionOrder,
DV_LineItem,
CASE
WHEN 'ERROR' IN UNNEST(ARRAY_AGG(Severity)) THEN 'ERROR'
WHEN 'WARNING' IN UNNEST(ARRAY_AGG(Severity)) THEN 'WARNING'
ELSE 'OK'
END AS Severity,
ARRAY_TO_STRING(ARRAY_AGG(CONCAT(Severity, ': ', Error)), '\\n') AS Error,
FROM `{dataset}.AUDIT_LI`
GROUP BY 1,2,3,4
) AS A
ON P.DV_Advertiser=A.DV_Advertiser
AND P.DV_Campaign=A.DV_Campaign
AND P.DV_InsertionOrder=A.DV_InsertionOrder
AND P.displayName=A.DV_LineItem
""".format(**task) }}))<block_end><def_stmt>preview_li_insert config task# download IO Inserts
<block_start>put_rows(task["auth_bigquery"] {"bigquery":{"dataset":task["dataset"] "table":"SHEET_LI_Inserts" "schema":[{"name":"status" "type":"STRING" "mode":"NULLABLE"} {"name":"error" "type":"STRING" "mode":"NULLABLE"} {"name":"action" "type":"STRING" "mode":"NULLABLE"} {"name":"advertiser" "type":"STRING" "mode":"NULLABLE"} {"name":"campaign" "type":"STRING" "mode":"NULLABLE"} {"name":"insertionOrder" "type":"STRING" "mode":"NULLABLE"} {"name":"displayName" "type":"STRING" "mode":"NULLABLE"} {"name":"lineItemType" "type":"STRING" "mode":"NULLABLE"} {"name":"entityStatus" "type":"STRING" "mode":"NULLABLE"} {"name":"bidAmount" "type":"INTEGER" "mode":"NULLABLE"} {"name":"budgetSegmentStartDate" "type":"DATE" "mode":"NULLABLE"} {"name":"budgetSegmentEndDate" "type":"DATE" "mode":"NULLABLE"} {"name":"lineItemBudgetAllocationType" "type":"STRING" "mode":"NULLABLE"} {"name":"pacingPeriod" "type":"STRING" "mode":"NULLABLE"} {"name":"pacingType" "type":"STRING" "mode":"NULLABLE"} {"name":"dailyMaxMicros" "type":"INTEGER" "mode":"NULLABLE"} {"name":"dailyMaxImpressions" "type":"INTEGER" "mode":"NULLABLE"} {"name":"frequencyCapUnlimited" "type":"BOOLEAN" "mode":"NULLABLE"} {"name":"frequencyCapTimeUnit" "type":"STRING" "mode":"NULLABLE"} {"name":"frequencyCapTimeUnitCount" "type":"INTEGER" "mode":"NULLABLE"} {"name":"frequencyCapMaxImpressions" "type":"INTEGER" "mode":"NULLABLE"} {"name":"postViewCountPercentageMillis" "type":"INTEGER" "mode":"NULLABLE"} {"name":"postViewLookbackWindowDays" "type":"INTEGER" "mode":"NULLABLE"} {"name":"postClickLookbackWindowDays" "type":"INTEGER" "mode":"NULLABLE"} {"name":"biddingStrategyPerformanceGoalType" "type":"STRING" "mode":"NULLABLE"} {"name":"performanceGoalAmountMicros" "type":"INTEGER" "mode":"NULLABLE"} {"name":"maxAverageCpmBidAmountMicros" "type":"INTEGER" "mode":"NULLABLE"} {"name":"customBiddingAlgorithm" "type":"STRING" "mode":"NULLABLE"} {"name":"floodlightActivityIds" "type":"STRING" "mode":"NULLABLE"} {"name":"inventorySourceIds" "type":"STRING" "mode":"NULLABLE"} {"name":"partnerCPMFeeCostType" "type":"STRING" "mode":"NULLABLE"} {"name":"partnerCPMFeeInvoiceType" "type":"STRING" "mode":"NULLABLE"} {"name":"partnerCPMFeeAmount" "type":"FLOAT" "mode":"NULLABLE"} {"name":"partnerMediaFeeCostType" "type":"STRING" "mode":"NULLABLE"} {"name":"partnerMediaFeeInvoiceType" "type":"STRING" "mode":"NULLABLE"} {"name":"partnerMediaFeePercent" "type":"FLOAT" "mode":"NULLABLE"} ] "format":"CSV"}} get_rows(task["auth_sheets"] {"sheets":{"sheet":task["sheet"] "tab":"LI Preview" "header":<false> "range":"A2:AJ"}}))<line_sep># create insert view
query_to_view(task["auth_bigquery"] config.project task["dataset"] "INSERT_LI" """
SELECT
REGEXP_EXTRACT(advertiser, r' - (\d+)$') AS advertiserId,
STRUCT(
REGEXP_EXTRACT(advertiser, r' - (\d+)$') AS advertiserId,
REGEXP_EXTRACT(campaign, r' - (\d+)$') AS campaignId,
REGEXP_EXTRACT(insertionOrder, r' - (\d+)$') AS insertionOrderId,
displayName,
lineItemType,
entityStatus,
ARRAY((
SELECT partnerCost FROM (
SELECT
IF(partnerCPMFeeAmount IS NOT NULL,
STRUCT(
'PARTNER_COST_FEE_TYPE_CPM_FEE' AS feeType,
partnerCPMFeeCostType AS costType,
partnerCPMFeeInvoiceType AS invoiceType,
COALESCE(partnerCPMFeeAmount, 0) * 1000000 AS feeAmount
), NULL) AS partnerCost
UNION ALL
SELECT
IF(partnerMediaFeePercent IS NOT NULL,
STRUCT(
'PARTNER_COST_FEE_TYPE_MEDIA_FEE' AS feeType,
partnerMediaFeeCostType AS costType,
partnerMediaFeeInvoiceType AS invoiceType,
COALESCE(partnerMediaFeePercent, 0) * 1000 AS feePercentageMillis
), NULL) AS partnerCost
) WHERE partnerCost IS NOT NULL)
) AS partnerCosts,
STRUCT( 'LINE_ITEM_FLIGHT_DATE_TYPE_INHERITED' AS flightDateType ) AS flight,
STRUCT ( lineItemBudgetAllocationType AS budgetAllocationType ) AS budget,
STRUCT (
pacingPeriod,
pacingType,
IF(dailyMaxMicros IS NOT NULL, dailyMaxMicros * 1000000, NULL) AS dailyMaxMicros,
IF(dailyMaxMicros IS NULL, dailyMaxImpressions, NULL) AS dailyMaxImpressions
) AS pacing,
STRUCT ( CAST(frequencyCapUnlimited AS BOOL) AS unlimited,
frequencyCapTimeUnit AS timeUnit,
CAST(frequencyCapTimeUnitCount AS INT64) AS timeUnitCount,
CAST(frequencyCapMaxImpressions AS INT64) AS maxImpressions
) AS frequencyCap,
STRUCT ( 'PARTNER_REVENUE_MODEL_MARKUP_TYPE_TOTAL_MEDIA_COST_MARKUP' AS markupType ) AS partnerRevenueModel,
STRUCT ( STRUCT ( CAST(bidAmount * 1000000 AS INT64) AS bidAmountMicros ) AS fixedBid ) AS bidStrategy,
STRUCT(
postViewCountPercentageMillis AS postViewCountPercentageMillis,
ARRAY(
SELECT
STRUCT(
floodlightActivityId,
postClickLookbackWindowDays,
postViewLookbackWindowDays
)
FROM UNNEST(SPLIT(floodlightActivityIds)) AS floodlightActivityId
) AS floodlightActivityConfigs
) AS conversionCounting
) AS body
FROM `{dataset}.SHEET_LI_Inserts`
WHERE action = 'INSERT'
""".format(**task) legacy=<false>)<line_sep># write LIs to API
<for_stmt>row get_rows(task["auth_bigquery"] {"bigquery":{"dataset":task["dataset"] "table":"INSERT_LI" }} as_object=<true>)<block_start><try_stmt><block_start>response=API_DV360(task['auth_dv']).advertisers().lineItems().create(**row).execute()<line_sep>log_write('LI' row response['lineItemId'] <none>)<block_end><except_stmt>Exception<as>e<block_start>log_write('LI' row <none> str(e))<block_end><block_end>log_write(config)<block_end> |
LOREM_IPSUM="""
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna
aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Sed ut perspiciatis
unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab
illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia
voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi
nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non
numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam,
quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem
vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum
fugiat quo voluptas nulla pariatur?
"""<line_sep> |
# -*- coding: utf-8 -*-
"""
Implementation of http://elementalselenium.com/tips/25-tables
"""<import_stmt>unittest<import_from_stmt>selenium webdriver<class_stmt>Tables(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>self.driver=webdriver.Firefox()<block_end><def_stmt>tearDown self<block_start>self.driver.quit()<block_end><def_stmt>test_sort_number_column_in_ascending_order_with_limited_locators self<block_start>driver=self.driver<line_sep>driver.get('http://the-internet.herokuapp.com/tables')<line_sep>driver.find_element_by_css_selector('#table1 thead tr th:nth-of-type(4)').click()<line_sep>due_column=driver.find_elements_by_css_selector('#table1 tbody tr td:nth-of-type(4)')<line_sep>dues=[float(due.text.replace('$' ''))<for>due due_column]<assert_stmt>dues<eq>sorted(dues)<block_end><def_stmt>test_sort_number_column_in_descending_order_with_limited_locators self<block_start>driver=self.driver<line_sep>driver.get('http://the-internet.herokuapp.com/tables')<line_sep>driver.find_element_by_css_selector('#table1 thead tr th:nth-of-type(4)').click()<line_sep>driver.find_element_by_css_selector('#table1 thead tr th:nth-of-type(4)').click()<line_sep>due_column=driver.find_elements_by_css_selector('#table1 tbody tr td:nth-of-type(4)')<line_sep>dues=[float(due.text.replace('$' ''))<for>due due_column]<assert_stmt>dues<eq>sorted(dues reverse=<true>)<block_end><def_stmt>test_sort_text_column_in_ascending_order_with_limited_locators self<block_start>driver=self.driver<line_sep>driver.get('http://the-internet.herokuapp.com/tables')<line_sep>driver.find_element_by_css_selector('#table1 thead tr th:nth-of-type(3)').click()<line_sep>email_column=driver.find_elements_by_css_selector('#table1 tbody tr td:nth-of-type(3)')<line_sep>emails=[email.text<for>email email_column]<assert_stmt>emails<eq>sorted(emails)<block_end><def_stmt>test_sort_number_column_in_ascending_order_with_helpful_locators self<block_start>driver=self.driver<line_sep>driver.get('http://the-internet.herokuapp.com/tables')<line_sep>driver.find_element_by_css_selector('#table2 thead .dues').click()<line_sep>due_column=driver.find_elements_by_css_selector('#table2 tbody .dues')<line_sep>dues=[float(due.text.replace('$' ''))<for>due due_column]<assert_stmt>dues<eq>sorted(dues)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>unittest.main()<block_end> |
<class_stmt>Solution<block_start><def_stmt>climbStairs self n:int<arrow>int<block_start><if_stmt>n<eq>1<or>n<eq>0<block_start><return>1<block_end>prev,curr=1 1<for_stmt>i range(2 n+1)<block_start>temp=curr<line_sep>curr<augadd>prev<line_sep>prev=temp<block_end><return>curr<block_end><block_end> |
# Generated by Django 2.1.5 on 2019-01-09 14:01
<import_from_stmt>django.db migrations<import_from_stmt>django.contrib.postgres.operations HStoreExtension<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[]<line_sep>operations=[HStoreExtension() ]<block_end> |
"""Analytics command line interface for DeepHyper.
It can be used with:
.. code-block:: console
$ deephyper-analytics --help
Command line to analysis the outputs produced by DeepHyper.
positional arguments:
{notebook,quickplot,topk}
Kind of analytics.
notebook Generate a notebook with different types of analysis
quickplot Tool to generate a quick 2D plot from file.
topk Print the top-k configurations.
optional arguments:
-h, --help show this help message and exit
"""<import_stmt>argparse<import_stmt>sys<import_from_stmt>deephyper.core.analytics _topk _quick_plot<def_stmt>create_parser <block_start>"""
:meta private:
"""<line_sep>parser=argparse.ArgumentParser(description="Command line to analysis the outputs produced by DeepHyper.")<line_sep>subparsers=parser.add_subparsers(help="Kind of analytics.")<line_sep>mapping=dict()<line_sep>modules=[_quick_plot # output quick plots
_topk]<for_stmt>module modules<block_start>name,func=module.add_subparser(subparsers)<line_sep>mapping[name]=func<block_end><return>parser mapping<block_end><def_stmt>main <block_start>"""
:meta private:
"""<line_sep>parser,mapping=create_parser()<line_sep>args=parser.parse_args()<line_sep>mapping[sys.argv[1]](**vars(args))<block_end> |
<def_stmt>extractBinhjamin item<block_start>"""
# Binhjamin
"""<line_sep>vol,chp,frag,postfix=extractVolChapterFragmentPostfix(item['title'])<if_stmt><not>(vol<or>chp<or>frag<or>postfix)<block_start><return><false><block_end><if_stmt>('SRKJ'<in>item['title']<or>'SRKJ-Sayonara Ryuu'<in>item['tags'])<and>(chp<or>vol)<block_start><return>buildReleaseMessageWithType(item '<NAME>' vol chp frag=frag postfix=postfix)<block_end><if_stmt>'Unborn'<in>item['title']<block_start><return>buildReleaseMessageWithType(item 'Unborn' vol chp frag=frag postfix=postfix)<block_end><if_stmt>'Bu ni Mi'<in>item['title']<or>'100 Years Of Martial Arts'<in>item['title']<block_start><return>buildReleaseMessageWithType(item '100 Years Of Martial Arts' vol chp frag=frag postfix=postfix)<block_end><return><false><block_end> |
<import_from_stmt>nalaf.features.relations EdgeFeatureGenerator<import_from_stmt>nltk.stem PorterStemmer<class_stmt>LinearDistanceFeatureGenerator(EdgeFeatureGenerator)<block_start>"""
The absolute distance between the two entities in the edge.
If distance is greater than 5 (default), add to feature set.
Also add the actual distance between the two entities.
:param feature_set: the feature set for the dataset
:type feature_set: nalaf.structures.data.FeatureDictionary
:param distance: the number of tokens between the two entities, default 5
:type distance: int
:param training_mode: indicates whether the mode is training or testing
:type training_mode: bool
"""<def_stmt>__init__ self distance=5 prefix_entity_linear_distance_greater_than=<none> prefix_entity_linear_distance_lesser_than=<none> prefix_entity_linear_distance=<none><block_start>self.distance=distance<line_sep>self.prefix_entity_linear_distance_greater_than=prefix_entity_linear_distance_greater_than<line_sep>self.prefix_entity_linear_distance_lesser_than=prefix_entity_linear_distance_lesser_than<line_sep>self.prefix_entity_linear_distance=prefix_entity_linear_distance<block_end><def_stmt>generate self dataset feature_set is_training_mode<block_start><for_stmt>edge dataset.edges()<block_start>entity1_number=edge.entity1.head_token.features['id']<line_sep>entity2_number=edge.entity2.head_token.features['id']<line_sep>distance=abs(entity1_number-entity2_number)<if_stmt>distance<g>self.distance<block_start>feature_name=self.gen_prefix_feat_name("prefix_entity_linear_distance_greater_than" 5)<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<block_end><else_stmt><block_start>feature_name=self.gen_prefix_feat_name("prefix_entity_linear_distance_lesser_than" 5)<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<block_end>feature_name=self.gen_prefix_feat_name("prefix_entity_linear_distance")<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name value=distance)<block_end><block_end><block_end><class_stmt>EntityOrderFeatureGenerator(EdgeFeatureGenerator)<block_start>"""
The is the order of the entities in the sentence. Whether entity1 occurs
first or entity2 occurs first.
:param feature_set: the feature set for the dataset
:type feature_set: nalaf.structures.data.FeatureDictionary
:param training_mode: indicates whether the mode is training or testing
:type training_mode: bool
"""<def_stmt>__init__ self prefix_order_entity1_entity2 prefix_order_entity2_entity1 <block_start>self.prefix_order_entity1_entity2=prefix_order_entity1_entity2<line_sep>self.prefix_order_entity2_entity1=prefix_order_entity2_entity1<block_end><def_stmt>generate self dataset feature_set is_training_mode<block_start><for_stmt>edge dataset.edges()<block_start><if_stmt>edge.entity1.offset<l>edge.entity2.offset<block_start>feature_name=self.gen_prefix_feat_name("prefix_order_entity1_entity2")<block_end><else_stmt><block_start>feature_name=self.gen_prefix_feat_name("prefix_order_entity2_entity1")<block_end>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<block_end><block_end><block_end><class_stmt>IntermediateTokensFeatureGenerator(EdgeFeatureGenerator)<block_start>"""
Generate the bag of words representation, masked text, stemmed text and
parts of speech tag for each of the tokens present between two entities in
an edge.
"""<def_stmt>__init__ self prefix_fwd_bow_intermediate=<none> prefix_fwd_bow_intermediate_masked=<none> prefix_fwd_stem_intermediate=<none> prefix_fwd_pos_intermediate=<none> prefix_bkd_bow_intermediate=<none> prefix_bkd_bow_intermediate_masked=<none> prefix_bkd_stem_intermediate=<none> prefix_bkd_pos_intermediate=<none> prefix_bow_intermediate=<none> prefix_bow_intermediate_masked=<none> prefix_stem_intermediate=<none> prefix_pos_intermediate=<none> <block_start>self.stemmer=PorterStemmer()<line_sep>"""an instance of PorterStemmer"""<line_sep>self.prefix_fwd_bow_intermediate=prefix_fwd_bow_intermediate<line_sep>self.prefix_fwd_bow_intermediate_masked=prefix_fwd_bow_intermediate_masked<line_sep>self.prefix_fwd_stem_intermediate=prefix_fwd_stem_intermediate<line_sep>self.prefix_fwd_pos_intermediate=prefix_fwd_pos_intermediate<line_sep>self.prefix_bkd_bow_intermediate=prefix_bkd_bow_intermediate<line_sep>self.prefix_bkd_bow_intermediate_masked=prefix_bkd_bow_intermediate_masked<line_sep>self.prefix_bkd_stem_intermediate=prefix_bkd_stem_intermediate<line_sep>self.prefix_bkd_pos_intermediate=prefix_bkd_pos_intermediate<line_sep>self.prefix_bow_intermediate=prefix_bow_intermediate<line_sep>self.prefix_bow_intermediate_masked=prefix_bow_intermediate_masked<line_sep>self.prefix_stem_intermediate=prefix_stem_intermediate<line_sep>self.prefix_pos_intermediate=prefix_pos_intermediate<block_end><def_stmt>generate self dataset feature_set is_training_mode<block_start><for_stmt>edge dataset.edges()<block_start>sentence=edge.same_part.sentences[edge.same_sentence_id]<if_stmt>edge.entity1.head_token.features['id']<l>edge.entity2.head_token.features['id']<block_start>first=edge.entity1.head_token.features['id']<line_sep>second=edge.entity2.head_token.features['id']<for_stmt>i range(first+1 second)<block_start>token=sentence[i]<line_sep>feature_name=self.gen_prefix_feat_name('prefix_fwd_bow_intermediate' token.word)<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<line_sep>feature_name=self.gen_prefix_feat_name('prefix_fwd_bow_intermediate_masked' token.masked_text(edge.same_part))<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<line_sep>feature_name=self.gen_prefix_feat_name('prefix_fwd_stem_intermediate' self.stemmer.stem(token.word))<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<line_sep>feature_name=self.gen_prefix_feat_name('prefix_fwd_pos_intermediate' token.features['pos'])<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<block_end><block_end><else_stmt><block_start>first=edge.entity2.head_token.features['id']<line_sep>second=edge.entity1.head_token.features['id']<for_stmt>i range(first+1 second)<block_start>token=sentence[i]<line_sep>feature_name=self.gen_prefix_feat_name('prefix_bkd_bow_intermediate' token.word)<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<line_sep>feature_name=self.gen_prefix_feat_name('prefix_bkd_bow_intermediate_masked' token.masked_text(edge.same_part))<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<line_sep>feature_name=self.gen_prefix_feat_name('prefix_bkd_stem_intermediate' self.stemmer.stem(token.word))<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<line_sep>feature_name=self.gen_prefix_feat_name('prefix_bkd_pos_intermediate' token.features['pos'])<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<block_end><block_end><for_stmt>i range(first+1 second)<block_start>token=sentence[i]<line_sep>feature_name=self.gen_prefix_feat_name('prefix_bow_intermediate' token.word)<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<line_sep>feature_name=self.gen_prefix_feat_name('prefix_bow_intermediate_masked' token.masked_text(edge.same_part))<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<line_sep>feature_name=self.gen_prefix_feat_name('prefix_stem_intermediate' self.stemmer.stem(token.word))<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<line_sep>feature_name=self.gen_prefix_feat_name('prefix_pos_intermediate' token.features['pos'])<line_sep>self.add_to_feature_set(feature_set is_training_mode edge feature_name)<block_end><block_end><block_end><block_end> |
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team.
# Copyright (c) 2018, <NAME>PORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyTorch BERT model."""<import_stmt>logging<import_stmt>numpy<as>np<import_stmt>torch<import_from_stmt>torch nn<import_stmt>torch.nn.functional<as>F<import_stmt>math<import_from_stmt>modules.until_config PretrainedConfig<line_sep>logger=logging.getLogger(__name__)<def_stmt>gelu x<block_start>"""Implementation of the gelu activation function.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
"""<line_sep><return>x<times>0.5<times>(1.0+torch.erf(x/math.sqrt(2.0)))<block_end><def_stmt>swish x<block_start><return>x<times>torch.sigmoid(x)<block_end>ACT2FN={"gelu":gelu "relu":torch.nn.functional.relu "swish":swish}<class_stmt>LayerNorm(nn.Module)<block_start><def_stmt>__init__ self hidden_size eps=1e-12<block_start>"""Construct a layernorm module in the TF style (epsilon inside the square root).
"""<line_sep>super(LayerNorm self).__init__()<line_sep>self.weight=nn.Parameter(torch.ones(hidden_size))<line_sep>self.bias=nn.Parameter(torch.zeros(hidden_size))<line_sep>self.variance_epsilon=eps<block_end><def_stmt>forward self x<block_start>u=x.mean(-1 keepdim=<true>)<line_sep>s=(x-u).pow(2).mean(-1 keepdim=<true>)<line_sep>x=(x-u)/torch.sqrt(s+self.variance_epsilon)<line_sep><return>self.weight<times>x+self.bias<block_end><block_end><class_stmt>PreTrainedModel(nn.Module)<block_start>""" An abstract class to handle weights initialization and
a simple interface for dowloading and loading pretrained models.
"""<def_stmt>__init__ self config *inputs **kwargs<block_start>super(PreTrainedModel self).__init__()<if_stmt><not>isinstance(config PretrainedConfig)<block_start><raise>ValueError("Parameter config in `{}(config)` should be an instance of class `PretrainedConfig`. "<concat>"To create a model from a Google pretrained model use "<concat>"`model = {}.from_pretrained(PRETRAINED_MODEL_NAME)`".format(self.__class__.__name__ self.__class__.__name__))<block_end>self.config=config<block_end><def_stmt>init_weights self module<block_start>""" Initialize the weights.
"""<if_stmt>isinstance(module (nn.Linear nn.Embedding))# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
<block_start>module.weight.data.normal_(mean=0.0 std=self.config.initializer_range)<block_end><elif_stmt>isinstance(module LayerNorm)<block_start><if_stmt>'beta'<in>dir(module)<and>'gamma'<in>dir(module)<block_start>module.beta.data.zero_()<line_sep>module.gamma.data.fill_(1.0)<block_end><else_stmt><block_start>module.bias.data.zero_()<line_sep>module.weight.data.fill_(1.0)<block_end><block_end><if_stmt>isinstance(module nn.Linear)<and>module.bias<is><not><none><block_start>module.bias.data.zero_()<block_end><block_end><def_stmt>resize_token_embeddings self new_num_tokens=<none><block_start><raise>NotImplementedError<block_end>@classmethod<def_stmt>init_preweight cls model state_dict prefix=<none> task_config=<none><block_start>old_keys=[]<line_sep>new_keys=[]<for_stmt>key state_dict.keys()<block_start>new_key=<none><if_stmt>'gamma'<in>key<block_start>new_key=key.replace('gamma' 'weight')<block_end><if_stmt>'beta'<in>key<block_start>new_key=key.replace('beta' 'bias')<block_end><if_stmt>new_key<block_start>old_keys.append(key)<line_sep>new_keys.append(new_key)<block_end><block_end><for_stmt>old_key,new_key zip(old_keys new_keys)<block_start>state_dict[new_key]=state_dict.pop(old_key)<block_end><if_stmt>prefix<is><not><none><block_start>old_keys=[]<line_sep>new_keys=[]<for_stmt>key state_dict.keys()<block_start>old_keys.append(key)<line_sep>new_keys.append(prefix+key)<block_end><for_stmt>old_key,new_key zip(old_keys new_keys)<block_start>state_dict[new_key]=state_dict.pop(old_key)<block_end><block_end>missing_keys=[]<line_sep>unexpected_keys=[]<line_sep>error_msgs=[]<line_sep># copy state_dict so _load_from_state_dict can modify it
metadata=getattr(state_dict '_metadata' <none>)<line_sep>state_dict=state_dict.copy()<if_stmt>metadata<is><not><none><block_start>state_dict._metadata=metadata<block_end><def_stmt>load module prefix=''<block_start>local_metadata={}<if>metadata<is><none><else>metadata.get(prefix[:-1] {})<line_sep>module._load_from_state_dict(state_dict prefix local_metadata <true> missing_keys unexpected_keys error_msgs)<for_stmt>name,child module._modules.items()<block_start><if_stmt>child<is><not><none><block_start>load(child prefix+name+'.')<block_end><block_end><block_end>load(model prefix='')<if_stmt>prefix<is><none><and>(task_config<is><none><or>task_config.local_rank<eq>0)<block_start>logger.info("-"<times>20)<if_stmt>len(missing_keys)<g>0<block_start>logger.info("Weights of {} not initialized from pretrained model: {}".format(model.__class__.__name__ "\n "+"\n ".join(missing_keys)))<block_end><if_stmt>len(unexpected_keys)<g>0<block_start>logger.info("Weights from pretrained model not used in {}: {}".format(model.__class__.__name__ "\n "+"\n ".join(unexpected_keys)))<block_end><if_stmt>len(error_msgs)<g>0<block_start>logger.error("Weights from pretrained model cause errors in {}: {}".format(model.__class__.__name__ "\n "+"\n ".join(error_msgs)))<block_end><block_end><return>model<block_end>@property<def_stmt>dtype self<block_start>"""
:obj:`torch.dtype`: The dtype of the module (assuming that all the module parameters have the same dtype).
"""<try_stmt><block_start><return>next(self.parameters()).dtype<block_end><except_stmt>StopIteration# For nn.DataParallel compatibility in PyTorch 1.5
<block_start><def_stmt>find_tensor_attributes module:nn.Module<block_start>tuples=[(k v)<for>k,v module.__dict__.items()<if>torch.is_tensor(v)]<line_sep><return>tuples<block_end>gen=self._named_members(get_members_fn=find_tensor_attributes)<line_sep>first_tuple=next(gen)<line_sep><return>first_tuple[1].dtype<block_end><block_end>@classmethod<def_stmt>from_pretrained cls config state_dict=<none> *inputs **kwargs<block_start>"""
Instantiate a PreTrainedModel from a pre-trained model file or a pytorch state dict.
Download and cache the pre-trained model file if needed.
"""<line_sep># Instantiate model.
model=cls(config *inputs **kwargs)<if_stmt>state_dict<is><none><block_start><return>model<block_end>model=cls.init_preweight(model state_dict)<line_sep><return>model<block_end><block_end>##################################
###### LOSS FUNCTION #############
##################################
<class_stmt>CrossEn(nn.Module)<block_start><def_stmt>__init__ self <block_start>super(CrossEn self).__init__()<block_end><def_stmt>forward self sim_matrix<block_start>logpt=F.log_softmax(sim_matrix dim=-1)<line_sep>logpt=torch.diag(logpt)<line_sep>nce_loss=-logpt<line_sep>sim_loss=nce_loss.mean()<line_sep><return>sim_loss<block_end><block_end><class_stmt>MILNCELoss(nn.Module)<block_start><def_stmt>__init__ self batch_size=1 n_pair=1 <block_start>super(MILNCELoss self).__init__()<line_sep>self.batch_size=batch_size<line_sep>self.n_pair=n_pair<line_sep>torch_v=float(".".join(torch.__version__.split(".")[:2]))<line_sep>self.bool_dtype=torch.bool<if>torch_v<ge>1.3<else>torch.uint8<block_end><def_stmt>forward self sim_matrix<block_start>mm_mask=np.eye(self.batch_size)<line_sep>mm_mask=np.kron(mm_mask np.ones((self.n_pair self.n_pair)))<line_sep>mm_mask=torch.tensor(mm_mask).float().to(sim_matrix.device)<line_sep>from_text_matrix=sim_matrix+mm_mask<times>-1e12<line_sep>from_video_matrix=sim_matrix.transpose(1 0)<line_sep>new_sim_matrix=torch.cat([from_video_matrix from_text_matrix] dim=-1)<line_sep>logpt=F.log_softmax(new_sim_matrix dim=-1)<line_sep>mm_mask_logpt=torch.cat([mm_mask torch.zeros_like(mm_mask)] dim=-1)<line_sep>masked_logpt=logpt+(torch.ones_like(mm_mask_logpt)-mm_mask_logpt)<times>-1e12<line_sep>new_logpt=-torch.logsumexp(masked_logpt dim=-1)<line_sep>logpt_choice=torch.zeros_like(new_logpt)<line_sep>mark_ind=torch.arange(self.batch_size).to(sim_matrix.device)<times>self.n_pair+(self.n_pair<floordiv>2)<line_sep>logpt_choice[mark_ind]=1<line_sep>sim_loss=new_logpt.masked_select(logpt_choice.to(dtype=self.bool_dtype)).mean()<line_sep><return>sim_loss<block_end><block_end><class_stmt>MaxMarginRankingLoss(nn.Module)<block_start><def_stmt>__init__ self margin=1.0 negative_weighting=<false> batch_size=1 n_pair=1 hard_negative_rate=0.5 <block_start>super(MaxMarginRankingLoss self).__init__()<line_sep>self.margin=margin<line_sep>self.n_pair=n_pair<line_sep>self.batch_size=batch_size<line_sep>easy_negative_rate=1-hard_negative_rate<line_sep>self.easy_negative_rate=easy_negative_rate<line_sep>self.negative_weighting=negative_weighting<if_stmt>n_pair<g>1<and>batch_size<g>1<block_start>alpha=easy_negative_rate/((batch_size-1)<times>(1-easy_negative_rate))<line_sep>mm_mask=(1-alpha)<times>np.eye(self.batch_size)+alpha<line_sep>mm_mask=np.kron(mm_mask np.ones((n_pair n_pair)))<line_sep>mm_mask=torch.tensor(mm_mask)<times>(batch_size<times>(1-easy_negative_rate))<line_sep>self.mm_mask=mm_mask.float()<block_end><block_end><def_stmt>forward self x<block_start>d=torch.diag(x)<line_sep>max_margin=F.relu(self.margin+x-d.view(-1 1))+F.relu(self.margin+x-d.view(1 -1))<if_stmt>self.negative_weighting<and>self.n_pair<g>1<and>self.batch_size<g>1<block_start>max_margin=max_margin<times>self.mm_mask.to(max_margin.device)<block_end><return>max_margin.mean()<block_end><block_end> |
"""This problem was asked by Dropbox.
Given a list of words, determine whether the words can be chained to form a circle.
A word X can be placed in front of another word Y in a circle if the last character
of X is same as the first character of Y.
For example, the words ['chair', 'height', 'racket', touch', 'tunic'] can form the
following circle: chair --> racket --> touch --> height --> tunic --> chair."""<line_sep> |
<def_stmt>readFlat filename delimiter<block_start>f=open(filename)<line_sep>ans=[]<for_stmt>line f<block_start>ans.append(map(<lambda>x:float(x) filter(<lambda>x:len(x)<g>0 line.split(delimiter))))<block_end><return>ans<block_end> |
<import_stmt>os<import_from_stmt>typing Optional Dict Any List<import_from_stmt>platypush.message.response.printer.cups PrinterResponse PrintersResponse PrinterJobAddedResponse<import_from_stmt>platypush.plugins Plugin action<class_stmt>PrinterCupsPlugin(Plugin)<block_start>"""
A plugin to interact with a CUPS printer server.
Requires:
- **pycups** (``pip install pycups``)
"""<def_stmt>__init__ self host:str='localhost' printer:Optional[str]=<none> **kwargs<block_start>"""
:param host: CUPS host IP/name (default: localhost).
:param printer: Default printer name that should be used.
"""<line_sep>super().__init__(**kwargs)<line_sep>self.host=host<line_sep>self.printer=printer<block_end><def_stmt>_get_connection self host:Optional[str]=<none># noinspection PyPackageRequirements
<block_start><import_stmt>cups<line_sep>connection=cups.Connection(host=host<or>self.host)<line_sep><return>connection<block_end><def_stmt>_get_printer self printer:Optional[str]=<none><block_start>printer=printer<or>self.printer<assert_stmt>printer 'No printer specified nor default printer configured'<line_sep><return>printer<block_end>@action<def_stmt>get_printers self host:Optional[str]=<none><arrow>PrintersResponse<block_start>"""
Get the list of printers registered on a CUPS server.
:param host: CUPS server host IP/name (default: default configured ``host``).
:return: :class:`platypush.message.response.printer.cups.PrintersResponse`, as a name -> attributes dict.
"""<line_sep>conn=self._get_connection(host)<line_sep><return>PrintersResponse(printers=[PrinterResponse(name=name printer_type=printer.get('printer-type') info=printer.get('printer-info') uri=printer.get('device-uri') state=printer.get('printer-state') is_shared=printer.get('printer-is-shared') state_message=printer.get('printer-state-message') state_reasons=printer.get('printer-state-reasons' []) location=printer.get('printer-location') uri_supported=printer.get('printer-uri-supported') make_and_model=printer.get('printer-make-and-model') )<for>name,printer conn.getPrinters().items()])<block_end>@action<def_stmt>print_test_page self printer:Optional[str]=<none> host:Optional[str]=<none><arrow>PrinterJobAddedResponse<block_start>"""
Print the CUPS test page.
:param printer: Printer name (default: default configured ``printer``).
:param host: CUPS server IP/name (default: default configured ``host``).
"""<line_sep>conn=self._get_connection(host)<line_sep>printer=self._get_printer(printer)<line_sep>job_id=conn.printTestPage(printer)<line_sep><return>PrinterJobAddedResponse(printer=printer job_id=job_id)<block_end>@action<def_stmt>print_file self filename:str printer:Optional[str]=<none> host:Optional[str]=<none> title:Optional[str]=<none> options:Optional[Dict[str Any]]=<none><arrow>PrinterJobAddedResponse<block_start>"""
Print a file.
:param filename: Path to the file to print.
:param printer: Printer name (default: default configured ``printer``).
:param host: CUPS server IP/name (default: default configured ``host``).
:param title: Print title.
:param options: Extra CUPS name->value options.
"""<line_sep>filename=os.path.abspath(os.path.expanduser(filename))<line_sep>conn=self._get_connection(host)<line_sep>printer=self._get_printer(printer)<line_sep>job_id=conn.printFile(printer filename=filename title=title<or>'' options=options<or>{})<line_sep><return>PrinterJobAddedResponse(printer=printer job_id=job_id)<block_end>@action<def_stmt>print_files self filenames:List[str] printer:Optional[str]=<none> host:Optional[str]=<none> title:Optional[str]=<none> options:Optional[Dict[str Any]]=<none><arrow>PrinterJobAddedResponse<block_start>"""
Print a list of files.
:param filenames: Paths to the files to print.
:param printer: Printer name (default: default configured ``printer``).
:param host: CUPS server IP/name (default: default configured ``host``).
:param title: Print title.
:param options: Extra CUPS name->value options.
"""<line_sep>filenames=[os.path.abspath(os.path.expanduser(f))<for>f filenames]<line_sep>conn=self._get_connection(host)<line_sep>printer=self._get_printer(printer)<line_sep>job_id=conn.printFiles(printer filenames=filenames title=title<or>'' options=options<or>{})<line_sep><return>PrinterJobAddedResponse(printer=printer job_id=job_id)<block_end>@action<def_stmt>add_printer self name:str ppd_file:str info:str location:Optional[str]=<none> host:Optional[str]=<none><block_start>"""
Add a printer.
:param name: Printer name - alphanumeric + underscore characters only.
:param ppd_file: Path to the PPD file with the printer information and configuration.
:param host: CUPS server IP/name (default: default configured ``host``).
:param info: Human-readable information about the printer.
:param location: Human-readable printer location info.
"""<line_sep>conn=self._get_connection(host)<line_sep>ppd_file=os.path.abspath(os.path.expanduser(ppd_file))<line_sep># noinspection PyArgumentList
conn.addPrinter(name=name filename=ppd_file info=info location=location)<block_end>@action<def_stmt>delete_printer self printer:str host:Optional[str]=<none><block_start>"""
Delete a printer from a CUPS server.
:param printer: Printer name.
:param host: CUPS server IP/name (default: default configured ``host``).
"""<line_sep>conn=self._get_connection(host)<line_sep>conn.deletePrinter(printer)<block_end>@action<def_stmt>enable_printer self printer:Optional[str] host:Optional[str]=<none><block_start>"""
Enable a printer on a CUPS server.
:param printer: Printer name.
:param host: CUPS server IP/name (default: default configured ``host``).
"""<line_sep>conn=self._get_connection(host)<line_sep>printer=self._get_printer(printer)<line_sep>conn.enablePrinter(printer)<block_end>@action<def_stmt>disable_printer self printer:Optional[str]=<none> host:Optional[str]=<none><block_start>"""
Disable a printer on a CUPS server.
:param printer: Printer name.
:param host: CUPS server IP/name (default: default configured ``host``).
"""<line_sep>conn=self._get_connection(host)<line_sep>printer=self._get_printer(printer)<line_sep>conn.disablePrinter(printer)<block_end>@action<def_stmt>get_jobs self host:Optional[str]=<none><arrow>Dict[int Dict[str Any]]<block_start>"""
Get the list of active jobs.
:param host: CUPS server IP/name (default: default configured ``host``).
:return: A job_id -> job_info dict.
"""<line_sep>conn=self._get_connection(host)<line_sep><return>conn.getJobs()<block_end>@action<def_stmt>accept_jobs self printer:Optional[str]=<none> host:Optional[str]=<none><block_start>"""
Start accepting jobs on a printer.
:param printer: Printer name.
:param host: CUPS server IP/name (default: default configured ``host``).
"""<line_sep>conn=self._get_connection(host)<line_sep>printer=self._get_printer(printer)<line_sep>conn.acceptJobs(printer)<block_end>@action<def_stmt>reject_jobs self printer:Optional[str]=<none> host:Optional[str]=<none><block_start>"""
Start rejecting jobs on a printer.
:param printer: Printer name.
:param host: CUPS server IP/name (default: default configured ``host``).
"""<line_sep>conn=self._get_connection(host)<line_sep>printer=self._get_printer(printer)<line_sep>conn.rejectJobs(printer)<block_end>@action<def_stmt>cancel_job self job_id:int purge_job:bool=<false> host:Optional[str]=<none><block_start>"""
Cancel a printer job.
:param job_id: Job ID to cancel.
:param purge_job: Also remove the job from the server (default: False).
:param host: CUPS server IP/name (default: default configured ``host``).
"""<line_sep>conn=self._get_connection(host)<line_sep>conn.cancelJob(job_id purge_job=purge_job)<block_end>@action<def_stmt>move_job self job_id:int source_printer_uri:str target_printer_uri:str host:Optional[str]=<none><block_start>"""
Move a job to another printer/URI.
:param job_id: Job ID to cancel.
:param source_printer_uri: Source printer URI.
:param target_printer_uri: Target printer URI.
:param host: CUPS server IP/name (default: default configured ``host``).
"""<line_sep>conn=self._get_connection(host)<line_sep>conn.moveJob(printer_uri=source_printer_uri job_id=job_id job_printer_uri=target_printer_uri)<block_end>@action<def_stmt>finish_document self printer:Optional[str]=<none> host:Optional[str]=<none><block_start>"""
Finish sending a document to a printer.
:param printer: Printer name (default: default configured ``printer``).
:param host: CUPS server IP/name (default: default configured ``host``).
"""<line_sep>conn=self._get_connection(host)<line_sep>printer=self._get_printer(printer)<line_sep>conn.finishDocument(printer)<block_end>@action<def_stmt>add_printer_to_class self printer_class:str printer:Optional[str]=<none> host:Optional[str]=<none><block_start>"""
Add a printer to a class.
:param printer_class: Class name.
:param printer: Printer name.
:param host: CUPS server IP/name (default: default configured ``host``).
"""<line_sep>conn=self._get_connection(host)<line_sep>printer=self._get_printer(printer)<line_sep>conn.addPrinterToClass(printer printer_class)<block_end>@action<def_stmt>delete_printer_from_class self printer_class:str printer:Optional[str]=<none> host:Optional[str]=<none><block_start>"""
Delete a printer from a class.
:param printer_class: Class name.
:param printer: Printer name.
:param host: CUPS server IP/name (default: default configured ``host``).
"""<line_sep>conn=self._get_connection(host)<line_sep>printer=self._get_printer(printer)<line_sep>conn.deletePrinterFromClass(printer printer_class)<block_end>@action<def_stmt>get_classes self host:Optional[str]=<none><arrow>Dict[str Dict[str Any]]<block_start>"""
Get the list of classes on a CUPS server.
:param host: CUPS server IP/name (default: default configured ``host``).
:return: dict - class_name -> class_info.
"""<line_sep>conn=self._get_connection(host)<line_sep><return>conn.getClasses()<block_end><block_end># vim:sw=4:ts=4:et:
|
<import_from_stmt>.resnet_vlbert_for_pretraining ResNetVLBERTForPretraining<import_from_stmt>.resnet_vlbert_for_pretraining_multitask ResNetVLBERTForPretrainingMultitask<import_from_stmt>.resnet_vlbert_for_attention_vis ResNetVLBERTForAttentionVis<line_sep> |
# -*- coding: utf-8 -*-
<import_stmt>markdown<import_from_stmt>django.utils timezone<import_from_stmt>flask request<import_from_stmt>api.caching.tasks ban_url<import_from_stmt>osf.models Guid<import_from_stmt>framework.postcommit_tasks.handlers enqueue_postcommit_task<import_from_stmt>website settings<import_from_stmt>addons.base.signals file_updated<import_from_stmt>osf.models BaseFileNode TrashedFileNode<import_from_stmt>osf.models Comment<import_from_stmt>website.notifications.constants PROVIDERS<import_from_stmt>website.notifications.emails notify notify_mentions<import_from_stmt>website.project.decorators must_be_contributor_or_public<import_from_stmt>osf.models Node<import_from_stmt>website.project.signals comment_added mention_added<line_sep>@file_updated.connect<def_stmt>update_file_guid_referent self target event_type payload user=<none><block_start><if_stmt>event_type<not><in>('addon_file_moved' 'addon_file_renamed')<block_start><return><block_end># Nothing to do
source,destination=payload['source'] payload['destination']<line_sep>source_node,destination_node=Node.load(source['node']['_id']) Node.load(destination['node']['_id'])<if_stmt>source['provider']<in>settings.ADDONS_BASED_ON_IDS<block_start><if_stmt>event_type<eq>'addon_file_renamed'<block_start><return><block_end># Node has not changed and provider has not changed
# Must be a move
<if_stmt>source['provider']<eq>destination['provider']<and>source_node<eq>destination_node<block_start><return><block_end><block_end># Node has not changed and provider has not changed
file_guids=BaseFileNode.resolve_class(source['provider'] BaseFileNode.ANY).get_file_guids(materialized_path=source['materialized']<if>source['provider']<ne>'osfstorage'<else>source['path'] provider=source['provider'] target=source_node)<for_stmt>guid file_guids<block_start>obj=Guid.load(guid)<if_stmt>source_node<ne>destination_node<and>Comment.objects.filter(root_target___id=guid).count()<ne>0<block_start>update_comment_node(guid source_node destination_node)<block_end><if_stmt>source['provider']<ne>destination['provider']<or>source['provider']<ne>'osfstorage'<block_start>old_file=BaseFileNode.load(obj.referent._id)<line_sep>obj.referent=create_new_file(obj source destination destination_node)<line_sep>obj.save()<if_stmt>old_file<and><not>TrashedFileNode.load(old_file._id)<block_start>old_file.delete()<block_end><block_end><block_end><block_end><def_stmt>create_new_file obj source destination destination_node# TODO: Remove when materialized paths are fixed in the payload returned from waterbutler
<block_start><if_stmt><not>source['materialized'].startswith('/')<block_start>source['materialized']='/'+source['materialized']<block_end><if_stmt><not>destination['materialized'].startswith('/')<block_start>destination['materialized']='/'+destination['materialized']<block_end><if_stmt><not>source['path'].endswith('/')<block_start>data=dict(destination)<line_sep>new_file=BaseFileNode.resolve_class(destination['provider'] BaseFileNode.FILE).get_or_create(destination_node destination['path'])<if_stmt>destination['provider']<ne>'osfstorage'<block_start>new_file.update(revision=<none> data=data)<block_end><block_end><else_stmt><block_start>new_file=find_and_create_file_from_metadata(destination.get('children' []) source destination destination_node obj)<if_stmt><not>new_file<block_start><if_stmt>source['provider']<eq>'box'<block_start>new_path=obj.referent.path<block_end><else_stmt><block_start>new_path=obj.referent.materialized_path.replace(source['materialized'] destination['materialized'])<block_end>new_file=BaseFileNode.resolve_class(destination['provider'] BaseFileNode.FILE).get_or_create(destination_node new_path)<line_sep>new_file.name=new_path.split('/')[-1]<line_sep>new_file.materialized_path=new_path<block_end><block_end>new_file.save()<line_sep><return>new_file<block_end><def_stmt>find_and_create_file_from_metadata children source destination destination_node obj<block_start>""" Given a Guid obj, recursively search for the metadata of its referent (a file obj)
in the waterbutler response. If found, create a new addon BaseFileNode with that metadata
and return the new file.
"""<for_stmt>item children# TODO: Remove when materialized paths are fixed in the payload returned from waterbutler
<block_start><if_stmt><not>item['materialized'].startswith('/')<block_start>item['materialized']='/'+item['materialized']<block_end><if_stmt>item['kind']<eq>'folder'<block_start><return>find_and_create_file_from_metadata(item.get('children' []) source destination destination_node obj)<block_end><elif_stmt>item['kind']<eq>'file'<and>item['materialized'].replace(destination['materialized'] source['materialized'])<eq>obj.referent.materialized_path<block_start>data=dict(item)<line_sep>new_file=BaseFileNode.resolve_class(destination['provider'] BaseFileNode.FILE).get_or_create(destination_node item['path'])<if_stmt>destination['provider']<ne>'osfstorage'<block_start>new_file.update(revision=<none> data=data)<block_end><return>new_file<block_end><block_end><block_end><def_stmt>update_comment_node root_target_id source_node destination_node<block_start>Comment.objects.filter(root_target___id=root_target_id).update(node=destination_node)<line_sep>source_node.save()<line_sep>destination_node.save()<block_end><def_stmt>render_email_markdown content<block_start><return>markdown.markdown(content ['del_ins' 'markdown.extensions.tables' 'markdown.extensions.fenced_code'])<block_end>@comment_added.connect<def_stmt>send_comment_added_notification comment auth new_mentions=<none><block_start><if_stmt><not>new_mentions<block_start>new_mentions=[]<block_end>node=comment.node<line_sep>target=comment.target<line_sep>context=dict(profile_image_url=auth.user.profile_image_url() content=render_email_markdown(comment.content) page_type=comment.get_comment_page_type() page_title=comment.get_comment_page_title() provider=PROVIDERS[comment.root_target.referent.provider]<if>comment.page<eq>Comment.FILES<else>'' target_user=target.referent.user<if>is_reply(target)<else><none> parent_comment=target.referent.content<if>is_reply(target)<else>'' url=comment.get_comment_page_url() exclude=new_mentions )<line_sep>time_now=timezone.now()<line_sep>sent_subscribers=notify(event='comments' user=auth.user node=node timestamp=time_now **context)<if_stmt>is_reply(target)<block_start><if_stmt>target.referent.user<and>target.referent.user._id<not><in>sent_subscribers<block_start>notify(event='global_comment_replies' user=auth.user node=node timestamp=time_now **context)<block_end><block_end><block_end>@mention_added.connect<def_stmt>send_mention_added_notification comment new_mentions auth<block_start>node=comment.node<line_sep>target=comment.target<line_sep>context=dict(profile_image_url=auth.user.profile_image_url() content=render_email_markdown(comment.content) page_type='file'<if>comment.page<eq>Comment.FILES<else>node.project_or_component page_title=comment.root_target.referent.name<if>comment.page<eq>Comment.FILES<else>'' provider=PROVIDERS[comment.root_target.referent.provider]<if>comment.page<eq>Comment.FILES<else>'' target_user=target.referent.user<if>is_reply(target)<else><none> parent_comment=target.referent.content<if>is_reply(target)<else>'' new_mentions=new_mentions url=comment.get_comment_page_url())<line_sep>time_now=timezone.now()<line_sep>notify_mentions(event='global_mentions' user=auth.user node=node timestamp=time_now **context)<block_end><def_stmt>is_reply target<block_start><return>isinstance(target.referent Comment)<block_end><def_stmt>_update_comments_timestamp auth node page=Comment.OVERVIEW root_id=<none><block_start><if_stmt>node.is_contributor_or_group_member(auth.user)<block_start>enqueue_postcommit_task(ban_url (node ) {} celery=<false> once_per_request=<true>)<if_stmt>root_id<is><not><none><block_start>guid_obj=Guid.load(root_id)<if_stmt>guid_obj<is><not><none># FIXME: Doesn't work because we're not using Vanish anymore
# enqueue_postcommit_task(ban_url, (self.get_node(),), {}, celery=False, once_per_request=True)
<block_start><pass><block_end><block_end># update node timestamp
<if_stmt>page<eq>Comment.OVERVIEW<block_start>root_id=node._id<block_end>auth.user.comments_viewed_timestamp[root_id]=timezone.now()<line_sep>auth.user.save()<line_sep><return>{root_id:auth.user.comments_viewed_timestamp[root_id].isoformat()}<block_end><else_stmt><block_start><return>{}<block_end><block_end>@must_be_contributor_or_public<def_stmt>update_comments_timestamp auth node **kwargs<block_start>timestamp_info=request.get_json()<line_sep>page=timestamp_info.get('page')<line_sep>root_id=timestamp_info.get('rootId')<line_sep><return>_update_comments_timestamp(auth node page root_id)<block_end> |
<import_from_stmt>RePoE.parser.util call_with_default_args write_json<import_from_stmt>RePoE.parser Parser_Module<class_stmt>cluster_jewels(Parser_Module)<block_start>@staticmethod<def_stmt>write file_system data_path relational_reader translation_file_cache ot_file_cache<block_start>skills={}<for_stmt>row relational_reader["PassiveTreeExpansionSkills.dat"]<block_start>size=row["PassiveTreeExpansionJewelSizesKey"]["Name"]<if_stmt>size<not><in>skills<block_start>skills[size]=[]<block_end>skills[size].append({"id":row["PassiveSkillsKey"]["Id"] "name":row["PassiveSkillsKey"]["Name"] "stats":{stat["Id"]:value<for>stat,value row["PassiveSkillsKey"]["StatsZip"]} "tag":row["TagsKey"]["Id"] })<block_end>data={}<for_stmt>row relational_reader["PassiveTreeExpansionJewels.dat"]<block_start>size=row["PassiveTreeExpansionJewelSizesKey"]["Name"]<line_sep>data[row["BaseItemTypesKey"]["Id"]]={"name":row["BaseItemTypesKey"]["Name"] "size":size "min_skills":row["MinNodes"] "max_skills":row["MaxNodes"] "small_indices":row["SmallIndices"] "notable_indices":row["NotableIndices"] "socket_indices":row["SocketIndices"] "total_indices":row["TotalIndices"] "passive_skills":skills[size] }<block_end>write_json(data data_path "cluster_jewels")<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>call_with_default_args(cluster_jewels.write)<block_end> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.