index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
10,600 | 5fa2e7d4ff00b4e614f89284bd1fc10e1486b677 | from django.apps import AppConfig
class PersoAppConfig(AppConfig):
name = 'perso_app'
|
10,601 | 52c9a916cbf699ca1058e16f1cdb8b46ddee7a95 | from django.apps import AppConfig
class BbcConfig(AppConfig):
name = 'social_simulator.bbc'
|
10,602 | 337c33dd8415f0ff3294d6bfcfdbbc090638e118 | import task
task_1 = task.Task('MPI-1717', 'Create /filters endpoint and update postman collection', '6')
task_2 = task.Task('MPI-1717', 'Create business logic to handle user request', '8')
task_3 = task.Task('MPI-1717', 'Performance testing', '8')
task_4 = task.Task('MPI-1717', 'Performance testing', '8')
task_5 = task.Task('MPI-1717', 'UI testing', '4')
task_6 = task.Task('MPI-1717', 'Technical debt', '2')
task_7 = task.Task('MPI-1717', 'Backend improvements', '8')
task_8 = task.Task('MPI-1717', 'Coffee', '1')
img_1 = task.create_task_rectangle(task_1)
img_2 = task.create_task_rectangle(task_2)
img_3 = task.create_task_rectangle(task_3)
img_4 = task.create_task_rectangle(task_4)
img_5 = task.create_task_rectangle(task_5)
img_6 = task.create_task_rectangle(task_6)
img_7 = task.create_task_rectangle(task_7)
img_8 = task.create_task_rectangle(task_8)
images = [img_1, img_2, img_3, img_4, img_5, img_6, img_7, img_8]
image_with_8_tasks = task.mergeImages(images)
# image_with_8_tasks.save('test.png')
image_with_8_tasks.show()
print('success')
|
10,603 | 9771414251ba083612d9280a7c9879d03e3b0b54 | class Solution:
def backspaceCompare(self, S: str, T: str) -> bool:
stack=[]
length=len(S)
res=""
for i in range(length):
if S[i] == "#" and len(stack)!=0:
stack.pop()
elif S[i]=="#" and len(stack)==0:
continue
else:
stack.append(S[i])
while len(stack)!=0:
res+=stack.pop()
length=len(T)
res1=""
for i in range(length):
if T[i] == "#" and len(stack)!=0:
stack.pop()
elif T[i]=="#" and len(stack)==0:
continue
else:
stack.append(T[i])
while len(stack)!=0:
res1+=stack.pop()
print(res1,res)
return res1==res
|
10,604 | ff3885413e65865b3c509532ed0e665549c1792d | import baostock as bs
import tushare as ts
import pandas as pd
import datetime
import os
class stock_data:
def __init__(self, ticker = "000001"):
if ticker[0] == '6':
ticker = 'sh.' + ticker
else:
ticker = 'sz.' + ticker
self.ticker = ticker
self.today = datetime.datetime.today().strftime("%Y-%m-%d")
def get_ticker(self):
return self.ticker
def get_stock_price(self, folder = "../cn_intraday"):
#get intraday data online
lg = bs.login(user_id="anonymous", password="123456")
print('login respond error_code:'+lg.error_code)
print('login respond error_msg:'+lg.error_msg)
ticker = self.ticker
dateToday = self.today
rs = bs.query_history_k_data_plus(ticker,
"date,code,open,close,high,low,volume",
start_date='2017-01-01', end_date= dateToday,
frequency="d", adjustflag="3")
print('query_history_k_data_plus respond error_code:'+rs.error_code)
print('query_history_k_data_plus respond error_msg:'+rs.error_msg)
# convert to dataframe
data_list = []
while (rs.error_code == '0') & rs.next():
# append results
data_list.append(rs.get_row_data())
intraday = pd.DataFrame(data_list, columns=rs.fields)
#if history, append at end
file = folder + "/" + ticker + ".csv"
if os.path.exists(file):
history = pd.read_csv(file, index_col = 0)
intraday.append(history)
#save
intraday.to_csv(file)
print("intraday for [" + ticker + "] saved.")
bs.logout()
def get_ticker_list(self, folder = "/Users/zhouzijian/myproject/Backtest-Platform"):
#save to a local file
dateToday = self.today
tickersRawData = ts.get_stock_basics()
tickers = tickersRawData.index.to_list()
return pd.DataFrame(tickers) |
10,605 | a13dbe2c9770da51096c44ff5fc639ce89c249b6 | """ handlers """
from .editor_handler import EditorHandler
from .site_handler import SiteHandler
from .dir_handler import DirHandler
from .mark_handler import MarkHandler
from .s3_browser import S3Browser
from .static_files import StaticFiles
from .access_control import LoginHandler, LogoutHandler
|
10,606 | a5283dae232a2c6a5ecf631295f2b3272865de2b | import sys
import requests
import pprint
import ast
from nltk.sentiment.vader import SentimentIntensityAnalyzer
def classify_all_comments_sentiment(input_file, output_file):
'''
calculates the sentiment of each comment in given input_file
'''
sia = SentimentIntensityAnalyzer()
file = open(input_file, 'r')
filtered_file = open(output_file, 'w')
total_obj = 0
written_obj = 0
while True:
line = file.readline()
if len(line) == 0:
break
total_obj += 1
data = json.loads(line)
sentiment = sia.polarity_scores(data['body'])
data['sentiment'] = sentiment
json.dump(data, filtered_file)
filtered_file.write('\n')
written_obj += 1
print('Total objects: {}'.format(total_obj))
print('Written objects: {}'.format(written_obj))
filtered_file.close()
class EmotionAPI:
'''
This program uses ParallelDots for emotional text analysis (https://www.paralleldots.com)
'''
url = 'https://apis.paralleldots.com/v3/'
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
api_key = ''
def __init__(self,apiExtension):
self.url = '{}{}'.format(self.url, apiExtension)
def make_emotion_request(text):
'''
{
'emotion': {
'probabilities': {
'angry': 0.376,
'indifferent': 0.189,
'sad': 0.381,
'excited': 0.014,
'happy': 0.04
},
'emotion': 'sad'
},
'usage': 'By accessing ParallelDots API or using information generated by ParallelDots API, you are agreeing to be bound by the ParallelDots API Terms of Use: http://www.paralleldots.com/terms-and-conditions'
}
'''
api_object = EmotionAPI('emotion')
payload = 'text={}&api_key={}&lang_code=en'.format(text, api_object.api_key)
response = requests.request('POST', api_object.url, data=payload, headers=api_object.headers)
return response
def get_emotion_from_response(response):
dictionary = ast.literal_eval(response.text)
return dictionary['emotion']['emotion']
def get_probabilities_from_response(response):
dictionary = ast.literal_eval(response.text)
return dictionary['emotion']['probabilities']
|
10,607 | 8b47d49fa3357c8c29479100287a6b9ce91cccad | class APIException(Exception):
""" Base exception for all API errors """
code = 0
message = 'API unknown error'
def __init__(self, text=None):
if text:
Exception.__init__(self, text)
else:
Exception.__init__(self, self.message)
class APIWrongMethod(APIException):
""" Wrong request method for API """
code = 1
message = 'Wrong method "{}" for method'
def __init__(self, method):
text = self.message.format(method)
APIException.__init__(self, text)
class APITokenError(APIException):
""" Access token invalid """
code = 2
message = 'Bad access token'
class APIMissingParameter(APIException):
""" Required parameter is missing """
code = 3
message = 'Attribute "{}" is required'
def __init__(self, param):
text = self.message.format(param)
APIException.__init__(self, text)
|
10,608 | 2da49c0ef434087c82c8b639fd21d6e10698073a |
while True:
try:
num_cows, num_cars, num_show = list(map(int, input().split()))
total = num_cars + num_cows
if num_show >= num_cows:
p = 1
else:
p = (num_cars / total) * ( (num_cars - 1) / (total - 1 - num_show ) )
p += (num_cows / total) * (num_cars / (total - 1 - num_show))
print("{0:.5f}".format(p))
except EOFError:
break
|
10,609 | f08d831233553f4e33a9dbf7bd4acadd68fe7dad |
#pathName = '/Volumes/Transcend/cattle/828/WA03_2016_1_1_16_23_37_348162' #For test
#826 H 105 120
#pathName = '/Volumes/WA03-1/WA03 2016 1/826/WA03_2016_1_19_8_44_35_907763'
#pathName = '/Volumes/WA03-1/WA03 2016 1/826/WA03_2016_1_19_8_48_3_445332'
#pathName = '/Volumes/WA03-1/WA03 2016 1/826/WA03_2016_1_19_8_49_5_333387'
#pathName = '/Volumes/WA03-1/WA03 2016 1/826/WA03_2016_1_19_10_53_15_211213'
#pathName = '/Volumes/WA03-1/WA03 2016 1/826/WA03_2016_1_19_15_1_32_339822'
#pathName = '/Volumes/WA03-1/WA03 2016 1/826/WA03_2016_1_19_16_55_28_154441'
#pathName = '/Volumes/WA03-1/WA03 2016 1/826/WA03_2016_1_19_16_56_29_696131'
#pathName = '/Volumes/WA03-1/WA03 2016 1/826/WA03_2016_1_19_16_56_46_807649'
#827 H 110 140
#827 8 44 35 -> 16 56 46
#pathName = '/Volumes/WA03-1/WA03 2016 1/827/WA03_2016_1_19_11_6_26_590721'
#pathName = '/Volumes/WA03-1/WA03 2016 1/827/WA03_2016_1_19_11_6_49_298738'
#pathName = '/Volumes/WA03-1/WA03 2016 1/827/WA03_2016_1_19_11_7_29_538677'
#pathName = '/Volumes/WA03-1/WA03 2016 1/827/WA03_2016_1_19_11_7_58_581651'
#pathName = '/Volumes/WA03-1/WA03 2016 1/827/WA03_2016_1_19_15_22_40_617169'
#pathName = '/Volumes/WA03-1/WA03 2016 1/827/WA03_2016_1_19_15_23_5_905612'
#pathName = '/Volumes/WA03-1/WA03 2016 1/827/WA03_2016_1_19_15_43_54_987778'
#pathName = '/Volumes/WA03-1/WA03 2016 1/827/WA03_2016_1_19_15_44_32_519449'
#pathName = '/Volumes/WA03-1/WA03 2016 1/827/WA03_2016_1_19_15_45_44_791940'
#pathName = '/Volumes/WA03-1/WA03 2016 1/827/WA03_2016_1_19_15_45_59_421875'
#pathName = '/Volumes/WA03-1/WA03 2016 1/827/WA03_2016_1_19_15_46_20_893043'
#828 19 10 53 -> 19 16 56
#H 102 125
#pathName = '/Volumes/WA03-1/WA03 2016 1/828/WA03_2016_1_19_15_26_59_345248'
#pathName = '/Volumes/WA03-1/WA03 2016 1/828/WA03_2016_1_19_16_44_24_275438'
#829 10 18 21 -> 16 43 34
#pathNameGroup = getPath('/Volumes/WA03-1/WA03 2016 1/829', 19, [10, 18, 21], [16, 43, 34])
#print(pathName) |
10,610 | b72ae3b7912650bebc8d01dc976a0d9eee25b514 | from turtle import Turtle
class Score(Turtle):
def __init__(self):
super().__init__()
self.penup()
self.points = 0
self.color("white")
self.hideturtle()
def score_board(self):
self.clear()
self.setpos(-350,350)
self.write(f"Score: {self.points}", align="left", font=("Open Sans", 25, "normal"))
def game_over(self):
self.clear()
self.setpos(0,-100)
self.color("white")
self.write(f"GAME OVER", align="center", font=("Open Sans", 60, "normal"))
def win(self):
self.clear()
self.setpos(0,-100)
self.color("white")
self.write(f"YOU STOPPED THE PANDEMIC!", align="center", font=("Open Sans", 60, "normal")) |
10,611 | 3f0d04205a659fbc8f06b521f3c4b892331b18e6 | # -*- coding: utf-8 -*-
"""
Created on Wed Nov 8 12:15:25 2017
@author: IACJ
"""
import matplotlib.pyplot as plt
import numpy as np
from dtw import dtw
np.set_printoptions(threshold=999) #全部输出
x = np.array([0, 0, 1, 1, 2, 4, 2, 1, 2, 0]).reshape(-1, 1)
y = np.array([1, 1, 1, 2, 2, 2, 2, 3, 2, 0]).reshape(-1, 1)
plt.plot(x)
plt.plot(y)
plt.show()
dist, cost, acc, path = dtw(x, y, dist=lambda x, y: np.linalg.norm(x - y, ord=1))
print ('Minimum distance found:', dist)
print('cost:\n',cost)
print('acc:\n',acc)
print('path:\n',path)
plt.imshow(acc.T, origin='lower', cmap=plt.cm.gray, interpolation='nearest')
plt.plot(path[0], path[1], 'w')
plt.xlim((-0.5, acc.shape[0]-0.5))
plt.ylim((-0.5, acc.shape[1]-0.5))
|
10,612 | 13a4e4359707f7773dbca4ff38ec178dadcee912 | # Generated by Django 2.0.5 on 2018-05-14 05:58
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('crafting', '0002_auto_20180513_2301'),
]
operations = [
migrations.AddField(
model_name='machine',
name='tier',
field=models.IntegerField(default=1),
),
migrations.AddField(
model_name='recipe',
name='required_research',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='unlocked_recipes', to='crafting.Research'),
),
migrations.AddField(
model_name='recipeexecutor',
name='machine_tier_required',
field=models.IntegerField(default=1),
),
]
|
10,613 | a435bf611eb04f1c22abce52e5831dd46350a9eb | import json
import jsonpath
import ast
import re
from utils.requests_utils import RequestsUtils
#
# str1 = '{"grant_type":"client_credential","appid":"wx55614004f367f8ca","secret":"65515b46dd758dfdb09420bb7db2c67f"}'
# step_list = [{'测试用例编号': 'api_case_02', '测试用例名称': '创建标签接口测试', '用例执行': '是', '用例步骤': 'step_01', '接口名称': '获取access_token接口',
# '请求方式': 'get', '请求头部信息': '', '请求地址': '/cgi-bin/token',
# '请求参数(get)': '{"grant_type":"client_credential","appid":"wx55614004f367f8ca","secret":"65515b46dd758dfdb09420bb7db2c67f"}',
# '请求参数(post)': '', '取值方式': 'jsonpath取值', '取值代码': '$.access_token', '取值变量': 'token'},
# {'测试用例编号': 'api_case_02', '测试用例名称': '创建标签接口测试', '用例执行': '是', '用例步骤': 'step_02', '接口名称': '创建标签接口',
# '请求方式': 'post', '请求头部信息': '', '请求地址': '/cgi-bin/tags/create',
# '请求参数(get)': '{"access_token":"40_CeejgCpZfvXfhAL1u01zvH8wDNeQSPliNhdpwbrmK91jyU5rZzWJ4IPOMFB2y6yBqQNmkkpFHuMenEf5WN-dnQUdsUlR3LcnrJA9q7K7pfzEhsZVAf3WRPl5thLSGKgiY-X-vBASzm76MmwtDWDcABAYRY"}',
# '请求参数(post)': '{ "tag" : { "name" : "p3p4hehehe123" } } ', '取值方式': '无', '取值代码': '', '取值变量': ''}]
#
# str2 = json.loads(str1)
# dict1 = ast.literal_eval(str1)
# print(type(dict1))
# print(str2)
# rquestsUtils = RequestsUtils()
# rquestsUtils.request_by_step(step_list)
# str1 = '{"access_token":${token}}'
# variables_list = re.findall('\${\w+}', str1)
# print(variables_list)
#
#
# str1 = '123'
# str1.replace()
list3 = [{'测试用例编号': 'api_case_02', '测试用例名称': '创建标签接口测试', '用例执行': '否', '用例步骤': 'step_01', '接口名称': '获取access_token接口', '请求方式': 'get', '请求头部信息': '', '请求地址': '/cgi-bin/token', '请求参数(get)': '{"grant_type":"client_credential","appid":"wx55614004f367f8ca","secret":"65515b46dd758dfdb09420bb7db2c67f"}', '请求参数(post)': '', '取值方式': 'jsonpath取值', '取值代码': '$.access_token', '取值变量': 'token', '断言类型': 'json_key_value', '期望结果': '{"expires_in":7200}'}, {'测试用例编号': 'api_case_02', '测试用例名称': '创建标签接口测试', '用例执行': '否', '用例步骤': 'step_02', '接口名称': '创建标签接口', '请求方式': 'post', '请求头部信息': '', '请求地址': '/cgi-bin/tags/create', '请求参数(get)': '{"access_token":${token}}', '请求参数(post)': '{ "tag" : { "name" : "广东" } } ', '取值方式': '无', '取值代码': '', '取值变量': '', '断言类型': 'json_key', '期望结果': 'tag'}]
# $.tag.id
list4 = [{'测试用例编号': 'api_case_03', '测试用例名称': '删除标签接口测试', '用例执行': '否', '用例步骤': 'step_01', '接口名称': '获取access_token接口', '请求方式': 'get', '请求头部信息': '', '请求地址': '/cgi-bin/token', '请求参数(get)': '{"grant_type":"client_credential","appid":"wx55614004f367f8ca","secret":"65515b46dd758dfdb09420bb7db2c67f"}', '请求参数(post)': '', '取值方式': 'jsonpath取值', '取值代码': '$.access_token', '取值变量': 'token', '断言类型': 'json_key', '期望结果': 'access_token'}, {'测试用例编号': 'api_case_03', '测试用例名称': '删除标签接口测试', '用例执行': '否', '用例步骤': 'step_02', '接口名称': '创建标签接口', '请求方式': 'post', '请求头部信息': '', '请求地址': '/cgi-bin/tags/create', '请求参数(get)': '{"access_token":${token}}', '请求参数(post)': '{ "tag" : { "name" : "p3p4testddd" } } ', '取值方式': 'jsonpath取值', '取值代码': '$.tag.id', '取值变量': 'tag_id', '断言类型': 'json_key', '期望结果': ''}, {'测试用例编号': 'api_case_03', '测试用例名称': '删除标签接口测试', '用例执行': '否', '用例步骤': 'step_03', '接口名称': '删除标签接口', '请求方式': 'post', '请求头部信息': '', '请求地址': '/cgi-bin/tags/delete', '请求参数(get)': '{"access_token":${token}}', '请求参数(post)': '{ "tag":{ "id" : ${tag_id} } }', '取值方式': '无', '取值代码': '', '取值变量': '', '断言类型': 'json_key_value', '期望结果': '{"errcode":0}'}]
list5 = [{'测试用例编号': 'api_case_03', '测试用例名称': '删除标签接口测试', '用例执行': '否', '用例步骤': 'step_01', '接口名称': '获取access_token接口', '请求方式': 'get', '请求头部信息': '', '请求地址': '/cgi-bin/token', '请求参数(get)': '{"grant_type":"client_credential","appid":"wx55614004f367f8ca","secret":"65515b46dd758dfdb09420bb7db2c67f"}', '请求参数(post)': '', '取值方式': 'jsonpath取值', '取值代码': '$.access_token', '取值变量': 'token'}, {'测试用例编号': 'api_case_03', '测试用例名称': '删除标签接口测试', '用例执行': '是', '用例步骤': 'step_02', '接口名称': '创建标签接口', '请求方式': 'post', '请求头部信息': '', '请求地址': '/cgi-bin/tags/create', '请求参数(get)': '{"access_token":${token}}', '请求参数(post)': '{ "tag" : { "name" : "p3p46789" } } ', '取值方式': 'jsonpath取值', '取值代码': '$.tag.id', '取值变量': 'tag_id'}, {'测试用例编号': 'api_case_03', '测试用例名称': '删除标签接口测试', '用例执行': '否', '用例步骤': 'step_03', '接口名称': '删除标签接口', '请求方式': 'post', '请求头部信息': '', '请求地址': '/cgi-bin/tags/delete', '请求参数(get)': '{"access_token":${token}}', '请求参数(post)': '{ "tag":{ "id" : ${tag_id} } }', '取值方式': '无', '取值代码': '', '取值变量': ''}]
list6 = [{'测试用例编号': 'api_case_03', '测试用例名称': '删除标签接口测试', '用例执行': '是', '用例步骤': 'step_01', '接口名称': '获取access_token接口', '请求方式': 'get', '请求头部信息': '', '请求地址': '/cgi-bin/token', '请求参数(get)': '{"grant_type":"client_credential","appid":"wx55614004f367f8ca","secret":"65515b46dd758dfdb09420bb7db2c67f"}', '请求参数(post)': '', '取值方式': 'jsonpath取值', '取值代码': '$.access_token', '取值变量': 'token'}, {'测试用例编号': 'api_case_03', '测试用例名称': '删除标签接口测试', '用例执行': '是', '用例步骤': 'step_02', '接口名称': '创建标签接口', '请求方式': 'post', '请求头部信息': '', '请求地址': '/cgi-bin/tags/create', '请求参数(get)': '{"access_token":${token}}', '请求参数(post)': '{ "tag" : { "name" : "p3p4pppp2" } } ', '取值方式': 'jsonpath取值', '取值代码': '$.tag.id', '取值变量': 'tag_id'}, {'测试用例编号': 'api_case_03', '测试用例名称': '删除标签接口测试', '用例执行': '是', '用例步骤': 'step_03', '接口名称': '删除标签接口', '请求方式': 'post', '请求头部信息': '', '请求地址': '/cgi-bin/tags/delete', '请求参数(get)': '{"access_token":${token}}', '请求参数(post)': '{"tag":{"id":${tag_id}}}', '取值方式': '无', '取值代码': '', '取值变量': ''}]
rquestsUtils = RequestsUtils()
rquestsUtils.request_by_step(list5) |
10,614 | c0723c96c3b404df7e5038a98626d5e5e1d07644 | import time
t1=time.time()
print("Hellooo")
print("India")
print("Corona")
for i in range(11):
print(i)
t2=time.time()
print(t2-t1)
|
10,615 | 47332c2ef2ed37c79fb319768763c58fc210a8d9 | import json
from pathlib import Path
import pandas as pd
from PIL import Image
from torch.utils.data import Dataset
from torchvision import transforms
from .augmentation import *
NB_CLASSES = 128
def default_loader(path):
return Image.open(path).convert('RGB')
class MyDataset(Dataset):
def __init__(self, txt, transform=None, target_transform=None, loader=default_loader):
fh = open(txt, 'r')
imgs = []
for line in fh:
line = line.strip('\n')
line = line.rstrip()
words = line.split()
imgs.append((words[0],int(words[1])))
self.imgs = imgs
self.transform = transform
self.target_transform = target_transform
self.loader = loader
def __getitem__(self, index):
fn, label = self.imgs[index]
img = self.loader(fn)
if self.transform is not None:
img = self.transform(img)
return img,label
def __len__(self):
return len(self.imgs)
normalize_torch = transforms.Normalize(
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]
)
normalize_05 = transforms.Normalize(
mean=[0.5, 0.5, 0.5],
std=[0.5, 0.5, 0.5]
)
# IMAGE_SIZE = 224
# IMAGE_SIZE = 299
def preprocess(normalize, image_size):
return transforms.Compose([
transforms.Resize((image_size, image_size)),
transforms.ToTensor(),
normalize
])
def preprocess_hflip(normalize, image_size):
return transforms.Compose([
transforms.Resize((image_size, image_size)),
HorizontalFlip(),
transforms.ToTensor(),
normalize
])
def preprocess_vflip(normalize, image_size):
return transforms.Compose([
transforms.Resize((image_size, image_size)),
VerticalFlip(),
transforms.ToTensor(),
normalize
])
def preprocess_rotation(normalize, image_size,angle):
return transforms.Compose([
transforms.Resize((image_size, image_size)),
Rotate(angle),
transforms.ToTensor(),
normalize
])
def preprocess_hflip_rotation(normalize, image_size,angle):
return transforms.Compose([
transforms.Resize((image_size, image_size)),
HorizontalFlip(),
Rotate(angle),
transforms.ToTensor(),
normalize
])
def preprocess_vflip_rotation(normalize, image_size,angle):
return transforms.Compose([
transforms.Resize((image_size, image_size)),
VerticalFlip(),
Rotate(angle),
transforms.ToTensor(),
normalize
])
def preprocess_with_augmentation(normalize, image_size):
return transforms.Compose([
transforms.Resize((image_size + 20, image_size + 20)),
transforms.RandomRotation(15, expand=True),
transforms.RandomCrop((image_size, image_size)),
transforms.RandomHorizontalFlip(),
transforms.ColorJitter(brightness=0.4,
contrast=0.4,
saturation=0.4,
hue=0.2),
transforms.ToTensor(),
normalize
])
|
10,616 | 70013751ff1a8a6ed5dcb81a9546ec2abd9a011c | # Adds a csv file to the data directory of a specified dataset
# with relevance tags using that data directory's movie IDs
import pandas as pd
import numpy as np
from collections import defaultdict
import sys
try:
sys.argv[1]
except IndexError:
dim = 10
else:
dim = int(sys.argv[1])
try:
sys.argv[2]
except IndexError:
factor_dir = "./output/factorisations/FF_dense/"
else:
factor_dir = sys.argv[2]
base_dir = "./data/FF/"
# Get a list of product descriptions in the training
# set, ordered by ProductId
ratings = pd.read_csv("%sdense_10pc_partition_purchases.csv" % base_dir)
base_products = pd.read_csv("%sproductinfo.csv" % base_dir,
sep=",",
error_bad_lines=False,
warn_bad_lines=True)
base_products.rename(columns={'ProductID': 'ProductId'}, inplace=True)
rated_products = ratings['ProductId'].unique().tolist()
base_products = base_products[base_products['ProductId'].isin(rated_products)]
base_products = base_products[["ProductId", "desc"]]
base_products = base_products.sort_values("ProductId")
# Get a list of the most common 1000 tags, ordered lexically
tags = pd.read_csv("%staginfo.csv" % base_dir)
tags.rename(columns={'ProductID': 'ProductId'}, inplace=True)
tag_counts = tags[["Tag", "ProductId"]].groupby("Tag").count()
tag_counts = tag_counts.reset_index().sort_values(by="ProductId", ascending=False)
tags = tag_counts["Tag"].head(1000).sort_values()
# Load the relevance matrix and the product matrix
rel_matrix = np.loadtxt("%sdimrelK%d.csv" % (factor_dir, dim), delimiter=" ")
product_matrix = np.loadtxt("%sdimproductsK%d.csv" % (factor_dir, dim), delimiter=" ")
(K, N) = rel_matrix.shape
basis_tags = []
product_examples = []
print(len(base_products))
print product_matrix.shape
# Get examples from movies and examples from tags for each basis
for i in range(0, K):
row_array = np.asarray(rel_matrix[i, :])
toptwenty_tags = row_array.argsort()[-10:][::-1]
basis_tags.append(toptwenty_tags)
col_array = np.asarray(product_matrix[:, i])
topten_products = col_array.argsort()[-20:][::-1]
product_examples.append(topten_products)
movie_titles = []
for m in product_examples:
try:
movie_titles.append([rated_products[n] for n in m])
except IndexError as e:
print(m)
# Get a list of lists of tags for each basis
tag_counts = defaultdict(int)
tag_words = [[] for i in range(0, K)]
count = 1
for i in basis_tags:
count += 1
for j in i:
word = tags.iloc[j]
tag_words[count - 2].append(word)
tag_counts[word] += 1
best_tag_words = []
for t in tag_words:
scoredict = dict((k, tag_counts[k]) for k in t if k in tag_counts)
# find 5 lowest values in dictionary
best_words = []
for j in range(0, 10):
w = min(scoredict, key=scoredict.get)
best_words.append(w)
del scoredict[w]
best_tag_words.append(best_words)
for i in range(0, K):
print("\nBasis %d" % (i + 1))
print(best_tag_words[i])
print(movie_titles[i])
|
10,617 | e5ecf5c8bcbe654d1ec03b811d8718f0ca8fb635 | #!/usr/bin/env python
import sys
import os
from os import path
def chase_link(file_path, indent=2):
indent_prefix = indent * ' '
print("{0}{1}".format(indent_prefix, file_path))
if path.islink(file_path):
target = os.readlink(file_path)
if not path.isabs(target):
base_dir = path.dirname(file_path)
target = path.normpath(path.join(base_dir, target))
chase_link(target, indent + 2)
def main(*args):
for arg in args[0]:
chase_link(arg)
if __name__ == "__main__":
main(sys.argv[1:]) |
10,618 | 55d9da172c02490df4d5f374b62da39ab9fd620f | import utils
from pylab import *
from random import shuffle
import matplotlib.pyplot as plt
zero_list = [0]*3000
one_list = [1]*1000
counts = zero_list + one_list
shuffle(counts)
noises = []
epsilon = 0.3
for i in range(4000):
noises.append(counts[i]+utils.laplace(1/epsilon))
myHist = plt.hist(noises, 10000, normed=True)
plt.show()
|
10,619 | c5bb0320c55bc5eb1a6c178feda3a40985a457b6 | # coding: utf-8
"""
Bitbucket API
Code against the Bitbucket API to automate simple tasks, embed Bitbucket data into your own site, build mobile or desktop apps, or even add custom UI add-ons into Bitbucket itself using the Connect framework. # noqa: E501
The version of the OpenAPI document: 2.0
Contact: support@bitbucket.org
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class Branchrestriction(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'branch_match_kind': 'str',
'branch_type': 'str',
'groups': 'list[Group]',
'id': 'int',
'kind': 'str',
'links': 'BranchingModelSettingsAllOfLinks',
'pattern': 'str',
'users': 'list[Account]',
'value': 'int'
}
attribute_map = {
'branch_match_kind': 'branch_match_kind',
'branch_type': 'branch_type',
'groups': 'groups',
'id': 'id',
'kind': 'kind',
'links': 'links',
'pattern': 'pattern',
'users': 'users',
'value': 'value'
}
def __init__(self, branch_match_kind=None, branch_type=None, groups=None, id=None, kind=None, links=None, pattern=None, users=None, value=None): # noqa: E501
"""Branchrestriction - a model defined in OpenAPI""" # noqa: E501
self._branch_match_kind = None
self._branch_type = None
self._groups = None
self._id = None
self._kind = None
self._links = None
self._pattern = None
self._users = None
self._value = None
self.discriminator = None
self.branch_match_kind = branch_match_kind
if branch_type is not None:
self.branch_type = branch_type
if groups is not None:
self.groups = groups
if id is not None:
self.id = id
self.kind = kind
if links is not None:
self.links = links
self.pattern = pattern
if users is not None:
self.users = users
if value is not None:
self.value = value
@property
def branch_match_kind(self):
"""Gets the branch_match_kind of this Branchrestriction. # noqa: E501
Indicates how the restriction is matched against a branch. The default is `glob`. # noqa: E501
:return: The branch_match_kind of this Branchrestriction. # noqa: E501
:rtype: str
"""
return self._branch_match_kind
@branch_match_kind.setter
def branch_match_kind(self, branch_match_kind):
"""Sets the branch_match_kind of this Branchrestriction.
Indicates how the restriction is matched against a branch. The default is `glob`. # noqa: E501
:param branch_match_kind: The branch_match_kind of this Branchrestriction. # noqa: E501
:type: str
"""
if branch_match_kind is None:
raise ValueError("Invalid value for `branch_match_kind`, must not be `None`") # noqa: E501
allowed_values = ["branching_model", "glob"] # noqa: E501
if branch_match_kind not in allowed_values:
raise ValueError(
"Invalid value for `branch_match_kind` ({0}), must be one of {1}" # noqa: E501
.format(branch_match_kind, allowed_values)
)
self._branch_match_kind = branch_match_kind
@property
def branch_type(self):
"""Gets the branch_type of this Branchrestriction. # noqa: E501
Apply the restriction to branches of this type. Active when `branch_match_kind` is `branching_model`. The branch type will be calculated using the branching model configured for the repository. # noqa: E501
:return: The branch_type of this Branchrestriction. # noqa: E501
:rtype: str
"""
return self._branch_type
@branch_type.setter
def branch_type(self, branch_type):
"""Sets the branch_type of this Branchrestriction.
Apply the restriction to branches of this type. Active when `branch_match_kind` is `branching_model`. The branch type will be calculated using the branching model configured for the repository. # noqa: E501
:param branch_type: The branch_type of this Branchrestriction. # noqa: E501
:type: str
"""
allowed_values = ["feature", "bugfix", "release", "hotfix", "development", "production"] # noqa: E501
if branch_type not in allowed_values:
raise ValueError(
"Invalid value for `branch_type` ({0}), must be one of {1}" # noqa: E501
.format(branch_type, allowed_values)
)
self._branch_type = branch_type
@property
def groups(self):
"""Gets the groups of this Branchrestriction. # noqa: E501
:return: The groups of this Branchrestriction. # noqa: E501
:rtype: list[Group]
"""
return self._groups
@groups.setter
def groups(self, groups):
"""Sets the groups of this Branchrestriction.
:param groups: The groups of this Branchrestriction. # noqa: E501
:type: list[Group]
"""
self._groups = groups
@property
def id(self):
"""Gets the id of this Branchrestriction. # noqa: E501
The branch restriction status' id. # noqa: E501
:return: The id of this Branchrestriction. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Branchrestriction.
The branch restriction status' id. # noqa: E501
:param id: The id of this Branchrestriction. # noqa: E501
:type: int
"""
self._id = id
@property
def kind(self):
"""Gets the kind of this Branchrestriction. # noqa: E501
The type of restriction that is being applied. # noqa: E501
:return: The kind of this Branchrestriction. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this Branchrestriction.
The type of restriction that is being applied. # noqa: E501
:param kind: The kind of this Branchrestriction. # noqa: E501
:type: str
"""
if kind is None:
raise ValueError("Invalid value for `kind`, must not be `None`") # noqa: E501
allowed_values = ["require_tasks_to_be_completed", "require_passing_builds_to_merge", "force", "require_all_dependencies_merged", "push", "require_approvals_to_merge", "enforce_merge_checks", "restrict_merges", "reset_pullrequest_approvals_on_change", "delete"] # noqa: E501
if kind not in allowed_values:
raise ValueError(
"Invalid value for `kind` ({0}), must be one of {1}" # noqa: E501
.format(kind, allowed_values)
)
self._kind = kind
@property
def links(self):
"""Gets the links of this Branchrestriction. # noqa: E501
:return: The links of this Branchrestriction. # noqa: E501
:rtype: BranchingModelSettingsAllOfLinks
"""
return self._links
@links.setter
def links(self, links):
"""Sets the links of this Branchrestriction.
:param links: The links of this Branchrestriction. # noqa: E501
:type: BranchingModelSettingsAllOfLinks
"""
self._links = links
@property
def pattern(self):
"""Gets the pattern of this Branchrestriction. # noqa: E501
Apply the restriction to branches that match this pattern. Active when `branch_match_kind` is `glob`. Will be empty when `branch_match_kind` is `branching_model`. # noqa: E501
:return: The pattern of this Branchrestriction. # noqa: E501
:rtype: str
"""
return self._pattern
@pattern.setter
def pattern(self, pattern):
"""Sets the pattern of this Branchrestriction.
Apply the restriction to branches that match this pattern. Active when `branch_match_kind` is `glob`. Will be empty when `branch_match_kind` is `branching_model`. # noqa: E501
:param pattern: The pattern of this Branchrestriction. # noqa: E501
:type: str
"""
if pattern is None:
raise ValueError("Invalid value for `pattern`, must not be `None`") # noqa: E501
self._pattern = pattern
@property
def users(self):
"""Gets the users of this Branchrestriction. # noqa: E501
:return: The users of this Branchrestriction. # noqa: E501
:rtype: list[Account]
"""
return self._users
@users.setter
def users(self, users):
"""Sets the users of this Branchrestriction.
:param users: The users of this Branchrestriction. # noqa: E501
:type: list[Account]
"""
self._users = users
@property
def value(self):
"""Gets the value of this Branchrestriction. # noqa: E501
Value with kind-specific semantics: \"require_approvals_to_merge\" uses it to require a minimum number of approvals on a PR; \"require_passing_builds_to_merge\" uses it to require a minimum number of passing builds. # noqa: E501
:return: The value of this Branchrestriction. # noqa: E501
:rtype: int
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this Branchrestriction.
Value with kind-specific semantics: \"require_approvals_to_merge\" uses it to require a minimum number of approvals on a PR; \"require_passing_builds_to_merge\" uses it to require a minimum number of passing builds. # noqa: E501
:param value: The value of this Branchrestriction. # noqa: E501
:type: int
"""
self._value = value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Branchrestriction):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
10,620 | 6abd95931715d393333fa3d341f5a342e13eb248 | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 27 10:40:20 2016
@author: Swan
"""
import pandas as pd
import datetime
###
### Step 1.0 Create URLS dataframe
###
#clean up urls
urls = pd.read_csv('url.csv', index_col = 0)
urls.x = urls.x.str.upper()
urls.x = urls.x.str.replace('HTTP://WWW.BEA.GOV/HISTDATA/RELEASES/GDP_AND_PI/','')
urls.x = urls.x.str.replace('_','/')
#change to pandas dataframe
urls = pd.DataFrame(urls.x.str.split('/').tolist())
#rename columns
urls.columns = ['year', 'quarter', 'est', 'date', 'section', 'xls']
#Change date to datetime.date() object
urls['date_pub'] = urls['date'].apply(lambda x: datetime.datetime.strptime(x,'%B-%d-%Y').date())
#Change Estimate names
urls.est.replace({'PRELIMINARY': 'SECOND', 'FINAL': 'THIRD'}, regex=True, inplace=True)
#sort by date
urls.sort_values('date_pub', inplace=True)
#create date variable year_Q
urls['date'] = urls['year'] + '_' + urls['quarter']
#keep relevent columns
urls = urls[['date','est', 'date_pub']]
#keep only files after 2004_Q1
urls = urls[urls['date_pub'] >= pd.datetime(2004, 4, 1).date()]
#output urls to csv
#urls.to_csv('urls.csv')
###
#2.0 Read in 164 excel files
###
x=0
hist_file_all = pd.DataFrame()
long_file = pd.DataFrame()
#Range should be 1 to 150. using 10 just to test.
for x in range(1, 149):
#read in xls files
xls_file = pd.ExcelFile('histData' + str(x) + '.xls')
if '10105 Qtr' in xls_file.sheet_names:
#This section is simply to get the date_pub variable to match with date_pub from urls
hist_date = xls_file.parse(sheetname = '10105 Qtr', header=None)
my_list = hist_date[0].astype(str)
matching = [s for s in my_list if "Data published" in s]
matching = [matching.replace("Data published","") for matching in matching]
#change date into datime.date() format
date_pub = datetime.datetime.strptime(matching[0].strip(' '), '%B %d, %Y').date()
#Get the actual data values by parsing each xls file
hist_file = xls_file.parse(sheetname = '10105 Qtr', skiprows=7, header=None)
#Change rows into column names
hist_col = hist_file[:2].transpose()
hist_col["period"] = hist_col[0].apply(str).str[:4] + '_Q' + hist_col[1].apply(str).str[:1]
col_names = hist_col['period'].tolist()
col_names[0] = 'line'
col_names[1] = 'description'
col_names[2] = 'code'
col_names[-1] = 'value'
hist_file.columns = col_names
#drop NAs
hist_file.dropna(inplace=True)
# change codes for consistency
hist_file.ix[hist_file['code']=='A002RC1', 'code'] = 'DPCERC1'
hist_file.ix[hist_file['code']=='B003RC1', 'code'] = 'DDURRC1'
hist_file.ix[hist_file['code']=='B004RC1', 'code'] = 'DNDGRC1'
hist_file.ix[hist_file['code']=='B005RC1', 'code'] = 'DSERRC1'
#add date_pub to the files
hist_file['date_pub'] = date_pub
#test = hist_file[list(hist_file.columns[:2]) + list(hist_file.columns[:-2])].copy()
# keep columns 1-3 and the last 2
hist_file = hist_file.ix[:,[0,1,2,-1,-2]]
#Save these files to show what I want:
#hist_file.to_csv('mock'+str(x)+'.csv')
#create a large file with all the data together
hist_file_all = pd.concat([hist_file_all, hist_file])
#if reading in the most recent vintage, create a long_file with the current GDP codes
if x==1:
codes = hist_file['code']
urls_all = urls
for item in codes:
urls_all['code'] = item
long_file = pd.concat([long_file, urls_all])
#sort the file
hist_file_all.sort_values(by=['date_pub', 'line'], inplace=True)
#change two release dates so they match up
hist_file_all.ix[hist_file_all['date_pub']==pd.datetime(2007, 1, 31).date(), 'date_pub'] = pd.datetime(2007, 1, 27).date()
hist_file_all.ix[hist_file_all['date_pub']==pd.datetime(2007, 3, 29).date(), 'date_pub'] = pd.datetime(2007, 3, 30).date()
#create final_data
final_data = pd.merge(long_file, hist_file_all, how='left', on=['date_pub', 'code'])
pivot = final_data.pivot_table('value', ['code', 'description', 'date'], 'est')
pivot['adv_less_second'] = pivot['ADVANCE'] - pivot['SECOND']
pivot['adv_less_third'] = pivot['ADVANCE'] - pivot['THIRD']
pivot['second_less_third'] = pivot['SECOND'] - pivot['THIRD']
pivot['abs_adv_less_second'] = abs(pivot['ADVANCE'] - pivot['SECOND'])
pivot['abs_adv_less_third'] = abs(pivot['ADVANCE'] - pivot['THIRD'])
pivot['abs_second_less_third'] = abs(pivot['SECOND'] - pivot['THIRD'])
pivot.to_pickle('GDP_level')
'''
#Use this when you get the code going
elif '101 Qtr' in xls_file.sheet_names:
xls_file = xls_file.parse(sheetname = '101 Qtr', header=None)
hist_file = xls_file.parse(sheetname = '101 Qtr', skiprows=7, skip_footer=0)
'''
|
10,621 | faf4ce22e90c5f69cf647ef2506775b904faa04c | #!/usr/bin/env python3
import math
# stack overflow for getting smallest multiple of all cycles
# https://stackoverflow.com/questions/51716916/built-in-module-to-calculate-least-common-multiple
def lcm(a, b):
return abs(a*b) // math.gcd(a, b)
with open('input.txt') as fp:
lines = fp.readlines()
moonpos = []
for line in lines:
split = line.split(' ')
moonpos.append([int(split[0].strip()[3:-1]),
int(split[1].strip()[2:-1]),
int(split[2].strip()[2:-1])])
totals = []
moonvel = [[0, 0, 0],
[0, 0, 0],
[0, 0, 0],
[0, 0, 0]]
for i in range(1000):
for j, moon in enumerate(moonpos):
for k, moon2 in enumerate(moonpos):
if j == k:
continue
for coord in range(3):
if moon[coord] < moon2[coord]:
moonvel[j][coord] += 1
elif moon[coord] > moon2[coord]:
moonvel[j][coord] -= 1
for j, moon in enumerate(moonpos):
for coord in range(3):
moon[coord] += moonvel[j][coord]
totals = []
for j, moon in enumerate(moonpos):
pot = abs(moon[0]) + abs(moon[1]) + abs(moon[2])
kin = abs(moonvel[j][0]) + abs(moonvel[j][1]) + abs(moonvel[j][2])
totals.append(pot * kin)
print('Part 1: ' + str(sum(totals)))
moonpos = []
origs = []
for line in lines:
split = line.split(' ')
moonpos.append([int(split[0].strip()[3:-1]),
int(split[1].strip()[2:-1]),
int(split[2].strip()[2:-1])])
moonvel = [[0, 0, 0],
[0, 0, 0],
[0, 0, 0],
[0, 0, 0]]
seen = [set(), set(), set()]
cycles = [-1, -1, -1]
last_cycles = [-1, -1, -1]
for i in range(10000000):
last_cycles = cycles[:]
for j, moon in enumerate(moonpos):
for k, moon2 in enumerate(moonpos):
if j == k:
continue
for coord in [0, 1, 2]:
if moon[coord] < moon2[coord]:
moonvel[j][coord] += 1
elif moon[coord] > moon2[coord]:
moonvel[j][coord] -= 1
for coord in range(3):
for j, moon in enumerate(moonpos):
moon[coord] += moonvel[j][coord]
entry = ''
for j, moon in enumerate(moonpos):
entry += str(moon[coord]) + ' ' + str(moonvel[j][coord]) + ' '
if entry not in seen[coord]:
cycles[coord] = i + 1
seen[coord].add(entry)
if cycles == last_cycles:
break
print('Part 2: ' + str(lcm(lcm(cycles[0], cycles[1]), cycles[2])))
|
10,622 | f8d28146dd6c4c8950cc81c46eb00d43b12ac51a | my_list = [1 , 2.5, 'a']
print(my_list)
length = len(my_list)
print("The length of the list is: {}".format(length))
print("The contents of the list is:")
for item in my_list:
print(item)
hello_list = list('hello')
print(hello_list)
my_sentence = "I am a lumberjack and I am OK. I sleep all night and I work all !"
print(my_sentence)
split = my_sentence.split()
print(split)
# split = '_'.join(split)
# print(split)
split = ' '.join(split)
print(split) |
10,623 | 8e6f3cace93d0a62bb51387ba97435bfaf0d7f1a | import numpy as np
from alexnet import alexnet2, alexnet, customnet
WIDTH = 80
HEIGHT = 60
LR = 1e-3
EPOCHS = 8
MODELNAME = 'surfrider-{}-{}-{}-epochs'.format(LR, 'customnet', EPOCHS)
model = customnet(WIDTH, HEIGHT, LR)
trainData = np.load('training_data_v2.npy', allow_pickle=True)
train = trainData[:-500]
test = trainData[-500:]
X = np.array([i[0] for i in train]).reshape(-1,WIDTH,HEIGHT,1)
Y = [i[1] for i in train]
test_x = np.array([i[0] for i in test]).reshape(-1,WIDTH,HEIGHT,1)
test_y = [i[1] for i in test]
model.fit({'input': X}, {'targets': Y}, n_epoch=1, validation_set=({'input': test_x}, {'targets': test_y}),
snapshot_step=2500, show_metric=True, run_id=MODELNAME)
# tensorboard --logdir=foo:C:/Users/Ruben/OneDrive/Desktop/Github/SurfRidersAI/log
model.save(MODELNAME) |
10,624 | 74fefd8d5cadc42ee96dcdc22b0e512a81d630a8 | import math
def sumaDivisoresPropios(numero):
n = numero
p =2
sum = 1
while pow(p, 2) <= n and n >1:
if not n%p:
j = pow(p,2)
n /= p
while not n%p:
j *= p
n /= p
sum *= (j-1)
sum /= (p-1)
if p == 2: p = 3
else: p +=2
if n>1: sum *=(n+1)
sumaDivisor = sum - numero
return sumaDivisor
abundantes = []
for a in xrange(12, 28123):
if sumaDivisoresPropios(a) > a:
abundantes.append(a)
print abundantes[0:5]
#print abundantes, len(abundantes)
dosAbundante = set([])
for a in abundantes:
for b in abundantes:
valor = a+b
if valor < 28123:
dosAbundante.add(a+b)
print len(dosAbundante)
total = 0
for n in dosAbundante:
total += n
superior = 0
for n in xrange(1, 28123):
superior += n
print superior
print total
print (superior-total)
|
10,625 | bd12c3d9b3f3bc391bc17d386826ec8e38cb7d0d | # /////////////////////////////////////////////////////////////////////
#
# ocpdemo.py :
# In an endless loop, repeating every 5(?) seconds...
# For each port,
# Show fixed identifying keys, then scrolling other keys
#
# Just to show that we can, and give an idea of how much data is available
#
# Copyright 2015 Finisar Inc.
#
# Author: Don Bollinger don@thebollingers.or/
#
# ////////////////////////////////////////////////////////////////////
from oom import * # the published OOM Northbound API
from oom.oomlib import type_to_str
from oom.decode import get_hexstr # helper function from the decode pack
from time import sleep
import sys
"""
tweak numlines and linelen to tune demo visual
numlines should be the number of lines in the display window,
or the number of modules in the switch, whichever is smaller
linelen should be the number of chars wide the window is, wider is better!
these can be passed as parameters on the command line:
eg: py ocpdemo.py 5 80 < 5 lines, 80 characters per line
"""
numlines = 3
linelen = 80
parms = sys.argv
if (len(parms) > 1):
numlines = int(parms[1])
if (len(parms) > 2):
linelen = int(parms[2])
portlist = oom_get_portlist()
numports = len(portlist)
print(numports)
pcycle = 0
pcount = 0
try:
while 1:
lines = 0
while lines < numlines:
pnum = (pcount) % numports
pcount += 1
port = portlist[pnum]
outstr = str.format("{:6} {:10}",
port.port_name, type_to_str(port.port_type))
keylist = port.fmap
if keylist == {}:
continue
keylist = port.fmap["SERIAL_ID"]
keyoff = 0
while len(outstr) < linelen:
temp = (pcycle + keyoff) % len(keylist)
keyoff += 1
key = keylist[temp % len(keylist)]
if len(port.mmap[key]) >= 6:
if port.mmap[key][1] == 'get_bytes':
val = oom_get_keyvalue(port, key)
outstr += key + ': ' + get_hexstr(val) + '; '
else:
outstr += key + ': ' \
+ str(oom_get_keyvalue(port, key)) + "; "
else:
outstr += ' '
print(outstr[0:linelen])
lines += 1
pcycle += 1 # it will take a LONG time to roll over
sleep(2)
except KeyboardInterrupt:
print("Thanks for running the OOM OCP Demo!")
|
10,626 | 8fe9a6f9f410b90c67e68240eec529556f76365b |
# coding: utf-8
# In[1]:
# import required Libraries
import numpy as np
import pandas as pd
#import seaborn as sns
import matplotlib.pyplot as plt
#sns.set(context="paper", font="monospace")
get_ipython().magic('matplotlib inline')
# In[2]:
#Data load
DF = pd.read_csv('http://localhost:8888/files/OneNote%20Notebooks/Personal/Europe%20Football_EDA/FootballEurope.csv')
# In[3]:
#removing unwanted column and converting column names to a list
DF = DF.drop(['Unnamed: 0','id'],axis=1)
DF['date'] = pd.to_datetime(DF['date'])
DF_cols = DF.columns.tolist()
#counting number of column data types
DF.dtypes.value_counts()
# In[4]:
#Set date column as index and sort by index to group by EPL seasons
DF = DF.set_index('date').sort_index(axis=0)
# In[5]:
#Populate Season column with appropriate values
DF['Season'] = np.where(((DF.index > '2012-07-01') & (DF.index < '2013-06-30')), '1213',
np.where(((DF.index > '2013-07-01') & (DF.index < '2014-06-30')),'1314',
np.where(((DF.index > '2014-07-01') & (DF.index < '2015-06-30')),'1415',
np.where(((DF.index > '2015-07-01') & (DF.index < '2016-06-30')),'1516',
np.where(((DF.index > '2016-07-01') & (DF.index < '2017-06-30')),'1617','NAN')))))
# In[6]:
#Create awayResult and homeResult columns and populate values
DF['awayResult'] = np.where((DF.awayGoalFT > DF.homeGoalFT),'W',
np.where((DF.awayGoalFT < DF.homeGoalFT),'L','D'))
DF['homeResult'] = np.where((DF.homeGoalFT > DF.awayGoalFT),'W',
np.where((DF.homeGoalFT < DF.awayGoalFT),'L','D'))
# In[7]:
#segregating columns based on text only types, half time and full time based
DF_cols_text = DF.select_dtypes(include=['object']).columns.tolist()
DF_cols_HT = [col for col in DF.columns if col.endswith('HT')]
DF_cols_FT = [col for col in DF.columns if col.endswith('FT')]
#segregating away team and home team features for numerical columns
DF_cols_FT_away = [col for col in DF_cols_FT if col.startswith('away')]
DF_cols_FT_home = [col for col in DF_cols_FT if col.startswith('home')]
#segregating away columns by position
DF_cols_FT_away_attack = ['awayPassSuccessFT','awayDribbleSuccessFT','awayShotsOnTargetFT','awayPassesKeyFT', 'awayDribbledPastFT','awayDribblesAttemptedFT','awayPossessionFT','awayShotsTotalFT', 'awayGoalFT','homeGoalFT']
DF_cols_FT_away_defence = ['awayDispossessedFT','awayShotsBlockedFT','awayDribblesWonFT','awayInterceptionsFT', 'awayTackleSuccessFT','awayTacklesTotalFT','awayGoalFT','homeGoalFT']
DF_cols_FT_away_neutral = ['awayGoalFT','homeGoalFT','awayOffsidesCaughtFT','awayFoulsCommitedFT','awayCornersTotalFT', 'awayAerialsTotalFT']
DF_cols_DIMS = ['awayTeam','homeTeam','awayResult','homeResult','Season']
#segregating home columns by position
DF_cols_FT_home_attack = ['homePassSuccessFT','homeDribbleSuccessFT','homeShotsOnTargetFT','homePassesKeyFT', 'homeDribbledPastFT','homeDribblesAttemptedFT','homePossessionFT','homeShotsTotalFT', 'homeGoalFT','awayGoalFT']
# In[8]:
#funtion to create correlation matix of individual EPL team based on features supplied
def corr_matrix(DF,corr_cols):
DF_Team_attack = DF[corr_cols]
DF_Team_attack_Matrix = DF_Team_attack.corr().abs().unstack()
labels_todrop = get_dup_cols(DF_Team_attack)
DF_Team_attack_Matrix = DF_Team_attack_Matrix.drop(labels=labels_todrop).sort_values(ascending=False)
DF_Team_attack_Matrix = DF_Team_attack_Matrix.reset_index()
return DF_Team_attack_Matrix
#Create a function to parse corelation matrix and keep only top absolute unique values
#remove diagonals and lower triangle values from corelation matrix
#NOTE: use only features with numerical values
#df_data = DF[DF_cols_HT]
def get_dup_cols(df_data):
pairs_to_drop = set()
df_data_cols = df_data.columns
for i in range(0,df_data.shape[1]):
for j in range(0,i+1):
pairs_to_drop.add((df_data_cols[i],df_data_cols[j]))
return pairs_to_drop
# In[9]:
#Function to create Dataframe of individual EPL team with "away" and "home" performance details
def EPL_individual_team_DF(DF,team_name):
DF_team = DF.loc[(DF.division =='EPL') & ((DF.awayTeam == team_name) | (DF.homeTeam == team_name))]
return DF_team
#Function to create Dataframe of individual EPL team with "away" performance details
def EPL_individual_team_away_DF(DF,team_name):
DF_team = DF.loc[(DF.division =='EPL') & (DF.awayTeam == team_name)]
return DF_team
#Function to create Dataframe of individual EPL team with "home" performance details
def EPL_individual_team_home_DF(DF,team_name):
DF_team = DF.loc[(DF.division =='EPL') & (DF.homeTeam == team_name)]
return DF_team
# In[10]:
#Get Man City away performance data
DF_ManCity_away = EPL_individual_team_away_DF(DF,'Man City')
#DF_ManCity.head()
# In[11]:
#Get Arsenal away performance data
DF_Arsenal_away = EPL_individual_team_away_DF(DF,'Arsenal')
#Create Arsenal away attack DF with supporting columns
DF_Arsenal_away_attack = pd.concat((DF_Arsenal_away[DF_cols_FT_away_attack],DF_Arsenal_away[DF_cols_DIMS]),axis=1)
#DF_Arsenal_away_attack = DF_Arsenal_away_attack.assign(awayTeam=DF_Arsenal_away['awayTeam'],homeTeam = DF_Arsenal_away['homeTeam'],Season=DF_Arsenal_away['Season'])
# In[12]:
#Get Arsenal home performance data
DF_Arsenal_home = EPL_individual_team_home_DF(DF,'Arsenal')
#Create Arsenal home attack DF with supporting columns
DF_Arsenal_home_attack = pd.concat((DF_Arsenal_home[DF_cols_FT_home_attack],DF_Arsenal_home[DF_cols_DIMS]),axis=1)
# In[13]:
#Arsenal data with points for time series analysis
DF_Arsenal = EPL_individual_team_DF(DF,'Arsenal')
DF_Arsenal_1213 = DF_Arsenal.loc[DF_Arsenal.Season=='1213']
# In[30]:
PTS_1213 = 0
# In[31]:
for idx, row in DF_Arsenal.iterrows():
if (((DF_Arsenal.loc[idx,'homeTeam'] == 'Arsenal') & (DF_Arsenal.loc[idx,'homeResult'] == 'W')) | ((DF_Arsenal.loc[idx,'awayTeam'] == 'Arsenal') & (DF_Arsenal.loc[idx,'awayResult'] == 'W'))):
DF_Arsenal.loc[idx,'PTS'] = PTS_1213 + 3
elif (((DF_Arsenal.loc[idx,'homeTeam'] == 'Arsenal') & (DF_Arsenal.loc[idx,'homeResult'] == 'D')) | ((DF_Arsenal.loc[idx,'awayTeam'] == 'Arsenal') & (DF_Arsenal.loc[idx,'awayResult'] == 'D'))):
DF_Arsenal.loc[idx,'PTS'] = PTS_1213 + 1
else:
DF_Arsenal.loc[idx,'PTS'] = PTS_1213
DF_Arsenal.PTS
# In[14]:
DF_Arsenal_1213
# In[15]:
#PLOT1
#Arsenal away performance plot with attacking features
DF_Arsenal_away_attack.pivot_table(index=['homeTeam'], values=['awayPossessionFT','awayShotsOnTargetFT','awayGoalFT','homeGoalFT'], aggfunc=np.mean).sort_values(by ='awayPossessionFT', ascending = False). plot(kind='bar',rot=90,figsize=(10,8))
# In[16]:
#PLOT2
#Arsenal away performance plot on losing games with attacking features
DF_Arsenal_away_attack.loc[DF_Arsenal_away_attack.awayResult == 'L']. pivot_table(index=['homeTeam'], values=['awayPossessionFT','awayShotsOnTargetFT','awayGoalFT','homeGoalFT'], aggfunc=np.mean).sort_values(by ='awayPossessionFT', ascending = False). plot(kind='bar',rot=90,figsize=(10,8))
# In[17]:
#Arsenal home performance on losing games with attacking features
DF_Arsenal_home_attack.loc[(DF_Arsenal_home_attack.homeResult == 'L')]. pivot_table(index=['awayTeam'], values=['homePossessionFT','homeShotsOnTargetFT','homeGoalFT','awayGoalFT'], aggfunc=np.mean).sort_values(by ='homePossessionFT', ascending = False). plot(kind='bar',rot=90,figsize=(10,8))
# In[18]:
#correlation matix of Arsenal attacking features at away
DF_Arsenal_away_attack_Matrix = corr_matrix(DF_Arsenal_away,DF_cols_FT_away_attack)
DF_Arsenal_away_attack_Matrix = DF_Arsenal_away_attack_Matrix.sort_values(['level_0','level_1'])
# In[19]:
#correlation matix of Man City attacking at away
DF_ManCity_away_attack_Matrix = corr_matrix(DF_ManCity_away,DF_cols_FT_away_attack)
#DF_ManCity_attack_Matrix = DF_ManCity_attack_Matrix.rename(index=str, columns = {'level_0':'ManCity_0','level_1':'ManCity_1',0:'ManCity'})
DF_ManCity_away_attack_Matrix = DF_ManCity_away_attack_Matrix.sort_values(['level_0','level_1'])
# In[20]:
#DF_Arsenal_attack_Matrix['ManCity_0'] = DF_ManCity_attack_Matrix['level_0'].values
#DF_Arsenal_attack_Matrix['ManCity_1'] = DF_ManCity_attack_Matrix['level_1'].values
DF_Arsenal_away_attack_Matrix['ManCity'] = DF_ManCity_away_attack_Matrix[0].values
# In[21]:
DF_Arsenal_away_attack_Matrix = DF_Arsenal_away_attack_Matrix.sort_values(0,ascending=False).rename(index=str, columns = {0:'Arsenal'})
# In[ ]:
|
10,627 | f62d0f648716e6fa814711a54825fb3562e41bb5 | # Generated by Django 3.2.5 on 2021-08-09 14:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sub_part', '0012_add_categories_database_status'),
]
operations = [
migrations.CreateModel(
name='reg2',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_name', models.CharField(max_length=100)),
('password', models.CharField(max_length=100)),
('c_password', models.CharField(max_length=100)),
('email_id', models.EmailField(max_length=254)),
],
),
]
|
10,628 | 9f00cb67df731c5ea21e39f7a030c55dd2c4b831 | class Solution:
def lengthOfLastWord(self, s):
"""
:type s: str
:rtype: int
"""
res = 0
temp = 0
for i in s:
if i == ' ':
if res != 0:
temp = res
res = 0
else:
res += 1
if res == 0:
res = temp
return res |
10,629 | f41743d82727b23bc4d8a7faa27a7e7687286ca2 | # eventpy library
# Copyright (C) 2020 Wang Qi (wqking)
# Github: https://github.com/wqking/eventpy
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import eventpy.eventqueue as eventqueue
import eventpy.policy as eventPolicy
import threading
import time
def test_tutorial_1_basic() :
print("EventQueue tutorial 1, basic")
# create an EventQueue
queue = eventqueue.EventQueue()
queue.appendListener(3, lambda s, n : print("Got event 3, s is %s n is %d" % (s, n)))
queue.appendListener(5, lambda s, n : print("Got event 5, s is %s n is %d" % (s, n)))
queue.appendListener(5, lambda s, n : print("Got another event 5, s is %s n is %d" % (s, n)))
# Enqueue the events, the first argument is always the event type.
# The listeners are not triggered during enqueue.
queue.enqueue(3, "Hello", 38)
queue.enqueue(5, "World", 58)
# Process the event queue, dispatch all queued events.
queue.process();
def test_tutorial_2_multipleThreading() :
print("EventQueue tutorial 2, multiple threading")
# create an EventQueue
queue = eventqueue.EventQueue()
stopEvent = 1
otherEvent = 2
def threadFunc() :
shouldStop = False
def stopCallback(index) :
nonlocal shouldStop
shouldStop = True
queue.appendListener(stopEvent, stopCallback)
queue.appendListener(otherEvent, lambda index : print("Got event, index is %d" % (index)))
while not shouldStop :
queue.wait()
queue.process()
# Start a thread to process the event queue.
# All listeners are invoked in that thread.
thread = threading.Thread(target = threadFunc)
thread.start()
# Enqueue an event from the main thread. After sleeping for 10 milliseconds,
# the event should have be processed by the other thread.
queue.enqueue(otherEvent, 1)
time.sleep(0.01)
print("Should have triggered event with index = 1")
queue.enqueue(otherEvent, 2)
time.sleep(0.01)
print("Should have triggered event with index = 2")
# eventqueue.DisableQueueNotify is a resource management class that
# disables waking up any waiting threads.
# So no events should be triggered in this code block.
# DisableQueueNotify is useful when adding lots of events at the same time
# and only want to wake up the waiting threads after all events are added.
with eventqueue.DisableQueueNotify(queue) :
queue.enqueue(otherEvent, 10)
time.sleep(0.01)
print("Should NOT trigger event with index = 10")
queue.enqueue(otherEvent, 11)
time.sleep(0.01)
print("Should NOT trigger event with index = 11")
# The DisableQueueNotify object is destroyed here, and has resumed
# waking up waiting threads. So the events should be triggered.
time.sleep(0.01)
print("Should have triggered events with index = 10 and 11")
queue.enqueue(stopEvent, 1)
thread.join()
|
10,630 | 49a944b672a7210267bc6480a9f5e3f6466d0f4f | #!/usr/bin/env python
# coding: utf-8
# ___
#
# <a href='http://www.pieriandata.com'><img src='../Pierian_Data_Logo.png'/></a>
# ___
# <center><em>Copyright Pierian Data</em></center>
# <center><em>For more information, visit us at <a href='http://www.pieriandata.com'>www.pieriandata.com</a></em></center>
# # NumPy Exercises
#
# Now that we've learned about NumPy let's test your knowledge. We'll start off with a few simple tasks and then you'll be asked some more complicated questions.
#
# <div class="alert alert-danger" style="margin: 10px"><strong>IMPORTANT NOTE!</strong> Make sure you don't run the cells directly above the example output shown, <br>otherwise you will end up writing over the example output!</div>
# #### 1. Import NumPy as np
# In[2]:
import numpy as np
# #### 2. Create an array of 10 zeros
# In[4]:
arr = np.zeros(10)
arr
# In[2]:
# DON'T WRITE HERE
# #### 3. Create an array of 10 ones
# In[5]:
arr2 = np.ones(10)
arr2
# In[3]:
# DON'T WRITE HERE
# #### 4. Create an array of 10 fives
# In[15]:
arr3 = np.ones(10)*5
arr3
# In[4]:
# DON'T WRITE HERE
# #### 5. Create an array of the integers from 10 to 50
# In[13]:
arr4 = np.linspace(10,50,41)
arr4
# In[14]:
arr5 = np.arange(10,51)
arr5
# In[5]:
# DON'T WRITE HERE
# #### 6. Create an array of all the even integers from 10 to 50
# In[19]:
arr6 = np.arange(10,52,2)
arr6
# In[22]:
arr7 = np.linspace(10,50,21)
arr7
# In[6]:
# DON'T WRITE HERE
# #### 7. Create a 3x3 matrix with values ranging from 0 to 8
# In[23]:
arr8 = np.arange(0,9).reshape(3,3)
arr8
# In[7]:
# DON'T WRITE HERE
# #### 8. Create a 3x3 identity matrix
# In[24]:
np.eye(3)
# In[8]:
# DON'T WRITE HERE
# #### 9. Use NumPy to generate a random number between 0 and 1<br><br> NOTE: Your result's value should be different from the one shown below.
# In[26]:
np.random.rand(1)
# In[9]:
# DON'T WRITE HERE
# #### 10. Use NumPy to generate an array of 25 random numbers sampled from a standard normal distribution<br><br>  NOTE: Your result's values should be different from the ones shown below.
# In[31]:
np.random.randn(25)
# In[10]:
# DON'T WRITE HERE
# #### 11. Create the following matrix:
# In[42]:
np.arange(0.01,1.01,0.01).reshape(10,10)
# In[11]:
# DON'T WRITE HERE
# #### 12. Create an array of 20 linearly spaced points between 0 and 1:
# In[43]:
np.linspace(0,1,20)
# In[12]:
# DON'T WRITE HERE
# ## Numpy Indexing and Selection
#
# Now you will be given a starting matrix (be sure to run the cell below!), and be asked to replicate the resulting matrix outputs:
# In[44]:
# RUN THIS CELL - THIS IS OUR STARTING MATRIX
mat = np.arange(1,26).reshape(5,5)
mat
# #### 13. Write code that reproduces the output shown below.<br><br>  Be careful not to run the cell immediately above the output, otherwise you won't be able to see the output any more.
# In[51]:
mat_2d = np.array([[12,13,14,15],[17,18,19,20],[22,23,24,25]]).reshape(3,4)
mat_2d
# In[14]:
# DON'T WRITE HERE
# #### 14. Write code that reproduces the output shown below.
# In[55]:
mat[3][4]
# In[15]:
# DON'T WRITE HERE
# #### 15. Write code that reproduces the output shown below.
# In[64]:
mat[0:3,1].reshape(3,1)
# In[16]:
# DON'T WRITE HERE
# #### 16. Write code that reproduces the output shown below.
# In[65]:
mat[4]
# In[17]:
# DON'T WRITE HERE
# #### 17. Write code that reproduces the output shown below.
# In[69]:
mat[3:5]
# In[18]:
# DON'T WRITE HERE
# ## NumPy Operations
# #### 18. Get the sum of all the values in mat
# In[70]:
mat.sum()
# In[19]:
# DON'T WRITE HERE
# #### 19. Get the standard deviation of the values in mat
# In[71]:
mat.std()
# In[20]:
# DON'T WRITE HERE
# #### 20. Get the sum of all the columns in mat
# In[72]:
mat.sum(axis=0)
# In[21]:
# DON'T WRITE HERE
# ## Bonus Question
#
# We worked a lot with random data with numpy, but is there a way we can insure that we always get the same random numbers? [Click Here for a Hint](https://www.google.com/search?q=numpy+random+seed&rlz=1C1CHBF_enUS747US747&oq=numpy+random+seed&aqs=chrome..69i57j69i60j0l4.2087j0j7&sourceid=chrome&ie=UTF-8)
# In[ ]:
# # Great Job!
|
10,631 | 2c174c100656e0fe78a7ee9f61bdd032da221cde | n,k = map(int, input().split())
# ans = 0
# for right in range(k+1, n+1):
# for left in range(max(1,k), right):
# while left <= n:
# ans+=1
# left+=right
# print(ans, flush=True)
# TLE
ans = 0
for b in range(1,n+1):
ans += (n//b)*max(0,b-k) + max(n%b+1-k, 0)
if k==0:
ans -= 1
print(ans, flush=True)
|
10,632 | f1ef92521dd62e7ce56efecab0ba6662a11d069a | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import datetime
from django import template
from django.utils.safestring import mark_safe
from django.utils.html import conditional_escape as esc
from django.utils.timesince import timesince
from bob.forms.dependency import DependencyForm
register = template.Library()
@register.simple_tag
def bob_icon(name, is_white=False):
"""
Display a bootstrap icon.
:param name: The name of the icon to display.
:param is_white: Whether the icon should be white (for dark background).
"""
white = ' icon-white' if is_white else ''
return mark_safe('<i class="icon-%s%s"></i>' % esc(name, white))
@register.inclusion_tag('bob/main_menu.html')
def main_menu(items, selected, title=None, search=None, white=False,
position='', title_url="/"):
"""
Show main menu bar.
:param items: The list of :class:`bob.menu.MenuItem` instances to show.
:param selected: The :data:`name` of the currently selected item.
:param title: The title to show in the menu bar.
:param search: The URL for the search form.
:param white: If True, the menu bar will be white.
:param position: Empty, or one of ``'fixed'``, ``'static'``, ``'bottom'``.
"""
positions = {
'static': 'navbar-static-top',
'fixed': 'navbar-fixed-top',
'bottom': 'navbar-fixed-bottom',
}
klass = ['navbar', positions.get(position, '')]
if not white:
klass.append('navbar-inverse')
return {
'items': items,
'selected': selected,
'title': title,
'search': search,
'position': position,
'white': bool(white),
'title_url': title_url,
'class': ' '.join(klass),
}
@register.inclusion_tag('bob/dropdown_items.html')
def dropdown_items(items, white=False):
"""
Render dropdown items.
:param items: The list of :class:`bob.menu.MenuItem` instances to show.
:param title: The title to show in the menu bar.
:param white: If True, the menu bar will be white.
"""
return {
'items': items.subitems,
'white': bool(white),
}
@register.simple_tag
def render_cell(column, row):
"""Render the cell for a given column and row."""
return column.render_cell(row)
@register.inclusion_tag('bob/tab_menu.html')
def tab_menu(items, selected, side=None):
"""
Show a menu in form of tabs.
:param items: The list of :class:`bob.menu.MenuItem` instances to show.
:param selected: The :data:`name` of the currently selected item.
:param side: The direction of tabs, may be on of ``"left"``, ``"right"``,
``"top"`` or ``"bottom"``. Defaults to ``"top"``.
"""
return {
'items': items,
'selected': selected,
'side': side,
}
@register.inclusion_tag('bob/sidebar_menu.html')
def sidebar_menu(items, selected):
"""
Show menu in a sidebar.
:param items: The list of :class:`bob.menu.MenuItem` instances to show.
:param selected: The :data:`name` of the currently selected item.
"""
return {
'items': items,
'selected': selected,
}
@register.inclusion_tag('bob/sidebar_menu_subitems.html')
def sidebar_menu_subitems(item, selected):
"""
Show subitems of a menu in a sidebar.
"""
return {
'item': item,
'selected': selected,
}
@register.inclusion_tag('bob/pagination.html')
def pagination(page, show_all=False, show_csv=False,
fugue_icons=False, url_query=None, neighbors=1,
query_variable_name='page', export_variable_name='export'):
"""
Display pagination for a list of items.
:param page: Django's paginator page to display.
:param show_all: Whether to show a link for disabling pagination.
:param show_csv: Whether to show a link to CSV download.
:param fugue_icons: Whether to use Fugue icons or Bootstrap icons.
:param url_query: The query parameters to add to all page links.
:param neighbors: How many neighboring pages to show in paginator.
"""
if not page:
return {
'show_all': show_all,
'show_csv': show_csv,
'fugue_icons': fugue_icons,
'url_query': url_query,
'export_variable_name': export_variable_name,
}
paginator = page.paginator
page_no = page.number
pages = paginator.page_range[
max(0, page_no - 1 - neighbors):
min(paginator.num_pages, page_no + neighbors)
]
if 1 not in pages:
pages.insert(0, 1)
pages.insert(1, '...')
if paginator.num_pages not in pages:
pages.append('...')
pages.append(paginator.num_pages)
urls = []
for item in pages:
if item == '...':
urls.append(changed_url(url_query, query_variable_name, page_no))
else:
urls.append(changed_url(url_query, query_variable_name, item))
url_pages = zip(pages, urls)
return {
'paginator': paginator,
'page_no': page_no,
'page': page,
'pages': pages,
'show_all': show_all,
'show_csv': show_csv,
'fugue_icons': fugue_icons,
'url_query': url_query,
'url_previous_page': changed_url(
url_query,
query_variable_name,
page_no - 1
),
'url_next_page': changed_url(
url_query,
query_variable_name,
page_no + 1
),
'url_pages': url_pages,
'url_all': changed_url(url_query, query_variable_name, 0),
'export_variable_name': export_variable_name,
}
def changed_url(query, name, value):
if not query:
return '%s=%s' % (name, value)
query = query.copy()
if value is not None and value not in ('1', 1):
query[name] = value
else:
try:
del query[name]
except KeyError:
pass
return query.urlencode()
@register.filter
def bob_export(query, export):
"""Modify the query string of an URL to change the ``export`` argument."""
if not query:
return 'export=%s' % export
query = query.copy()
if export:
query['export'] = export
else:
try:
del query['export']
except KeyError:
pass
return query.urlencode()
@register.filter
def timesince_limited(d):
"""
Display time between given date and now in a human-readable form if the
time span is less than a day, otherwise display the date normally.
:param d: The date to display.
"""
today = datetime.datetime.now()
delta = datetime.timedelta
interval = today - d
if today.strftime('%Y-%m-%d') == d.strftime('%Y-%m-%d'):
if interval < delta(days=0, hours=1):
return timesince(d) + ' ago '
else:
return d.strftime('%H:%M')
else:
return d
@register.inclusion_tag('bob/form.html')
def form(form, action="", method="POST", fugue_icons=False,
css_class="form-horizontal", title="", submit_label='Save'):
"""
Render a form.
:param form: The form to render.
:param action: The submit URL.
:param method: The submit method, either ``"GET"`` or ``"POST"``.
:param fugue_icons: Whether to use Fugue or Bootstrap icon.
:param css_class: The CSS class to use for the ``<form>`` tag.
:param title: Form title.
:param submit_label: Submit button label.
"""
return {
'form': form,
'action': action,
'title': title,
'method': method,
'fugue_icons': fugue_icons,
'css_class': css_class,
'submit_label': submit_label,
}
@register.inclusion_tag('bob/form_as_fieldsets.html')
def form_as_fieldsets(form_instance, *args, **kwargs):
if not getattr(form_instance.Meta, 'fieldset', None):
raise Exception(
"{}.Meta.fieldset attribute is UNDEFINED or EMPTY".format(
repr(form_instance)
)
)
return form(form_instance, *args, **kwargs)
@register.inclusion_tag('bob/form.html')
def form_horizontal(*args, **kwargs):
return form(*args, **kwargs)
@register.inclusion_tag('bob/table_header.html')
def table_header(columns=None, url_query=None, sort=None, fugue_icons=False,
sort_variable_name='sort'):
"""
Render a table header with sorted column options
:param columns: a list of objects of
type :py:class:bob.data_table.DataTableColumn
:param url_query: The query parameters to add to all page links
:param sort: means that the column is now sorted
:param fugue_icons: Whether to use Fugue icons or Bootstrap icons.
show_conditions field on column item - func and args which determines
whether the column is to be displayed.
"""
new_columns = []
for column in columns:
if isinstance(column.show_conditions, tuple):
func, arg = column.show_conditions
if func(arg):
new_columns.append(column)
else:
new_columns.append(column)
return {
'columns': new_columns,
'sort': sort,
'url_query': url_query,
'fugue_icons': fugue_icons,
'sort_variable_name': sort_variable_name,
}
@register.simple_tag
def bob_sort_url(query, field, sort_variable_name, type):
"""Modify the query string of an URL to change the ``sort_variable_name``
argument.
"""
query = query.copy()
if type == 'desc':
query[sort_variable_name] = '-' + field
elif type == 'asc':
query[sort_variable_name] = field
return query.urlencode()
@register.simple_tag
def bob_export_url(query, value, export_variable_name='export'):
"""Modify the query string of an URL to change the ``export_variable_name``
argument.
"""
if not query:
return '%s=%s' % (export_variable_name, value)
query = query.copy()
if value:
query[export_variable_name] = value
else:
try:
del query[export_variable_name]
except KeyError:
pass
return query.urlencode()
@register.simple_tag
def dependency_data(form):
"""Render the data-bob-dependencies tag if this is a DependencyForm"""
if not isinstance(form, DependencyForm):
return ''
return 'data-bob-dependencies="{0}"'.format(
esc(json.dumps(form.get_dependencies_for_js())))
@register.inclusion_tag('bob/field_wrapper.html')
def field_wrapper(field):
"""Render the full control-group tag of a field."""
return {'field': field}
@register.filter
def get_item(obj, key):
return obj[key]
|
10,633 | a048718fc0da7a252e894eec2c93b2ed3ef7a343 | from django.contrib import admin
from .models import Employee, EmployeeAdmin
admin.site.register(Employee, EmployeeAdmin)
|
10,634 | 07691c5c28b381f1c0ff800089a6be048cf7ef8a | from django.test import TestCase
from per_ac.accounts_department.models import *
class CategoryTest(TestCase):
def setUp(self):
user = User.objects.create_user("test","test", "123")
user.is_staf = True
user.save()
prof = UserProfile(user=user)
prof.save()
self.cat = Categoty.objects.create(Title = "first", periodicity = "sd", FK_User =user )
self.assertEqual(self.cat.objects.filter(Title = "first").periodicity, 10000)
|
10,635 | 485e6df6d9e3795662613eb1882a9666cdb91a4e | #!/usr/bin/env python
from load import ROOT as R
import gna.constructors as C
constant = 1.2345
def make(nsources, ntargets):
sources=[C.Points([constant]) for i in range(nsources)]
targets=[R.DebugTransformation('debug_%02d'%i) for i in range(ntargets)]
return sources+targets
def check(*objs):
for obj in objs:
res = obj.transformations[0].outputs[0].data()[0]
print(' ', res, 'should be', constant)
assert res==constant
def test_single_01():
dbg = R.DebugTransformation('debug')
assert dbg.single()==dbg.transformations[0].outputs[0]
assert dbg.transformations[0].single()==dbg.transformations[0].outputs[0]
assert dbg.single_input()==dbg.transformations[0].inputs[0]
assert dbg.transformations[0].single_input()==dbg.transformations[0].inputs[0]
def test_single_02():
points, dbg = make(1, 1)
points.points.points >> dbg.debug.source
dbg.add_input()
try:
ret = dbg.single()
except Exception:
pass
else:
print(ret)
assert False
def test_single_03():
points, dbg = make(1, 1)
points.points.points >> dbg.debug.source
dbg.add_input()
try:
ret = dbg.single_input()
except Exception:
pass
else:
print(ret)
assert False
def test_single_04():
dbg = R.DebugTransformation('debug')
dbg.add_transformation()
try:
ret = dbg.single()
except Exception:
pass
else:
print(ret)
assert False
def test_single_05():
dbg = R.DebugTransformation('debug')
dbg.add_transformation()
try:
ret = dbg.single_input()
except Exception:
pass
else:
print(ret)
assert False
def test_binding_01():
# 3->3
points, dbg = make(1, 1)
points.points.points >> dbg.debug.source
check(dbg)
def test_binding_02():
# 3->3
points, dbg = make(1, 1)
dbg.debug.source << points.points.points
check(dbg)
def test_binding_03():
# 3->2
points, dbg1, dbg2 = make(1, 2)
points.points.points >> dbg1.debug
dbg2.debug << points.points.points
check(dbg1, dbg2)
def test_binding_04():
# 3->1
points, dbg1, dbg2 = make(1, 2)
points.points.points >> dbg1
dbg2 << points.points.points
check(dbg1, dbg2)
def test_binding_05():
# 2->2
points, dbg1, dbg2 = make(1, 2)
points.points >> dbg1.debug
dbg2.debug << points.points
check(dbg1, dbg2)
def test_binding_06():
# 2->1
points, dbg1, dbg2 = make(1, 2)
points.points >> dbg1
dbg2 << points.points
check(dbg1, dbg2)
def test_binding_07():
# 1->1
points, dbg1, dbg2 = make(1, 2)
points >> dbg1
dbg2 << points
check(dbg1, dbg2)
def test_binding_08():
# 1->2
points, dbg1, dbg2 = make(1, 2)
points >> dbg1.debug
dbg2.debug << points
check(dbg1, dbg2)
def test_binding_9():
# 1->3
points, dbg1, dbg2 = make(1, 2)
points >> dbg1.debug.source
dbg2.debug.source << points
check(dbg1, dbg2)
def test_binding_10():
# 2->3
points, dbg1, dbg2 = make(1, 2)
points.points >> dbg1.debug.source
dbg2.debug.source << points.points
check(dbg1, dbg2)
def test_binding_11():
points, dbg1, dbg2, dbg3 = make(1, 3)
points.points.points >> (dbg1, dbg2, dbg3)
check(dbg1, dbg2, dbg3)
def test_binding_12():
points, dbg1, dbg2, dbg3 = make(1, 3)
(dbg1, dbg2, dbg3) << points.points.points
check(dbg1, dbg2, dbg3)
def test_binding_13():
points, dbg1, dbg2, dbg3 = make(1, 3)
points >> (dbg1, dbg2, dbg3)
check(dbg1, dbg2, dbg3)
def test_binding_14():
points, dbg1, dbg2, dbg3 = make(1, 3)
(dbg1, dbg2, dbg3) << points
check(dbg1, dbg2, dbg3)
def test_binding_15():
points, dbg1, dbg2, dbg3 = make(1, 3)
dbg1.add_transformation()
try:
(dbg1, dbg2, dbg3) << points
except Exception:
pass
else:
assert False
def test_binding_16():
points1, points2, dbg1, dbg2, dbg3 = make(2, 3)
points1>>dbg1
dbg1.add_input()
try:
(dbg1, dbg2, dbg3) << points
except Exception:
pass
else:
assert False
if __name__ == "__main__":
glb = globals()
for fcn in sorted([name for name in list(glb.keys()) if name.startswith('test_')]):
print('call ', fcn)
glb[fcn]()
print()
print('All tests are OK!')
|
10,636 | 10fdd97484c748b2fc95130fdcb87ec8eacdcc33 | from testit_pytest.plugin_manager import TestITPluginManager
from pluggy import HookimplMarker
hookimpl = HookimplMarker("testit")
__all__ = [
'TestITPluginManager',
'hookimpl'
]
|
10,637 | 24a54eaa290bd870541e9db587ce66e382f946cb | from selenium import webdriver
import unittest
import time
class TESTS(unittest.TestCase):
def test_1(self):
link = "http://suninjuly.github.io/registration1.html"
browser = webdriver.Chrome()
browser.get(link)
# Ваш код, который заполняет обязательные поля
input1 = browser.find_element_by_css_selector ('div.first_block > div.first_class > .first')
input1.send_keys("Ivan")
input2 = browser.find_element_by_css_selector ('div.first_block > div.second_class > .second')
input2.send_keys("Petrov")
input3 = browser.find_element_by_css_selector ('div.first_block > div.third_class > .third')
input3.send_keys("mail@mail.ru")
# Отправляем заполненную форму
button = browser.find_element_by_css_selector("button.btn")
button.click()
# Проверяем, что смогли зарегистрироваться
# ждем загрузки страницы
time.sleep(1)
# находим элемент, содержащий текст
welcome_text_elt = browser.find_element_by_tag_name("h1")
# записываем в переменную welcome_text текст из элемента welcome_text_elt
welcome_text = welcome_text_elt.text
# с помощью assert проверяем, что ожидаемый текст совпадает с текстом на странице сайта
self.assertEqual(welcome_text, "Поздравляем! Вы успешно зарегистировались!", "Тест 1 - не дошли до приветствия")
def test_2(self):
link = "http://suninjuly.github.io/registration2.html"
browser = webdriver.Chrome()
browser.get(link)
# Ваш код, который заполняет обязательные поля
input1 = browser.find_element_by_css_selector ('div.first_block > div.first_class > .first')
input1.send_keys("Ivan")
input2 = browser.find_element_by_css_selector ('div.first_block > div.second_class > .second')
input2.send_keys("Petrov")
input3 = browser.find_element_by_css_selector ('div.first_block > div.third_class > .third')
input3.send_keys("mail@mail.ru")
# Отправляем заполненную форму
button = browser.find_element_by_css_selector("button.btn")
button.click()
# Проверяем, что смогли зарегистрироваться
# ждем загрузки страницы
time.sleep(1)
# находим элемент, содержащий текст
welcome_text_elt2 = browser.find_element_by_tag_name("h1")
# записываем в переменную welcome_text текст из элемента welcome_text_elt
welcome_text2 = welcome_text_elt2.text
# с помощью assert проверяем, что ожидаемый текст совпадает с текстом на странице сайта
self.assertEqual(welcome_text2, "Поздравляем! Вы успешно зарегистировались!", "Тест 2 - не дошли до приветствия")
if __name__ == "__main__":
unittest.main() |
10,638 | 9196e0e29f5626b7af2e5870408d76f930e19aec | import torch
import pandas as pd
if __name__ == '__main__':
# tem = pd.DataFrame({'source': [0, 1, 2, 3], 'target': [1, 2, 3, 0]})
# node_dataframe = pd.DataFrame({'node': [0, 1, 2]})
# tem = tem[tem.apply(lambda x: (x['source'] in node_dataframe['node']) and (x['target'] in node_dataframe['node']),
# axis=1)]
# a1 = torch.FloatTensor([[1, 2],
# [1, 3]])
# b1 = torch.FloatTensor([[1, 1],
# [1, 1]])
sim = torch.FloatTensor([[1, 0, 0], [1, 1, 0], [0, 0, 0]])
a = torch.tensor([[1, 0, 1], [1, 2, 3], [1, 2, 3]])
index = sim.sum(1) > 0
# a = a[sim.sum(1) > 0]
# a = a[:, sim.sum(1) > 0]
# index = torch.nonzero(a, as_tuple=True)[0]
# sim = sim[index]
# sim = sim[:, index]
print('1') |
10,639 | 6e799e03a075b7f186be80d21baa56083d7a558c | import webapp2
import jinja2
from google.appengine.api import users
from google.appengine.ext import ndb
import os
from myuser import MyUser
from ReviewModel import ReviewModel
JINJA_ENVIRONMENT= jinja2.Environment(
loader = jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'],
autoescape=True
)
class EVDATA(ndb.Model):
#email address of this User
name = ndb.StringProperty()
manufacturer = ndb.StringProperty()
year = ndb.StringProperty()
batterysize = ndb.StringProperty()
wltprange=ndb.StringProperty()
cost = ndb.StringProperty()
power = ndb.StringProperty()
#************class to add EV data**************************************************
class ADDEV(webapp2.RequestHandler):
def get(self):
vehicle = EVDATA.query()
result= list(vehicle.fetch())
print('ala')
print(result)
user=''
url=''
url_string=''
welcome=''
template_values={
'result' :result
}
#*****checking the existing data in the below code*************
name_val = EVDATA.query(EVDATA.name==self.request.get('name'))
vehicle1=name_val.fetch()
manu_val=EVDATA.query(EVDATA.manufacturer==self.request.get('manufacturer'))
vehicle2=manu_val.fetch()
year_val=EVDATA.query(EVDATA.year==self.request.get('year'))
vehicle3=year_val.fetch()
#***********if data doesnot exists then only it will put the data in the datastore*****************
if len(vehicle1)==0 and len(vehicle2)==0 and len(vehicle3)==0:
#self.response.write('populating<br/>')
rv=EVDATA(id=self.request.get('name')+""+self.request.get('manufacturer')+""+self.request.get('year'))
rv.name= self.request.get('name')
rv.manufacturer= self.request.get('manufacturer')
rv.year=self.request.get('year')
rv.batterysize=self.request.get('batterysize')
rv.wltprange=self.request.get('wltprange')
rv.cost=self.request.get('cost')
rv.power=self.request.get('power')
rv.put()
#template = JINJA_ENVIRONMENT.get_template('edittext.html')
#self.response.write(template.render())
self.response.write("Data has been added successfully.")
else:
self.response.write("Data Already exists. Name, Manufacturer, year should be unique." )
|
10,640 | c30accf76df68a24e7331739218c78ce34b29be3 | # Generated by Django 2.2.1 on 2019-11-01 15:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('book_issuer', '0009_auto_20191101_2032'),
]
operations = [
migrations.RenameField(
model_name='student',
old_name='class_of',
new_name='year',
),
migrations.AlterField(
model_name='student',
name='major',
field=models.CharField(choices=[('CS', 'CS'), ('SDP', 'SDP'), ('CND', 'CND'), ('EE', 'EE'), ('CLS', 'CLS')], default='CS', max_length=4),
),
]
|
10,641 | 33efa2ac8fc3f12bef9b0c188f4889bcc3b38383 | from multipoll.models.approvalpoll import ApprovalPoll, FullApprovalVote, PartialApprovalVote
from multipoll.models.multipoll import FullMultiVote, MultiPoll, PartialMultiVote
from multipoll.models.pollbase import FullVoteBase, PartialVoteBase, PollBase
from multipoll.models.user import User
__all__ = ["ApprovalPoll", "FullApprovalVote", "PartialApprovalVote",
"FullMultiVote", "MultiPoll", "PartialMultiVote",
"FullVoteBase", "PartialVoteBase", "PollBase",
"User"] |
10,642 | 94013cec03b960c3698f8f994dc6523a99b00ad7 | __package__ = "translate.learning.models.cnn"
__all__ = ["cnntranslate"]
__author__ = "Hassan S. Shavarani"
__copyright__ = "Copyright 2018, SFUTranslate Project"
__credits__ = ["Hassan S. Shavarani"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Hassan S. Shavarani"
__email__ = "sshavara@sfu.ca"
__status__ = "Production"
|
10,643 | 90aebf98ea1e46dbd5f22bf7c1c53eb4ddf72a3d | import tkinter
from collections import namedtuple
from tkinter import *
from tkinter.constants import *
from tkinter import ttk
from tkinter.ttk import *
import requests
import time
import datetime
import gui
import concurrent.futures
import logging
import json
import asyncio
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s:%(name)s:%(message)s")
file_handler = logging.FileHandler("RemoteKu_mainLog.log")
file_handler.setLevel(logging.ERROR)
file_handler.setFormatter(formatter)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
def logger_func(orig_func):
import logging
formatter2 = logging.Formatter("%(asctime)s:%(name)s:%(message)s")
file_handler2 = logging.FileHandler("RemoteKu.log")
file_handler2.setFormatter(formatter2)
logger.addHandler(file_handler2)
def wrapper(*args, **kwargs):
logger.debug("DEBUG log for Func {} with args:{} and kwargs:{}".format(orig_func, args, kwargs))
return orig_func(*args, **kwargs)
return wrapper
### This is basics such as variables and holders for devices
global cur_hdmi
stats_counter = 30
counter = 0
running = False
timing = 0
result = "NULL"
msg_box_text = ""
api_port = ":8060"
cur_hdmi = 1
devices_listing = []
root = tkinter.Tk()
root.wm_iconbitmap(default='wicon.ico')
#root.tk_setPalette(background='purple', activeBackground='white', foreground='green')
def toplevel_loading(devices_listing):
t = 'loading...'
toplevel2 = tkinter.Toplevel(root)
toplevel2.title('Loading Devices...')
label1 = ttk.LabelFrame(toplevel2)
label1_1 = ttk.Label(label1, text=t)
label1.place()
label1_1.place()
with open('devices.json', mode='r') as f:
dev_in = json.load(f)
for dev in dev_in.get('devices').items():
devices_listing.append(dev)
dev_states = generate_devs(devices_listing)
toplevel2.destroy()
return dev_states
def generate_devs(dev_in):
dev_states = []
for dev in dev_in:
dev_url = 'http://{}'.format(dev[1].get('ip_address'))
result = pwr_status(dev_url)
dev_status = (result)
dev_states.append(dev_status)
return vals(dev_states)
dev_list = {
"dadL": "http://192.168.0.111",
"dadR": "http://192.168.0.203",
"lrTV": "http://192.168.1.155",
"sisTV": "http://192.168.1.199",
"parkTV": "http://192.168.1.198"
}
input_list = ['InputTuner', 'InputHDMI1','InputHDMI2', 'InputHDMI3', 'InputHDMI4']
dev_grps = {
"dadBOTH": [dev_list.get("dadL"), dev_list.get("dadR")]
}
api_calls = {
"device_info": "/query/device-info",
"get_apps": "/query/apps",
"power_cycle": "/keypress/power",
"active_app": "/query/active-app",
"vol_up": "/keypress/volumeup",
"vol_down": "/keypress/volumedown",
"vol_mute": "/keypress/volumemute",
"select": "/keypress/select",
"home": "/keypress/home",
"up": "/keypress/up",
"down": "/keypress/down",
"right": "/keypress/right",
"left": "/keypress/left",
"info": "/keypress/info",
"input": "/keypress/inputhdmi{}".format(cur_hdmi)
}
def inputs(input_list):
inp_vals = []
for value in input_list.values():
inp_vals.append(value)
return inp_vals
def dev_check(dev_list):
dev_states = []
dev_states = dev_status()
return vals(dev_states)
def vals(dev_states):
val_list = []
for value in dev_states:
if value[2] != 'red':
val_list.append(value[0])
return val_list
@logger_func
def api_post(dev, api_call):
"""
Function for api POST calls
"""
import xmltodict
import pdb
try:
r = requests.post(dev + ':8060' + api_call, timeout=10)
except Exception as exc:
response = ["ERR", exc]
return response[0]
except ConnectionError as connerr:
response = ["ERR", connerr]
return response[0]
except TimeoutError as toerr:
response = ["ERR", toerr]
return response[0], toerr
r_code = r.status_code
if r_code == 200:
print("REQUEST WAS A SUCCESS. DEVICE {} RETURNED: {} ".format(n.get(), str(r)))
r2 = r.text
response = f'{r_code} - OK'
return msg_box(response)
@logger_func
def api_req(dev, api_call):
"""
Function for api GET calls
"""
import xmltodict
import logging
try:
r = requests.get(dev + ':8060' + api_call, timeout=5)
except Exception as exc:
response = ["ERR", exc]
return response[0]
except ConnectionError as connerr:
response = ["ERR", connerr]
return response[0]
except TimeoutError as toerr:
response = ["ERR", toerr]
return response[0], toerr
r_code = r.status_code
if r_code == 200:
print("REQUEST WAS A SUCCESS. DEVICE RETURNED: {} ".format(str(r)))
r2 = r.text
response = xmltodict.parse(r2, xml_attribs=False)
return response
else:
response = "UnknownERR"
dev.state(DISABLED)
return msg_box(response)
def active_app(dev):
api_call = api_calls.get("active_app")
response = api_req(dev, "get", api_call)
act_app = response.get("active-app").get("app")
return act_app
def dev_status():
dev_states = []
for key,value in dev_list.items():
dev_url = value
result = pwr_status(value)
dev_status = (result)
dev_states.append(dev_status)
return dev_states
def dev_status_exec():
dev_states = []
for key,value in dev_list.items():
dev_url = value
with concurrent.futures.ProcessPoolExecutor() as executor:
rslts = executor.map(pwr_status, dev_url)
for r in rslts:
print(r)
dev_status = r
dev_states.append(dev_status)
return dev_states
def pwr_status(dev):
api_call = "/query/device-info"
try:
response = api_req(dev, api_call)
except TimeoutError as to_err:
response = "Timeout Error Occured on : {}".format(dev)
pwr_status = "Unknown"
pwr_color = "red"
return dev, pwr_status, pwr_color
if response == 'ERR':
response = "Timeout2 Error Occured on : {}".format(dev)
pwr_status = "Unknown"
pwr_color = "red"
return dev, pwr_status, pwr_color
dev_info = response.get("device-info")
pwr_state = dev_info.get("power-mode")
if pwr_state == "Ready":
pwr_status = "Sleep"
pwr_color = "orange"
return dev, pwr_status, pwr_color
elif pwr_state == "PowerOn":
pwr_status = "On"
pwr_color = "green"
return dev, pwr_status, pwr_color
else:
pwr_status = "Unknown"
pwr_color = "red"
return dev, pwr_status, pwr_color
@logger_func
def input_hdmi_cycle(dev, cur_hdmi):
import itertools
hdmi_range = [1, 2, 3, 4]
num = itertools.cycle(hdmi_range)
cur_hdmi = num.__next__()
response = api_post(dev, api_calls.get("input"), cur_hdmi)
return response
def select_dev(eventObject):
device = eventObject.get()
label1["text"] = "OK"
return device
## Toplevel window for sending api calls
apiPath_var = StringVar()
apiMethod_var = StringVar()
apiCall_var = StringVar()
@logger_func
def toplevel_apiCall():
toplevel1 = Toplevel(root)
toplevel1.title('RemoteKu-Send API Call')
toplevel_label = Label(toplevel1, text="This window allows user to send API calls ").pack()
## "to the current device. Provide only the path below, the URL " \
## "and port auto-populate and the click the button to choose the " \
## "method for the call (GET or POST). ex. http://2.2.3.2:8060/query/device-info")
path_label = Label(toplevel1, text="API Path:").pack()
path_entry = Entry(toplevel1, textvariable=apiPath_var).pack()
get_btn = Button(toplevel1, text="GET", command=lambda:build_apiCall("GET", apiPath_var)).pack()
post_btn = Button(toplevel1, text="POST", command=lambda:build_apiCall("POST", apiPath_var)).pack()
close_btn = Button(toplevel1, text="Close", command=toplevel1.destroy).pack()
## return build_apiCall(apiPath_var)
##command=lambda:api_post(n.get(),api_calls.get("vol_mute"))
@logger_func
def build_apiCall(apiMethod, apiPath_var):
dev = n.get()
path = apiPath_var.get()
if apiMethod == "POST":
response = api_post(dev, path)#requests.post(dev + ":8060" + path)
print(response)
return msg_box(response)
elif apiMethod == "GET":
response = api_req(dev, path)
print(response)
return msg_box(response)
else:
return msg_box("ERROR")
#### end toplevel
2##def toplevel_input():
## ii = tkinter.StringVar()
## toplevel1 = tkinter.Toplevel(root)
## toplevel1.title('RemoteKu-Input Selector')
## input_combobox = ttk.Combobox(toplevel1, textvariable=ii)
## input_combobox['values'] = inputs(input_list)
## input_combobox.grid()
## toplevel1.bind('<<ComboboxSelected>>', select_input)
## return toplevel1
##
##def select_input(eventObject):
#### ii = eventObject.get()
## toplevel1.destroy()
## return ii
def donothing():
pass
def menu_close():
root.destroy()
############## Below is GUI definitions
##root = Tk()
root.title("RemoteKu C5dev--..")
root.minsize(width=100, height=70)
font1 = ttk.Separator
menubar = Menu(root)
filemenu = Menu(menubar, tearoff = 0)
filemenu.add_command(label="New", command = donothing)
##filemenu.add_command(label = "Open", command = open_file)
filemenu.add_separator()
filemenu.add_command(label = "Close", command = menu_close)
menubar.add_cascade(label = "File", menu = filemenu)
style1 = ttk.Style()
style1.map("C.TButton", foreground=[('pressed', 'red'), ('active', 'blue')],
background=[('pressed', '!disabled', 'black'), ('active', 'purple')]
)
top = ttk.Frame(root)
top.grid(columnspan=2, rowspan=2)
label1 = ttk.Label(top, text='Current Device').grid(column=0, row=1, pady=2)
n = tkinter.StringVar()
current_dev = ttk.Combobox(top, textvariable=n)
current_dev['values'] = toplevel_loading(devices_listing)#generate_devs(devices_listing)
current_dev.current()
current_dev.grid()
top.bind('<<ComboboxSelected>>', select_dev)
device = n.get()
sep1 = ttk.Separator(root, orient='horizontal').grid(row=2)
index = ttk.Frame(root).grid(columnspan=3, padx=0, pady=0)
##########Current Tab Config Btns etc
btn1Img = PhotoImage(file='images/pwr.png')
btn2Img = PhotoImage(file='images/nav_up.png')
btn3Img = PhotoImage(file='images/api.png')
btn4Img = PhotoImage(file='images/nav_left.png')
btn5Img = PhotoImage(file='images/nav_ok.png')
btn6Img = PhotoImage(file='images/nav_right.png')
btn7Img = PhotoImage(file='images/mute.png')
btn8Img = PhotoImage(file='images/nav_down.png')
btn9Img = PhotoImage(file='images/vol_up.png')
btn10Img = PhotoImage(file='images/home.png')
btn11Img = PhotoImage(file='images/info.png')
btn12Img = PhotoImage(file='images/vol_down.png')
btn1 = ttk.Button(index, style='C.TButton', text="Pwr", image=btn1Img, command=lambda:api_post(n.get(),api_calls.get("power_cycle"))).grid(row=3, column=0)
btn2 = ttk.Button(index, text=" ^ ", image=btn2Img, command=lambda:api_post(n.get(),api_calls.get("up"))).grid(row=3, column=1)
btn3 = ttk.Button(index, text="API Call", image=btn3Img, command=toplevel_apiCall).grid(row=6, column=0)
btn4 = ttk.Button(index, text=" < ", image=btn4Img, command=lambda:api_post(n.get(),api_calls.get("left"))).grid(row=4, column=0)
btn5 = ttk.Button(index, text="Enter", image=btn5Img, command=lambda:api_post(n.get(),api_calls.get("select"))).grid(row=4, column=1)
btn6 = ttk.Button(index, text=" > ", image=btn6Img, command=lambda:api_post(n.get(),api_calls.get("right"))).grid(row=4, column=2)
btn7 = ttk.Button(index, text="Mute", image=btn7Img, command=lambda:api_post(n.get(),api_calls.get("vol_mute"))).grid(row=5, column=0)
btn8 = ttk.Button(index, text="\/", image=btn8Img, command=lambda:api_post(n.get(),api_calls.get("down"))).grid(row=5, column=1)
btn9 = ttk.Button(index, text="Vol Up", image=btn9Img, command=lambda:api_post(n.get(),api_calls.get("vol_up"))).grid(row=5, column=2)
btn10 = ttk.Button(index, text="Home", image=btn10Img, command=lambda:api_post(n.get(),api_calls.get("home"))).grid(row=3, column=2)
btn11= ttk.Button(index, text="Info", image=btn11Img, command=lambda:api_post(n.get(),api_calls.get("info"))).grid(row=6, column=1)
btn12 = ttk.Button(index, text="Vol Dn", image=btn12Img, command=lambda:api_post(n.get(),api_calls.get("vol_down"))).grid(row=6, column=2)
msg_frame1 = LabelFrame(root, text = "Message Box",)
msg_initial = "Welcome"
label1 = Label(msg_frame1)
label1['text'] = msg_initial
label1.grid()
msg_frame1.grid(sticky="s", columnspan=3)
def msg_box(msg_label):
counter = 3
label1['text'] = msg_label
return label1
root.config(menu = menubar)
if __name__ == '__main__':
root.mainloop()
|
10,644 | 7e5abb5860717555ced163c62fa3cc50f8add39d | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='IngredientFlavorCompound',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, primary_key=True, serialize=False)),
('ingredient_id', models.CharField(db_index=True, max_length=200)),
('flavor_id', models.IntegerField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Recipe',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, primary_key=True, serialize=False)),
('recipe_id', models.CharField(max_length=200)),
('flavor_id', models.IntegerField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='UserFlavorCompound',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, primary_key=True, serialize=False)),
('user_id', models.IntegerField(db_index=True)),
('flavor_id', models.IntegerField(null=True)),
('score', models.IntegerField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='YummlyResponse',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, primary_key=True, serialize=False)),
('recipe_id', models.CharField(max_length=200)),
('response', jsonfield.fields.JSONField()),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='userflavorcompound',
unique_together=set([('user_id', 'flavor_id')]),
),
]
|
10,645 | 2cabb1c2e05371835c97e77447e5afe9a1431a5f | # importing libraries
import matplotlib.pyplot as plt
import pandas as pd
import pickle
import numpy as np
import os
import glob
from keras.applications.resnet50 import ResNet50
from keras.optimizers import Adam
from keras.layers import Dense, Flatten,Input, Convolution2D, Dropout, LSTM, TimeDistributed, Embedding, Bidirectional, Activation, RepeatVector,Concatenate
from keras.models import Sequential, Model
from keras.utils import np_utils
import random
from keras.preprocessing import image, sequence
import matplotlib.pyplot as plt
from google.colab import drive
drive.mount('/content/drive')
import glob
images_directory = '/content/drive/My Drive/Flickr_Data/'
img_path = '/content/drive/My Drive/Flickr_Data/Flickr_Data/Images/'
cap_path = '/content/drive/My Drive/Flickr_Data/Flickr_Data/Flickr_TextData/Flickr8k.token.txt'
training_path = '/content/drive/My Drive/Flickr_Data/Flickr_Data/Flickr_TextData/Flickr_8k.trainImages.txt'
valid_path = '/content/drive/My Drive/Flickr_Data/Flickr_Data/Flickr_TextData/Flickr_8k.devImages.txt'
testing_path = '/content/drive/My Drive/Flickr_Data/Flickr_Data/Flickr_TextData/Flickr_8k.testImages.txt'
cap = open(cap_path, 'r').read().split("\n")
x_training = open(training_path, 'r').read().split("\n")
x_valid = open(valid_path, 'r').read().split("\n")
x_testing = open(testing_path , 'r').read().split("\n")
# Loading cap as values and images as key in dictionary
tok = {}
for item in range(len(cap)-1):
tem = cap[item].split("#") #tem[0]= imgname.jpg ..... tem[1]=0 captionn.
if tem[0] in tok:
tok[tem[0]].append(tem[1][2:])
else:
tok[tem[0]] = [tem[1][2:]] #tem[n]= imgName ... #tok[tem[n]] = list of caption
# Making 3 files with 2 colmns as 'image_id' and 'captions'
training_dataset = open('flickr_8k_train_dataset.txt','wb')
training_dataset.write(b"image_id\tcap\n")
valid_dataset = open('flickr_8k_val_dataset.txt','wb')
valid_dataset.write(b"image_id\tcap\n")
testing_dataset = open('flickr_8k_test_dataset.txt','wb')
testing_dataset.write(b"image_id\tcap\n")
# Loading image ids and captions for each of these images in the above 3 files
for img in x_training:
if img == '':
continue
for capt in tok[img]:
caption = "<start> "+ capt + " <end>"
training_dataset.write((img+"\t"+caption+"\n").encode())
training_dataset.flush()
training_dataset.close()
for img in x_testing:
if img == '':
continue
for capt in tok[img]:
caption = "<start> "+ capt + " <end>"
testing_dataset.write((img+"\t"+caption+"\n").encode())
testing_dataset.flush()
testing_dataset.close()
for img in x_valid:
if img == '':
continue
for capt in tok[img]:
caption = "<start> "+ capt + " <end>"
valid_dataset.write((img+"\t"+caption+"\n").encode())
valid_dataset.flush()
valid_dataset.close()
# Here, we're using ResNet50 Model
from IPython.core.display import display, HTML
model = ResNet50(include_top=False,weights='imagenet',input_shape=(224,224,3),pooling='avg')
model.summary()
# process images to target size
def preprocess(img_path):
im = image.load_img(img_path, target_size=(224,224,3))
im = image.img_to_array(im) # (x, y, z)
im = np.expand_dims(im, axis=0) # (0, x, y, z)
return im
training_data = {}
counter=0
for item in x_training:
if item == "":
continue
if counter >= 3000:
break
counter+=1
if counter%1000==0:
print(counter)
path = img_path + item
img = preprocess(path) #to change the dimensions of the image for using ResNet model
pred = model.predict(img).reshape(2048) # shape of each image is (2048, 0)
training_data[item] = pred
# opening train_enc_img.p file and dumping content of training_data to this file
with open( "train_enc_img.p", "wb" ) as pickle_f: #obj hierarchy is converted into byte stream
pickle.dump(training_data, pickle_f )
# Storing image and its corresponding caption into a dataframe
pd_dataset = pd.read_csv("flickr_8k_train_dataset.txt", delimiter='\t')
dframe = pd_dataset.values
print(dframe.shape)
pd_dataset.head()
# Storing all the captions from dframe into a list
senten = []
for item in range(dframe.shape[0]):
senten.append(dframe[item, 1])
#senten will have 30000 length
# First 5 captions stored in senten
senten[:5]
# Splitting each captions stored in 'senten' and storing them in 'wor' as list of list
wor = [i.split() for i in senten]
# Creating a list of all unique wor
uniq = []
for i in wor:
uniq.extend(i)
uniq = list(set(uniq))
print(len(uniq))
vocabulary_size = len(uniq)
# making 2 lists to index each unique word and vice-versa
w_to_i = {val:index for index, val in enumerate(uniq)}
i_to_w = {index:val for index, val in enumerate(uniq)}
w_to_i['UNK'] = 0
w_to_i['raining'] = 8253
i_to_w[0] = 'UNK'
i_to_w[8253] = 'raining'
vocabulary_size = len(w_to_i.keys())
print(vocabulary_size)
max_len = 0
for i in senten:
i = i.split()
if len(i) > max_len:
max_len = len(i)
print(max_len) #finding longest caption
pad_seq, subsequent_wor = [], []
for item in range(dframe.shape[0]): #30000 items
part_seq = []
next_wor = []
text = dframe[item, 1].split() #diving each caption for every image into words
text = [w_to_i[i] for i in text] #finding index for each word
for i in range(1, len(text)):
part_seq.append(text[:i]) #start, 1st word, ... , last word
next_wor.append(text[i]) #1st word, ... , last word, end
pad_part_seq = sequence.pad_sequences(part_seq, max_len, padding='post')
next_wor_1hot = np.zeros([len(next_wor), vocabulary_size], dtype=np.bool)
for i,next_word in enumerate(next_wor):
next_wor_1hot[i, next_word] = 1
pad_seq.append(pad_part_seq )
subsequent_wor.append(next_wor_1hot)
pad_seq = np.asarray(pad_seq)
subsequent_wor = np.asarray(subsequent_wor)
print(pad_seq.shape)
print(subsequent_wor.shape)
print(pad_seq[0])
for item in range(len(pad_seq[0])):
for y in range(max_len):
print(i_to_w[pad_seq[0][item][y]],)
print("\n")
print(len(pad_seq[0]))
num_imgs = 2000
cap = np.zeros([0, max_len])
next_wor = np.zeros([0, vocabulary_size])
for item in range(num_imgs): #img_to_padded_seqs.shape[0]):
cap = np.concatenate([cap, pad_seq[item]])
next_wor = np.concatenate([next_wor, subsequent_wor[item]])
np.save("cap.npy", cap)
np.save("next_wor.npy", next_wor)
print(cap.shape)
print(next_wor.shape)
with open('train_enc_img.p', 'rb') as f:
enc_img = pickle.load(f, encoding="bytes")
imgs = []
for item in range(dframe.shape[0]): #30000
if dframe[item, 0] in enc_img.keys(): #dframe[0,0], [1,0], ... , [4,0] match with 0th key of enc_img
imgs.append(list(enc_img[dframe[item, 0]]))
imgs = np.asarray(imgs)
print(imgs.shape)
images = []
img_names = []
for item in range(num_imgs): #2000
for y in range(pad_seq[item].shape[0]): #14
images.append(imgs[item]) #1st iteration: 14 times name of image in byte form
img_names.append(dframe[item, 0]) # normal form
images = np.asarray(images) #images contains image_name in byte form
np.save("images.npy", images)
img_names = np.asarray(img_names) #img_names contains image_name normally
np.save("img_names.npy", img_names)
print(images.shape)
print(len(img_names))
cap = np.load("cap.npy")
next_wor = np.load("next_wor.npy")
print(cap.shape)
print(next_wor.shape)
images = np.load("images.npy")
print(images.shape)
imag = np.load("img_names.npy")
print(imag.shape)
embed_size = 128
max_len = 40
img_model = Sequential()
img_model.add(Dense(embed_size, input_shape=(2048,), activation='relu'))
img_model.add(RepeatVector(max_len))
img_model.summary()
lang_model = Sequential()
lang_model.add(Embedding(input_dim=vocabulary_size, output_dim=embed_size, input_length=max_len))
lang_model.add(LSTM(256, return_sequences=True))
lang_model.add(TimeDistributed(Dense(embed_size)))
lang_model.summary()
concat = Concatenate()([img_model.output, lang_model.output])
x = LSTM(128, return_sequences=True)(concat)
x = LSTM(512, return_sequences=False)(x)
x = Dense(vocabulary_size)(x)
out = Activation('softmax')(x)
model = Model(inputs=[img_model.input, lang_model.input], outputs = out)
model.compile(loss='categorical_crossentropy', optimizer='RMSprop', metrics=['accuracy'])
model.summary()
hist = model.fit([images, cap], next_wor, batch_size=512, epochs=210)
for label in ["loss"]:
plt.plot(hist.history[label],label=label)
plt.legend()
plt.xlabel("epochs")
plt.ylabel("loss")
plt.show()
for label in ["accuracy"]:
plt.plot(hist.history[label],label=label)
plt.legend()
plt.xlabel("epochs")
plt.ylabel("accuracy")
plt.show()
model.save_weights("model_weights.h5")
def preprocess(img_path):
im = image.load_img(img_path, target_size=(224,224,3))
im = image.img_to_array(im) #(224,224,3)
im = np.expand_dims(im, axis=0) #(1,224,224,3)
return im
def get_encode(model, img):
image = preprocess(img)
pred = model.predict(image).reshape(2048)
return pred
resnet = ResNet50(include_top=False,weights='imagenet',input_shape=(224,224,3),pooling='avg')
img = "/content/drive/My Drive/Flickr_Data/Flickr_Data/Images/3376942201_2c45d99237.jpg"
test_img = get_encode(resnet, img)
def predict_cap(image):
start_wor = ["<start>"]
while True:
par_cap = [w_to_i[i] for i in start_wor] #par_cap list is made
par_cap = sequence.pad_sequences([par_cap], maxlen=max_len, padding='post') #convert list to sequence of len = 40
preds = model.predict([np.array([image]), np.array(par_cap)]) # PREDICTION
xx = np.argmax(preds[0])
word_pred = i_to_w[xx] # convert 5972 to DOG
start_wor.append(word_pred) # [dog] is added in list
if word_pred == "<end>" or len(start_wor) > max_len:
break
return ' '.join(start_wor[1:-1])
final_caption = predict_cap(test_img)
from IPython.display import Image,display
z = Image(filename=img)
display(z)
print(final_caption)
img = "/content/drive/My Drive/Flickr_Data/Flickr_Data/Images/1.jpg"
test_img = get_encode(resnet, img)
from IPython.display import Image,display
final_caption = predict_cap(test_img)
z = Image(filename=img)
display(z)
print(final_caption)
img = "/content/drive/My Drive/Flickr_Data/Flickr_Data/Images/car.jpg"
test_img = get_encode(resnet, img)
from IPython.display import Image,display
final_caption = predict_cap(test_img)
z = Image(filename=img)
display(z)
print(final_caption)
img = "/content/drive/My Drive/Flickr_Data/Flickr_Data/Images/bike.jpg"
test_img = get_encode(resnet, img)
from IPython.display import Image,display
final_caption = predict_cap(test_img)
z = Image(filename=img)
display(z)
print(final_caption)
img = "/content/drive/My Drive/Flickr_Data/Flickr_Data/Images/tennis.jpg"
test_img = get_encode(resnet, img)
from IPython.display import Image,display
final_caption = predict_cap(test_img)
z = Image(filename=img)
display(z)
print(final_caption) |
10,646 | 6d34a90a4ae3373e17f0309229597a5145264cb7 | # while loop
# print("hello world") 10 times
########################################################################
# i = 1
# while i <= 10:
# print("hello world")
# i +=1
########### Example:1 For loop same program #######################################
for i in range(10): ### range is 0 to 9 here
print("hello world")
i += 1
for i in range(1,11): ### range is 1 to 10 here
print("how r u")
i += 1
####### Example:2 Sum from 1 to 10 ##############################
num = 10
sum_num = 0
for i in range(num+1):
sum_num += i
i+=1
print(sum_num)
############ Example 3:
# ask user for name
# Example - Amol Chavan
# print count of each words
# output:
# A : 1
# m : 1
# o : 1
# l : 1
# : 1
# C : 1
# h : 1
# a : 2
# v : 1
# n : 1
#####
name = input("Enter user's name: ")
len_name = len(name)
print(len_name)
### getting unique letters from name ####
uq_name = ''
i = 0
for i in range(len_name):
if name[i] not in uq_name:
uq_name += name[i]
i += 1
#### get letter count from name ##
for i in range(len(uq_name)):
print(f"{uq_name[i]} : {name.count(uq_name[i])}")
i += 1
|
10,647 | 5d85454e89aed0c096b8e007e645fcbb59b95b12 | import os
import sys
from google.cloud import storage
from setup.init_client import create_client
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = './service_account.json'
def create_bucket(bucket_name):
"""create new bucket in specific location with storage class"""
# initialize client & get bucket
storage_client, bucket, _ = create_client(bucket_name)
# set storage class, by default STANDARD
bucket.storage_class = "COLDLINE"
# create new bucket
new_bucket = storage_client.create_bucket(bucket, location='us-central1')
# print new bucket detail
print(vars(bucket))
return None
def delete_bucket(bucket_name):
"""deletes a bucket, but it must be empty first"""
# initialize client & get bucket
storage_client, bucket, _ = create_client(bucket_name)
# delete bucket
bucket.delete()
print("bucket {} deleted".format(bucket_name))
def get_specific_bucket(bucket_name):
"""get a single bucket"""
# initialize client & get bucket
_, bucket, _ = create_client(bucket_name)
return bucket
def get_list_of_buckets():
"""get list of all buckets"""
# initialize client
storage_client = storage.Client()
# get list of buckets
buckets = storage_client.list_buckets()
list_of_buckets = []
for bucket in buckets:
list_of_buckets.append(bucket.name)
return list_of_buckets
def upload_to_bucket(bucket_name, path_to_source_file, upload_file_name):
"""upload file to bucket"""
try:
# initialize client & get blob
_, _, blob = create_client(bucket_name, upload_file_name)
# set the path to source file
blob.upload_from_filename(path_to_source_file)
except Exception as err:
raise err
sys.exit(1)
else:
print(f"upload file '{path_to_source_file}' succeed")
return None
def download_specific_blob(bucket_name, path_to_storage_file_name, download_file_name):
"""download specific blob from bucket"""
try:
# initialize client & get blob
_, _, blob = create_client(bucket_name, path_to_storage_file_name)
# set the path to source file
blob.download_to_filename(download_file_name)
except Exception as err:
raise err
sys.exit(1)
else:
print(f"download blob '{path_to_storage_file_name}' succeed")
return None
def get_list_of_blobs(bucket_name, prefix=None, delimiter=None):
"""get lists of all the blobs in the bucket"""
# initialize client
storage_client = storage.Client()
# get list blobs
blobs = storage_client.list_blobs(bucket_name, prefix=prefix, delimiter=delimiter)
for blob in blobs:
print(blob.name)
if delimiter:
print("Prefixes:")
for prefix in blobs.prefixes:
print(prefix)
return None
def copy_blob(bucket_name, blob_name, destination_bucket_name, destination_blob_name):
"""copies an blob from one bucket to another with a new name"""
# initialize client, get bucket, & get blob
storage_client, source_bucket, source_blob = create_client(bucket_name, blob_name)
# set destination bucket name
destination_bucket = storage_client.bucket(destination_bucket_name)
# copy blob
blob_copy = source_bucket.copy_blob(
source_blob, destination_bucket, destination_blob_name
)
print(
"blob {} in bucket {} copied to blob {} in bucket {}.".format(
source_blob.name,
source_bucket.name,
blob_copy.name,
destination_bucket.name,
)
)
def delete_blob(bucket_name, blob_name):
"""delete an blob from the bucket"""
# initialize client, get bucket, & get blob
_, _, blob = create_client(bucket_name, blob_name)
# delete blob
blob.delete()
print("blob {} deleted".format(blob_name))
bucket_name = 'agi_dummy_bucket'
src_file_name1 = './src/ready_to_upload_txt.txt'
src_file_name2 = './src/ready_to_upload_txt2.txt'
src_file_name3 = './src/ready_to_upload_img.jpg'
src_file_name4 = './src/ready_to_upload_img2.jpg'
# create_bucket(bucket_name)
# delete_bucket(bucket_name)
# print(get_specific_bucket(bucket_name))
# print(get_list_of_buckets())
# upload_to_bucket(bucket_name, src_file_name1, 'src/download_txt.txt')
# upload_to_bucket(bucket_name, src_file_name2, 'src/download_txt2.txt')
# upload_to_bucket(bucket_name, src_file_name2, 'src/src2/download_txt2.txt')
# upload_to_bucket(bucket_name, src_file_name3, 'download_img.jpg')
# upload_to_bucket(bucket_name, src_file_name4, 'download_img2.jpg')
# download_specific_blob(bucket_name, 'download_img2.jpg', './src/download_img.jpg')
# get_list_of_blobs(bucket_name)
# get_list_of_blobs(bucket_name, 'src/', '/')
# copy_blob(bucket_name, 'download_img.jpg', bucket_name, 'src/src2/copied_img.jpg')
# delete_blob(bucket_name, 'src/src2/copied_img.jpg') |
10,648 | bf0b968d16da6dd9db56d3381d2a07172a2f0238 | #comparing isoforms between single tissue analyses
#this will be easier that before as all of the single tissue analyses have the combined sexes isoform ID added to them, so I can just compare IDs rather than compare sequences, locations, etc.
#will use exon counts file for input for single tissues to remove any converted isoform ids that don't match up correctly (Gene_Isoform_Counts.py does this)
#to run script: python3 Compare_single_tissues.py <single tissue exon counts file female liver> <single tissue exon file male liver> <single tissue exon file female brain> <single tissue exon file male brain> <single tissue exon file female pronephros> <single tissue exon file male pronephros> <single tissue exon file ovary> <single tissue classification file testis>
#have 8 single tissue files for this analysis
#Author: Alice Naftaly, March 2020
import sys
#read in exon counts files for each single tissue
#return lists of isoform IDs
def read_female_liver_file():
input_file = sys.argv[1]
female_liver_isoforms = []
with open(input_file, 'r') as fl_info:
for line in fl_info:
if line.startswith("PB"):
new_line = line.split()
isoform = new_line[0]
female_liver_isoforms.append(isoform)
return female_liver_isoforms
def read_male_liver_file():
input_file = sys.argv[2]
male_liver_isoforms = []
with open(input_file, 'r') as ml_info:
for line in ml_info:
if line.startswith("PB"):
new_line = line.split()
isoform = new_line[0]
male_liver_isoforms.append(isoform)
return male_liver_isoforms
def read_female_brain_file():
input_file = sys.argv[3]
female_brain_isoforms = []
with open(input_file, 'r') as fb_info:
for line in fb_info:
if line.startswith("PB"):
new_line = line.split()
isoform = new_line[0]
female_brain_isoforms.append(isoform)
return female_brain_isoforms
def read_male_brain_file():
input_file = sys.argv[4]
male_brain_isoforms = []
with open(input_file, 'r') as mb_info:
for line in mb_info:
if line.startswith("PB"):
new_line = line.split()
isoform = new_line[0]
male_brain_isoforms.append(isoform)
return male_brain_isoforms
def read_female_pronephros_file():
input_file = sys.argv[5]
female_pronephros_isoforms = []
with open(input_file, 'r') as fp_info:
for line in fp_info:
if line.startswith("PB"):
new_line = line.split()
isoform = new_line[0]
female_pronephros_isoforms.append(isoform)
return female_pronephros_isoforms
def read_male_pronephros_file():
input_file = sys.argv[6]
male_pronephros_isoforms = []
with open(input_file, 'r') as mp_info:
for line in mp_info:
if line.startswith("PB"):
new_line = line.split()
isoform = new_line[0]
male_pronephros_isoforms.append(isoform)
return male_pronephros_isoforms
def read_ovary_file():
input_file = sys.argv[7]
ovary_isoforms = []
with open(input_file, 'r') as o_info:
for line in o_info:
if line.startswith("PB"):
new_line = line.split()
isoform = new_line[0]
ovary_isoforms.append(isoform)
return ovary_isoforms
def read_testis_file():
input_file = sys.argv[8]
testis_isoforms = []
with open(input_file, 'r') as t_info:
for line in t_info:
if line.startswith("PB"):
new_line = line.split()
isoform = new_line[0]
testis_isoforms.append(isoform)
return testis_isoforms
#summary read out of total number of isoforms per tissue:
def total_isoform_counts():
fl_iso = read_female_liver_file()
ml_iso = read_male_liver_file()
fb_iso = read_female_brain_file()
mb_iso = read_male_brain_file()
fp_iso = read_female_pronephros_file()
mp_iso = read_male_pronephros_file()
o_iso = read_ovary_file()
t_iso = read_testis_file()
print("Total number of Isoforms in Female Liver")
print(len(set(fl_iso)))
print("Total number of Isoforms in Male Liver")
print(len(ml_iso))
print("Total number of Isoforms in Female Brain")
print(len(fb_iso))
print("Total number of Isoforms in Male Brain")
print(len(mb_iso))
print("Total number of Isoforms in Female Pronephros")
print(len(fp_iso))
print("Total number of Isoforms in Male Pronephros")
print(len(mp_iso))
print("Total number of Isoforms in Ovary")
print(len(o_iso))
print("Total number of Isoforms in Testis")
print(len(t_iso))
#need to compare isoforms
def compare():
fl_iso = set(read_female_liver_file())
ml_iso = set(read_male_liver_file())
fb_iso = set(read_female_brain_file())
mb_iso = set(read_male_brain_file())
fp_iso = set(read_female_pronephros_file())
mp_iso = set(read_male_pronephros_file())
o_iso = set(read_ovary_file())
t_iso = set(read_testis_file())
#shared betwen all tissues
print("set intersection for all tissues/sexes")
shared_between_all_samples = fl_iso.intersection(ml_iso, fb_iso, mb_iso, fp_iso, mp_iso, o_iso, t_iso)
print(len(shared_between_all_samples))
#shared between all somatic tissues
print("set intersection for all somatic tissues")
shared_between_all_somatic_tissues = fl_iso.intersection(ml_iso, fb_iso, mb_iso, fp_iso, mp_iso)
print(len(shared_between_all_somatic_tissues))
#shared between all female tissues
print("set intersection for all female tissues")
shared_between_all_female_tissues = fl_iso.intersection(fb_iso, fp_iso, o_iso)
print(len(shared_between_all_female_tissues))
#shared between all male tissues
print("set intersection for all male tissues")
shared_between_all_male_tissues = ml_iso.intersection(mb_iso, mp_iso, t_iso)
print(len(shared_between_all_male_tissues))
#shared between liver samples
print("set intersections for liver samples")
shared_liver = fl_iso.intersection(ml_iso)
print(len(shared_liver))
#shared between brain samples
print("set intersections for brain samples")
shared_brain = fb_iso.intersection(mb_iso)
print(len(shared_brain))
#shared between pronephros samples
print("set intersections for pronephros samples")
shared_pronephros = fp_iso.intersection(mp_iso)
print(len(shared_pronephros))
#shared between gonad samples
print("set intersections for gonads samples")
shared_gonad = o_iso.intersection(t_iso)
print(len(shared_gonad))
#shared between liver and brain samples
print("set intersection for liver and brain")
shared_liver_brain = fl_iso.intersection(ml_iso, fb_iso, mb_iso)
print(len(shared_liver_brain))
#shared between liver and pronephros samples
print("set intersection for liver and pronephros")
shared_liver_pronephros = fl_iso.intersection(ml_iso, fp_iso, mp_iso)
print(len(shared_liver_pronephros))
#shared between liver and gonad samples
print("set intersection for liver and gonads")
shared_liver_gonads = fl_iso.intersection(ml_iso, o_iso, t_iso)
print(len(shared_liver_gonads))
#shared between brain and pronephros samples
print("set intersection for brain and Pronephros")
shared_brain_pronephros = fb_iso.intersection(mb_iso, fp_iso, mp_iso)
print(len(shared_brain_pronephros))
#shared between brain and gonad samples
print("set intersection for brain and gonads")
shared_brain_gonads = fb_iso.intersection(mb_iso, o_iso, t_iso)
print(len(shared_brain_gonads))
#shared between pronephros and gonad samples
print("set intersection for pronephros and gonads")
shared_pronephros_gonads = fp_iso.intersection(mp_iso, o_iso, t_iso)
print(len(shared_pronephros_gonads))
#call all functions
def call():
isoforms_counts = total_isoform_counts()
compare_function = compare()
call()
|
10,649 | 5856239347d49d4b6d29de05118d6bd5637d040a | # Generated by Django 3.0.9 on 2020-08-07 12:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cases', '0003_auto_20200807_1021'),
]
operations = [
migrations.AlterField(
model_name='contact',
name='contact',
field=models.TextField(max_length=512),
),
]
|
10,650 | 236e9fa45550b8339bcbe7b10fee8b222abd8f62 | import pytest
from ipxeboot import main
from webtest import TestApp as WebTestApp
class Test_Root_IPXE_Agent:
def setup(self):
app = main()
self.testapp = WebTestApp(app)
def ipxeget(self, *args, **kwargs):
return self.testapp.get(
*args,
headers={'User-Agent': 'iPXE/1.0.0'},
**kwargs,
)
def test_get(self):
self.ipxeget('/', status=200)
def test_ipxe_script_response(self):
resp = self.ipxeget('/', status=200)
assert resp.body.startswith(b'#!ipxe')
class Test_Root_Non_IPXE_Agent:
def setup(self):
app = main()
self.testapp = WebTestApp(app)
@pytest.mark.parametrize("agent", (
'Mozilla/5.0',
'iPXE',
'ipxe/version',
))
def test_not_ipxe_script_response(self, agent):
resp = self.testapp.get('/', status=200, headers={'User-Agent': agent})
assert not resp.body.startswith(b'#!ipxe')
|
10,651 | b649ef8e1c0685bb890cd47462cca13790127035 | # -*- coding: utf-8 -*-
"""
Created on Wed Jun 23 11:09:32 2021
@author: Administrator
"""
import utils
import numpy as np
import pandas as pd
import ot
import partial_gw as pgw
import matplotlib.pyplot as plt
n_unl = 800
n_pos = 400
nb_reps = 10
nb_dummies = 10
"""
prior = 0.518
perfs_mushrooms, perfs_list_mushrooms = pgw.compute_perf_emd('mushrooms', 'mushrooms', n_unl, n_pos, prior, nb_reps, nb_dummies)
avg_mush_emd_groups = perfs_mushrooms['emd_groups']
prior = 0.786
perfs_shuttle, perfs_list_shuttle = pgw.compute_perf_emd('shuttle', 'shuttle', n_unl, n_pos, prior, nb_reps, nb_dummies)
avg_shut_emd_groups = perfs_shuttle['emd_groups']
prior = 0.898
perfs_pageblocks, perfs_list_pageblocks = pgw.compute_perf_emd('pageblocks', 'pageblocks', n_unl, n_pos, prior, nb_reps, nb_dummies)
avg_page_emd_groups = perfs_pageblocks['emd_groups']
prior = 0.167
perfs_usps, perfs_list_usps = pgw.compute_perf_emd('usps', 'usps', n_unl, n_pos, prior, nb_reps, nb_dummies)
avg_usps_emd_groups = perfs_usps['emd_groups']
prior = 0.394
perfs_spambase, perfs_list_spambase = pgw.compute_perf_emd('spambase', 'spambase', n_unl, n_pos, prior, nb_reps, nb_dummies)
avg_spambase_emd_groups = perfs_spambase['emd_groups']
prior = 0.5
perfs_house, perfs_list_house = pgw.compute_perf_emd('house', 'house', n_unl, n_pos, prior, nb_reps, nb_dummies)
avg_house_emd_groups = perfs_house['emd_groups']
"""
prior = 0.5
perfs_mnist, perfs_list_mnist = pgw.compute_perf_emd('mnist', 'mnist', n_unl, n_pos, prior, nb_reps, nb_dummies)
avg_mnist_emd_groups = perfs_mnist['emd_groups'] |
10,652 | 0aa6617efd3c64e894bb5f9d4812640a8191b5cb | from ddblog.celery import app
from tools.sms import YuoknTongXin
@app.task
def send_sms(phone,code):
aid = '8aaf0708773733a80177433d7c750705'
atoken = 'e0718509c4f942a29bfb9be971afedea'
appid = '8aaf0708773733a80177433d7d3f070b'
tid = '1'
# 1 创建云通信对象
x = YuoknTongXin(aid, atoken, appid, tid)
# 2 发送短信
res = x.run(phone, code)
# 3 返回信息
print(res) |
10,653 | 75b6e93d055471cc6c2edb42021b1c7e0f279618 | ### Hacked together by Johnson Thomas
### Annotation UI created in Streamlit
### Can be used for binary or multilabel annotation
### Importing libraries
from streamlit.hashing import _CodeHasher
from streamlit.report_thread import get_report_ctx
from streamlit.server.server import Server
import streamlit as st
from PIL import Image
import os
import pandas as pd
import re
### Creating a 3 column layout in streamlit
col1, col2, col3= st.beta_columns([3, 1,1])
### Folder where the image files are kept. This path is in windows format.
### If you are runnin it in Linux, chnage the path appropriately.
#source_dir = r'C:\Users\JOHNY\CV_recepies\cv\images'
source_dir = None
csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv'
proj_file = r'C:\Users\JOHNY\CV_recepies\St_annot.csv'
### Function to create a python list cotaning paths to image files in a specific folder
### This function is decorated with @st.cache to avoid rerunning
extensions = ['.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG']
@st.cache(allow_output_mutation=True)
def get_file_list(root_dir):
file_list = []
counter = 1
for root, directories, filenames in os.walk(root_dir):
for filename in filenames:
if any(ext in filename for ext in extensions):
file_list.append(os.path.join(root, filename))
counter += 1
return sorted(file_list)
### Creating the side bar
add_proj_text = st.sidebar.write('Start new project')
add_textbox = st.sidebar.text_input('Project name')
add_foldbox = st.sidebar.text_input('Folder name' )
add_newproj_btn = st.sidebar.button('Create new project')
st.sidebar.write(' ')
add_proj_load = st.sidebar.write('Load project')
#proj_list =new_installation(proj_file)
add_csvbox = st.sidebar.selectbox('Pick your project',"exp1")
add_loadproj_btn = st.sidebar.button('Load project')
### store file names to a list and find the number of files in the list
#file_to_anot = get_file_list(source_dir)
#file_to_anot = get_file_list(source_dir)
#max_ind= len(file_to_anot) -1
### Creating a list to store the annotations
### @st.cache(allow_output_mutation=True) - is used to preserve the current state and to allow modification of the list
@st.cache(allow_output_mutation=True)
def init_anot(file_to_anot):
anot = [None]*(len(file_to_anot))
comp_list = [None]*(len(file_to_anot))
echo_list = [None]*(len(file_to_anot))
shape_list =[None]*(len(file_to_anot))
marg_list = [None]*(len(file_to_anot))
foci_list = [None]*(len(file_to_anot))
return anot,comp_list,echo_list,shape_list,marg_list,foci_list
### Creating a list to store just the file names
@st.cache(allow_output_mutation=True)
def init_base_f(file_to_anot):
base_file = [None]*(len(file_to_anot))
return base_file
#anotf,comp_list,echo_list,shape_list,marg_list,foci_list = init_anot(file_to_anot)
#base_f = init_base_f(file_to_anot)
### Given an index this function converts path in the index to windows readable path
### then load the imaeg and returns the loaded image
def get_image(ind_no,file_to_anot):
file_name = file_to_anot[ind_no]
im_file =re.sub("\\\\","\\\\\\\\", file_name)
loaded_image = Image.open(im_file)
return loaded_image
### Get just the image file name from the complete path string
def extract_basename(path):
basename = re.search(r'[^\\/]+(?=[\\/]?$)', path)
if basename:
return basename.group(0)
def get_index(dta_ar, out_string):
for i in range(len(dta_ar)):
if dta_ar[i] == out_string:
in_dex = i
return in_dex
def main():
state = _get_state()
def set_index_in(in_num):
state.comp_list[in_num] = get_index(comp_options, composition)
state.echo_list[in_num] = get_index(echo_options, echo)
state.shape_list[in_num]= get_index(shape_options, shape)
state.marg_list[in_num] = get_index(margin_options, margin)
state.foci_list[in_num]= get_index(foci_options, echogenic_foci)
def update_choices(ind_num):
''' This function collects the values of lables/tags for the next or previous image,
then displays it in the user interface.
This function is called each time Next or Previous button is pressed.
'''
if state.comp_list[ind_num] != None:
state.comp = state.comp_list[ind_num]
else:
state.comp = 0
if state.echo_list[ind_num] != None:
state.echo = state.echo_list[ind_num]
else:
state.echo = 0
if state.shape_list[ind_num] !=None:
state.shape = state.shape_list[ind_num]
else:
state.shape = 0
if state.marg_list[ind_num] != None:
state.margin = state.marg_list[ind_num]
else:
state.margin = 0
if state.foci_list[ind_num] != None:
state.foci = state.foci_list[ind_num]
else:
state.foci = 0
#print("This is from update", state.comp, state.echo, state.shape, state.margin, state.foci)
# Initializing a state variable input
if state.input == None:
state.input = 0
state.last_anot =0
state.comp = 0
state.echo = 0
state.shape = 0
state.margin = 0
state.foci = 0
# Creating the UI
comp_options = ['cystic','spongiform', 'mixed cystic','solid']
echo_options = ['anechoic','hyperechoic','isoechoic','hypoechoic','very hypoechoic']
shape_options =['wider than tall','taller than wide']
margin_options = ['smooth','ill defined','lobulated', 'irregular', 'ete']
foci_options = ['none','comet tail artifacts','macrocalcifications','peripheral calcifications','punctate echogenic foci']
with col2:
prev_button = st.button('Previous')
if state.active_project == True:
composition = st.radio('Composition',comp_options, state.comp)
echo = st.radio('Echogenicity',echo_options, state.echo)
shape = st.radio('Shape',shape_options, state.shape)
state.started = True
with col3:
next_button = st.button('Next')
if state.active_project == True:
margin = st.radio('Margin',margin_options, state.margin)
echogenic_foci = st.radio('Echogenic Foci', foci_options, state.foci)
with col1:
#if state.input ==0:
if next_button and state.active_project == True:
if state.input == state.max_ind:
e =RuntimeError('Reached end of images in the folder')
st.exception(e)
else:
set_index_in(state.input)
#update_choices(state.input,comp_list)
state.input = state.input + 1
update_choices(state.input)
if state.input > state.last_anot:
state.last_anot = state.input
if prev_button and state.active_project == True:
if state.input == 0:
e =RuntimeError('Reached the first image in the folder')
st.exception(e)
else:
set_index_in(state.input)
#update_choices(state.input,state.comp_list)
state.input = state.input -1
update_choices(state.input)
if add_newproj_btn and add_foldbox != "":
state.file_to_anot = get_file_list(add_foldbox)
state.max_ind= len(state.file_to_anot) -1
### initializing variables
state.active_project = True
state.input = 0
state.last_anot =0
state.comp = 0
state.echo = 0
state.shape = 0
state.margin = 0
state.foci = 0
state.started = False
state.anot_list,state.comp_list,state.echo_list,state.shape_list,state.marg_list,state.foci_list = init_anot(state.file_to_anot)
state.base_f = init_base_f(state.file_to_anot)
if add_foldbox != "" and state.started == True:
st.image(get_image(state.input,state.file_to_anot),use_column_width=True)
desc_nod, lbl, fln= gen_desc_save(composition, echo, shape, margin, echogenic_foci,state.input,state.file_to_anot )
#print("anot list",state.anot_list)
state.anot_list[state.input] = lbl
state.base_f[state.input] = fln
col1.write( desc_nod)
### Save button ########################################################
save_button = st.button('Save')
if save_button:
set_index_in(state.input)
df = pd.DataFrame(list(zip(state.base_f, state.anot_list)), columns =["IM_FILENAME", "LABELS"])
cwd = os.getcwd()
csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv'
#print("printing curr file name")
#print(csv_to_log)
df.to_csv(csv_to_log)
#proj = pd.read_csv(proj_file)
#ind_pr= proj.index[proj['Project_name'] == curr_proj_name].tolist()
print(ind_pr)
state.sync()
def gen_desc_save(composition, echo, shape, margin, echogenic_foci, ind_no,file_to_anot):
comp = composition.capitalize()
if echogenic_foci =="none":
echo_foc = "no calcification or comet tail artiacts"
else:
echo_foc = echogenic_foci
desc = comp + " " + echo + " " + shape + " thyroid nodule with " + margin + " margin" + " and " + echo_foc + "."
file_name2 = file_to_anot[ind_no]
file_only = extract_basename(file_name2)
label_to_log = composition + "," + echo + "," + shape + "," + margin + "," + echogenic_foci
#anotf[ind_no] = label_to_log
return desc,label_to_log, file_only
class _SessionState:
def __init__(self, session, hash_funcs):
"""Initialize SessionState instance."""
self.__dict__["_state"] = {
"data": {},
"hash": None,
"hasher": _CodeHasher(hash_funcs),
"is_rerun": False,
"session": session,
}
def __call__(self, **kwargs):
"""Initialize state data once."""
for item, value in kwargs.items():
if item not in self._state["data"]:
self._state["data"][item] = value
def __getitem__(self, item):
"""Return a saved state value, None if item is undefined."""
return self._state["data"].get(item, None)
def __getattr__(self, item):
"""Return a saved state value, None if item is undefined."""
return self._state["data"].get(item, None)
def __setitem__(self, item, value):
"""Set state value."""
self._state["data"][item] = value
def __setattr__(self, item, value):
"""Set state value."""
self._state["data"][item] = value
def clear(self):
"""Clear session state and request a rerun."""
self._state["data"].clear()
self._state["session"].request_rerun()
def sync(self):
"""Rerun the app with all state values up to date from the beginning to fix rollbacks."""
# Ensure to rerun only once to avoid infinite loops
# caused by a constantly changing state value at each run.
#
# Example: state.value += 1
if self._state["is_rerun"]:
self._state["is_rerun"] = False
elif self._state["hash"] is not None:
if self._state["hash"] != self._state["hasher"].to_bytes(self._state["data"], None):
self._state["is_rerun"] = True
self._state["session"].request_rerun()
self._state["hash"] = self._state["hasher"].to_bytes(self._state["data"], None)
def _get_session():
session_id = get_report_ctx().session_id
session_info = Server.get_current()._get_session_info(session_id)
if session_info is None:
raise RuntimeError("Couldn't get your Streamlit Session object.")
return session_info.session
def _get_state(hash_funcs=None):
session = _get_session()
if not hasattr(session, "_custom_session_state"):
session._custom_session_state = _SessionState(session, hash_funcs)
return session._custom_session_state
if __name__ == "__main__":
main()
#main(file_to_anot, anotf, base_f,comp_list,echo_list,shape_list,marg_list,foci_list)
|
10,654 | 65587455c046f7db6f0d61b7cd48290829ea48b8 | # -*- coding: utf-8 -*-
"""
# https://mp.weixin.qq.com/s/wlqvAvKvqPCclZm8AvkUSw
# hppts://github.com/miguelgrinberg/merry
pip3 install merry
"""
from merry import Merry
import requests
from requests import ConnectTimeout
# version 1.0
def process_v1_0(num1, num2, file):
result = num1 / num2
with open(file, "w", encoding = "utf-8") as f:
f.write(str(result))
# version 1.1
def process_v1_1(num1, num2, file):
try:
result = num1 / num2
with open(file, "w", encoding = "utf-8") as f:
f.write(str(result))
except ZeroDivisionError:
print(f"{num2} can not be zero")
except FileNotFoundError:
print(f"file {file} not found")
except Exception as e:
print(f"exception, {e.args}")
# version 2.0
merry = Merry()
merry.logger.disabled = True
@merry._try
def process_v2_0(num1, num2, file):
result = num1 / num2
with open(file, "w", encoding = "utf-8") as f:
f.write(str(result))
@merry._except(ZeroDivisionError)
def process_zero_division_error(e):
print("zero_division_error", e)
@merry._except(FileNotFoundError)
def process_file_not_found_error(e):
print("file_not_found_error", e)
@merry._except(Exception)
def process_exception(e):
print("exception", type(e), e)
# version 3.0
merry = Merry()
merry.logger.disabled = True
catch = merry._try
class BaseClass(object):
@staticmethod
@merry._except(ZeroDivisionError)
def process_zero_division_error(e):
print("zero_division_error", e)
@staticmethod
@merry._except(FileNotFoundError)
def process_file_not_found_error(e):
print("file_not_found_error", e)
@staticmethod
@merry._except(Exception)
def process_exception(e):
print("exception", type(e), e)
@staticmethod
@merry._except(ConnectTimeout)
def process_connect_timeout(e):
print("connect_timeout", e)
class Caculator(BaseClass):
@catch
def process_v3_0(self, num1, num2, file):
result = num1 / num2
with open(file, "w", encoding = "utf-8") as f:
f.write(str(result))
class Fetcher(BaseClass):
@catch
def process(self, url):
response = requests.get(url, timeout = 1)
if response.status_code == 200:
print(response.text)
if __name__ == "__main__":
# process_v1_0(1, 2, "result/result.txt")
# process_v1_0(1, 0, "result.txt")
# process_v1_0(1, [2], "result.txt")
process_v1_1(1, 2, "result/result.txt")
process_v1_1(1, 0, "result.txt")
process_v1_1(1, [2], "result.txt")
process_v2_0(1, 2, "result/result.txt")
process_v2_0(1, 0, "result.txt")
process_v2_0(1, 2, "result.txt")
process_v2_0(1, [1], "result.txt")
c = Caculator()
c.process_v3_0(1, 0, "result.txt")
f = Fetcher()
f.process("http://notfound.com")
|
10,655 | cfbc51929b383ff2de90e28c23a3e996ba59aac2 | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import pickle
import os
import sys
import unittest
import networkx as nx
import numpy as np
from topologic.embedding import laplacian_embedding
class TestLaplacianSpectralEmbedding(unittest.TestCase):
def test_laplacian_embedding(self):
graph = nx.Graph([('a', 'b', {'weight': 1.0}), ('b', 'c', {'weight': 2.0})])
result = laplacian_embedding(
graph,
elbow_cut=1,
svd_seed=1234
)
self.assertIsNotNone(result)
(matrix, labels) = result
self.assertIsInstance(matrix, np.ndarray)
self.assertIsInstance(labels, list)
self.assertEqual(2, matrix.ndim)
expected_matrix = np.array([[0.408248],
[0.707107],
[0.577350]])
expected_label = ['a', 'b', 'c']
np.testing.assert_allclose(expected_matrix, matrix, rtol=1e-5)
self.assertListEqual(expected_label, labels)
def test_laplacian_embedding_digraph(self):
graph = nx.DiGraph([('a', 'b', {'weight': 1.0}), ('b', 'c', {'weight': 2.0})])
result = laplacian_embedding(
graph,
elbow_cut=1,
svd_seed=1234
)
self.assertIsNotNone(result)
(matrix, labels) = result
self.assertIsInstance(matrix, np.ndarray)
self.assertIsInstance(labels, list)
self.assertEqual(2, matrix.ndim)
expected_matrix = np.array([[0.527046, 0.235702],
[0.781736, 0.62361],
[0.333333, 0.745356]])
expected_label = ['a', 'b', 'c']
np.testing.assert_allclose(expected_matrix, matrix, rtol=1e-5)
self.assertListEqual(expected_label, labels)
@unittest.skipIf(
sys.platform.startswith('darwin') and os.getenv("SKIP_TEST_35", "false") == "true",
"Test not supported on MacOS Github Actions, see: https://github.com/microsoft/topologic/issues/35"
)
def test_laplacian_embedding_elbowcut_none(self):
graph = nx.Graph([('a', 'b', {'weight': 2.0}), ('b', 'c', {'weight': 2.0})])
result = laplacian_embedding(
graph,
elbow_cut=None,
svd_seed=1234
)
self.assertIsNotNone(result)
(matrix, labels) = result
self.assertIsInstance(matrix, np.ndarray)
self.assertIsInstance(labels, list)
self.assertEqual(2, matrix.ndim)
expected_matrix = np.array([[5.000000e-01, 4.714045e-01],
[7.071068e-01, -3.333333e-01],
[5.000000e-01, -1.425006e-16]])
expected_label = ['a', 'b', 'c']
np.testing.assert_allclose(expected_matrix, matrix, rtol=1e-5)
self.assertListEqual(expected_label, labels)
def test_laplacian_embedding_gpickle(self):
graph = nx.Graph([('a', 'b', {'weight': 1.0}), ('b', 'c', {'weight': 2.0})])
result = laplacian_embedding(graph, svd_seed=1234)
pickled = pickle.dumps(result)
unpickled = pickle.loads(pickled)
np.testing.assert_array_equal(result.embedding, unpickled.embedding)
np.testing.assert_array_equal(result.vertex_labels, unpickled.vertex_labels)
|
10,656 | ffdf46791741184ea1adf52e8e404eeeaabef429 | from .provider import XingProvider as XingProvider
from allauth.socialaccount.providers.oauth.client import OAuth as OAuth
from allauth.socialaccount.providers.oauth.views import OAuthAdapter as OAuthAdapter, OAuthCallbackView as OAuthCallbackView, OAuthLoginView as OAuthLoginView
from typing import Any
class XingAPI(OAuth):
url: str = ...
def get_user_info(self): ...
class XingOAuthAdapter(OAuthAdapter):
provider_id: Any = ...
request_token_url: str = ...
access_token_url: str = ...
authorize_url: str = ...
def complete_login(self, request: Any, app: Any, token: Any, response: Any): ...
oauth_login: Any
oauth_callback: Any
|
10,657 | bcc4ab4af84f18df1a86a871391deba0118b5c1c | import math
import torch.nn as nn
__all__ = [
'ConvFC', 'ConvFCSimple'
]
class ConvFCBase(nn.Module):
def __init__(self, num_classes):
super(ConvFCBase, self).__init__()
self.conv_part = nn.Sequential(
nn.Conv2d(3, 32, kernel_size=5, padding=2),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(32, 64, kernel_size=5, padding=2),
nn.ReLU(True),
nn.MaxPool2d(3, 2),
nn.Conv2d(64, 128, kernel_size=5, padding=2),
nn.ReLU(True),
nn.MaxPool2d(3, 2),
)
self.fc_part = nn.Sequential(
nn.Linear(1152, 1000),
nn.ReLU(True),
nn.Linear(1000, 1000),
nn.ReLU(True),
nn.Linear(1000, num_classes)
)
# Initialize weights
for m in self.conv_part.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
m.bias.data.zero_()
def forward(self, x):
x = self.conv_part(x)
x = x.view(x.size(0), -1)
x = self.fc_part(x)
return x
class ConvFCSimpleBase(nn.Module):
def __init__(self, num_classes):
super(ConvFCSimpleBase, self).__init__()
self.conv_part = nn.Sequential(
nn.Conv2d(3, 8, kernel_size=3, padding=2),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=3, stride=1),
nn.Conv2d(8, 8, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(3, 2),
nn.Conv2d(8, 8, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(3, 2),
)
self.fc_part = nn.Sequential(
nn.Linear(392, 100),
nn.ReLU(True),
nn.Linear(100, num_classes)
)
# Initialize weights
for m in self.conv_part.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
m.bias.data.zero_()
def forward(self, x):
x = self.conv_part(x)
x = x.view(x.size(0), -1)
x = self.fc_part(x)
return x
class ConvFC:
base = ConvFCBase
kwargs = {}
class ConvFCSimple:
base = ConvFCSimpleBase
kwargs = {}
|
10,658 | 844a532c614176be173f7cbe75a187ee359b4c27 | import argparse
import numpy as np
from os.path import join
from train.models import DreyeveNet
from train.config import dreyeve_test_seq
from train.config import frames_per_seq
from train.config import h
from train.config import w
from tqdm import tqdm
from metrics.metrics import kld_numeric
from computer_vision_utils.io_helper import read_image
from computer_vision_utils.tensor_manipulation import resize_tensor
def translate_tensor(x, pixels):
if pixels < 0:
side = 'left'
pixels = -pixels
elif pixels > 0:
side = 'right'
else:
return x
w = x.shape[-1]
pad = x[..., (w - pixels):] if side == 'left' else x[..., :pixels]
pad = pad[..., ::-1]
if side == 'left':
xt = np.roll(x, -pixels, axis=-1)
xt[..., (w-pixels):] = pad
else:
xt = np.roll(x, pixels, axis=-1)
xt[..., :pixels] = pad
return xt
def translate_batch(batch, pixels):
X, Y = [[np.copy(a) for a in b] for b in batch]
for i, tensor in enumerate(X):
tensor_shift = (tensor.shape[-1] * pixels) // 1920
X[i] = translate_tensor(tensor, pixels=tensor_shift)
for i, tensor in enumerate(Y):
tensor_shift = (tensor.shape[-1] * pixels) // 1920
Y[i] = translate_tensor(tensor, pixels=tensor_shift)
return X, Y
def load_dreyeve_sample(sequence_dir, stop, mean_dreyeve_image):
"""
Function to load a dreyeve_sample.
:param sequence_dir: string, sequence directory (e.g. 'Z:/DATA/04/').
:param stop: int, sample to load in (15, 7499). N.B. this is the sample where prediction occurs!
:param mean_dreyeve_image: mean dreyeve image, subtracted to each frame.
:param frames_per_seq: number of temporal frames for each sample
:param h: h
:param w: w
:return: a dreyeve_sample like I, OF, SEG
"""
h_c = h_s = h // 4
w_c = w_s = h // 4
I_ff = np.zeros(shape=(1, 3, 1, h, w), dtype='float32')
I_s = np.zeros(shape=(1, 3, frames_per_seq, h_s, w_s), dtype='float32')
I_c = np.zeros(shape=(1, 3, frames_per_seq, h_c, w_c), dtype='float32')
OF_ff = np.zeros(shape=(1, 3, 1, h, w), dtype='float32')
OF_s = np.zeros(shape=(1, 3, frames_per_seq, h_s, w_s), dtype='float32')
OF_c = np.zeros(shape=(1, 3, frames_per_seq, h_c, w_c), dtype='float32')
SEG_ff = np.zeros(shape=(1, 19, 1, h, w), dtype='float32')
SEG_s = np.zeros(shape=(1, 19, frames_per_seq, h_s, w_s), dtype='float32')
SEG_c = np.zeros(shape=(1, 19, frames_per_seq, h_c, w_c), dtype='float32')
Y_sal = np.zeros(shape=(1, 1, h, w), dtype='float32')
Y_fix = np.zeros(shape=(1, 1, h, w), dtype='float32')
for fr in xrange(0, frames_per_seq):
offset = stop - frames_per_seq + 1 + fr # tricky
# read image
x = read_image(join(sequence_dir, 'frames', '{:06d}.jpg'.format(offset)),
channels_first=True, resize_dim=(h, w)) - mean_dreyeve_image
I_s[0, :, fr, :, :] = resize_tensor(x, new_size=(h_s, w_s))
# read of
of = read_image(join(sequence_dir, 'optical_flow', '{:06d}.png'.format(offset + 1)),
channels_first=True, resize_dim=(h, w))
of -= np.mean(of, axis=(1, 2), keepdims=True) # remove mean
OF_s[0, :, fr, :, :] = resize_tensor(of, new_size=(h_s, w_s))
# read semseg
seg = resize_tensor(np.load(join(sequence_dir, 'semseg', '{:06d}.npz'.format(offset)))['arr_0'][0],
new_size=(h, w))
SEG_s[0, :, fr, :, :] = resize_tensor(seg, new_size=(h_s, w_s))
I_ff[0, :, 0, :, :] = x
OF_ff[0, :, 0, :, :] = of
SEG_ff[0, :, 0, :, :] = seg
Y_sal[0, 0] = read_image(join(sequence_dir, 'saliency', '{:06d}.png'.format(stop)), channels_first=False,
color=False, resize_dim=(h, w))
Y_fix[0, 0] = read_image(join(sequence_dir, 'saliency_fix', '{:06d}.png'.format(stop)), channels_first=False,
color=False, resize_dim=(h, w))
return [I_ff, I_s, I_c, OF_ff, OF_s, OF_c, SEG_ff, SEG_s, SEG_c], [Y_sal, Y_fix]
class DataLoader:
def __init__(self, dreyeve_root):
self.dreyeve_root = dreyeve_root
self.dreyeve_data_root = join(dreyeve_root, 'DATA')
self.subseq_file = join(dreyeve_root, 'subsequences.txt')
# load subsequences
self.subseqs = np.loadtxt(self.subseq_file, dtype=str)
# filter attentive
self.subseqs = self.subseqs[self.subseqs[:, -1] == 'k']
# cast to int
self.subseqs = np.int32(self.subseqs[:, :-1])
# filter test sequences
self.subseqs = np.array([seq for seq in self.subseqs if seq[0] in dreyeve_test_seq])
# filter too short sequences
self.subseqs = np.array([seq for seq in self.subseqs if seq[2] - seq[1] >= frames_per_seq])
self.len = len(self.subseqs)
self.counter = 0
# load mean dreyeve image
self.mean_dreyeve_image = read_image(join(self.dreyeve_data_root, 'dreyeve_mean_frame.png'),
channels_first=True, resize_dim=(h, w))
def __len__(self):
return self.len
def get_sample(self):
# compute center of this subsequence
seq, start, stop = self.subseqs[self.counter]
# start = (start + stop) / 2 - frames_per_seq / 2
start = np.random.randint(0, 7500 - frames_per_seq)
stop = start + frames_per_seq
# compute sequence dir
sequence_dir = join(self.dreyeve_data_root, '{:02d}'.format(seq))
batch = load_dreyeve_sample(sequence_dir, stop, self.mean_dreyeve_image)
self.counter += 1
return batch
if __name__ == '__main__':
# parse arguments
parser = argparse.ArgumentParser()
parser.add_argument('--checkpoint_file', type=str)
args = parser.parse_args()
assert args.checkpoint_file is not None, 'Please provide a checkpoint model to load.'
# get the models
dreyevenet_model = DreyeveNet(frames_per_seq=frames_per_seq, h=h, w=w)
dreyevenet_model.compile(optimizer='adam', loss='kld') # do we need this?
dreyevenet_model.load_weights(args.checkpoint_file) # load weights
dreyeve_root = '/majinbu/public/DREYEVE'
shifts = np.arange(-800, 801, step=200)
# get data_loader
loader = DataLoader(dreyeve_root)
# set up array for kld results
kld_results = np.zeros(shape=(len(loader), len(shifts)))
for clip_idx in tqdm(range(0, len(loader))):
batch = loader.get_sample()
# compute shifted versions
X_list = []
GT_list = []
for s in shifts:
X, GT = translate_batch(batch, pixels=s)
X_list.append(X)
GT_list.append(GT)
X_batch = [np.concatenate(l) for l in zip(*X_list)]
GT_batch = [np.concatenate(l) for l in zip(*GT_list)][1]
P_batch = dreyevenet_model.predict(X_batch)[0]
for shift_idx, (p, gt) in enumerate(zip(P_batch, GT_batch)):
kld_results[clip_idx, shift_idx] = kld_numeric(gt, p)
np.savetxt(args.checkpoint_file + '.txt',
X=np.concatenate(
(
np.mean(kld_results, axis=0, keepdims=True),
np.std(kld_results, axis=0, keepdims=True)
),
axis=0
))
|
10,659 | 0f2bbf29caa0f76c7be97df4b9aa039691515b35 | from .sopt_gateway import SoptGateway
|
10,660 | 11370b03c3ae80aade9d476511a4cd472af95d1a | #!/usr/bin/env python
"""
This script is used to parse the log in CFS sever
Example:
import cfsServerLogParser
missLogData = cfsServerLogParser.parseRatingMissLogFile("logfile")
print("Missing log file: " + missLogData["file"])
print("Missing log line count: " + str(missLogData["lineCount"]))
print("Missing log err line count: " + str(missLogData["errLineCount"]))
print("Missing log OK line count: " + str(missLogData["okLineCount"]))
for e in missLogData["logList"]:
print("domain: " + e["domain"])
print("path: " + e["path"])
"""
import sys
def parseRatingMissLogLine(logLine, printError = False):
"""
Parse a rating missing log line:
Following is example of content of rating missing log:
Sep 10 02:24:48 sjl0vm-cfs401 webcfsd: dk3.lunrac.com /api/test UNRATED
0 1 2 3 4 5 6 7
"""
entryMap = {}
entryList = logLine.split(sep = None)
# Verify the log format
if len(entryList) < 6: # webcfsd: <domain> is must
if printError:
print("Ignore log line with invalid format: " + logLine)
return entryMap
if entryList[4] != "webcfsd:":
if printError:
print("Ignore log line with invalid format: " + logLine)
return entryMap
# Record base info. "time", "server", "logType", "domain"
entryMap["time"] = entryList[0] + " " + entryList[1] + " " + entryList[2]
entryMap["server"] = entryList[3]
entryMap["logType"] = entryList[4]
entryMap["domain"] = entryList[5]
# Handle the "path" case
if len(entryList) > 6:
entryMap["path"] = entryList[6]
else :
entryMap["path"] = ""
if entryMap["path"] == "NULL":
entryMap["path"] = ""
# Handle the "msg" case
if len(entryList) > 7:
entryMap["msg"] = entryList[7]
else:
entryMap["msg"] = ""
return entryMap
def parseRatingMissLogFile(logFile):
"""
Parse Rating missing log file.
The return is a map of file log info.
{
"file": <log file name>,
"lineCount": <line count of file>,
"errLineCount": <line count of error>,
"okLineCount": <line count of ok>,
"logList": <list of format log>
}
Each format log is a map of log line info
{
"time": "Sep 10 02:24:48",
"server": "sjl0vm-cfs401",
"logType": "webcfsd",
"domain": "yahoo.com",
"path": "/news",
"msg": "UNRATED"
}
"""
logData = {}
lineCount = 0
errLineCount = 0
validLineCount = 0
logList = []
with open(logFile) as f:
line = f.readline()
while line:
lineCount += 1
logLineMap = parseRatingMissLogLine(line)
if logLineMap.get("domain") != None:
logList.append(logLineMap)
validLineCount += 1
else :
errLineCount += 1
line = f.readline()
logData["file"] = logFile
logData["lineCount"] = lineCount
logData["errLineCount"] = errLineCount
logData["okLineCount"] = validLineCount
logData["logList"] = logList
return logData
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Please provide log file path")
sys.exit()
missLogFile = sys.argv[1]
missLogData = parseRatingMissLogFile(missLogFile)
print("Missing log file: " + missLogData["file"])
print("Missing log line count: " + str(missLogData["lineCount"]))
print("Missing log err line count: " + str(missLogData["errLineCount"]))
print("Missing log OK line count: " + str(missLogData["okLineCount"]))
print("\nMising domain + path examples: \n\n")
limit = 10
for e in missLogData["logList"]:
print(e["domain"] + e["path"])
limit -= 1
if limit < 0:
break |
10,661 | 11b8b712d7612e17c6b39b0f26895bfc9fdedb9b | import numpy as np
import matplotlib.pyplot as plt
from decimal import *
plt.rcParams.update({'font.size': 22})
q2x = np.load('q2x.npy')
q2y = np.load('q2y.npy')
N = len(q2x)
x_poly = np.column_stack([np.ones((N,1)), q2x])
#q2(d)i
def unweighted_linear_regression(x_poly, y):
xTx = np.dot(np.transpose(x_poly), x_poly)
w = np.dot(np.dot(np.linalg.inv(xTx), np.transpose(x_poly)), y)
return w
def plot_unweighted(w, x_poly, x, y):
fig = plt.figure(0)
fig.set_size_inches(18.5, 10.5, forward=True)
fig.canvas.set_window_title('q2(d) i.')
pred = np.dot(x_poly, np.transpose(w))
plt.plot(x, y, 'ob')
plt.plot(x, pred,'-g')
plt.legend(('point(x,y)', 'unwieighted'), shadow=True, loc=(0.01, 0.48), handlelength=1.5, fontsize=16)
plt.xlabel('X')
plt.ylabel('Y')
w = unweighted_linear_regression(x_poly, q2y)
plot_unweighted(w, x_poly, q2x, q2y)
#q2(d)ii
# w = (XTRX)^-1 * XTRY
def weighted_linear_regression(x_query, x_poly, x, y, tau):
query_len = len(x_query)
W = []
Pred = []
for i in range(query_len):
r = np.exp(-np.square((x-x_query[i])) / (2 * tau ** 2))
R = np.diag(r)
w = np.dot(np.dot(np.dot(np.linalg.inv(np.dot(np.dot(np.transpose(x_poly), R), x_poly)), np.transpose(x_poly)), R), y)
W.append(w)
x_query_poly = np.array([1, x_query[i]])
pred = np.dot(x_query_poly, np.transpose(w))
Pred.append(pred)
return W, Pred
def plot_query_weighted(Pred, x_query, x, y):
fig = plt.figure(1)
fig.set_size_inches(18.5, 10.5, forward=True)
fig.canvas.set_window_title('q2(d) ii.')
plt.plot(x, y, 'ob')
plt.plot(x_query, Pred, '-g')
plt.legend(('point(x,y)', 'τ = 0.8'), shadow=True, loc=(0.01, 0.48), handlelength=1.5, fontsize=16)
plt.xlabel('X')
plt.ylabel('Y')
#print(f'The closed form coefficients for {lamd} : \n {w_r}')
q2x_max = np.max(q2x)
q2x_min = np.min(q2x)
x_query = np.linspace(q2x_min,q2x_max,50)
W0, Pred = weighted_linear_regression(x_query, x_poly, q2x, q2y, 0.8)
plot_query_weighted(Pred, x_query, q2x, q2y)
##q2(d) iii
def plot_multiple_query_weighted(Pred0, Pred1, Pred2, Pred3, x_query, x, y):
fig = plt.figure(4)
fig.set_size_inches(18.5, 10.5, forward=True)
fig.canvas.set_window_title('q2(d) iii.')
plt.plot(x, y, 'ob')
plt.plot(x_query, Pred0, '-g')
plt.plot(x_query, Pred1, '-r')
plt.plot(x_query, Pred2, '-b')
plt.plot(x_query, Pred3, '-m')
plt.legend(('point(x,y)', 'τ = 0.1', 'τ = 0.3', 'τ = 2', 'τ = 10'), shadow=True, loc=(0.01, 0.48), handlelength=1.5, fontsize=16)
plt.xlabel('X')
plt.ylabel('Y')
print(f'q2 (d) iii:\n When τ is small, the fitting curve followed the data points closely. On the other hand, the curve did not follow closely to data points when τ is large')
W0, Pred0 = weighted_linear_regression(x_query, x_poly, q2x, q2y, 0.1)
W1, Pred1 = weighted_linear_regression(x_query, x_poly, q2x, q2y, 0.3)
W2, Pred2 = weighted_linear_regression(x_query, x_poly, q2x, q2y, 2)
W3, Pred3 = weighted_linear_regression(x_query, x_poly, q2x, q2y, 10)
plot_multiple_query_weighted(Pred0, Pred1, Pred2, Pred3, x_query, q2x, q2y)
plt.show()
|
10,662 | 7af2daf51857ef17eec6362e4920fc4e25ea5fbf | import numpy as np
from scipy.linalg import lstsq
#temperature data
fahrenheit = np.array([5,14,23,32,41,50])
celsius = np.array([-15,-10,-5,0,5,10])
M = fahrenheit[:, np.newaxis]**[0, 1]
model, _, _, _ = lstsq(M,celsius)
print "Intercept =", model[0]
print "fahrenheit =", model[1]
|
10,663 | 7f9193d0534fb3a6119fb779ee06bdf27d91b222 | # -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.nsx_policy.infra.
#---------------------------------------------------------------------------
"""
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class DhcpRelayConfigs(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.dhcp_relay_configs'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _DhcpRelayConfigsStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
dhcp_relay_config_id,
):
"""
Delete DHCP relay configuration
:type dhcp_relay_config_id: :class:`str`
:param dhcp_relay_config_id: DHCP relay config ID (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'dhcp_relay_config_id': dhcp_relay_config_id,
})
def get(self,
dhcp_relay_config_id,
):
"""
Read DHCP relay configuration
:type dhcp_relay_config_id: :class:`str`
:param dhcp_relay_config_id: DHCP relay config ID (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.DhcpRelayConfig`
:return: com.vmware.nsx_policy.model.DhcpRelayConfig
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'dhcp_relay_config_id': dhcp_relay_config_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Paginated list of all DHCP relay config instances
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.DhcpRelayConfigListResult`
:return: com.vmware.nsx_policy.model.DhcpRelayConfigListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
dhcp_relay_config_id,
dhcp_relay_config,
):
"""
If DHCP relay config with the dhcp-relay-config-id is not already
present, create a new DHCP relay config instance. If it already exists,
update the DHCP relay config instance with specified attributes.
:type dhcp_relay_config_id: :class:`str`
:param dhcp_relay_config_id: DHCP relay config ID (required)
:type dhcp_relay_config: :class:`com.vmware.nsx_policy.model_client.DhcpRelayConfig`
:param dhcp_relay_config: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'dhcp_relay_config_id': dhcp_relay_config_id,
'dhcp_relay_config': dhcp_relay_config,
})
def update(self,
dhcp_relay_config_id,
dhcp_relay_config,
):
"""
If DHCP relay config with the dhcp-relay-config-id is not already
present, create a new DHCP relay config instance. If it already exists,
replace the DHCP relay config instance with this object.
:type dhcp_relay_config_id: :class:`str`
:param dhcp_relay_config_id: DHCP relay config ID (required)
:type dhcp_relay_config: :class:`com.vmware.nsx_policy.model_client.DhcpRelayConfig`
:param dhcp_relay_config: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.DhcpRelayConfig`
:return: com.vmware.nsx_policy.model.DhcpRelayConfig
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'dhcp_relay_config_id': dhcp_relay_config_id,
'dhcp_relay_config': dhcp_relay_config,
})
class DhcpServerConfigs(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.dhcp_server_configs'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _DhcpServerConfigsStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
dhcp_server_config_id,
):
"""
Delete DHCP server configuration
:type dhcp_server_config_id: :class:`str`
:param dhcp_server_config_id: DHCP server config ID (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'dhcp_server_config_id': dhcp_server_config_id,
})
def get(self,
dhcp_server_config_id,
):
"""
Read DHCP server configuration
:type dhcp_server_config_id: :class:`str`
:param dhcp_server_config_id: DHCP server config ID (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.DhcpServerConfig`
:return: com.vmware.nsx_policy.model.DhcpServerConfig
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'dhcp_server_config_id': dhcp_server_config_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Paginated list of all DHCP server config instances
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.DhcpServerConfigListResult`
:return: com.vmware.nsx_policy.model.DhcpServerConfigListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
dhcp_server_config_id,
dhcp_server_config,
):
"""
If DHCP server config with the dhcp-server-config-id is not already
present, create a new DHCP server config instance. If it already
exists, update the DHCP server config instance with specified
attributes. Realized entities of this API can be found using the path
of Tier-0 that this config is applied on.
:type dhcp_server_config_id: :class:`str`
:param dhcp_server_config_id: DHCP server config ID (required)
:type dhcp_server_config: :class:`com.vmware.nsx_policy.model_client.DhcpServerConfig`
:param dhcp_server_config: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'dhcp_server_config_id': dhcp_server_config_id,
'dhcp_server_config': dhcp_server_config,
})
def update(self,
dhcp_server_config_id,
dhcp_server_config,
):
"""
If DHCP server config with the dhcp-server-config-id is not already
present, create a new DHCP server config instance. If it already
exists, replace the DHCP server config instance with this object.
Realized entities of this API can be found using the path of Tier-0
that this config is applied on.
:type dhcp_server_config_id: :class:`str`
:param dhcp_server_config_id: DHCP server config ID (required)
:type dhcp_server_config: :class:`com.vmware.nsx_policy.model_client.DhcpServerConfig`
:param dhcp_server_config: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.DhcpServerConfig`
:return: com.vmware.nsx_policy.model.DhcpServerConfig
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'dhcp_server_config_id': dhcp_server_config_id,
'dhcp_server_config': dhcp_server_config,
})
class DnsForwarderZones(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.dns_forwarder_zones'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _DnsForwarderZonesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
dns_forwarder_zone_id,
):
"""
Delete the DNS Forwarder Zone
:type dns_forwarder_zone_id: :class:`str`
:param dns_forwarder_zone_id: DNS Forwarder Zone ID (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'dns_forwarder_zone_id': dns_forwarder_zone_id,
})
def get(self,
dns_forwarder_zone_id,
):
"""
Read the DNS Forwarder Zone
:type dns_forwarder_zone_id: :class:`str`
:param dns_forwarder_zone_id: DNS Forwarder Zone ID (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.PolicyDnsForwarderZone`
:return: com.vmware.nsx_policy.model.PolicyDnsForwarderZone
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'dns_forwarder_zone_id': dns_forwarder_zone_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Paginated list of all Dns Forwarder Zones
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.PolicyDnsForwarderZoneListResult`
:return: com.vmware.nsx_policy.model.PolicyDnsForwarderZoneListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
dns_forwarder_zone_id,
policy_dns_forwarder_zone,
):
"""
Create or update the DNS Forwarder Zone
:type dns_forwarder_zone_id: :class:`str`
:param dns_forwarder_zone_id: DNS Forwarder Zone ID (required)
:type policy_dns_forwarder_zone: :class:`com.vmware.nsx_policy.model_client.PolicyDnsForwarderZone`
:param policy_dns_forwarder_zone: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'dns_forwarder_zone_id': dns_forwarder_zone_id,
'policy_dns_forwarder_zone': policy_dns_forwarder_zone,
})
def update(self,
dns_forwarder_zone_id,
policy_dns_forwarder_zone,
):
"""
Create or update the DNS Forwarder Zone
:type dns_forwarder_zone_id: :class:`str`
:param dns_forwarder_zone_id: DNS Forwarder Zone ID (required)
:type policy_dns_forwarder_zone: :class:`com.vmware.nsx_policy.model_client.PolicyDnsForwarderZone`
:param policy_dns_forwarder_zone: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.PolicyDnsForwarderZone`
:return: com.vmware.nsx_policy.model.PolicyDnsForwarderZone
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'dns_forwarder_zone_id': dns_forwarder_zone_id,
'policy_dns_forwarder_zone': policy_dns_forwarder_zone,
})
class Domains(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.domains'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _DomainsStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
domain_id,
):
"""
Read a domain.
:type domain_id: :class:`str`
:param domain_id: Domain ID (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.Domain`
:return: com.vmware.nsx_policy.model.Domain
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'domain_id': domain_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Paginated list of all domains for infra.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.DomainListResult`
:return: com.vmware.nsx_policy.model.DomainListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class Drafts(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.drafts'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _DraftsStub)
self._VAPI_OPERATION_IDS = {}
def abort(self,
draft_id,
):
"""
If there is a failure during publish, the admin can choose to abort the
publishing attempts. This will bring back the system to the current
configuration, i.e just before the publish was invoked. If draft
publishing is not in progress or already completed, then this is a
NO-OP
:type draft_id: :class:`str`
:param draft_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('abort',
{
'draft_id': draft_id,
})
def delete(self,
draft_id,
):
"""
Delete a manual draft.
:type draft_id: :class:`str`
:param draft_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'draft_id': draft_id,
})
def get(self,
draft_id,
):
"""
Read a draft for a given draft identifier.
:type draft_id: :class:`str`
:param draft_id: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.PolicyDraft`
:return: com.vmware.nsx_policy.model.PolicyDraft
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'draft_id': draft_id,
})
def list(self,
auto_drafts=None,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
List policy drafts.
:type auto_drafts: :class:`bool` or ``None``
:param auto_drafts: Fetch list of draft based on is_auto_draft flag (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.PolicyDraftListResult`
:return: com.vmware.nsx_policy.model.PolicyDraftListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'auto_drafts': auto_drafts,
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
draft_id,
policy_draft,
):
"""
Create a new manual draft if the specified draft id does not correspond
to an existing draft. Update the manual draft otherwise. Auto draft can
not be updated.
:type draft_id: :class:`str`
:param draft_id: (required)
:type policy_draft: :class:`com.vmware.nsx_policy.model_client.PolicyDraft`
:param policy_draft: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'draft_id': draft_id,
'policy_draft': policy_draft,
})
def publish(self,
draft_id,
infra,
):
"""
Read a draft and publish it by applying changes onto current
configuration.
:type draft_id: :class:`str`
:param draft_id: (required)
:type infra: :class:`com.vmware.nsx_policy.model_client.Infra`
:param infra: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('publish',
{
'draft_id': draft_id,
'infra': infra,
})
def update(self,
draft_id,
policy_draft,
):
"""
Create a new manual draft if the specified draft id does not correspond
to an existing draft. Update the manual draft otherwise. Auto draft can
not be updated.
:type draft_id: :class:`str`
:param draft_id: (required)
:type policy_draft: :class:`com.vmware.nsx_policy.model_client.PolicyDraft`
:param policy_draft: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.PolicyDraft`
:return: com.vmware.nsx_policy.model.PolicyDraft
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'draft_id': draft_id,
'policy_draft': policy_draft,
})
class GroupAssociations(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.group_associations'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _GroupAssociationsStub)
self._VAPI_OPERATION_IDS = {}
def list(self,
intent_path,
cursor=None,
enforcement_point_path=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Get policy groups for which the given object is a member.
:type intent_path: :class:`str`
:param intent_path: String path of the intent object (required)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type enforcement_point_path: :class:`str` or ``None``
:param enforcement_point_path: String Path of the enforcement point (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.PolicyResourceReferenceForEPListResult`
:return: com.vmware.nsx_policy.model.PolicyResourceReferenceForEPListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'intent_path': intent_path,
'cursor': cursor,
'enforcement_point_path': enforcement_point_path,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class IpfixCollectorProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.ipfix_collector_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpfixCollectorProfilesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
ipfix_collector_profile_id,
):
"""
API deletes IPFIX collector profile. Flow forwarding to collector will
be stopped. This API is deprecated. Please use the following API:
https://<policy-mgr>/policy/api/v1/infra/ipfix-l2-collector-profiles
:type ipfix_collector_profile_id: :class:`str`
:param ipfix_collector_profile_id: IPFIX collector Profile id (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ipfix_collector_profile_id': ipfix_collector_profile_id,
})
def get(self,
ipfix_collector_profile_id,
):
"""
API will return details of IPFIX collector profile. If profile does not
exist, it will return 404. This API is deprecated. Please use the
following API:
https://<policy-mgr>/policy/api/v1/infra/ipfix-l2-collector-profiles
:type ipfix_collector_profile_id: :class:`str`
:param ipfix_collector_profile_id: IPFIX collector profile id (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXCollectorProfile`
:return: com.vmware.nsx_policy.model.IPFIXCollectorProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ipfix_collector_profile_id': ipfix_collector_profile_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
API will provide list of all IPFIX collector profiles and their
details. This API is deprecated. Please use the following API:
https://<policy-mgr>/policy/api/v1/infra/ipfix-l2-collector-profiles
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXCollectorProfileListResult`
:return: com.vmware.nsx_policy.model.IPFIXCollectorProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
ipfix_collector_profile_id,
i_pfix_collector_profile,
):
"""
Create a new IPFIX collector profile if the IPFIX collector profile
with given id does not already exist. If the IPFIX collector profile
with the given id already exists, patch with the existing IPFIX
collector profile. This API is deprecated. Please use the following
API:
https://<policy-mgr>/policy/api/v1/infra/ipfix-l2-collector-profiles
:type ipfix_collector_profile_id: :class:`str`
:param ipfix_collector_profile_id: IPFIX collector profile id (required)
:type i_pfix_collector_profile: :class:`com.vmware.nsx_policy.model_client.IPFIXCollectorProfile`
:param i_pfix_collector_profile: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'ipfix_collector_profile_id': ipfix_collector_profile_id,
'i_pfix_collector_profile': i_pfix_collector_profile,
})
def update(self,
ipfix_collector_profile_id,
i_pfix_collector_profile,
):
"""
Create or Replace IPFIX collector profile. IPFIX data will be sent to
IPFIX collector port. This API is deprecated. Please use the following
API:
https://<policy-mgr>/policy/api/v1/infra/ipfix-l2-collector-profiles
:type ipfix_collector_profile_id: :class:`str`
:param ipfix_collector_profile_id: IPFIX collector profile id (required)
:type i_pfix_collector_profile: :class:`com.vmware.nsx_policy.model_client.IPFIXCollectorProfile`
:param i_pfix_collector_profile: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXCollectorProfile`
:return: com.vmware.nsx_policy.model.IPFIXCollectorProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ipfix_collector_profile_id': ipfix_collector_profile_id,
'i_pfix_collector_profile': i_pfix_collector_profile,
})
class IpfixDfwCollectorProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.ipfix_dfw_collector_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpfixDfwCollectorProfilesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
ipfix_dfw_collector_profile_id,
):
"""
API deletes IPFIX dfw collector profile. Flow forwarding to collector
will be stopped.
:type ipfix_dfw_collector_profile_id: :class:`str`
:param ipfix_dfw_collector_profile_id: IPFIX dfw collector Profile id (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ipfix_dfw_collector_profile_id': ipfix_dfw_collector_profile_id,
})
def get(self,
ipfix_dfw_collector_profile_id,
):
"""
API will return details of IPFIX dfw collector profile. If profile does
not exist, it will return 404.
:type ipfix_dfw_collector_profile_id: :class:`str`
:param ipfix_dfw_collector_profile_id: IPFIX dfw collector profile id (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXDFWCollectorProfile`
:return: com.vmware.nsx_policy.model.IPFIXDFWCollectorProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ipfix_dfw_collector_profile_id': ipfix_dfw_collector_profile_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
API will provide list of all IPFIX dfw collector profiles and their
details.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXDFWCollectorProfileListResult`
:return: com.vmware.nsx_policy.model.IPFIXDFWCollectorProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
ipfix_dfw_collector_profile_id,
i_pfixdfw_collector_profile,
):
"""
Create a new IPFIX dfw collector profile if the IPFIX dfw collector
profile with given id does not already exist. If the IPFIX dfw
collector profile with the given id already exists, patch with the
existing IPFIX dfw collector profile.
:type ipfix_dfw_collector_profile_id: :class:`str`
:param ipfix_dfw_collector_profile_id: (required)
:type i_pfixdfw_collector_profile: :class:`com.vmware.nsx_policy.model_client.IPFIXDFWCollectorProfile`
:param i_pfixdfw_collector_profile: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'ipfix_dfw_collector_profile_id': ipfix_dfw_collector_profile_id,
'i_pfixdfw_collector_profile': i_pfixdfw_collector_profile,
})
def update(self,
ipfix_dfw_collector_profile_id,
i_pfixdfw_collector_profile,
):
"""
Create or Replace IPFIX dfw collector profile. IPFIX data will be sent
to IPFIX collector port.
:type ipfix_dfw_collector_profile_id: :class:`str`
:param ipfix_dfw_collector_profile_id: IPFIX dfw collector profile id (required)
:type i_pfixdfw_collector_profile: :class:`com.vmware.nsx_policy.model_client.IPFIXDFWCollectorProfile`
:param i_pfixdfw_collector_profile: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXDFWCollectorProfile`
:return: com.vmware.nsx_policy.model.IPFIXDFWCollectorProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ipfix_dfw_collector_profile_id': ipfix_dfw_collector_profile_id,
'i_pfixdfw_collector_profile': i_pfixdfw_collector_profile,
})
class IpfixDfwProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.ipfix_dfw_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpfixDfwProfilesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
ipfix_dfw_profile_id,
):
"""
API deletes IPFIX DFW Profile. Selected IPFIX Collectors will stop
receiving flows.
:type ipfix_dfw_profile_id: :class:`str`
:param ipfix_dfw_profile_id: IPFIX DFW Profile ID (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ipfix_dfw_profile_id': ipfix_dfw_profile_id,
})
def get(self,
ipfix_dfw_profile_id,
):
"""
API will return details of IPFIX DFW profile.
:type ipfix_dfw_profile_id: :class:`str`
:param ipfix_dfw_profile_id: IPFIX DFW collection id (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXDFWProfile`
:return: com.vmware.nsx_policy.model.IPFIXDFWProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ipfix_dfw_profile_id': ipfix_dfw_profile_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
API provides list IPFIX DFW profiles available on selected logical DFW.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXDFWProfileListResult`
:return: com.vmware.nsx_policy.model.IPFIXDFWProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
ipfix_dfw_profile_id,
i_pfixdfw_profile,
):
"""
Create a new IPFIX DFW profile if the IPFIX DFW profile with given id
does not already exist. If the IPFIX DFW profile with the given id
already exists, patch with the existing IPFIX DFW profile.
:type ipfix_dfw_profile_id: :class:`str`
:param ipfix_dfw_profile_id: IPFIX DFW Profile ID (required)
:type i_pfixdfw_profile: :class:`com.vmware.nsx_policy.model_client.IPFIXDFWProfile`
:param i_pfixdfw_profile: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'ipfix_dfw_profile_id': ipfix_dfw_profile_id,
'i_pfixdfw_profile': i_pfixdfw_profile,
})
def update(self,
ipfix_dfw_profile_id,
i_pfixdfw_profile,
):
"""
Create or replace IPFIX DFW profile. Config will start forwarding data
to provided IPFIX DFW collector.
:type ipfix_dfw_profile_id: :class:`str`
:param ipfix_dfw_profile_id: IPFIX DFW Profile ID (required)
:type i_pfixdfw_profile: :class:`com.vmware.nsx_policy.model_client.IPFIXDFWProfile`
:param i_pfixdfw_profile: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXDFWProfile`
:return: com.vmware.nsx_policy.model.IPFIXDFWProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ipfix_dfw_profile_id': ipfix_dfw_profile_id,
'i_pfixdfw_profile': i_pfixdfw_profile,
})
class IpfixL2CollectorProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.ipfix_l2_collector_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpfixL2CollectorProfilesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
ipfix_l2_collector_profile_id,
):
"""
API deletes IPFIX collector profile. Flow forwarding to collector will
be stopped.
:type ipfix_l2_collector_profile_id: :class:`str`
:param ipfix_l2_collector_profile_id: IPFIX collector Profile id (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ipfix_l2_collector_profile_id': ipfix_l2_collector_profile_id,
})
def get(self,
ipfix_l2_collector_profile_id,
):
"""
API will return details of IPFIX collector profile.
:type ipfix_l2_collector_profile_id: :class:`str`
:param ipfix_l2_collector_profile_id: IPFIX collector profile id (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXL2CollectorProfile`
:return: com.vmware.nsx_policy.model.IPFIXL2CollectorProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ipfix_l2_collector_profile_id': ipfix_l2_collector_profile_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
API will provide list of all IPFIX collector profiles and their
details.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXL2CollectorProfileListResult`
:return: com.vmware.nsx_policy.model.IPFIXL2CollectorProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
ipfix_l2_collector_profile_id,
i_pfix_l2_collector_profile,
):
"""
Create a new IPFIX collector profile if the IPFIX collector profile
with given id does not already exist. If the IPFIX collector profile
with the given id already exists, patch with the existing IPFIX
collector profile.
:type ipfix_l2_collector_profile_id: :class:`str`
:param ipfix_l2_collector_profile_id: IPFIX collector profile id (required)
:type i_pfix_l2_collector_profile: :class:`com.vmware.nsx_policy.model_client.IPFIXL2CollectorProfile`
:param i_pfix_l2_collector_profile: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'ipfix_l2_collector_profile_id': ipfix_l2_collector_profile_id,
'i_pfix_l2_collector_profile': i_pfix_l2_collector_profile,
})
def update(self,
ipfix_l2_collector_profile_id,
i_pfix_l2_collector_profile,
):
"""
Create or Replace IPFIX collector profile. IPFIX data will be sent to
IPFIX collector.
:type ipfix_l2_collector_profile_id: :class:`str`
:param ipfix_l2_collector_profile_id: IPFIX collector profile id (required)
:type i_pfix_l2_collector_profile: :class:`com.vmware.nsx_policy.model_client.IPFIXL2CollectorProfile`
:param i_pfix_l2_collector_profile: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXL2CollectorProfile`
:return: com.vmware.nsx_policy.model.IPFIXL2CollectorProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ipfix_l2_collector_profile_id': ipfix_l2_collector_profile_id,
'i_pfix_l2_collector_profile': i_pfix_l2_collector_profile,
})
class IpfixL2Profiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.ipfix_l2_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpfixL2ProfilesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
ipfix_l2_profile_id,
):
"""
API deletes IPFIX L2 Profile. Flow forwarding to selected collector
will be stopped.
:type ipfix_l2_profile_id: :class:`str`
:param ipfix_l2_profile_id: IPFIX L2 Profile ID (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ipfix_l2_profile_id': ipfix_l2_profile_id,
})
def get(self,
ipfix_l2_profile_id,
):
"""
API will return details of IPFIX L2 profile.
:type ipfix_l2_profile_id: :class:`str`
:param ipfix_l2_profile_id: IPFIX L2 profile id (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXL2Profile`
:return: com.vmware.nsx_policy.model.IPFIXL2Profile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ipfix_l2_profile_id': ipfix_l2_profile_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
API provides list IPFIX L2 Profiles available on selected logical l2.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXL2ProfileListResult`
:return: com.vmware.nsx_policy.model.IPFIXL2ProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
ipfix_l2_profile_id,
i_pfix_l2_profile,
):
"""
Create a new IPFIX L2 profile if the IPFIX L2 profile with given id
does not already exist. If the IPFIX L2 profile with the given id
already exists, patch with the existing IPFIX L2 profile.
:type ipfix_l2_profile_id: :class:`str`
:param ipfix_l2_profile_id: IPFIX L2 Profile ID (required)
:type i_pfix_l2_profile: :class:`com.vmware.nsx_policy.model_client.IPFIXL2Profile`
:param i_pfix_l2_profile: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'ipfix_l2_profile_id': ipfix_l2_profile_id,
'i_pfix_l2_profile': i_pfix_l2_profile,
})
def update(self,
ipfix_l2_profile_id,
i_pfix_l2_profile,
):
"""
Create or replace IPFIX L2 Profile. Profile is reusable entity. Single
profile can attached multiple bindings e.g group, segment and port.
:type ipfix_l2_profile_id: :class:`str`
:param ipfix_l2_profile_id: IPFIX L2 Profile ID (required)
:type i_pfix_l2_profile: :class:`com.vmware.nsx_policy.model_client.IPFIXL2Profile`
:param i_pfix_l2_profile: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPFIXL2Profile`
:return: com.vmware.nsx_policy.model.IPFIXL2Profile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ipfix_l2_profile_id': ipfix_l2_profile_id,
'i_pfix_l2_profile': i_pfix_l2_profile,
})
class IpsecVpnDpdProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.ipsec_vpn_dpd_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpsecVpnDpdProfilesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
dpd_profile_id,
):
"""
Delete custom dead peer detection (DPD) profile. Profile can not be
deleted if profile has references to it.
:type dpd_profile_id: :class:`str`
:param dpd_profile_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'dpd_profile_id': dpd_profile_id,
})
def get(self,
dpd_profile_id,
):
"""
Get custom dead peer detection (DPD) profile, given the particular id.
:type dpd_profile_id: :class:`str`
:param dpd_profile_id: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPSecVpnDpdProfile`
:return: com.vmware.nsx_policy.model.IPSecVpnDpdProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'dpd_profile_id': dpd_profile_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Get paginated list of all DPD Profiles.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPSecVpnDpdProfileListResult`
:return: com.vmware.nsx_policy.model.IPSecVpnDpdProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
dpd_profile_id,
ip_sec_vpn_dpd_profile,
):
"""
Create or patch dead peer detection (DPD) profile. Any change in
profile affects all sessions consuming this profile. System will be
provisioned with system owned editable default DPD profile. Any change
in profile affects all sessions consuming this profile.
:type dpd_profile_id: :class:`str`
:param dpd_profile_id: (required)
:type ip_sec_vpn_dpd_profile: :class:`com.vmware.nsx_policy.model_client.IPSecVpnDpdProfile`
:param ip_sec_vpn_dpd_profile: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'dpd_profile_id': dpd_profile_id,
'ip_sec_vpn_dpd_profile': ip_sec_vpn_dpd_profile,
})
def update(self,
dpd_profile_id,
ip_sec_vpn_dpd_profile,
):
"""
Create or patch dead peer detection (DPD) profile. Any change in
profile affects all sessions consuming this profile. System will be
provisioned with system owned editable default DPD profile. Any change
in profile affects all sessions consuming this profile. Revision is
optional for creation and required for update.
:type dpd_profile_id: :class:`str`
:param dpd_profile_id: (required)
:type ip_sec_vpn_dpd_profile: :class:`com.vmware.nsx_policy.model_client.IPSecVpnDpdProfile`
:param ip_sec_vpn_dpd_profile: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPSecVpnDpdProfile`
:return: com.vmware.nsx_policy.model.IPSecVpnDpdProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'dpd_profile_id': dpd_profile_id,
'ip_sec_vpn_dpd_profile': ip_sec_vpn_dpd_profile,
})
class IpsecVpnIkeProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.ipsec_vpn_ike_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpsecVpnIkeProfilesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
ike_profile_id,
):
"""
Delete custom IKE Profile. Profile can not be deleted if profile has
references to it.
:type ike_profile_id: :class:`str`
:param ike_profile_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ike_profile_id': ike_profile_id,
})
def get(self,
ike_profile_id,
):
"""
Get custom IKE Profile, given the particular id.
:type ike_profile_id: :class:`str`
:param ike_profile_id: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPSecVpnIkeProfile`
:return: com.vmware.nsx_policy.model.IPSecVpnIkeProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ike_profile_id': ike_profile_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Get paginated list of all IKE Profiles.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPSecVpnIkeProfileListResult`
:return: com.vmware.nsx_policy.model.IPSecVpnIkeProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
ike_profile_id,
ip_sec_vpn_ike_profile,
):
"""
Create or patch custom internet key exchange (IKE) Profile. IKE Profile
is a reusable profile that captures IKE and phase one negotiation
parameters. System will be pre provisioned with system owned editable
default IKE profile and suggested set of profiles that can be used for
peering with popular remote peers like AWS VPN. User can create custom
profiles as needed. Any change in profile affects all sessions
consuming this profile.
:type ike_profile_id: :class:`str`
:param ike_profile_id: (required)
:type ip_sec_vpn_ike_profile: :class:`com.vmware.nsx_policy.model_client.IPSecVpnIkeProfile`
:param ip_sec_vpn_ike_profile: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'ike_profile_id': ike_profile_id,
'ip_sec_vpn_ike_profile': ip_sec_vpn_ike_profile,
})
def update(self,
ike_profile_id,
ip_sec_vpn_ike_profile,
):
"""
Create or fully replace custom internet key exchange (IKE) Profile. IKE
Profile is a reusable profile that captures IKE and phase one
negotiation parameters. System will be pre provisioned with system
owned editable default IKE profile and suggested set of profiles that
can be used for peering with popular remote peers like AWS VPN. User
can create custom profiles as needed. Any change in profile affects all
sessions consuming this profile. Revision is optional for creation and
required for update.
:type ike_profile_id: :class:`str`
:param ike_profile_id: (required)
:type ip_sec_vpn_ike_profile: :class:`com.vmware.nsx_policy.model_client.IPSecVpnIkeProfile`
:param ip_sec_vpn_ike_profile: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPSecVpnIkeProfile`
:return: com.vmware.nsx_policy.model.IPSecVpnIkeProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ike_profile_id': ike_profile_id,
'ip_sec_vpn_ike_profile': ip_sec_vpn_ike_profile,
})
class IpsecVpnTunnelProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.ipsec_vpn_tunnel_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpsecVpnTunnelProfilesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
tunnel_profile_id,
):
"""
Delete custom IPSec tunnel Profile. Profile can not be deleted if
profile has references to it.
:type tunnel_profile_id: :class:`str`
:param tunnel_profile_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'tunnel_profile_id': tunnel_profile_id,
})
def get(self,
tunnel_profile_id,
):
"""
Get custom IPSec tunnel Profile, given the particular id.
:type tunnel_profile_id: :class:`str`
:param tunnel_profile_id: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPSecVpnTunnelProfile`
:return: com.vmware.nsx_policy.model.IPSecVpnTunnelProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'tunnel_profile_id': tunnel_profile_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Get paginated list of all IPSec tunnel Profiles.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPSecVpnTunnelProfileListResult`
:return: com.vmware.nsx_policy.model.IPSecVpnTunnelProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
tunnel_profile_id,
ip_sec_vpn_tunnel_profile,
):
"""
Create or patch custom IPSec tunnel profile. IPSec tunnel profile is a
reusable profile that captures phase two negotiation parameters and
tunnel properties. System will be provisioned with system owned
editable default IPSec tunnel profile. Any change in profile affects
all sessions consuming this profile.
:type tunnel_profile_id: :class:`str`
:param tunnel_profile_id: (required)
:type ip_sec_vpn_tunnel_profile: :class:`com.vmware.nsx_policy.model_client.IPSecVpnTunnelProfile`
:param ip_sec_vpn_tunnel_profile: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'tunnel_profile_id': tunnel_profile_id,
'ip_sec_vpn_tunnel_profile': ip_sec_vpn_tunnel_profile,
})
def update(self,
tunnel_profile_id,
ip_sec_vpn_tunnel_profile,
):
"""
Create or fully replace custom IPSec tunnel profile. IPSec tunnel
profile is a reusable profile that captures phase two negotiation
parameters and tunnel properties. System will be provisioned with
system owned editable default IPSec tunnel profile. Any change in
profile affects all sessions consuming this profile. Revision is
optional for creation and required for update.
:type tunnel_profile_id: :class:`str`
:param tunnel_profile_id: (required)
:type ip_sec_vpn_tunnel_profile: :class:`com.vmware.nsx_policy.model_client.IPSecVpnTunnelProfile`
:param ip_sec_vpn_tunnel_profile: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.IPSecVpnTunnelProfile`
:return: com.vmware.nsx_policy.model.IPSecVpnTunnelProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'tunnel_profile_id': tunnel_profile_id,
'ip_sec_vpn_tunnel_profile': ip_sec_vpn_tunnel_profile,
})
class PartnerServices(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.partner_services'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _PartnerServicesStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
service_name,
):
"""
Read the specific partner service identified by provided name.
:type service_name: :class:`str`
:param service_name: Name of the service (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.ServiceDefinition`
:return: com.vmware.nsx_policy.model.ServiceDefinition
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'service_name': service_name,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Read all the partner services available for service insertion
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.ServiceInsertionServiceListResult`
:return: com.vmware.nsx_policy.model.ServiceInsertionServiceListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class PortMirroringProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.port_mirroring_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _PortMirroringProfilesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
port_mirroring_profile_id,
):
"""
API will delete port mirroring profile. Mirroring from source to
destination ports will be stopped.
:type port_mirroring_profile_id: :class:`str`
:param port_mirroring_profile_id: Port Mirroring Profile Id (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'port_mirroring_profile_id': port_mirroring_profile_id,
})
def get(self,
port_mirroring_profile_id,
):
"""
API will return details of port mirroring profile.
:type port_mirroring_profile_id: :class:`str`
:param port_mirroring_profile_id: Port Mirroring Profile Id (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.PortMirroringProfile`
:return: com.vmware.nsx_policy.model.PortMirroringProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'port_mirroring_profile_id': port_mirroring_profile_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
API will list all port mirroring profiles group.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.PortMirroringProfileListResult`
:return: com.vmware.nsx_policy.model.PortMirroringProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
port_mirroring_profile_id,
port_mirroring_profile,
):
"""
Create a new Port Mirroring Profile if the Port Mirroring Profile with
given id does not already exist. If the Port Mirroring Profile with the
given id already exists, patch with the existing Port Mirroring
Profile. Realized entities of this API can be found using the path of
monitoring profile binding map that is used to apply this profile.
:type port_mirroring_profile_id: :class:`str`
:param port_mirroring_profile_id: Port Mirroring Profile Id (required)
:type port_mirroring_profile: :class:`com.vmware.nsx_policy.model_client.PortMirroringProfile`
:param port_mirroring_profile: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'port_mirroring_profile_id': port_mirroring_profile_id,
'port_mirroring_profile': port_mirroring_profile,
})
def update(self,
port_mirroring_profile_id,
port_mirroring_profile,
):
"""
Create or Replace port mirroring profile. Packets will be mirrored from
source group, segment, port to destination group. Realized entities of
this API can be found using the path of monitoring profile binding map
that is used to apply this profile.
:type port_mirroring_profile_id: :class:`str`
:param port_mirroring_profile_id: Port Mirroring Profiles Id (required)
:type port_mirroring_profile: :class:`com.vmware.nsx_policy.model_client.PortMirroringProfile`
:param port_mirroring_profile: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.PortMirroringProfile`
:return: com.vmware.nsx_policy.model.PortMirroringProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'port_mirroring_profile_id': port_mirroring_profile_id,
'port_mirroring_profile': port_mirroring_profile,
})
class Segments(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.segments'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SegmentsStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
segment_id,
):
"""
Delete infra segment
:type segment_id: :class:`str`
:param segment_id: Segment ID (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'segment_id': segment_id,
})
def delete_0(self,
segment_id,
):
"""
Force delete bypasses validations during segment deletion. This may
result in an inconsistent connectivity.
:type segment_id: :class:`str`
:param segment_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete_0',
{
'segment_id': segment_id,
})
def get(self,
segment_id,
):
"""
Read infra segment
:type segment_id: :class:`str`
:param segment_id: Segment ID (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.Segment`
:return: com.vmware.nsx_policy.model.Segment
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'segment_id': segment_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Paginated list of all segments under infra.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.SegmentListResult`
:return: com.vmware.nsx_policy.model.SegmentListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
segment_id,
segment,
):
"""
If segment with the segment-id is not already present, create a new
segment. If it already exists, update the segment with specified
attributes.
:type segment_id: :class:`str`
:param segment_id: Segment ID (required)
:type segment: :class:`com.vmware.nsx_policy.model_client.Segment`
:param segment: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'segment_id': segment_id,
'segment': segment,
})
def update(self,
segment_id,
segment,
):
"""
If segment with the segment-id is not already present, create a new
segment. If it already exists, replace the segment with this object.
:type segment_id: :class:`str`
:param segment_id: Segment ID (required)
:type segment: :class:`com.vmware.nsx_policy.model_client.Segment`
:param segment: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.Segment`
:return: com.vmware.nsx_policy.model.Segment
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'segment_id': segment_id,
'segment': segment,
})
class Services(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.services'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ServicesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
service_id,
):
"""
Delete Service
:type service_id: :class:`str`
:param service_id: Service ID (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'service_id': service_id,
})
def get(self,
service_id,
):
"""
Read a service
:type service_id: :class:`str`
:param service_id: Service ID (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.Service`
:return: com.vmware.nsx_policy.model.Service
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'service_id': service_id,
})
def list(self,
cursor=None,
default_service=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Paginated list of Services for infra.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type default_service: :class:`bool` or ``None``
:param default_service: Fetch all default services (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.ServiceListResult`
:return: com.vmware.nsx_policy.model.ServiceListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'default_service': default_service,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
service_id,
service,
):
"""
Create a new service if a service with the given ID does not already
exist. Creates new service entries if populated in the service. If a
service with the given ID already exists, patch the service including
the nested service entries.
:type service_id: :class:`str`
:param service_id: Service ID (required)
:type service: :class:`com.vmware.nsx_policy.model_client.Service`
:param service: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'service_id': service_id,
'service': service,
})
def update(self,
service_id,
service,
):
"""
Create a new service if a service with the given ID does not already
exist. Creates new service entries if populated in the service. If a
service with the given ID already exists, update the service including
the nested service entries. This is a full replace.
:type service_id: :class:`str`
:param service_id: Service ID (required)
:type service: :class:`com.vmware.nsx_policy.model_client.Service`
:param service: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.Service`
:return: com.vmware.nsx_policy.model.Service
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'service_id': service_id,
'service': service,
})
class _DhcpRelayConfigsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'dhcp_relay_config_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/dhcp-relay-configs/{dhcp-relay-config-id}',
path_variables={
'dhcp_relay_config_id': 'dhcp-relay-config-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'dhcp_relay_config_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/dhcp-relay-configs/{dhcp-relay-config-id}',
path_variables={
'dhcp_relay_config_id': 'dhcp-relay-config-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/dhcp-relay-configs',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'dhcp_relay_config_id': type.StringType(),
'dhcp_relay_config': type.ReferenceType('com.vmware.nsx_policy.model_client', 'DhcpRelayConfig'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/dhcp-relay-configs/{dhcp-relay-config-id}',
request_body_parameter='dhcp_relay_config',
path_variables={
'dhcp_relay_config_id': 'dhcp-relay-config-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'dhcp_relay_config_id': type.StringType(),
'dhcp_relay_config': type.ReferenceType('com.vmware.nsx_policy.model_client', 'DhcpRelayConfig'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/dhcp-relay-configs/{dhcp-relay-config-id}',
request_body_parameter='dhcp_relay_config',
path_variables={
'dhcp_relay_config_id': 'dhcp-relay-config-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'DhcpRelayConfig'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'DhcpRelayConfigListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'DhcpRelayConfig'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.dhcp_relay_configs',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _DhcpServerConfigsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'dhcp_server_config_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/dhcp-server-configs/{dhcp-server-config-id}',
path_variables={
'dhcp_server_config_id': 'dhcp-server-config-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'dhcp_server_config_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/dhcp-server-configs/{dhcp-server-config-id}',
path_variables={
'dhcp_server_config_id': 'dhcp-server-config-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/dhcp-server-configs',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'dhcp_server_config_id': type.StringType(),
'dhcp_server_config': type.ReferenceType('com.vmware.nsx_policy.model_client', 'DhcpServerConfig'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/dhcp-server-configs/{dhcp-server-config-id}',
request_body_parameter='dhcp_server_config',
path_variables={
'dhcp_server_config_id': 'dhcp-server-config-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'dhcp_server_config_id': type.StringType(),
'dhcp_server_config': type.ReferenceType('com.vmware.nsx_policy.model_client', 'DhcpServerConfig'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/dhcp-server-configs/{dhcp-server-config-id}',
request_body_parameter='dhcp_server_config',
path_variables={
'dhcp_server_config_id': 'dhcp-server-config-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'DhcpServerConfig'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'DhcpServerConfigListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'DhcpServerConfig'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.dhcp_server_configs',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _DnsForwarderZonesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'dns_forwarder_zone_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/dns-forwarder-zones/{dns-forwarder-zone-id}',
path_variables={
'dns_forwarder_zone_id': 'dns-forwarder-zone-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'dns_forwarder_zone_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/dns-forwarder-zones/{dns-forwarder-zone-id}',
path_variables={
'dns_forwarder_zone_id': 'dns-forwarder-zone-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/dns-forwarder-zones',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'dns_forwarder_zone_id': type.StringType(),
'policy_dns_forwarder_zone': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PolicyDnsForwarderZone'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/dns-forwarder-zones/{dns-forwarder-zone-id}',
request_body_parameter='policy_dns_forwarder_zone',
path_variables={
'dns_forwarder_zone_id': 'dns-forwarder-zone-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'dns_forwarder_zone_id': type.StringType(),
'policy_dns_forwarder_zone': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PolicyDnsForwarderZone'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/dns-forwarder-zones/{dns-forwarder-zone-id}',
request_body_parameter='policy_dns_forwarder_zone',
path_variables={
'dns_forwarder_zone_id': 'dns-forwarder-zone-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PolicyDnsForwarderZone'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PolicyDnsForwarderZoneListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PolicyDnsForwarderZone'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.dns_forwarder_zones',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _DomainsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/{domain-id}',
path_variables={
'domain_id': 'domain-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Domain'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'DomainListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.domains',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _DraftsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for abort operation
abort_input_type = type.StructType('operation-input', {
'draft_id': type.StringType(),
})
abort_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
abort_input_value_validator_list = [
]
abort_output_validator_list = [
]
abort_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/policy/api/v1/infra/drafts/{draft-id}?action=abort',
path_variables={
'draft_id': 'draft-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'draft_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/drafts/{draft-id}',
path_variables={
'draft_id': 'draft-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'draft_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/drafts/{draft-id}',
path_variables={
'draft_id': 'draft-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'auto_drafts': type.OptionalType(type.BooleanType()),
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/drafts',
path_variables={
},
query_parameters={
'auto_drafts': 'auto_drafts',
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'draft_id': type.StringType(),
'policy_draft': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PolicyDraft'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/drafts/{draft-id}',
request_body_parameter='policy_draft',
path_variables={
'draft_id': 'draft-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for publish operation
publish_input_type = type.StructType('operation-input', {
'draft_id': type.StringType(),
'infra': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Infra'),
})
publish_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
publish_input_value_validator_list = [
HasFieldsOfValidator()
]
publish_output_validator_list = [
]
publish_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/policy/api/v1/infra/drafts/{draft-id}?action=publish',
request_body_parameter='infra',
path_variables={
'draft_id': 'draft-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'draft_id': type.StringType(),
'policy_draft': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PolicyDraft'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/drafts/{draft-id}',
request_body_parameter='policy_draft',
path_variables={
'draft_id': 'draft-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'abort': {
'input_type': abort_input_type,
'output_type': type.VoidType(),
'errors': abort_error_dict,
'input_value_validator_list': abort_input_value_validator_list,
'output_validator_list': abort_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PolicyDraft'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PolicyDraftListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'publish': {
'input_type': publish_input_type,
'output_type': type.VoidType(),
'errors': publish_error_dict,
'input_value_validator_list': publish_input_value_validator_list,
'output_validator_list': publish_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PolicyDraft'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'abort': abort_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'publish': publish_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.drafts',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _GroupAssociationsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {
'intent_path': type.StringType(),
'cursor': type.OptionalType(type.StringType()),
'enforcement_point_path': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/group-associations',
path_variables={
},
query_parameters={
'intent_path': 'intent_path',
'cursor': 'cursor',
'enforcement_point_path': 'enforcement_point_path',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PolicyResourceReferenceForEPListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.group_associations',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpfixCollectorProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ipfix_collector_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/ipfix-collector-profiles/{ipfix-collector-profile-id}',
path_variables={
'ipfix_collector_profile_id': 'ipfix-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ipfix_collector_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipfix-collector-profiles/{ipfix-collector-profile-id}',
path_variables={
'ipfix_collector_profile_id': 'ipfix-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipfix-collector-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'ipfix_collector_profile_id': type.StringType(),
'i_PFIX_collector_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXCollectorProfile'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/ipfix-collector-profiles/{ipfix-collector-profile-id}',
request_body_parameter='i_PFIX_collector_profile',
path_variables={
'ipfix_collector_profile_id': 'ipfix-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ipfix_collector_profile_id': type.StringType(),
'i_PFIX_collector_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXCollectorProfile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/ipfix-collector-profiles/{ipfix-collector-profile-id}',
request_body_parameter='i_PFIX_collector_profile',
path_variables={
'ipfix_collector_profile_id': 'ipfix-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXCollectorProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXCollectorProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXCollectorProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.ipfix_collector_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpfixDfwCollectorProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ipfix_dfw_collector_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/ipfix-dfw-collector-profiles/{ipfix-dfw-collector-profile-id}',
path_variables={
'ipfix_dfw_collector_profile_id': 'ipfix-dfw-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ipfix_dfw_collector_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipfix-dfw-collector-profiles/{ipfix-dfw-collector-profile-id}',
path_variables={
'ipfix_dfw_collector_profile_id': 'ipfix-dfw-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipfix-dfw-collector-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'ipfix_dfw_collector_profile_id': type.StringType(),
'i_PFIXDFW_collector_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXDFWCollectorProfile'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/ipfix-dfw-collector-profiles/{ipfix-dfw-collector-profile-id}',
request_body_parameter='i_PFIXDFW_collector_profile',
path_variables={
'ipfix_dfw_collector_profile_id': 'ipfix-dfw-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ipfix_dfw_collector_profile_id': type.StringType(),
'i_PFIXDFW_collector_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXDFWCollectorProfile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/ipfix-dfw-collector-profiles/{ipfix-dfw-collector-profile-id}',
request_body_parameter='i_PFIXDFW_collector_profile',
path_variables={
'ipfix_dfw_collector_profile_id': 'ipfix-dfw-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXDFWCollectorProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXDFWCollectorProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXDFWCollectorProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.ipfix_dfw_collector_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpfixDfwProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ipfix_dfw_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/ipfix-dfw-profiles/{ipfix-dfw-profile-id}',
path_variables={
'ipfix_dfw_profile_id': 'ipfix-dfw-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ipfix_dfw_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipfix-dfw-profiles/{ipfix-dfw-profile-id}',
path_variables={
'ipfix_dfw_profile_id': 'ipfix-dfw-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipfix-dfw-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'ipfix_dfw_profile_id': type.StringType(),
'i_PFIXDFW_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXDFWProfile'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/ipfix-dfw-profiles/{ipfix-dfw-profile-id}',
request_body_parameter='i_PFIXDFW_profile',
path_variables={
'ipfix_dfw_profile_id': 'ipfix-dfw-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ipfix_dfw_profile_id': type.StringType(),
'i_PFIXDFW_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXDFWProfile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/ipfix-dfw-profiles/{ipfix-dfw-profile-id}',
request_body_parameter='i_PFIXDFW_profile',
path_variables={
'ipfix_dfw_profile_id': 'ipfix-dfw-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXDFWProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXDFWProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXDFWProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.ipfix_dfw_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpfixL2CollectorProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ipfix_l2_collector_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/ipfix-l2-collector-profiles/{ipfix-l2-collector-profile-id}',
path_variables={
'ipfix_l2_collector_profile_id': 'ipfix-l2-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ipfix_l2_collector_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipfix-l2-collector-profiles/{ipfix-l2-collector-profile-id}',
path_variables={
'ipfix_l2_collector_profile_id': 'ipfix-l2-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipfix-l2-collector-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'ipfix_l2_collector_profile_id': type.StringType(),
'i_PFIX_l2_collector_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXL2CollectorProfile'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/ipfix-l2-collector-profiles/{ipfix-l2-collector-profile-id}',
request_body_parameter='i_PFIX_l2_collector_profile',
path_variables={
'ipfix_l2_collector_profile_id': 'ipfix-l2-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ipfix_l2_collector_profile_id': type.StringType(),
'i_PFIX_l2_collector_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXL2CollectorProfile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/ipfix-l2-collector-profiles/{ipfix-l2-collector-profile-id}',
request_body_parameter='i_PFIX_l2_collector_profile',
path_variables={
'ipfix_l2_collector_profile_id': 'ipfix-l2-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXL2CollectorProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXL2CollectorProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXL2CollectorProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.ipfix_l2_collector_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpfixL2ProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ipfix_l2_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/ipfix-l2-profiles/{ipfix-l2-profile-id}',
path_variables={
'ipfix_l2_profile_id': 'ipfix-l2-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ipfix_l2_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipfix-l2-profiles/{ipfix-l2-profile-id}',
path_variables={
'ipfix_l2_profile_id': 'ipfix-l2-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipfix-l2-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'ipfix_l2_profile_id': type.StringType(),
'i_PFIX_l2_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXL2Profile'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/ipfix-l2-profiles/{ipfix-l2-profile-id}',
request_body_parameter='i_PFIX_l2_profile',
path_variables={
'ipfix_l2_profile_id': 'ipfix-l2-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ipfix_l2_profile_id': type.StringType(),
'i_PFIX_l2_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXL2Profile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/ipfix-l2-profiles/{ipfix-l2-profile-id}',
request_body_parameter='i_PFIX_l2_profile',
path_variables={
'ipfix_l2_profile_id': 'ipfix-l2-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXL2Profile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXL2ProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPFIXL2Profile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.ipfix_l2_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpsecVpnDpdProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'dpd_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/ipsec-vpn-dpd-profiles/{dpd-profile-id}',
path_variables={
'dpd_profile_id': 'dpd-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'dpd_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipsec-vpn-dpd-profiles/{dpd-profile-id}',
path_variables={
'dpd_profile_id': 'dpd-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipsec-vpn-dpd-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'dpd_profile_id': type.StringType(),
'ip_sec_vpn_dpd_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnDpdProfile'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/ipsec-vpn-dpd-profiles/{dpd-profile-id}',
request_body_parameter='ip_sec_vpn_dpd_profile',
path_variables={
'dpd_profile_id': 'dpd-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'dpd_profile_id': type.StringType(),
'ip_sec_vpn_dpd_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnDpdProfile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/ipsec-vpn-dpd-profiles/{dpd-profile-id}',
request_body_parameter='ip_sec_vpn_dpd_profile',
path_variables={
'dpd_profile_id': 'dpd-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnDpdProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnDpdProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnDpdProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.ipsec_vpn_dpd_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpsecVpnIkeProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ike_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/ipsec-vpn-ike-profiles/{ike-profile-id}',
path_variables={
'ike_profile_id': 'ike-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ike_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipsec-vpn-ike-profiles/{ike-profile-id}',
path_variables={
'ike_profile_id': 'ike-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipsec-vpn-ike-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'ike_profile_id': type.StringType(),
'ip_sec_vpn_ike_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnIkeProfile'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/ipsec-vpn-ike-profiles/{ike-profile-id}',
request_body_parameter='ip_sec_vpn_ike_profile',
path_variables={
'ike_profile_id': 'ike-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ike_profile_id': type.StringType(),
'ip_sec_vpn_ike_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnIkeProfile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/ipsec-vpn-ike-profiles/{ike-profile-id}',
request_body_parameter='ip_sec_vpn_ike_profile',
path_variables={
'ike_profile_id': 'ike-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnIkeProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnIkeProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnIkeProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.ipsec_vpn_ike_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpsecVpnTunnelProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'tunnel_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/ipsec-vpn-tunnel-profiles/{tunnel-profile-id}',
path_variables={
'tunnel_profile_id': 'tunnel-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'tunnel_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipsec-vpn-tunnel-profiles/{tunnel-profile-id}',
path_variables={
'tunnel_profile_id': 'tunnel-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/ipsec-vpn-tunnel-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'tunnel_profile_id': type.StringType(),
'ip_sec_vpn_tunnel_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnTunnelProfile'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/ipsec-vpn-tunnel-profiles/{tunnel-profile-id}',
request_body_parameter='ip_sec_vpn_tunnel_profile',
path_variables={
'tunnel_profile_id': 'tunnel-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'tunnel_profile_id': type.StringType(),
'ip_sec_vpn_tunnel_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnTunnelProfile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/ipsec-vpn-tunnel-profiles/{tunnel-profile-id}',
request_body_parameter='ip_sec_vpn_tunnel_profile',
path_variables={
'tunnel_profile_id': 'tunnel-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnTunnelProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnTunnelProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'IPSecVpnTunnelProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.ipsec_vpn_tunnel_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _PartnerServicesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'service_name': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/partner-services/{service-name}',
path_variables={
'service_name': 'service-name',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/partner-services',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'ServiceDefinition'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'ServiceInsertionServiceListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.partner_services',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _PortMirroringProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'port_mirroring_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/port-mirroring-profiles/{port-mirroring-profile-id}',
path_variables={
'port_mirroring_profile_id': 'port-mirroring-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'port_mirroring_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/port-mirroring-profiles/{port-mirroring-profile-id}',
path_variables={
'port_mirroring_profile_id': 'port-mirroring-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/port-mirroring-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'port_mirroring_profile_id': type.StringType(),
'port_mirroring_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PortMirroringProfile'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/port-mirroring-profiles/{port-mirroring-profile-id}',
request_body_parameter='port_mirroring_profile',
path_variables={
'port_mirroring_profile_id': 'port-mirroring-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'port_mirroring_profile_id': type.StringType(),
'port_mirroring_profile': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PortMirroringProfile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/port-mirroring-profiles/{port-mirroring-profile-id}',
request_body_parameter='port_mirroring_profile',
path_variables={
'port_mirroring_profile_id': 'port-mirroring-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PortMirroringProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PortMirroringProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'PortMirroringProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.port_mirroring_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _SegmentsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'segment_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/segments/{segment-id}',
path_variables={
'segment_id': 'segment-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for delete_0 operation
delete_0_input_type = type.StructType('operation-input', {
'segment_id': type.StringType(),
})
delete_0_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_0_input_value_validator_list = [
]
delete_0_output_validator_list = [
]
delete_0_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/segments/{segment-id}?force=true',
path_variables={
'segment_id': 'segment-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'segment_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/segments/{segment-id}',
path_variables={
'segment_id': 'segment-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/segments',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'segment_id': type.StringType(),
'segment': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Segment'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/segments/{segment-id}',
request_body_parameter='segment',
path_variables={
'segment_id': 'segment-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'segment_id': type.StringType(),
'segment': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Segment'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/segments/{segment-id}',
request_body_parameter='segment',
path_variables={
'segment_id': 'segment-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'delete_0': {
'input_type': delete_0_input_type,
'output_type': type.VoidType(),
'errors': delete_0_error_dict,
'input_value_validator_list': delete_0_input_value_validator_list,
'output_validator_list': delete_0_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Segment'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'SegmentListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Segment'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'delete_0': delete_0_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.segments',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ServicesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'service_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/services/{service-id}',
path_variables={
'service_id': 'service-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'service_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/services/{service-id}',
path_variables={
'service_id': 'service-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'default_service': type.OptionalType(type.BooleanType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/services',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'default_service': 'default_service',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'service_id': type.StringType(),
'service': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Service'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/services/{service-id}',
request_body_parameter='service',
path_variables={
'service_id': 'service-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'service_id': type.StringType(),
'service': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Service'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/services/{service-id}',
request_body_parameter='service',
path_variables={
'service_id': 'service-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Service'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'ServiceListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Service'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.services',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class StubFactory(StubFactoryBase):
_attrs = {
'DhcpRelayConfigs': DhcpRelayConfigs,
'DhcpServerConfigs': DhcpServerConfigs,
'DnsForwarderZones': DnsForwarderZones,
'Domains': Domains,
'Drafts': Drafts,
'GroupAssociations': GroupAssociations,
'IpfixCollectorProfiles': IpfixCollectorProfiles,
'IpfixDfwCollectorProfiles': IpfixDfwCollectorProfiles,
'IpfixDfwProfiles': IpfixDfwProfiles,
'IpfixL2CollectorProfiles': IpfixL2CollectorProfiles,
'IpfixL2Profiles': IpfixL2Profiles,
'IpsecVpnDpdProfiles': IpsecVpnDpdProfiles,
'IpsecVpnIkeProfiles': IpsecVpnIkeProfiles,
'IpsecVpnTunnelProfiles': IpsecVpnTunnelProfiles,
'PartnerServices': PartnerServices,
'PortMirroringProfiles': PortMirroringProfiles,
'Segments': Segments,
'Services': Services,
'domains': 'com.vmware.nsx_policy.infra.domains_client.StubFactory',
'drafts': 'com.vmware.nsx_policy.infra.drafts_client.StubFactory',
'realized_state': 'com.vmware.nsx_policy.infra.realized_state_client.StubFactory',
'segments': 'com.vmware.nsx_policy.infra.segments_client.StubFactory',
'services': 'com.vmware.nsx_policy.infra.services_client.StubFactory',
'sites': 'com.vmware.nsx_policy.infra.sites_client.StubFactory',
'tier_0s': 'com.vmware.nsx_policy.infra.tier_0s_client.StubFactory',
'tier_1s': 'com.vmware.nsx_policy.infra.tier_1s_client.StubFactory',
}
|
10,664 | 9e9d6e1407d7c2d3afa64cec0b484c4abcb6b7fe | #!/usr/bin/env python
#The line above tells Linux that this file is a Python script,
#and that the OS should use the Python interpreter in /usr/bin/env
#to run it. Don't forget to use "chmod +x [filename]" to make
#this script executable.
#Import the dependencies as described in example_pub.py
import rospy
from sensor_msgs.msg import JointState
import lab3 as llib
import numpy as np
import math
'''The Code below is borrowed from openCV at
https://www.learnopencv.com/rotation-matrix-to-euler-angles/
to check to see if our rotation matrix does indeed produce valid RPY angles'''
# Checks if a matrix is a valid rotation matrix.
def isRotationMatrix(R) :
Rt = np.transpose(R)
shouldBeIdentity = np.dot(Rt, R)
I = np.identity(3, dtype = R.dtype)
n = np.linalg.norm(I - shouldBeIdentity)
return n < 1e-6
# Calculates rotation matrix to euler angles
# The result is the same as MATLAB except the order
# of the euler angles ( x and z are swapped ).
def rotationMatrixToEulerAngles(R) :
assert(isRotationMatrix(R))
sy = math.sqrt(R[0,0] * R[0,0] + R[1,0] * R[1,0])
singular = sy < 1e-6
if not singular :
x = math.atan2(R[2,1] , R[2,2])
y = math.atan2(-R[2,0], sy)
z = math.atan2(R[1,0], R[0,0])
else :
x = math.atan2(-R[1,2], R[1,1])
y = math.atan2(-R[2,0], sy)
z = 0
return np.array([x, y, z])
#end openCV code
#Define the callback method which is called whenever this node receives a
#message on its subscribed topic. The received message is passed as the
#first argument to callback().
def callback(message):
#Print the contents of the message to the console
print 'This is our produced rotation matrix'
joints = [message.position[4], message.position[5], message.position[2], message.position[3], message.position[6], message.position[7], message.position[8]]
ours = llib.gst(joints)
print ours
rotation = ours[:3,:3]
print 'This is openCV RPY angles: '
print rotationMatrixToEulerAngles(rotation)
#Define the method which contains the node's main functionality
def forward_kin():
#Run this program as a new node in the ROS computation graph
#called /listener_<id>, where <id> is a randomly generated numeric
#string. This randomly generated name means we can start multiple
#copies of this node without having multiple nodes with the same
#name, which ROS doesn't allow.
rospy.init_node('forward_kin', anonymous=True)
#Create a new instance of the rospy.Subscriber object which we can
#use to receive messages of type std_msgs/String from the topic /chatter_talk.
#Whenever a new message is received, the method callback() will be called
#with the received message as its first argument.
rospy.Subscriber("/robot/joint_states", JointState, callback)
#Wait for messages to arrive on the subscribed topics, and exit the node
#when it is killed with Ctrl+C
rospy.spin()
#Python's syntax for a main() method
if __name__ == '__main__':
forward_kin()
|
10,665 | f42169a8c1aa3be3171bf363762a7ce32c0bc935 | """Matmul layer test cases"""
from typing import (
List,
Callable
)
import sys
import copy
import logging
import cProfile
import numpy as np
from common.constant import (
TYPE_FLOAT,
TYPE_LABEL,
)
from common.function import (
softmax,
relu,
transform_X_T,
softmax_cross_entropy_log_loss,
compose,
softmax,
relu,
transform_X_T,
softmax_cross_entropy_log_loss,
compose,
)
import common.weights as weights
from testing.config import (
GRADIENT_DIFF_CHECK_TRIGGER,
GRADIENT_DIFF_ACCEPTANCE_RATIO,
GRADIENT_DIFF_ACCEPTANCE_VALUE,
)
from testing.layer import (
expected_gradients_from_relu_neuron,
expected_gradient_from_log_loss,
validate_against_expected_gradient
)
from data import (
spiral,
venn_of_circle_a_not_b
)
from layer import (
Standardization,
Matmul,
ReLU,
CrossEntropyLogLoss
)
from layer.utility import (
forward_outputs,
backward_outputs
)
from optimizer import (
Optimizer,
SGD
)
from testing.config import (
ENFORCE_STRICT_ASSERT,
)
from testing.layer import (
validate_against_numerical_gradient
)
np.set_printoptions(threshold=sys.maxsize)
np.set_printoptions(linewidth=1024)
Logger = logging.getLogger(__name__)
def build(
M1: int,
W1: np.ndarray,
M2: int,
W2: np.ndarray,
log_loss_function: Callable,
optimizer,
log_level
):
# --------------------------------------------------------------------------------
# Instantiate a CrossEntropyLogLoss layer
# --------------------------------------------------------------------------------
loss = CrossEntropyLogLoss(
name="loss",
num_nodes=M2,
log_loss_function=log_loss_function,
log_level=log_level
)
# --------------------------------------------------------------------------------
# Instantiate the 2nd ReLU layer
# --------------------------------------------------------------------------------
activation02 = ReLU(
name="relu02",
num_nodes=M2,
log_level=log_level
)
activation02.objective = loss.function
# --------------------------------------------------------------------------------
# Instantiate the 2nd Matmul layer
# --------------------------------------------------------------------------------
matmul02 = Matmul(
name="matmul02",
num_nodes=M2,
W=W2, # (M2, M1+1)
optimizer=optimizer,
log_level=log_level
)
matmul02.objective = compose(activation02.function, activation02.objective)
# --------------------------------------------------------------------------------
# Instantiate the 1st ReLU layer
# --------------------------------------------------------------------------------
activation01 = ReLU(
name="relu01",
num_nodes=M1,
log_level=log_level
)
activation01.objective = compose(matmul02.function, matmul02.objective)
# --------------------------------------------------------------------------------
# Instantiate the 2nd Matmul layer
# --------------------------------------------------------------------------------
matmul01 = Matmul(
name="matmul01",
num_nodes=M1,
W=W1, # (M1, D+1)
optimizer=optimizer,
log_level=log_level
)
matmul01.objective = compose(activation01.function, activation01.objective)
# --------------------------------------------------------------------------------
# Instantiate a Normalization layer
# Need to apply the same mean and std to the non-training data set.
# --------------------------------------------------------------------------------
# # norm = Standardization(
# name="standardization",
# num_nodes=D
# )
# X = np.copy(X)
# X = norm.function(X)
# --------------------------------------------------------------------------------
# Network objective function f: L=f(X)
# --------------------------------------------------------------------------------
objective = compose(matmul01.function, matmul01.objective)
predict = compose(
matmul01.predict,
activation01.predict,
matmul02.predict,
# TODO: Understand why including last activation make the prediction fail.
# The venn diagram, (A and B and C and D)
# activation02.predict,
)
return objective, predict, loss, activation02, matmul02, activation01, matmul01
def expected_gradients_relu(
N,
T,
P,
Y02,
matmul02,
Y01,
matmul01
):
# --------------------------------------------------------------------------------
# Expected gradient EDA02 = dL/dA02 = (P-T)/N at the log loss.
# --------------------------------------------------------------------------------
EDA02 = expected_gradient_from_log_loss(P=P, T=T, N=N)
EDY02, EDW02, EDA01 = expected_gradients_from_relu_neuron(EDA02, Y02, matmul02)
EDY01, EDW01, EDX = expected_gradients_from_relu_neuron(EDA01, Y01, matmul01)
return EDA02, EDY02, EDW02, EDA01, EDY01, EDW01, EDX
def __backward(
back_propagation,
activation,
matmul,
EDA,
EDY,
EDW,
EDX,
test_numerical_gradient
):
# ================================================================================
# Layer backward path
# 1. Calculate the analytical gradient dL/dX=matmul.gradient(dL/dY) with a dL/dY.
# 2. Gradient descent to update Wn+1 = Wn - lr * dL/dX.
# ================================================================================
before = copy.deepcopy(matmul.W)
# ********************************************************************************
# Constraint:
# EDA should match the gradient dL/dA back-propagated from the log-loss layer.
# ********************************************************************************
dA = back_propagation
assert validate_against_expected_gradient(EDA, dA), \
"dA should match EDA. dA=\n%s\nEDA=\n%s\nDiff=\n%s\n" \
% (dA, EDA, (dA - EDA))
# ********************************************************************************
# Constraint:
# EDY should match the gradient dL/dY back-propagated from the ReLU layer.
# ********************************************************************************
dY = activation.gradient(dA) # dL/dY: (N, M2)
assert validate_against_expected_gradient(EDY, dY), \
"dY should match EDY. dY=\n%s\nEDY=\n%s\nDiff=\n%s\n" \
% (dY, EDY, (dY - EDY))
# ********************************************************************************
# Constraint:
# EDX should match the gradient dL/dX back-propagated from the Matmul layer.
# ********************************************************************************
dX = matmul.gradient(dY) # dL/dX: (N, M1)
if not np.allclose(
a=dX,
b=EDX,
atol=GRADIENT_DIFF_ACCEPTANCE_VALUE,
rtol=GRADIENT_DIFF_ACCEPTANCE_RATIO
):
Logger.error(
"dX should match EDX. dX=\n%s\nEDX=\n%s\nDiff=\n%s\n",
dX, EDX, (dX - EDX)
)
assert ENFORCE_STRICT_ASSERT
# ================================================================================
# Layer gradient descent
# ================================================================================
# ********************************************************************************
# Constraint.
# W in the Matmul is updated by the gradient descent.
# ********************************************************************************
dS = matmul.update() # [dL/dW: (M2, M1+1)]
Logger.debug("W after is \n%s", matmul.W)
if np.array_equal(before, matmul.W):
Logger.warning(
"W has not been updated. Before=\n%s\nAfter=\n%s\nDiff=\n%s\ndW=\n%s\n",
before, matmul.W, (before - matmul.W), dS[0]
)
# ********************************************************************************
# Constraint:
# EDW should match the gradient dL/dW in the Matmul layer.
# ********************************************************************************
dW = dS[0]
assert validate_against_expected_gradient(EDW, dW), \
"dW should match EDW. dW=\n%s\nEDW=\n%s\nDiff=\n%s\n" \
% (dW, EDW, (dW - EDW))
# ================================================================================
# Layer numerical gradient
# ================================================================================
if test_numerical_gradient:
gn = matmul.gradient_numerical() # [dL/dX: (N,M1), dL/dW: (M,M+1)]
validate_against_numerical_gradient([dX] + dS, gn, Logger)
return dX
def backward(
back_propagation,
activation02,
matmul02,
activation01,
matmul01,
EDA02,
EDY02,
EDW02,
EDA01,
EDY01,
EDW01,
EDX,
test_numerical_gradient
):
# ================================================================================
# Layer 01 backward path
# 1. Calculate the analytical gradient dL/dX=matmul.gradient(dL/dY) with a dL/dY.
# 2. Gradient descent to update Wn+1 = Wn - lr * dL/dX.
# ================================================================================
before01 = copy.deepcopy(matmul01.W)
dA01 = __backward(
back_propagation,
activation02,
matmul02,
EDA02,
EDY02,
EDW02,
EDA01,
test_numerical_gradient
)
dX = __backward(
dA01,
activation01,
matmul01,
EDA01,
EDY01,
EDW01,
EDX,
test_numerical_gradient
)
def train_two_layer_classifier(
N: int,
D: int,
X: np.ndarray,
T: np.ndarray,
M1: int,
W1: np.ndarray,
M2: int,
W2: np.ndarray,
log_loss_function: Callable,
optimizer: Optimizer,
num_epochs: int = 100,
test_numerical_gradient: bool = False,
log_level: int = logging.ERROR,
callback: Callable = None
):
"""Test case for binary classification with matmul + log loss.
Args:
N: Batch size of X
D: Number of features in X
X: train data
T: labels
M1: Number of nodes in layer 1.
W1: weight for layer 1
M2: Number of nodes in layer 2.
W2: weight for layer 2
log_loss_function: cross entropy logg loss function
optimizer: Optimizer
num_epochs: Number of epochs to run
test_numerical_gradient: Flag if test the analytical gradient with the numerical one.
log_level: logging level
callback: callback function to invoke at the each epoch end.
"""
name = __name__
assert \
isinstance(T, np.ndarray) and T.ndim == 1 and T.shape[0] == N
assert \
isinstance(X, np.ndarray) and X.dtype == TYPE_FLOAT and \
X.ndim == 2 and X.shape[0] == N and X.shape[1] == D
assert \
isinstance(W1, np.ndarray) and W1.dtype == TYPE_FLOAT and \
W1.ndim == 2 and W1.shape[0] == M1 and W1.shape[1] == D+1
assert \
isinstance(W2, np.ndarray) and W2.dtype == TYPE_FLOAT and \
W2.ndim == 2 and W2.shape[0] == M2 and W2.shape[1] == M1+1
assert num_epochs > 0 and N > 0 and D > 0 and M1 > 1
assert log_loss_function == softmax_cross_entropy_log_loss and M2 >= 2
matmul01: Matmul
matmul02: Matmul
*network, = build(
M1,
W1,
M2,
W2,
log_loss_function,
optimizer,
log_level
)
objective, prediction, loss, activation02, matmul02, activation01, matmul01 = network
loss.T = T
# ================================================================================
# Train the classifier
# ================================================================================
num_no_progress: int = 0 # how many time when loss L not decreased.
# pylint: disable=not-callable
history: List[np.ndarray] = [objective(X)]
for i in range(num_epochs):
# --------------------------------------------------------------------------------
# Forward path
# --------------------------------------------------------------------------------
Y01, A01, Y02, A02, L = forward_outputs(
[
matmul01,
activation01,
matmul02,
activation02,
loss
],
X
)
P = softmax(relu(np.matmul(matmul02.X, matmul02.W.T))) # (N,M2)
# --------------------------------------------------------------------------------
# Verify loss
# --------------------------------------------------------------------------------
if not (i % 100): print(f"iteration {i} Loss {L}")
Logger.info("%s: iteration[%s]. Loss is [%s]", name, i, L)
# ********************************************************************************
# Constraint: Objective/Loss L(Yn+1) after gradient descent < L(Yn)
# ********************************************************************************
if L >= history[-1] and i > 0:
Logger.warning(
"Iteration [%i]: Loss[%s] has not improved from the previous [%s] for %s times.",
i, L, history[-1], num_no_progress + 1
)
# --------------------------------------------------------------------------------
# Reduce the learning rate can make the situation worse.
# When reduced the lr every time L >= history, the (L >= history) became successive
# and eventually exceeded 50 successive non-improvement ending in failure.
# Keep the learning rate make the L>=history more frequent but still up to 3
# successive events, and the training still kept progressing.
# --------------------------------------------------------------------------------
num_no_progress += 1
if num_no_progress > 5:
matmul01.lr = matmul01.lr * 0.95
matmul02.lr = matmul02.lr * 0.99
if num_no_progress > 50:
Logger.error(
"The training has no progress more than %s times.", num_no_progress
)
break
else:
num_no_progress = 0
history.append(L)
# --------------------------------------------------------------------------------
# Expected gradients
# --------------------------------------------------------------------------------
*gradients, = expected_gradients_relu(
N, T, P, Y02, matmul02, Y01, matmul01
)
EDA02, EDY02, EDW02, EDA01, EDY01, EDW01, EDX = gradients
# --------------------------------------------------------------------------------
# Backward path
# --------------------------------------------------------------------------------
backward(
loss.gradient(TYPE_FLOAT(1)), # dL/dA02: (N, M2),
activation02,
matmul02,
activation01,
matmul01,
EDA02,
EDY02,
EDW02,
EDA01,
EDY01,
EDW01,
EDX,
test_numerical_gradient
)
if callback:
callback(matmul01.W, matmul02.W)
return matmul01.W, matmul02.W, objective, prediction, history
def test_two_layer_classifier(caplog):
"""Test case for layer matmul class
"""
caplog.set_level(logging.WARNING, logger=Logger.name)
# Input X specification
D = 2 # Dimension of X WITHOUT bias
# Layer 01. Output Y01=X@W1.T of shape (N,M1)
M1 = 4 # Nodes in the matmul 01
W1 = weights.he(M1, D+1) # Weights in the matmul 01 WITH bias (D+1)
# Layer 02. Input A01 of shape (N,M1).
# Output Y02=A01@W2.T of shape (N,M2)
M2: int = 3 # Number of categories to classify
W2 = weights.he(M2, M1+1) # Weights in the matmul 02 WITH bias (M1+1)
optimizer = SGD(lr=TYPE_FLOAT(0.2))
# X data
# X, T, V = linear_separable_sectors(n=N, d=D, m=M)
X, T = venn_of_circle_a_not_b(
radius=TYPE_FLOAT(1.0),
ratio=TYPE_FLOAT(1.3),
m=M2,
n=10
)
N = X.shape[0]
assert X.shape[0] > 0 and X.shape == (N, D)
X, T = transform_X_T(X, T)
def callback(W1, W2):
"""Dummy callback"""
pass
profiler = cProfile.Profile()
profiler.enable()
train_two_layer_classifier(
N=N,
D=D,
X=X,
T=T,
M1=M1,
W1=W1,
M2=M2,
W2=W2,
log_loss_function=softmax_cross_entropy_log_loss,
optimizer=optimizer,
num_epochs=10,
test_numerical_gradient=True,
log_level=logging.DEBUG,
callback=callback
)
profiler.disable()
profiler.print_stats(sort="cumtime")
|
10,666 | b100b28f2dbe2a7e638a9388d604311c28fc631f | import RPi.GPIO as GPIO
import subprocess
while True:
GPIO.cleanup()
subprocess.Popen("simpletest.py", shell=True)
# sensor = Adafruit_DHT.DHT11
# pin = 4
# ldr = LightSensor(17)
#
## while True:
#
#
# light = ldr.value
# print('Light Source={0:0.1f}%'.format(light*100))
# if (light == 0 or light < 0.15):
# print('Bright light available')
# elif (light > 0.15 and light < 0.7):
# print ('Partial light available')
# else:
# print ('Not sufficient light')
#
time.sleep(10)
#
|
10,667 | bf50a4a64cc0a4cdabdcbd06be969a0da2a62b9f | # Generated by Django 2.1.15 on 2021-06-13 15:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0010_auto_20210612_2333'),
]
operations = [
migrations.AlterField(
model_name='author',
name='avatar',
field=models.ImageField(default='unnamed.jpg', upload_to='avatar/'),
),
migrations.AlterField(
model_name='tag',
name='name',
field=models.CharField(max_length=80, unique=True),
),
]
|
10,668 | 42607232f53547989eb434dd68eb844d3bb90ff1 | '''Problem statement¶
Given a string consisting of uppercase and lowercase ASCII
characters, write a function, case_sort, that sorts uppercase
and lowercase letters separately, such that if the $i$th place
in the original string had an uppercase character then it should
not have a lowercase character after being sorted and vice versa.
For example:
Input: fedRTSersUXJ
Output: deeJRSfrsTUX'''
def mergesort(items):
if len(items) <=1:
return items
mid = len(items)//2
left = items[:mid]
right = items[mid:]
left = mergesort(left)
right = mergesort(right)
return merge(left,right)
def merge(left,right):
merged = []
left_index = 0
right_index = 0
while left_index<len(left) and right_index <len(right):
if left[left_index]>right[right_index]:
merged.append(right[right_index])
right_index +=1
else:
merged.append(left[left_index])
left_index +=1
merged += left[left_index:]
merged += right[right_index:]
return merged
def case_sort(string):
lower_case_char = []
upper_case_char = []
for char in string:
if ord(char)<91:
upper_case_char.append(char)
else:
lower_case_char.append(char)
lower_case_char = mergesort(lower_case_char)
upper_case_char = mergesort(upper_case_char)
result = ''
for char in string:
if ord(char) < 91: # 90 is Z
result += upper_case_char.pop(0)
else:
result += lower_case_char.pop(0)
return result
def test_function(test_case):
test_string = test_case[0]
solution = test_case[1]
if case_sort(test_string) == solution:
print("Pass")
else:
print("False")
test_string = 'fedRTSersUXJ'
solution = "deeJRSfrsTUX"
test_case = [test_string, solution]
test_function(test_case)
|
10,669 | 267bb550159efc9138f20430a5e6ac474479b5e8 | from networkx import Graph
from networkx import degree_centrality, closeness_centrality, eigenvector_centrality, betweenness_centrality
from networkx import find_cliques
import logging
import conf
import datetime
import numpy as np
import networkx as nx
from myexceptions import network_big
from google.appengine.ext import db
from obj import obj_network
conf = conf.Config()
def getusernetwork(uidin):
q = db.GqlQuery("SELECT * FROM Network WHERE uid = :1", uidin)
network = q.fetch(1)
if len(network) == 0:
network = obj_network.Network(uid = uidin)
network.updated_time = datetime.datetime.now()
network.put()
else:
network = network[0]
return network
class SocialNetwork(object):
"""Object oriented interface for conducting social network analysis."""
valid_indexes = []
for index_name in conf.INDEXES.keys():
valid_indexes.append(index_name)
cache = None
def __init__(self):
logging.info("libSNA object created.")
self.graph = Graph()
self.measures = {}
self.nodesmeasures = {}
self.edgesmeasures = {}
for index in self.valid_indexes: self.measures[index] = None
def getNodes(self):
nodes = self.graph.nodes()
nodes.sort()
return nodes
def getEdges(self):
edges = self.graph.edges()
return edges
def loadGraph(self, nodes, edges):
logging.info("Loading network from input variables")
for node in nodes:
self.graph.add_node(node)
for edge in edges:
self.graph.add_edge(edge[0], edge[1])
self.graph.name = "Social Network"
logging.info("Finished loading network.")
def runMeasure(self, measure_name, backend):
if measure_name in self.valid_indexes:
eval('self.calculate_' + measure_name.replace(' ', '_') + '(backend)')
else:
logging.error("Unable to calculate the measure (%s)"%(measure_name))
def returnResults(self, measure_name, value = 'value'):
if measure_name in self.valid_indexes:
if value == 'value': return self.measures[measure_name]
elif value == 'nodes': return self.nodesmeasures[measure_name]
elif value == 'edges': return self.edgesmeasures[measure_name]
else:
return None
def displayResults(self, measure_name, value = 'value'):
if measure_name in self.valid_indexes:
if value == 'value': logging.info((conf.INDEX_TYPES[measure_name] + '.') % self.measures[measure_name])
elif value == 'nodes': logging.info(str(self.nodesmeasures[measure_name] or '<null>'))
elif value == 'edges': logging.info(str(self.edgesmeasures[measure_name] or '<null>'))
else:
logging.error("Unable to calculate the measure (%s)"%(measure_name))
def calculateMeasures(self, backend=False):
for measure_name in self.valid_indexes:
self.runMeasure(measure_name, backend=False)
def calculate_density(self, backend=False):
logging.info("Calculating density.")
nodes = self.graph.nodes()
edges = self.graph.edges()
tot_edges = float(len(nodes) * (len(nodes)-1))
tot_edges = tot_edges / 2
num_edges = float(len(edges))
w = {}
for n1 in nodes:
for n2 in nodes:
w[n1,n2] = 0.0
for n1,n2 in edges:
w[n1,n2] = 1.0
self.measures['density'] = num_edges / tot_edges * 100
self.nodesmeasures['density'] = None
self.edgesmeasures['density'] = w
def calculate_geodesic(self, backend=False):
logging.info("Calculating geodesic.")
path = self.floyd_warshall(backend)
nodes = self.graph.nodes()
dividend = 0
geodesic = float(0)
geodesic_edges = {}
for i in nodes:
for j in nodes:
try:
geodesic_edges[i,j] = path[i,j]
geodesic += path[i,j]
dividend += 1
except KeyError:
pass
geodesic /= dividend
self.measures['geodesic'] = geodesic
self.nodesmeasures['geodesic'] = None
self.edgesmeasures['geodesic'] = geodesic_edges
def calculate_fragmentation(self, backend=False):
logging.info("Calculating fragmentation.")
nodes = self.graph.nodes()
w = self.floyd_warshall(backend)
fragmentation = float(0)
for i in nodes:
for j in nodes:
try:
w[i,j]
except KeyError:
fragmentation += 1
pass
fragmentation /= len(nodes)*(len(nodes)-1)
self.measures['fragmentation'] = fragmentation * 100
self.nodesmeasures['fragmentation'] = None
self.edgesmeasures['fragmentation'] = w
def calculate_diameter(self, backend=False):
logging.info("Calculating diameter.")
path = self.floyd_warshall(backend)
nodes = self.graph.nodes()
diameter = float(0)
for i in nodes:
for j in nodes:
try:
diameter = max(diameter, path[i,j])
except KeyError:
pass
self.measures['diameter'] = diameter
self.nodesmeasures['diameter'] = None
self.edgesmeasures['diameter'] = path
def calculate_degree(self, backend=False):
logging.info("Calculating degree.")
degrees = degree_centrality(self.graph)
degree = float(sum(degrees.values())/len(degrees.values()))
self.measures['degree'] = degree * 100
self.nodesmeasures['degree'] = degrees
self.edgesmeasures['degree'] = None
def calculate_centralization(self, backend=False):
logging.info("Calculating centralization.")
degrees = degree_centrality(self.graph)
centralization = float(0)
maxdegree = max(degrees.values())
for degree in degrees.values():
centralization += maxdegree-degree
centralization /= len(degrees.values())-1
self.measures['centralization'] = centralization * 100
self.nodesmeasures['centralization'] = degrees
self.edgesmeasures['centralization'] = None
def calculate_closeness(self, backend=False):
logging.info("Calculating closeness.")
closenesses = closeness_centrality(self.graph)
closeness = float(sum(closenesses.values())/len(closenesses.values()))
self.measures['closeness'] = closeness * 100
self.nodesmeasures['closeness'] = closenesses
self.edgesmeasures['closeness'] = None
def calculate_eigenvector(self, backend=False):
logging.info("Calculating eigenvector.")
eigenvectors = eigenvector_centrality(self.graph)
eigenvector = float(sum(eigenvectors.values())/len(eigenvectors.values()))
self.measures['eigenvector'] = eigenvector * 100
self.nodesmeasures['eigenvector'] = eigenvectors
self.edgesmeasures['eigenvector'] = None
def calculate_betweenness(self, backend=False):
logging.info("Calculating betweenness.")
betweennesses = betweenness_centrality(self.graph)
betweenness = float(sum(betweennesses.values())/len(betweennesses.values()))
self.measures['betweenness'] = betweenness * 100
self.nodesmeasures['betweenness'] = betweennesses
self.edgesmeasures['betweenness'] = None
def calculate_cliques(self, backend=False):
logging.info("Calculating cliques.")
cliques = list(find_cliques(self.graph))
w = {}
nodes = self.graph.nodes()
for node in nodes:
w[node] = 0.0
for clique in cliques:
if node in clique:
w[node] += 1
self.measures['cliques'] = len(cliques)
self.nodesmeasures['cliques'] = w
self.edgesmeasures['cliques'] = None
def calculate_comembership(self, backend=False):
logging.info("Calculating comembership.")
nodes = self.graph.nodes()
n = len(nodes)
if not backend and n > 500:
raise network_big.NetworkTooBigException(n)
cliques = list(find_cliques(self.graph))
w = {}
for clique in cliques:
for node1 in clique:
for node2 in clique:
try:
w[node1,node2] += 1
except KeyError:
w[node1,node2] = 1
nodes = w.keys()
comembership = float(0)
for node1, node2 in nodes:
if node1 != node2: comembership += w[node1,node2]
num_nodes = len(self.graph.nodes())
comembership /= num_nodes*(num_nodes-1)
self.measures['comembership'] = comembership
self.nodesmeasures['comembership'] = None
self.edgesmeasures['comembership'] = w
def calculate_components(self, backend=False):
logging.info("Calculating components.")
components = nx.connected_component_subgraphs(self.graph)
w = {}
nodes = self.graph.nodes()
for node in nodes:
w[node] = 0.0
for component in components:
if len(component) > 1:
for node in component:
w[node] += 1
self.measures['components'] = len(components)
self.nodesmeasures['components'] = w
self.edgesmeasures['components'] = None
def floyd_warshall(self, backend):
nodes = self.graph.nodes()
if not backend and len(nodes) > 400:
raise network_big.NetworkTooBigException(len(nodes))
logging.info("Computing Floyd-Warshall.")
infvalue = 127 #sys.maxint
F = nx.floyd_warshall_numpy(self.graph, dtype=np.int8, infvalue=infvalue)
w = {}
for i in range(0, len(nodes)):
for j in range(0, len(nodes)):
if not F[i,j] == infvalue:
w[nodes[i],nodes[j]] = F[i,j]
return w |
10,670 | d4326d33136843481c5afef7195c914b25a51e53 | #!/usr/bin/python
#import ConfigParser
import icalendar,sys,os,datetime
import stripe
import pytz
import urllib
import json
from dateutil import tz
from ..templateCommon import *
def utctolocal(dt,endofdate=False):
from_zone = tz.gettz('UTC')
to_zone = tz.gettz('America/New_York')
if isinstance(dt,datetime.datetime):
#dt = dt.replace(tzinfo=from_zone)
dt = dt.astimezone(to_zone)
else:
if endofdate:
dt = datetime.datetime.combine(dt,datetime.time(hour=23,minute=59,second=59,tzinfo=to_zone))
else:
dt = datetime.datetime.combine(dt,datetime.time(tzinfo=to_zone))
return dt
weekday=['Sun','Mon','Tues','Wed','Thurs','Fri','Sat'] # OUR Sunday=0 Convention!!
def crunch_calendar(rundate=None):
#ICAL_URL = Config.get('autoplot','ICAL_URI')
ICAL_URL = current_app.config['globalConfig'].Config.get("autoplot","ICAL_URI")
g = urllib.request.urlopen(ICAL_URL)
data= g.read()
print(data)
cal = icalendar.Calendar.from_ical(data)
g.close()
"""
g = urllib.urlopen(ICAL_URL)
print g.read()
g.close()
"""
if rundate:
now = datetime.datetime.strptime(rundate,"%Y-%m-%d").replace(tzinfo=tz.gettz('America/New York'))
else:
now = datetime.datetime.now().replace(tzinfo=tz.gettz('America/New York'))
#print "CRUNCH EFFECTIVE RUNDATE",rundate
## ADJUST HERE FOR TZ! (i.e. If we run Midnight on Sunday don't want LAST week's run
dow = now.weekday() # 0=Monday
dow = (dow+1) %7 #0=Sunday
weeknum = int(now.strftime("%U"))
#print "weeknum",weeknum,"Weekday",weekday[dow],"DOW",dow
weekstart = (now - datetime.timedelta(days=dow))
weekstart = weekstart.replace(hour=0,minute=0,second=0,microsecond=0)
weekend = weekstart + datetime.timedelta(days=7)
weekend = weekend - datetime.timedelta(seconds=1)
#print "WEEKSTART",weekstart,"through",weekend
errors=[]
warnings=[]
billables=[]
summaries=[]
debug=[]
data={}
debug.append("{2} Week #{3} - {0} through {1}".format(weekstart.strftime("%b-%d"),weekend.strftime("%b-%d"),weekstart.year,weeknum))
data['title']="Auto Plot Lease {2} Week #{3} - {0} through {1}".format(weekstart.strftime("%b-%d"),weekend.strftime("%b-%d"),weekstart.year,weeknum)
data['lease-id']="autoplot-lease-{2}-Week{3:02}".format(weekstart.strftime("%b-%d"),weekend.strftime("%b-%d"),weekstart.year,weeknum)
data['weekid']="{2:04}-{3:02}".format(weekstart.strftime("%b-%d"),weekend.strftime("%b-%d"),weekstart.year,weeknum)
for component in cal.walk():
#print component.name
#print dict(component)
#print dir(component)
#print(component.get('summary'))
#print(component.get('dtstart'))
#print(component.get('dtend'))
#print(component.get('dtstamp'))
summary={'errors':[],'warnings':[]}
if component.name != 'VEVENT':
print ("NOT A VEVENT!!!",component.name)
else:
#print "VEVENT",component
billable=False
members=[]
event={}
calstart = component['DTSTART'].dt
#print "CALSTART",calstart
calstart = utctolocal(calstart)
calend = component['DTEND'].dt
calend = utctolocal(calend,endofdate=True)
#print "SUMMARY",component['SUMMARY']
#print "START",calstart
#print "END",calend
if 'ORGANIZER' in component:
# print "ORGANIZER",component['ORGANIZER']
for p in component['ORGANIZER'].params:
pass #print "_ ---- ",p,component['ORGANIZER'].params[p]
#print "CHECK",weekstart,"<",calstart,
#print "aand",calend,"<",weekend
#if (weekstart <= calstart) and (calend <= weekend):
rrule = None
weeks=1
if 'RRULE' in component and 'COUNT' in component['RRULE'] and 'FREQ' in component['RRULE']:
rrule=component['RRULE']
#print "RRULE",calstart.strftime("%b-%d %H:%M ")+component['SUMMARY'],
#print rrule['COUNT'][0],rrule['FREQ'][0]
if rrule['FREQ'][0]== "WEEKLY":
weeks = rrule['COUNT'][0]
for weekno in range(0,weeks):
short = calstart.strftime("%b-%d %H:%M ")+component['SUMMARY']
if (calstart <= weekend) and (weekstart < calend):
#print "THISWEEK calendar",calstart,calend
#print "THISWEEK curweel",weekstart,weekend
#print "PROCESS",short
#print "WEEK IN SERIES",weekno
if 'ATTENDEE' not in component:
summary['errors'].append("No Attendees")
else:
if isinstance(component['ATTENDEE'],list):
attlist = component['ATTENDEE']
else:
attlist = [component['ATTENDEE']]
for a in attlist:
#print " -- Attendee:",a
#print " -- Params:"
for p in a.params:
pass #print "_ ---- ",p,a.params[p]
if 'CUTYPE' in a.params and a.params['CUTYPE'] == 'INDIVIDUAL':
members.append(a.params['CN'])
"""
print " -- DIR",dir(a)
print
print " -- ICAL",type(a.to_ical),dir(a.to_ical())
print
"""
hrs=(calend-calstart).total_seconds()/3600
#print "*** CURRENT!!! {0} Hours total".format(hrs)
if (hrs <= 24):
summary['warnings'].append("Partial day entry - NOT BILLING")
elif (hrs <= 167):
summary['warnings'].append("Entry isn't quite full week, but billing anyway")
if (hrs > 24):
if len(members) > 1:
summary['errors'].append("More than one member assigned: "+str(", ".join(members)))
elif len(members) == 0:
summary['errors'].append("No attendees in calendar entry")
else:
if not members[0].lower().endswith("@makeitlabs.com"):
summary['errors'].append("Non-MIL email: "+str(members[0]))
else:
billable=True
#print "*** BILLABLE"
event['summary']=short
event['member']=members[0]
#if component['SUMMARY'].strip().lower().startswith("rental"):
# print "** IS RENTAL"
# Figure out what to do based on Summary
if (len(summary['errors']) == 0) and billable:
billables.append(event)
for e in summary['errors']:
errors.append(short + ": "+e)
for w in summary['warnings']:
warnings.append(short + ": "+w)
#print "END PARSE"
calstart = calstart + datetime.timedelta(weeks=1)
calend = calend + datetime.timedelta(weeks=1)
# End of FOR for weeks
"""
for x in component:
print x,type(component[x]),
if (isinstance(component[x],icalendar.prop.vDDDTypes)):
print component.decoded(x)
print type(component[x].dt)
print component[x].dt
else:
print component.decoded(x)
#print dir(component[x])
print
"""
if len(billables) ==0:
warnings.append("WARNING - NO BILLABLES THIS WEEK!")
elif len(billables) >1:
errors.append("ERROR - MULTIPLE BILLABLES THIS WEEK!")
if (len(errors) != 0):
data['Decision']='error'
elif (len(billables) == 0):
data['Decision']='no_bill'
else:
data['Decision']='bill'
return (errors,warnings,debug,data,billables)
def do_payment(customer,price,leaseid,description,test=False,pay=False):
errors=[]
warnings=[]
debug=[]
stripe.api_key = current_app.config['globalConfig'].Config.get('autoplot','stripe_token')
#stripe.api_key = "sk_test_4eC39HqLyjWDarjtT1zdp7dc" # TEST KEY
#print stripe.SKU.list(limit=99)
#print stripe.Customer.list(limit=99)
debug.append("Process Payment customer {0} Price {1} leaseid {2}".format(customer,price,leaseid))
#print "Process Payment customer {0} Price {1} leaseid {2}".format(customer,price,leaseid)
debug.append("Description: {0}".format(description))
#print "Description: {0}".format(description)
"""
"""
print ("""
** GET EXISTING INVOICE ITEM
""")
# Get existing outstanding items in Stripe to invoice
lastItem=None
pendingleases={}
while True:
ii= stripe.InvoiceItem.list(
limit=2,
#customer="cus_J0mrDmtpzbfYOk", # Stripe Test Customer
customer=customer, # MIL Brad Goodman
starting_after=lastItem
)
#print "EXISTING ITEMS"
#print ii
if ii:
for d in ii['data']:
lastItem=d['id']
if 'metadata' in d:
#print "Metadata ",d['metadata']
if 'X-MIL-lease-id' in d['metadata']:
pendingleases[d['metadata']['X-MIL-lease-id']] = { 'invoice':d['invoice'],'invoiceitem':d['id']}
warnings.append("Lease already pending: "+d['metadata']['X-MIL-lease-id']+" in invoice "+str(d['invoice']))
else:
warnings.append("No metadata in item")
if not ii['has_more']: break
#print "PENDING LEASES",pendingleases
# If our new entry is not here - create item in stripe
if leaseid not in pendingleases:
print ("""
** ADD INVOICE ITEM
""")
ii= stripe.InvoiceItem.create(
#customer="cus_J0mrDmtpzbfYOk", # Stripe Test Customer
customer=customer, # MIL Brad Goodman
description=description,
#price="sku_IpxYEyVzmdmEy6", # TEST
price=price, # MIL ZERO DOLLAR PLOT
metadata={
'X-MIL-lease-id':leaseid,
'X-MIL-lease-location':'autoplot'
}
)
pendingleases[leaseid]= { 'invoice':None,'invoiceitem':ii['id']}
None # We have a pending now, with no invoice
debug.append("Created Invoice Item {0} for lease {1}".format(ii['id'],leaseid))
# If we have not created an invoice with this item in it - do so
if leaseid not in pendingleases or pendingleases[leaseid]['invoice'] is None:
print ("""
** INVOICE
""")
inv = stripe.Invoice.create(
customer=customer,
description=description,
auto_advance=False,
collection_method="charge_automatically",
metadata={
'X-MIL-lease-id':leaseid,
'X-MIL-lease-location':'autoplot'
}
#period_start=,
#period_end=json
)
pendingleases[leaseid]['invoice']=inv['id']
debug.append("Created Invoice {0} for lease {1}".format(inv['id'],leaseid))
status="invoiced"
else:
status="already_invoiced"
warnings.append("Using existing Invoice {0} for lease {1}".format(pendingleases[leaseid]['invoice'],leaseid))
# We have a current lease - let's look at it!
print ("INSPECT INVOICE")
print ("***")
inv = stripe.Invoice.retrieve(pendingleases[leaseid]['invoice'])
print (json.dumps(inv,indent=2))
# If unpaied - pay it!
if inv['paid'] == True and inv['status']=='paid':
debug.append("Already paid")
#print "** Aleady Paid!"
status="already_paid_stripe"
elif pay:
#print "** Paying!"
debug.append("Paying")
try:
#stripe.Invoice.pay(inv['id'])
stripe.Invoice.pay(inv)
debug.append("Payment Done")
#print "** Paid!"
status="paid"
except BaseException as e:
errors.append("Payment failed on invoice {0}: {1}".format(inv['id'],e))
print ("** Payment failed!")
status="payment_failed"
#print "DELETEING INVOICE"
#print stripe.Invoice.delete(inv['id'])
#debug.append("Created Invoice {0} for lease {1}".format(inv['id'],leaseid))
return (errors,warnings,debug,status)
|
10,671 | a9fd036dbb9a431bf8c9105efac748799b818074 | class ConfigException(Exception): pass
|
10,672 | 4c68c08c86b78dde08254fadee83bc2204e2e282 | # -*- coding: utf8 -*-
#사람 객체 생성
class person():
#객체 생성과 동시에 나의 돈
def __init__(self):
self.my_money_dict = {5000 : 2 , 1000 : 1, 500 : 2 , 100 : 8}
class machine():
#객체 생성과 동시에 생기는 리스트
def __init__(self):
self.products_dict = {
'vita500' : {'price' : 500, 'number' : 2},
'milk' : {'price' : 700, 'number' : 13},
'coffee' : {'price' : 900, 'number' : 8}
}
#잔액관리
self.re_try = 0
self.change = 0
#돈 넣는 행위
def get_inserted_money (self, person, my_money_dict) :
#1: 돈을 투입합니다.
self.inserted_money_sum = 0 #최종 투입된 금액
while(True):
print '\n내 주머니 상황'
print '--------------------'
for key,value in sorted(my_money_dict.items()):
print "%d원이 %d개 있어요" % (key,value)
self.inserted_money = raw_input('\n돈을 넣으세요(입금을 마치려면 0입력) : ')
#입력 데이터를 숫자형으로 형변환 (왜냐면 $raw_input()은 숫자입력도 문자열로 입력을 받기때문)
self.inserted_money = int(self.inserted_money)
#엔터치면 돈 투입 끝으로 간주하고 while loop를 종료한다
if self.inserted_money == 0:
print '돈입력이 완료돠었습니다.'
break
#예외처리1 : 올바른 돈을 넣지 않은 경우(나의 딕셔너리 키값에 없다면)
elif self.inserted_money not in person.my_money_dict:
print '\n--------------------'
print '잘못된 돈을 넣으셨습니다. 올바른 돈을 넣어주시기 바랍니다.'
print '100, 200, 500, 1000, 5000'
self.inserted_money = 0
#예외처리2 : 잔액이 부족(나의 딕셔너리 key의 val이 0이라면)
elif person.my_money_dict[self.inserted_money] == 0:
print '\n--------------------'
print '넣으신 돈의 잔액(수)가 부족합니다.'
print '다시 한번 확인해 주세'
self.inserted_money = 0
# 종료조건이 아니고, 예외상황이 아닐 경우 정상적인 실해
else:
#투입한 만큼 지갑에서 돈을 뺀다. (해당 돈의 갯수에서 하나 빼기)
person.my_money_dict[self.inserted_money] -= 1
#누적해서 투입된 돈의 합을 표시한다.
self.inserted_money_sum += self.inserted_money
print '지금까지 %d가 투입되었네요' % self.inserted_money_sum
#투입된 돈의 총 합이 0이상이 아니면..입금이 안된 것임
if self.inserted_money_sum <= 0:
print '돈이 입금되지 않았네요. 프로그램 종료 합니다'
#현재까지 투입된 돈 보여주기
print '지금까지 %d가 투입되었네요' % self.inserted_money_sum
#음료수 고르기
def ready_get_product(self, person, my_money_dict):
print '\n--------------------'
print '구매가능 음료수'
for key in self.products_dict:
print key, '가격 %d, 수량%d' % (self.products_dict[key]['price'], self.products_dict[key]['number'])
while(True):
#음료수 선택
self.choice_product = raw_input('\n 드실 음료수를 선택해주세요 : ')
#예외처리1 : 올바른 상품을 선택하지 않았을 경우
if self.choice_product not in self.products_dict:
print '올바른 상품을 선택하지 않으셨습니다.'
print '상품을 다시 골라주시기 바랍니다'
#예외처리2 : 수량이 부족한 경우
elif self.products_dict[self.choice_product]['number'] ==0:
print '수량이 부족합니다'
print '다른 상품을 골라주세요'
#예외처리3 : 금액이 부족한 경우 (돈넣는 함수 실행)
elif self.products_dict[self.choice_product]['price'] > self.inserted_money_sum:
print '잔액이 부족합니다.'
self.get_inserted_money(person, my_money_dict)
#예외처리4: 금액이 부족한 경우2(다시 뽑아먹을때 남은 잔액과 비교) re_try 1 & 잔액부족
elif (self.re_try == 1) & (self.products_dict[self.choice_product]['price'] > self.change):
print '잔액이 부족합니다.'
self.get_inserted_money(person, my_money_dict)
else:
break
#상품수령 함수실행
self.get_product(person, my_money_dict)
def get_product(self, person, my_money_dict):
print '\n상품이 나왔습니다'
print self.choice_product
print '---------------------'
#기계 수량체크
self.products_dict[self.choice_product]['number'] -= 1
#잔돈반환여부
self.change = self.inserted_money_sum - self.products_dict[self.choice_product]['price']
print '남은 잔액 %d' % self.change
while(True):
self.re_try = input('다른 상품을 고르시겠습니까? (yes : 1, no : 0) : ')
if self.re_try ==0:
print '반환금액 %d' % self.change
print '이용해주셔서 감사합니다'
break
#상품선택 함수 호출
elif self.re_try == 1:
self.ready_get_product(person, my_money_dict)
break
#예외처리(다시 선택)
else:
print '입력이 잘못되었습니다. 다시 시도해주세요.'
# 메인 로직
if __name__ == '__main__':
#오브젝트(인스턴스) 생성 : person & machine
youngnam = person()
nhn_next = machine()
#돈 투입하기 (투입하는 사람, 투입한 사람의 돈 딕셔너리 인자값으로 취)
nhn_next.get_inserted_money(youngnam, youngnam.my_money_dict)
nhn_next.ready_get_product(youngnam, youngnam.my_money_dict)
|
10,673 | 13dad9c6228912bca3a1edad3541a34cb0ac662f | # ngrams.py
# A program that tallies n-gram counts in given text files
# Eric Alexander
# CS 111
def getOneGrams(filename):
''' Function takes in a given text filename and counts one-grams in the file '''
oneGramCounts = {}
with open(filename, 'r') as f:
for line in f:
# Clean out punctuation
for ch in '!@#$%^&*()_+-=;:",./<>?\\':
line = line.replace(ch, ' ')
# Reduce to all lowercase
line = line.lower()
# Split into words
words = line.split()
# Add to oneGramCounts for each word (checking to see if it is already present)
for word in words:
if word in oneGramCounts:
oneGramCounts[word] += 1
else:
oneGramCounts[word] = 1
return oneGramCounts
def getTwoGrams(filename):
''' Function takes in a given text filename and counts two-grams in the file '''
pass
def getNGrams(filename, n):
''' Function takes in a given text filename and an integer n and counts n-grams in the file '''
pass
def printTopN(countDict, n):
''' Function takes a dictionary mapping n-grams to counts and prints top n n-grams by count. '''
dictItems = list(countDict.items())
dictItems.sort()
dictItems.sort(key=lambda pair: pair[1], reverse=True)
for i in range(n):
word, count = dictItems[i]
print('{} {}'.format(word, count))
def main():
filename = 'Shakespeare/Hamlet.txt'
oneGramDict = getOneGrams(filename)
printTopN(oneGramDict, 50)
if __name__ == '__main__':
main() |
10,674 | 57091a09fc93aa997c989747fef01adbdc10fd0f | import csv
import pprint
import matplotlib.pyplot as plt
import numpy as np
def analyzeSolution(fname):
#Dataformat: [[total, proper],[...]] index is difficulty level -1
data = [[[0,0] for givens in range(81)] for difficulty in range(4)]
with open(fname) as f:
c = csv.DictReader(f)
for r in c:
data[ int(r['difficulty'])-1 ][int(r['givens'])-1][0]+=1
if int(r['proper']):
data[ int(r['difficulty'])-1 ][int(r['givens'])-1][1]+=1
pprint.pprint(data)
x=np.array(range(1,82))
difficulty = ["Simple","Easy", "Intermediate", "Expert"]
for dif in range(4):
t = np.array([float(i[1])/(data[dif][-1][1]) for i in data[dif]])
plt.plot(x, t, 'o', label=difficulty[dif])
print "Difficulty: %s|| Mean: %s, Variance: %s, Std: %s" % (difficulty[dif], np.mean(t),np.var(t),np.std(t))
plt.xlabel('Number of Givens')
plt.ylabel('Probability of proper')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.show()
def analyzeSudoku(fname):
#Dataformat: [[total, proper],[...]] index is difficulty level -1
data = [[[0,0] for givens in range(81)] for difficulty in range(4)]
with open(fname) as f:
c = csv.DictReader(f)
for r in c:
data[ int(r['difficulty'])-1 ][int(r['givens'])-1][0]+=1
if int(r['proper']):
data[ int(r['difficulty'])-1 ][int(r['givens'])-1][1]+=1
pprint.pprint(data)
x=np.array(range(1,82))
difficulty = ["Simple","Easy", "Intermediate", "Expert"]
for dif in range(4):
t = np.array([float(i[1]) for i in data[dif]])
plt.plot(x, t, 'o', label=difficulty[dif])
plt.xlabel('Number of Givens')
plt.ylabel('Probability of proper')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.show()
# analyzeSudoku('sudokuData.csv')
analyzeSolution('data.csv') |
10,675 | 427223eb56f9fa066a85bb0de142eede918828d6 | import pickle
import numpy as np
ds_ans_dict = pickle.load(open("ds_ans_dict.dat","rb"))
ds_cluster_centers = pickle.load(open("ds_ans_cluster_centers.dat","rb"))
#ds_cluster_centers_indices = pickle.load(open("ds_cluster_centers_indices.dat","rb"))
num_clusters = ds_cluster_centers.shape
print("cluster_centers shape:", num_clusters)
from sklearn.metrics.pairwise import euclidean_distances
def computeNearByClusters(cluster_centers):
nearby_centers_dict = {}
for i in range(len(cluster_centers)):
#gives the distance of i-th center from all the centers
distances = euclidean_distances(cluster_centers,[cluster_centers[i]])
#sort these distances
nearest_10_centers_array = distances.argsort(axis=0)[1:6]
nearest_10_centers = []
for elem in nearest_10_centers_array:
nearest_10_centers.append(elem[0])
if not i in nearby_centers_dict:
nearby_centers_dict[i] = nearest_10_centers
return nearby_centers_dict
nearby_centers_dict = computeNearByClusters(ds_cluster_centers)
print(nearby_centers_dict[2])
def findAllAnsInCluster(answers_dict):
questions_in_a_cluster_dict = {}
for key, value in answers_dict.items():
if value[1] in questions_in_a_cluster_dict:
questions_in_a_cluster_dict[value[1]].append(key)
else:
questions_in_a_cluster_dict[value[1]] = [key]
return questions_in_a_cluster_dict
questions_in_a_cluster_dict = findAllAnsInCluster(ds_ans_dict)
import math
#given a question embedding, calculate its magnitude
def computeMagnitude(embedding):
square_sum = 0
for i in range(len(embedding)):
if not math.isnan(embedding[i]):
square_sum = square_sum + (embedding[i]*embedding[i])
return math.sqrt(square_sum)
def computeCosine(ques_1, ques_2):
numerator = 0
for i in range(len(ques_1)):
if not math.isnan(ques_1[i]) and not math.isnan(ques_2[i]):
numerator = numerator + ques_1[i]*ques_2[i]
denominator = computeMagnitude(ques_1) * computeMagnitude(ques_2)
return numerator/denominator
import pandas as pd
answersDF = pd.read_csv('DS_answers.csv')
answers ={}
#Building a dictionary of answerId with associated questionId
for index, row in answersDF.iterrows():
answers[row['Id']] = row['ParentId']
#lookup parentID(Question ID) for answers
def lookupParentId(ansId):
return answers[ansId]
from sklearn.metrics.pairwise import cosine_similarity
def generateAnsRecommendations(ques_id):
total_questions = []
#input is question_id
#cluster_label tells which cluster the question is in
#ds_ques_dict is a dictionary where key is ques_id and value is a tuple(embedding,label)
cluster_label = ds_ans_dict[ques_id][1]
#find the 10 nearby clusters to a given cluster
nearby_clusters = nearby_centers_dict[cluster_label]
#nearby_clusters is a list
for cluster in nearby_clusters:
total_questions.extend(questions_in_a_cluster_dict[cluster])
total_questions.extend(questions_in_a_cluster_dict[cluster_label])
#total_questions is a list of all questions
#for a given question find cosine similarity with all these questions
cosine_scores = []
for ans in total_questions:
ques = lookupParentId(ans)
cosine_scores.append((ques,cosine_similarity(ds_ans_dict[ques_id][0].reshape(1,-1),ds_ans_dict[ans][0].reshape(1,-1))[0][0]))
cosine_scores.sort(key=lambda x: x[1],reverse=True)
return cosine_scores[0:31]
ds_ans_recommendations = {}
for key, value in ds_ans_dict.items():
#here key represents the question_id
#generate recommendation for each such question
qid = lookupParentId(key)
if qid not in ds_ans_recommendations.keys():
ds_ans_recommendations[qid] = generateAnsRecommendations(key)
else:
ds_ans_recommendations[qid].extend(generateAnsRecommendations(key))
def combine(ans_recommendations,ques_recommendations):
ques_ans_recommendations = {}
for key,val in ans_recommendations.items():
if key in ques_ans_recommendations.keys():
ques_ans_recommendations[key].extend(ans_recommendations[key])
else:
ques_ans_recommendations[key] = ans_recommendations[key]
for key,val in ques_recommendations.items():
if key in ques_ans_recommendations.keys():
ques_ans_recommendations[key].extend(ques_recommendations[key])
else:
ques_ans_recommendations[key] = ques_recommendations[key]
return ques_ans_recommendations
ds_ques_recommendations = pickle.load(open("ds_ques_recommendations.dat", "rb"))
combinedRec = combine(ds_ans_recommendations, ds_ques_recommendations)
def Sort_Tuple(tup):
return(sorted(tup, key = lambda x: x[1],reverse=True))
for key,value in combinedRec.items():
temp = []
for tup in combinedRec[key]:
if(key==tup[0]):
continue
else:
temp.append(tup)
combinedRec[key]=set(temp)
t = Sort_Tuple(combinedRec[key])
combinedRec[key] = t
pickle.dump(ds_ans_recommendations,open("ds_ans_recommendations.dat","wb"))
pickle.dump(combinedRec, open("ds_combined_recommendations.dat", "wb"))
|
10,676 | 1d0e8ceaae51a359def93f0d54a35054242d84fd | import pandas as pd
from sklearn import model_selection
from transformers import AdamW, get_linear_schedule_with_warmup
from torch.utils.data import DataLoader
from services.text_similarity.application.ai.model import BERTClassifier
from services.text_similarity.application.ai.training.src import utils
from services.text_similarity.application.ai.training.src.dataset import BERTDataset
from services.text_similarity.application.ai.training.src.engine import Engine
from services.text_similarity.application.ai.training.src.preprocess import Preprocess
from services.text_similarity.settings import Settings
class Train:
def __init__(self):
# initialize required class
self.settings = Settings
self.engine = Engine()
self.preprocess = Preprocess()
self.early_stopping = utils.EarlyStopping(patience=self.settings.patience,
mode=self.settings.mode)
# initialize required variables
self.bert_text_model = None
self.optimizer = None
self.scheduler = None
self.train_data_loader = None
self.val_data_loader = None
self.total_steps = None
self.param_optimizer = None
self.optimizer_parameters = None
def __initialize(self):
# Instantiate Bert Classifier
self.bert_text_model = BERTClassifier()
self.bert_text_model.to(self.settings.DEVICE)
self.__optimizer_params()
# Create the optimizer
self.optimizer = AdamW(self.optimizer_parameters,
lr=5e-5, # Default learning rate
eps=1e-8 # Default epsilon value
)
# Set up the learning rate scheduler
self.scheduler = get_linear_schedule_with_warmup(self.optimizer,
num_warmup_steps=0, # Default value
num_training_steps=self.total_steps)
def __optimizer_params(self):
self.param_optimizer = list(self.bert_text_model.named_parameters())
self.optimizer_parameters = [
{
"params": [
p for n, p in self.param_optimizer if not any(nd in n for nd in self.settings.no_decay)
],
"weight_decay": 0.001,
},
{
"params": [
p for n, p in self.param_optimizer if any(nd in n for nd in self.settings.no_decay)
],
"weight_decay": 0.0,
},
]
def __create_data_loaders(self, sentence1, sentence2, targets, batch_size, num_workers):
dataset = BERTDataset(sentence_1=sentence1,
sentence_2=sentence2,
targets=targets)
data_loader = DataLoader(dataset, batch_size=batch_size, num_workers=num_workers)
return data_loader
def __load_data(self, csv_data_path):
df = pd.read_csv(csv_data_path).dropna().reset_index(drop=True)
df_train, df_valid = model_selection.train_test_split(
df,
random_state=self.settings.seed_value,
test_size=self.settings.test_size,
stratify=df.is_duplicate.values
)
df_train = df_train.reset_index(drop=True)
df_valid = df_valid.reset_index(drop=True)
# creating Data Loaders
# train data loader
self.train_data_loader = self.__create_data_loaders(sentence1=df_train.question1.values,
sentence2=df_train.question2.values,
targets=df_train.is_duplicate.values,
num_workers=self.settings.TRAIN_NUM_WORKERS,
batch_size=self.settings.TRAIN_BATCH_SIZE)
# validation data loader
self.val_data_loader = self.__create_data_loaders(sentence1=df_valid.question1.values,
sentence2=df_valid.question2.values,
targets=df_valid.is_duplicate.values,
num_workers=self.settings.VAL_NUM_WORKERS,
batch_size=self.settings.VALID_BATCH_SIZE)
self.total_steps = int(len(df_train) / self.settings.TRAIN_BATCH_SIZE * self.settings.EPOCHS)
def __train(self):
for epochs in range(self.settings.EPOCHS):
self.engine.train_fn(data_loader=self.train_data_loader,
model=self.bert_text_model,
optimizer=self.optimizer,
device=self.settings.DEVICE,
scheduler=self.scheduler)
val_loss, val_accuracy = self.engine.eval_fn(data_loader=self.val_data_loader,
model=self.bert_text_model,
device=self.settings.DEVICE)
print(f"Validation accuracy = {val_accuracy}")
self.early_stopping(epoch_score=val_accuracy,
model=self.bert_text_model,
model_path=self.settings.WEIGHTS_PATH)
if self.early_stopping.early_stop:
print("Early stopping")
break
def run(self):
try:
print("Loading and Preparing the Dataset-----!! ")
self.__load_data(csv_data_path=self.settings.TRAIN_DATA)
print("Dataset Successfully Loaded and Prepared-----!! ")
print()
print("Loading and Initializing the Bert Model -----!! ")
self.__initialize()
print("Model Successfully Loaded and Initialized-----!! ")
print()
print("------------------Starting Training-----------!!")
self.engine.set_seed()
self.__train()
print("Training complete-----!!!")
except BaseException as ex:
print("Following Exception Occurred---!! ", str(ex))
|
10,677 | 118b36e79bf1f9e5f9fa9c548099ed0c423d77b1 | from prettytable import PrettyTable
XL, XU, x = .5, 2.5, 0
Xr = (XL + XU)/2
PE = abs((XU-XL)/(XU+XL)) * 100
def func(y):
return 1-(400/(9.81*(3*y+(y**2)/2)**3))*(3+y)
t = PrettyTable(["Iteration", "Xl", "XU", "F(XL)", "F(XU)", "F(XR)*F(XU)", "XR", "F(XR)", "PE"])
t.add_row([x, "%.4f" % XL, "%.4f" % XU, "%.4f" % func(XL), "%.4f" % func(XU), "%.4f" % float(func(XL) * func(Xr)),
"%.4f" % Xr, "%.4f" % func(Xr), "%.4f" % PE])
while PE > 1 and x < 10:
if func(XL)*func(Xr) < 0:
XU = Xr
else:
XL = Xr
Xr = (XL + XU) / 2
PE = abs((XU - XL) / (XU + XL)) * 100
x = x+1
t.add_row([x, "%.4f" % XL, "%.4f" % XU, "%.4f" % func(XL), "%.4f" % func(XU), "%.4f" % float(func(XL) * func(Xr)),
"%.4f" % Xr, "%.4f" % func(Xr), "%.4f" % PE])
print(t)
end = input() |
10,678 | 11e6ebdefc55ef7ce723fd85efd857b2102c26e9 | from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='logomaker',
version='0.8.0',
description='Package for making Sequence Logos',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Bio-Informatics',
],
keywords='Sequence Logos',
url='http://logomaker.readthedocs.io',
author='Ammar Tareen and Justin B. Kinney',
author_email='tareen@cshl.edu',
license='MIT',
packages=['logomaker'],
include_package_data=True,
install_requires=[
'numpy',
'matplotlib>=2.2.2',
'pandas'
],
zip_safe=False) |
10,679 | b835e4c4512c10a0ca92ec1c232b370f74c60794 | n = int(input())
P = tuple(map(int, input().split()))
Q = tuple(map(int, input().split()))
##print(P)
#print(Q)
import itertools
X = []
for p in itertools.permutations(range(1, n+1)):
X.append(p)
#print(X)
for i in range(len(X)):
if P == X[i]:
a = i
break
for i in range(len(X)):
if Q == X[i]:
b = i
break
print(abs(a-b))
|
10,680 | 30f3f8501096c8085ab41eee5d190ed6c3c238e1 | from flask import Blueprint, render_template, redirect, url_for, abort
from nsweb.models.analyses import (Analysis, AnalysisSet, TopicAnalysis,
TermAnalysis)
import json
import re
from flask_user import login_required, current_user
from nsweb.initializers import settings
from nsweb.controllers import error_page
from os.path import join
bp = Blueprint('analyses', __name__, url_prefix='/analyses')
### TOP INDEX ###
@bp.route('/')
def list_analyses():
n_terms = TermAnalysis.query.count()
return render_template('analyses/index.html', n_terms=n_terms)
### ROUTES COMMON TO ALL ANALYSES ###
def find_analysis(name, type=None):
''' Retrieve analysis by either id (when int) or name (when string) '''
if re.match('\d+$', name):
return Analysis.query.get(name)
query = Analysis.query.filter_by(name=name)
if type is not None:
query = query.filter_by(type=type)
return query.first()
@bp.route('/<string:id>/')
def show_analysis(id):
analysis = find_analysis(id)
if analysis is None:
return render_template('analyses/missing.html', analysis=id)
if analysis.type == 'term':
return redirect(url_for('analyses.show_term', term=analysis.name))
elif analysis.type == 'topic':
return redirect(url_for('analyses.show_topic', number=analysis.number,
topic_set=analysis.analysis_set.name))
@bp.route('/terms/')
def list_terms():
return render_template('analyses/terms/index.html')
@bp.route('/terms/<string:term>/')
def show_term(term):
analysis = find_analysis(term, type='term')
if analysis is None:
return render_template('analyses/missing.html', analysis=term)
return render_template('analyses/terms/show.html',
analysis=analysis,
cog_atlas=json.loads(analysis.cog_atlas or '{}'))
### TOPIC-SPECIFIC ROUTES ###
@bp.route('/topics/')
def list_topic_sets():
topic_sets = AnalysisSet.query.filter_by(type='topics')
return render_template('analyses/topics/index.html',
topic_sets=topic_sets)
@bp.route('/topics/<string:topic_set>/')
def show_topic_set(topic_set):
topic_set = AnalysisSet.query.filter_by(name=topic_set).first()
return render_template('analyses/topics/show_set.html',
topic_set=topic_set)
@bp.route('/topics/<string:topic_set>/<string:number>')
def show_topic(topic_set, number):
topic = TopicAnalysis.query.join(AnalysisSet).filter(
TopicAnalysis.number == number, AnalysisSet.name == topic_set).first()
if topic is None:
return render_template('analyses/missing.html', analysis=None)
terms = [t[0] for t in TermAnalysis.query.with_entities(
TermAnalysis.name).all()]
top = topic.terms.split(', ')
def map_url(x):
if x in terms:
return '<a href="%s">%s</a>' % (url_for('analyses.show_term',
term=x), x)
return x
topic.terms = ', '.join(map(map_url, top))
return render_template('analyses/topics/show.html',
analysis_set=topic.analysis_set, analysis=topic)
# Show custom analysis page for explanation
@bp.route('/custom/')
def list_custom_analyses():
return render_template('analyses/custom/index.html')
|
10,681 | 00d41d0d2b9d48418a69bcbe63a36a621ca3d7bb | coords = [[1, 5, 9], [2, 6, 10], [3, 7, 11], [4, 8, 12]]
for loc in coords:
for coord in loc:
print(coord)
|
10,682 | 70150cf97cf8769c6d8eec559057afe7734f24b8 | """Blog Model."""
from flask_blog import db
class Blog(db.Model):
"""Blog Model Class."""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80))
admin = db.Column(db.Integer, db.ForeignKey('author.id'))
def __init__(self, name, admin):
"""Initialize class object."""
self.name = name
self.admin = admin
def __repr__(self):
"""Representation of the object."""
return '<Blog %r>' % self.name
|
10,683 | c3cbff41c932d247125efc896dbf2c90a6cc1f46 | from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
import settings
import circuits.views
import os
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'quantummechanic.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', circuits.views.index, name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^sandbox/(?P<n>\d{0,10})', circuits.views.sandbox, name='sandbox'),
url(r'^sandbox/', circuits.views.sandbox_default, name='sandbox_default'),
url(r'^puzzle/', circuits.views.puzzle, name='puzzle'),
url(r'^addgate/', circuits.views.addgate, name='addgate'),
url(r'^undo/', circuits.views.undo, name='undo'),
url(r'^clear/', circuits.views.clear, name='clear'),
)
if settings.DEBUG404:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': os.path.join(os.path.dirname(__file__),'media')})
)
|
10,684 | a0b0749cc43508d6bf17971f2ef67d178cfb2c36 | from .dict_menu import DictMenu
from .dict_table import DictTable
from .dict_dialog import DictDialog
from .dict_tree import DictTree
|
10,685 | 14f9a5f60836c4756c0857006e0637c840b58416 | vowels = "aeiou"
consonants = "bcdfghjklmnpqrstvwxyz"
def decode(tone):
s = tone.lower()
pin = ''
for ch in s:
if ch in consonants:
idx = consonants.find(ch)
elif ch in vowels:
idx2 = vowels.find(ch)
line = 'pin {0} idx {1} idx2 {2}'.format(pin, idx, idx2)
print line
pin = str(pin) + str(idx*5 + idx2)
print pin
if __name__ == '__main__':
decode('bomelela')
decode('bomeluco')
|
10,686 | ab2747edf5ebe27a8ae0421704f9227ffce1a3bb | import numpy as np
import pandas as pd
import scipy.io as scio
import h5py
import sys
import datetime
open_file = sys.argv[1]
data_file = sys.argv[2]
save_file = sys.argv[3]
dataset_len = len(data_file.split('/')[-1])
dir_path = data_file[:-dataset_len]
cell_file = dir_path + 'cell_filtered.txt'
gene_file = dir_path + 'gene_filtered.txt'
cells = np.loadtxt(cell_file, dtype=str, delimiter='\n')
genes = np.loadtxt(gene_file, dtype=str, delimiter='\n')
V7 = True
path = open_file
if V7 == True:
matrix = {}
with h5py.File(path,'r') as f:
for k, v in f.items():
matrix[k] = np.array(v)
else:
matrix = scio.loadmat(path)
print('start:',datetime.datetime.now())
matrix = matrix['W_singlecell_NE']
sim_sh = 1.5
f_sh = 0.01
print('matrix load finished')
for i in range(len(matrix)):
matrix[i][i] = sim_sh * max(matrix[i])
Mnorm = matrix.copy()
for i in range(len(Mnorm)):
Mnorm[i] = Mnorm[i]/(sum(Mnorm[i]))
path = data_file
file_type = path.split('.')[-1]
if file_type == 'tsv':
data = pd.read_csv(path, index_col=0, sep='\t').values.T
else:
data = pd.read_csv(path).values
print('data load finished')
print(data.shape)
data_impute = Mnorm.dot(data)
'''
for i in range(len(data_impute)):
r = np.max(data[i])/np.max(data_impute[i])
data_impute[i] = data_impute[i] * r
mask = data.copy()
mask[mask > 0] = -1
mask = mask + 1
'''
impute = data_impute
#impute = mask * data_impute + data
print('imputed')
print('end:', datetime.datetime.now())
path = save_file
impute = impute.T
impute = pd.DataFrame(impute, index=genes, columns=cells)
impute.to_csv(path, index=True, header=True, sep='\t')
print('save imputed data finished')
print(datetime.datetime.now(),'NE end')
|
10,687 | 606afbbe6ad09efeb45a098b83b867a3dc261a3d | import argparse
import sys
import os
import time
import math
import tensorflow as tf
NUM_CLASSES = 10
# 28x28
IMAGE_SIZE = 28
IMAGE_PIXELS = IMAGE_SIZE * IMAGE_SIZE
# function for creating Artificial neural network
def inference(images, hidden_units1,hidden_units2):
with tf.name_scope('H_layer_1'):
# hidden layer 1 :
weights = tf.Variable(
tf.truncated_normal(
[IMAGE_PIXELS,hidden_units1],stddev=1.0 / math.sqrt(float(IMAGE_PIXELS)),
),dtype=tf.float32,name='weight_1'
)
b = tf.Variable(tf.zeros([hidden_units1]),name='biases')
# create first hidden layer
hidden1 = tf.nn.relu(tf.matmul(images,weights) + b,name='Hidden_1')
with tf.name_scope('H_layer_2'):
# hidden layer 2
weights = tf.Variable(
tf.truncated_normal([hidden_units1,hidden_units2],stddev=1.0/math.sqrt(float(hidden_units1))),
dtype=tf.float32,
name='weights_2'
)
b = tf.Variable(tf.zeros(hidden_units2),name='biases')
hidden2 = tf.nn.relu(tf.matmul(hidden1,weights)+b,name = 'hidden_2')
with tf.name_scope('H_layer_output'):
# weights
weights = tf.Variable(tf.truncated_normal([hidden_units2,NUM_CLASSES],stddev=0.1/math.sqrt(float(hidden_units2))), name='weights_3')
b = tf.Variable(tf.zeros([NUM_CLASSES]),name='biases')
# logits # don't use activation function here, as it's being taken care by the loss function
# cross entropy
output_layer = tf.matmul(hidden2,weights)+b
return output_layer
def loss(ac_labels,pred_logits):
cross_ent = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=ac_labels,
logits=pred_logits,
name='Cross_Entropy'
)
return tf.reduce_mean(cross_ent,name='cross_ent_mean')
def training(loss,learning_rate):
# add loss summary for visalization
tf.summary.scalar('loss',loss)
# global step
global_step = tf.Variable(0,dtype=tf.int32,trainable=False,name='global_step')
# tf.train.global_step()
# training
train = tf.train.GradientDescentOptimizer(learning_rate = 0.3).minimize(loss,global_step=global_step)
return train
def evaluation(logits,labels):
return tf.reduce_sum(tf.cast(tf.nn.in_top_k(logits,labels,1),tf.int32))
|
10,688 | 40765a4c52795050ef81ebbed08c5d936e7957a7 | import unittest
import pytest
from os import environ
environ['CI'] = environ.get('CI') or 'true'
import rl
class DQNTest(unittest.TestCase):
def test_run_gym_tour(self):
problem = 'CartPole-v0'
param = {'e_anneal_steps': 10000,
'learning_rate': 0.01,
'n_epoch': 1,
'gamma': 0.99}
sys_vars = rl.run_gym_tour.run_session(problem, param)
assert(sys_vars['RENDER'] == False)
# ensure it runs, and returns the sys_vars
assert(isinstance(sys_vars, dict))
assert(sys_vars['epi'] > 0)
def test_run_tabular_q(self):
problem = 'CartPole-v0'
param = {'e_anneal_steps': 10000,
'learning_rate': 0.01,
'gamma': 0.99}
sys_vars = rl.run_tabular_q.run_session(problem, param)
assert(sys_vars['RENDER'] == False)
# ensure it runs, and returns the sys_vars
assert(isinstance(sys_vars, dict))
assert(sys_vars['epi'] > 0)
def test_run_dqn(self):
problem = 'CartPole-v0'
param = {'e_anneal_steps': 10000,
'learning_rate': 0.01,
'n_epoch': 1,
'gamma': 0.99}
sys_vars = rl.run_dqn.run_session(problem, param)
assert(sys_vars['RENDER'] == False)
# ensure it runs, and returns the sys_vars
assert(isinstance(sys_vars, dict))
assert(sys_vars['epi'] > 0)
|
10,689 | 8b007fee129c90a6a7633853f66a1a08008ebb96 | import unittest
from unittest.mock import Mock, patch
from mstrio.microstrategy import Connection
import json
from nose.tools import assert_true
class TestMicrostrategy(unittest.TestCase):
@patch('mstrio.api.projects.projects', autospec=True)
@patch('mstrio.api.authentication.login', autospec=True)
def setUp(self, mock_login, mock_projects):
with open('production/tests/api-responses/misc/server_status.json') as server_status:
self.__server_status =json.load(server_status)
self.username = 'user'
self.password = 'pass'
self.base_url = 'https://test-env.customer.cloud.microstrategy.com/MicroStrategyLibrary/api/'
self.project_id = 'B7CA92F04B9FAE8D941C3E9B7E0CD754'
self.__web_version = '11.1.0400'
self.__iserver_version = '11.1.0400'
mock_login.return_value.ok = True
mock_projects.return_value.ok = True
self.conn = Connection(self.base_url, self.username, self.password,
self.project_id)
@patch('mstrio.api.misc.server_status', autospec=True)
def test_check_version(self, mock_server_status):
"""Test that response is parsed correctly and the version numbers are properly set."""
# Configure the mock to return a response with an OK status code.
mock_server_status.return_value.ok = True
# mock the response from misc.server_status()
mock_server_status.return_value = Mock(ok=True)
mock_server_status.return_value.json.return_value = self.__server_status
self.assertIsInstance(self.conn._Connection__check_version(), bool)
assert_true(mock_server_status.called)
self.assertEqual(self.conn._Connection__web_version, self.__web_version)
self.assertEqual(self.conn._Connection__iserver_version,
self.__iserver_version)
if __name__ == '__main__':
unittest.main()
|
10,690 | 0541992af70daa6c3bfa01b0fb29e6a3e8b37cc8 | from keras.models import Model
from keras.optimizers import Adam
from keras.layers import *
def build_model(vectorizer, embed_dim, num_layers, recurrent_dim,
lr, dropout, num_classes=2):
input_ = Input(shape=(vectorizer.max_len,), dtype='int32')
m = Embedding(input_dim=len(vectorizer.syll2idx),
output_dim=embed_dim,
mask_zero=True,
input_length=vectorizer.max_len)(input_)
m = Dropout(dropout)(m)
for i in range(num_layers):
if i == 0:
curr_input = m
else:
curr_input = curr_enc_out
curr_enc_out = Bidirectional(LSTM(units=recurrent_dim,
return_sequences=True,
activation='tanh',
recurrent_dropout=dropout,
name='enc_lstm_'+str(i + 1)),
merge_mode='sum')(curr_input)
curr_enc_out = Dropout(dropout)(curr_enc_out)
dense = TimeDistributed(Dense(num_classes, activation='relu'), name='dense')(curr_enc_out)
optim = Adam(lr=lr)
output_ = Activation('softmax', name='out')(dense)
model = Model(inputs=input_, outputs=output_)
model.compile(optimizer=optim,
loss={'out': 'categorical_crossentropy'},
metrics=['accuracy'])
return model |
10,691 | c9cc4c0441410617309e45c5cbda271c10322c34 | import json
import os
from pathlib import Path
from shutil import rmtree
from subprocess import DEVNULL, PIPE, CalledProcessError, run # nosec
from tempfile import TemporaryDirectory
from typing import Any, Dict, Optional, Set
import click
import typer
from cookiecutter.generate import generate_files
from git import Repo
from .utils import (
example,
generate_cookiecutter_context,
get_cookiecutter_repo,
get_cruft_file,
is_project_updated,
json_dumps,
)
try:
import toml # type: ignore
except ImportError: # pragma: no cover
toml = None # type: ignore
CruftState = Dict[str, Any]
@example(skip_apply_ask=False)
@example()
def update(
project_dir: Path = Path("."),
cookiecutter_input: bool = False,
skip_apply_ask: bool = True,
skip_update: bool = False,
checkout: Optional[str] = None,
strict: bool = True,
) -> bool:
"""Update specified project's cruft to the latest and greatest release."""
pyproject_file = project_dir / "pyproject.toml"
cruft_file = get_cruft_file(project_dir)
# If the project dir is a git repository, we ensure
# that the user has a clean working directory before proceeding.
if not _is_project_repo_clean(project_dir):
typer.secho(
"Cruft cannot apply updates on an unclean git project."
" Please make sure your git working tree is clean before proceeding.",
fg=typer.colors.RED,
)
return False
cruft_state = json.loads(cruft_file.read_text())
with TemporaryDirectory() as compare_directory_str:
# Initial setup
compare_directory = Path(compare_directory_str)
template_dir = compare_directory / "template"
repo = get_cookiecutter_repo(cruft_state["template"], template_dir, checkout)
directory = cruft_state.get("directory", None)
if directory:
template_dir = template_dir / directory
last_commit = repo.head.object.hexsha
# Bail early if the repo is already up to date
if is_project_updated(repo, cruft_state["commit"], last_commit, strict):
typer.secho(
"Nothing to do, project's cruft is already up to date!", fg=typer.colors.GREEN
)
return True
# Generate clean outputs via the cookiecutter
# from the current cruft state commit of the cookiectter and the updated
# cookiecutter.
old_main_directory, new_main_directory, new_context = _generate_project_updates(
compare_directory, cruft_state, template_dir, cookiecutter_input, repo
)
# Get all paths that we are supposed to skip before generating the diff and applying updates
skip_paths = _get_skip_paths(cruft_state, pyproject_file)
# We also get the list of paths that were deleted from the project
# directory but were present in the template that the project is linked against
# This is to avoid introducing changes that won't apply cleanly to the current project.
deleted_paths = _get_deleted_files(old_main_directory, project_dir)
# We now remove both the skipped and deleted paths from the new and old project
_remove_paths(old_main_directory, new_main_directory, skip_paths | deleted_paths)
# Given the two versions of the cookiecutter outputs based
# on the current project's context we calculate the diff and
# apply the updates to the current project.
if _apply_project_updates(
old_main_directory, new_main_directory, project_dir, skip_update, skip_apply_ask
):
# Update the cruft state and dump the new state
# to the cruft file
cruft_state["commit"] = last_commit
cruft_state["context"] = new_context
cruft_state["directory"] = directory
cruft_file.write_text(json_dumps(cruft_state))
typer.secho(
"Good work! Project's cruft has been updated and is as clean as possible!",
fg=typer.colors.GREEN,
)
return True
#####################################
# Generating clean outputs for diff #
#####################################
def _generate_output(
cruft_state: CruftState, template_dir: Path, cookiecutter_input: bool, new_output_dir: Path
):
new_context = generate_cookiecutter_context(
cruft_state["template"],
template_dir,
extra_context=cruft_state["context"]["cookiecutter"],
no_input=not cookiecutter_input,
)
project_dir = generate_files(
repo_dir=template_dir,
context=new_context,
overwrite_if_exists=True,
output_dir=new_output_dir,
)
return new_context, Path(project_dir)
def _generate_project_updates(
compare_directory: Path,
cruft_state: CruftState,
template_dir: Path,
cookiecutter_input: bool,
repo: Repo,
):
new_output_dir = compare_directory / "new_output"
new_context, new_main_directory = _generate_output(
cruft_state, template_dir, cookiecutter_input, new_output_dir
)
repo.head.reset(commit=cruft_state["commit"], working_tree=True)
old_output_dir = compare_directory / "old_output"
# We should not prompt for the cookiecutter input for the current
# project state
_, old_main_directory = _generate_output(cruft_state, template_dir, False, old_output_dir)
return old_main_directory, new_main_directory, new_context
##############################
# Removing unnecessary files #
##############################
def _get_skip_paths(cruft_state: CruftState, pyproject_file: Path) -> Set[Path]:
skip_cruft = cruft_state.get("skip", [])
if toml and pyproject_file.is_file():
pyproject_cruft = toml.loads(pyproject_file.read_text()).get("tool", {}).get("cruft", {})
skip_cruft.extend(pyproject_cruft.get("skip", []))
return set(map(Path, skip_cruft))
def _get_deleted_files(template_dir: Path, project_dir: Path):
cwd = Path.cwd()
os.chdir(template_dir)
template_paths = set(Path(".").glob("**/*"))
os.chdir(cwd)
os.chdir(project_dir)
deleted_paths = set(filter(lambda path: not path.exists(), template_paths))
os.chdir(cwd)
return deleted_paths
def _remove_paths(old_main_directory: Path, new_main_directory: Path, paths_to_remove: Set[Path]):
for path_to_remove in paths_to_remove:
old_path = old_main_directory / path_to_remove
new_path = new_main_directory / path_to_remove
for path in (old_path, new_path):
if path.is_dir():
rmtree(path)
elif path.is_file():
path.unlink()
#################################################
# Calculating project diff and applying updates #
#################################################
def _get_diff(old_main_directory: Path, new_main_directory: Path):
diff = run(
[
"git",
"diff",
"--no-index",
"--no-ext-diff",
"--no-color",
str(old_main_directory),
str(new_main_directory),
],
stdout=PIPE,
stderr=PIPE,
).stdout.decode()
diff = diff.replace(str(old_main_directory), "").replace(str(new_main_directory), "")
return diff
def _view_diff(old_main_directory: Path, new_main_directory: Path):
run(["git", "diff", "--no-index", str(old_main_directory), str(new_main_directory)])
def _is_git_repo(directory: Path):
# Taken from https://stackoverflow.com/a/16925062
# This works even if we are in a sub folder in a git
# repo
output = run(
["git", "rev-parse", "--is-inside-work-tree"], stdout=PIPE, stderr=DEVNULL, cwd=directory
)
if b"true" in output.stdout:
return True
return False
def _is_project_repo_clean(directory: Path):
if not _is_git_repo(directory):
return True
output = run(["git", "status", "--porcelain"], stdout=PIPE, stderr=DEVNULL, cwd=directory)
if output.stdout.strip():
return False
return True
def _apply_patch_with_rejections(diff: str, expanded_dir_path: Path):
try:
run(
["git", "apply", "--reject"],
input=diff.encode(),
stderr=PIPE,
stdout=PIPE,
check=True,
cwd=expanded_dir_path,
)
except CalledProcessError as error:
typer.secho(error.stderr.decode(), err=True)
typer.secho(
(
"Project directory may have *.rej files reflecting merge conflicts with the update."
" Please resolve those conflicts manually."
),
fg=typer.colors.YELLOW,
)
def _apply_three_way_patch(diff: str, expanded_dir_path: Path):
try:
run(
["git", "apply", "-3"],
input=diff.encode(),
stderr=PIPE,
stdout=PIPE,
check=True,
cwd=expanded_dir_path,
)
except CalledProcessError as error:
typer.secho(error.stderr.decode(), err=True)
if _is_project_repo_clean(expanded_dir_path):
typer.secho(
"Failed to apply the update. Retrying again with a different update stratergy.",
fg=typer.colors.YELLOW,
)
_apply_patch_with_rejections(diff, expanded_dir_path)
def _apply_patch(diff: str, expanded_dir_path: Path):
# Git 3 way merge is the our best bet
# at applying patches. But it only works
# with git repos. If the repo is not a git dir
# we fall back to git apply --reject which applies
# diffs cleanly where applicable otherwise creates
# *.rej files where there are conflicts
if _is_git_repo(expanded_dir_path):
_apply_three_way_patch(diff, expanded_dir_path)
else:
_apply_patch_with_rejections(diff, expanded_dir_path)
def _apply_project_updates(
old_main_directory: Path,
new_main_directory: Path,
project_dir: Path,
skip_update: bool,
skip_apply_ask: bool,
) -> bool:
diff = _get_diff(old_main_directory, new_main_directory)
if not skip_apply_ask and not skip_update:
input_str: str = "v"
while input_str == "v":
typer.echo(
'Respond with "s" to intentionally skip the update while marking '
"your project as up-to-date or "
'respond with "v" to view the changes that will be applied.'
)
input_str = typer.prompt(
"Apply diff and update?",
type=click.Choice(("y", "n", "s", "v")),
show_choices=True,
default="y",
)
if input_str == "v":
if diff.strip():
_view_diff(old_main_directory, new_main_directory)
else:
click.secho("There are no changes.", fg=typer.colors.YELLOW)
if input_str == "n":
typer.echo("User cancelled Cookiecutter template update.")
return False
elif input_str == "s":
skip_update = True
if not skip_update and diff.strip():
_apply_patch(diff, project_dir)
return True
|
10,692 | 09cf5a787adbc13d732ee4edf840668a49668449 | '''
@Description:
@Author: wjx
@Date: 2018-09-23 17:06:51
@LastEditors: wjx
@LastEditTime: 2018-11-20 16:03:54
'''
# coding=utf-8
import pytest
from workspace.pages.login_page import LoginPage
from workspace.config.running_config import get_driver
class TestLoginCSC():
"""
登录CSC的测试用例
"""
@pytest.fixture(scope='function')
def init(self):
'''
获取和退出浏览器
'''
self.driver = get_driver()
yield
self.driver.quit()
@pytest.mark.parametrize('casename, username, password, asserts',
[("用户名为空", '', 'password', '账号不能为空!'),
("密码为空", 'admin', '', '密码不能为空!'),
("本地登录", 'admin', '1234567890', '系统管理员'),
("AD登录", 'wjx', 'Admin123', 'weijiaxin有一个超级长的名字')])
def test_login_csc(self, init, casename, username, password, asserts):
'''
测试使用不同的账号密码组合进行登陆测试
'''
login_page = LoginPage(self.driver) # 创建一个登陆页面的实例
login_page.open()
login_page.input_username(username)
login_page.input_password(password)
if casename == 'AD登录': # 是否切换Ad域登录
login_page.switch_usertype()
login_page.click_submit()
if login_page.on_page('WinCloud-CSC'): # 判断登陆情况和tip信息
login_page.assert_login(asserts)
else:
assert login_page.show_msg() == asserts
if __name__ == '__main__':
pytest.main(['./workspace/testcase/test_login.py'])
|
10,693 | 23278451016ae499af2608658c84d5dbe40db3fd | #!/usr/bin/env python
# send trace backs to log file
from asterisk import agitb
agitb.enable(display = False, logdir = '/var/log/asterisk/')
# import our libs
import sys
import asterisk.agi
import soundcloud
import random
# initilize the agi stuff, and update the trackback to use our agi handle
agi = asterisk.agi.AGI()
agitb.enable(agi, display = False, logdir = '/var/log/asterisk/')
# start program to collect variable from args
# Read and ignore AGI environment (read until blank line)
agi.verbose("before reading args")
# get the number to lookup
try:
#digitPressed = agi.get_result('200')
#agi.verbose("digit pressed : "+str(digitPressed))
#my_var =int(agi.get_variable('digit'))
#agi.say_number(my_var)
my_var = agi.execapp('Read','digit,custom/search-by-track,2')
agi.verbose("digit pressed "+str(my_var)
except asterisk.agi.AGIException:
agi.verbose('MyVar not set, exiting...', 3)
agitb.handler()
sys.exit(1)
#alphabet encoding
if my_var == 21:
#agi.verbose("god dang it works")
letter = "a"
elif my_var == 22:
letter = "b"
elif my_var == 23:
letter = "c"
elif my_var == 31:
letter = "d"
elif my_var == 32:
letter = "e"
elif my_var == 33:
letter = "f"
elif my_var == 41:
letter = "g"
elif my_var == 42:
letter = "h"
elif my_var == 43:
letter = "i"
elif my_var == 51:
letter = "j"
elif my_var == 52:
letter = "k"
elif my_var == 53:
letter = "l"
elif my_var == 61:
letter = "m"
elif my_var == 62:
letter = "n"
elif my_var == 63:
letter = "o"
elif my_var == 71:
letter = "p"
elif my_var == 72:
letter = "q"
elif my_var == 73:
letter = "r"
elif my_var == 74:
letter = "s"
elif my_var == 81:
letter = "t"
elif my_var == 82:
letter = "u"
elif my_var == 83:
letter = "v"
elif my_var == 91:
letter = "w"
elif my_var == 92:
letter = "x"
elif my_var == 93:
letter = "y"
elif my_var == 94:
letter = "z"
elif my_var == 99:
letter = " "
elif my_var == 10:
letter = "0"
elif my_var == 11:
letter = "1"
elif my_var == 12:
letter = "2"
elif my_var == 13:
letter = "3"
elif my_var == 14:
letter = "4"
elif my_var == 15:
letter = "5"
elif my_var == 16:
letter = "6"
elif my_var == 17:
letter = "7"
elif my_var == 18:
letter = "8"
elif my_var == 19:
letter = "9"
elif my_var:
letter =""
agi.stream_file('error')
agi.set_variable('newLetter', letter)
agi.say_alpha(letter)
#agi.stream_file(error)
#letter = 'a'
# play a file
#agi.stream_file(track.location)
#my_word = 'icup'
# read my var to the user
# agi.say_alpha(my_var)
#agi.say_alpha(my_word)
|
10,694 | 467c7e3749e31744e21071c70029ada028734501 | from django.views.generic import FormView
from api.admin.apis import IonicApi
from api.admin.forms import NotificationsIonicForm
class NotificationsIonic(FormView):
SUPPORTED_PAYLOAD_KEYS = []
template_name = 'admin/notifications/ionic.html'
form_class = NotificationsIonicForm
def form_valid(self, form):
data = form.cleaned_data
payload = {key: data[key] for key in self.SUPPORTED_PAYLOAD_KEYS if data.get('key', NotificationsIonic)}
filters = dict(
bmi_from=data['bmi_from'],
bmi_to=data['bmi_to']
)
success, errors, warnings = IonicApi().notify(data['title'], data['message'], filters=filters, **payload)
return self.render_to_response(self.get_context_data(
form=form,
success=success.replace('\n', '<br>'),
errors=errors,
warnings=warnings
))
|
10,695 | 97d08a9e53b281969582916c1c33c57194a94f59 | import os
os.system('python exFefe.py')
os.system('clear')
os.system('python newMonth.py') |
10,696 | c8b7f44d0e2e6db6adce46abc5e7c72dbb156d63 | import sys, requests, json, datetime, argparse
from queryAPI import getSearch, getUTCtime
from FormatForDB import formatForDB
parser = argparse.ArgumentParser()
parser.add_argument("username")
parser.add_argument("password")
parser.add_argument("accountFqdn")
parser.add_argument("query")
parser.add_argument("outputFile")
args = parser.parse_args()
untildt = datetime.datetime.today() - datetime.timedelta(days=0)
untildt = untildt.replace(hour=0, minute=0, second=0, microsecond=0)
fromdt = datetime.datetime.today() - datetime.timedelta(days=1)
fromdt = fromdt.replace(hour=0, minute=0, second=0, microsecond=0)
step = 86400
print "From: " + str(fromdt)
print "Until: " + str(untildt)
# Pack arguments for search API call
API_request = (args.query, formatForDB)
API_dates = (fromdt, untildt, step)
API_output = (args.outputFile, 1, 1)
API_credentials = (args.accountFqdn, args.username, args.password)
# launch search
getSearch(*API_request + API_dates + API_output + API_credentials)
|
10,697 | 342a8597fe1161c488116661511097f165c00d5b | import xbmc, xbmcgui
import pigpio
import time
from os import system
class TimerDialogCallback:
def __init__(self, pi, gpio, timeout, text, callback):
self.pi = pi
self.gpio = gpio
self.timeout = timeout
self.text = text
self.callback = callback
self._last_tick = 0
pi.set_mode(self.gpio, pigpio.INPUT)
pi.set_pull_up_down(self.gpio, pigpio.PUD_UP)
self.cb = self.pi.callback(self.gpio, pigpio.FALLING_EDGE, self._pulse)
def _pulse(self, gpio, level, tick):
if tick - self._last_tick < 1000000:
return
self._last_tick = tick;
dialog = xbmcgui.DialogProgress()
dialog.create(self.text)
secs = 0
increment = 100 / self.timeout
cancelled = False
while secs <= self.timeout:
if (dialog.iscanceled()):
cancelled = True
break
if secs != 0:
xbmc.sleep(1000)
secs_left = self.timeout - secs
if secs_left == 0:
percent = 100
else:
percent = increment * secs
remaining_display = ('shutting down in %s seconds') % secs_left
dialog.update(percent, self.text, remaining_display)
secs += 1
remaining_display = 'shutting down'
dialog.update(percent, self.text, remaining_display)
if cancelled == False:
self.callback()
def my_func():
#xbmc.executebuiltin('Shutdown()')
system('sudo shutdown -h now');
if __name__ == '__main__':
while system('pidof pigpiod') != 0:
time.sleep(1)
time.sleep(1) # wait for pigpio to get a socket-connection...
pi = pigpio.pi()
timer = TimerDialogCallback(pi, 10, 10, "Shutting Down", my_func)
while 1:
time.sleep(1)
|
10,698 | 1a5b99de232773aa8cd8fb9e2984c3a3d99efade | import sys
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from scipy.interpolate import griddata
import h5py
plt.style.use('seaborn-paper')
class sim_avg:
def __init__(self,csv_filename,save_directory):
self.csv_filename=csv_filename
self.save_directory=save_directory
def csv_slice_to_npy(self):
d=np.genfromtxt(self.csv_filename,delimiter=',',skip_header=1)
uexp,uexp1,u,v,w,x,y,z=[ d[:,i] for i in range(0,8) ]
# non-dimensionalize everything
D_ref = 0.00745
U_ref = 27.5
x = x/D_ref
y = y/D_ref
z = z/D_ref
uexp=uexp/U_ref
uexp1=uexp1/U_ref
u=u/U_ref
v=v/U_ref
w=w/U_ref
# interpolate to grid
nx=3000j
ny=260j
self.Xall,self.Yall=np.mgrid[0:30:nx,-4:4:ny] # make grid of slice down the middle (if more data is given, can do slice of whole domain to get the whole experimental region)
#self.XCL,self.YCL=np.mgrid[0:30:nx,0:1:1j] # make grid of slice down the middle (if more data is given, can do slice of whole domain to get the whole experimental region)
points=np.array([x,y]).T
self.Uexp=griddata(points,uexp,(self.Xall,self.Yall),method='linear')
self.Uexp1=griddata(points,uexp1,(self.Xall,self.Yall),method='linear')
self.U=griddata(points,u,(self.Xall,self.Yall),method='linear')
#self.UCL=griddata(points,uavg,(self.XCL,self.YCL),method='linear')
#self.VCL=griddata(points,vavg,(self.XCL,self.YCL),method='linear')
# save interpolated data
np.save(self.save_directory+'/uexp',self.Uexp)
np.save(self.save_directory+'/uexp1',self.Uexp1)
np.save(self.save_directory+'/u',self.U)
#np.save(self.save_directory+'/ucl',self.UCL)
#np.save(self.save_directory+'/vcl',self.VCL)
np.save(self.save_directory+'/X',self.Xall)
np.save(self.save_directory+'/Y',self.Yall)
#np.save(self.save_directory+'/Xcl',self.XCL)
#np.save(self.save_directory+'/Ycl',self.YCL)
def load(self):
# load data
self.Uexp=np.load(self.save_directory+'/uexp.npy')
self.Uexp1=np.load(self.save_directory+'/uexp1.npy')
self.U=np.load(self.save_directory+'/u.npy')
#self.UCL =np.load(self.save_directory+'/ucl.npy')
#self.VCL =np.load(self.save_directory+'/vcl.npy')
self.Xall=np.load(self.save_directory+'/X.npy')
self.Yall=np.load(self.save_directory+'/Y.npy')
#self.XCL=np.load(self.save_directory+'/Xcl.npy')
#self.YCL=np.load(self.save_directory+'/Ycl.npy')
# change exp data to nan and 1000
temp=self.Uexp.copy()
temp[self.Uexp==0]=np.nan
temp[np.isfinite(temp)]=1000
# get bounding values for exp data
self.exp_range=np.isfinite(temp)
self.xmin=np.min(self.Xall[self.exp_range])
self.xmax=np.max(self.Xall[self.exp_range])
self.ymin=np.min(self.Yall[self.exp_range])
self.ymax=np.max(self.Yall[self.exp_range])
self.xwidth=self.xmax-self.xmin
self.ywidth=self.ymax-self.ymin
def plot_data(sim):
fig = plt.figure(figsize=(6.0,5))
#fig = plt.figure(figsize=(7.36,5))
ax_whole_domain = plt.subplot2grid((4,100),(0,0),rowspan=4,colspan=50,aspect='equal',adjustable='box-forced')
colormap=np.linspace(np.nanmin(sim.U),np.nanmax(sim.U),300)
#ax_whole_domain.contour (-sim.Yall,sim.Xall,sim.U,colormap,cmap='jet')
whole_colorbar=ax_whole_domain.contourf(-sim.Yall,sim.Xall,sim.U,colormap,cmap='jet')
plt.colorbar(whole_colorbar,ax=ax_whole_domain,ticks=[0.,0.25,0.5,0.75,1.,1.25])
# get rid of white lines in pdf
for c in whole_colorbar.collections:
c.set_edgecolor("face")
# patch to ax_whole_domain
ax_whole_domain.add_patch(patches.Rectangle(
(-sim.ymin,sim.xmin),
-sim.ywidth,
1.5,
fill=False,
edgecolor='red',
linewidth=2,
))
ax_whole_domain.add_patch(patches.Rectangle(
(-sim.ymin,sim.xmin),
-sim.ywidth,
0.75,
fill=False,
edgecolor='red',
linewidth=2,
hatch='////'
))
# add axes for zoomed in area
ax_zoomed = plt.subplot2grid((4,100),(1,50),rowspan=2,colspan=50,aspect='equal',adjustable='box-forced')
zoomed_area=(
(-sim.Yall>0) &
(-sim.Yall<2) &
(sim.Xall>14) &
(sim.Xall<15.5))
colormap=np.linspace(np.nanmin(sim.U[zoomed_area]),np.nanmax(sim.U[zoomed_area]),300)
#ax_zoomed.tricontour(-sim.Yall[zoomed_area],sim.Xall[zoomed_area],sim.U[zoomed_area],colormap,cmap='jet')
zoomed_colorbar=ax_zoomed.tricontourf(-sim.Yall[zoomed_area],sim.Xall[zoomed_area],sim.U[zoomed_area],colormap,cmap='jet')
plt.colorbar(zoomed_colorbar,ax=ax_zoomed,ticks=[0.02,0.1,0.2,0.3,0.4,0.5])
# get rid of white lines in pdf
for c in zoomed_colorbar.collections:
c.set_edgecolor("face")
#zoomed_colorbar.solids.set_edgecolor("face")
ax_whole_domain.set_xlabel(r'$y/D$')
ax_whole_domain.set_ylabel(r'$x/D$')
ax_zoomed.set_xlabel(r'$y/D$')
ax_zoomed.set_ylabel(r'$x/D$')
ax_zoomed.axis('image')
# add arrow patch
xyA=(0.01,14.01)
xyB=(2.,14.)
ax_zoomed.add_artist(patches.ConnectionPatch(
xyA=xyA,
xyB=xyB,
coordsA='data',
coordsB='data',
axesA=ax_zoomed,
axesB=ax_whole_domain,
linewidth=2,
))
xyA=(0.01,15.49)
xyB=(2.,15.5)
ax_zoomed.add_artist(patches.ConnectionPatch(
xyA=xyA,
xyB=xyB,
coordsA='data',
coordsB='data',
axesA=ax_zoomed,
axesB=ax_whole_domain,
linewidth=2,
))
#ax_zoomed.axhline(14.75,color='k',linewidth=2)
ax_zoomed.add_patch(patches.Rectangle(
(-sim.ymin,sim.xmin),
-sim.ywidth,
0.75,
fill=False,
edgecolor='red',
linewidth=2,
hatch='/'
))
fig.tight_layout()
plt.savefig(sim.save_directory+'/Instantaneous__u.pdf',bbox_inches='tight')
# average simulation data
csv_filename='../Create_Plots/DA_paper/Updated_BC/Reference/Instantaneous/Instant.csv'
save_directory='../Create_Plots/DA_paper/Updated_BC/Reference/Instantaneous'
s_avg=sim_avg(csv_filename,save_directory)
## read csv create npy files ( runs once )
#s_avg.csv_slice_to_npy()
## load npy
s_avg.load()
# plot
plot_data(s_avg)
plt.show()
|
10,699 | b64293706b7a460b67aeb31b1486f79fb72d8a2f | '''
网络测速模块
'''
import psutil
import time
def speed_test():
time.clock()
net_io = psutil.net_io_counters(pernic=True)
s1, s2 = 0, 0
for key in net_io.keys():
s1 += net_io[key].bytes_recv
time.sleep(1)
net_io = psutil.net_io_counters(pernic=True)
for key in net_io.keys():
s2 += net_io[key].bytes_recv
result = s2 - s1
# 除法结果保留两位小数
return str('%.2f' % (result / 1024)) + 'kb/s'
if __name__ == '__main__':
while True:
print(speed_test())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.