hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
66417885fd18a033be08fc835a9713f2744a15e6 | 1,772 | py | Python | mow/strong/phase2/split_data.py | tychen5/Audio_Tagging_Challenge | 4602400433d37958d95ebf40a3c0798d17cc53c6 | [
"MIT"
] | 3 | 2019-01-22T03:14:32.000Z | 2019-08-17T02:22:06.000Z | mow/strong/phase2/split_data.py | tychen5/Audio_Tagging_Challenge | 4602400433d37958d95ebf40a3c0798d17cc53c6 | [
"MIT"
] | null | null | null | mow/strong/phase2/split_data.py | tychen5/Audio_Tagging_Challenge | 4602400433d37958d95ebf40a3c0798d17cc53c6 | [
"MIT"
] | null | null | null | import os
import sys
import time
import math
import random
import pickle
import numpy as np
import pandas as pd
from keras.utils import to_categorical
with open('./map.pkl', 'rb') as f:
map_dict = pickle.load(f)
train_label_path = os.path.join('/tmp2/b03902110', 'Y_train_ens_verified.csv')
Y_train = pd.read_csv(train_label_path)
Y_dict = Y_train['label_verified'].map(map_dict)
Y_dict = np.array(Y_dict)
Y_all = []
for i in Y_dict:
Y_all.append(to_categorical(i, num_classes=41))
Y_all = np.array(Y_all)
print(Y_all)
np.save('/tmp2/b03902110/Y_train_ens_verified.npy', Y_all)
exit()
X_train_path = os.path.join(base_path, 'X_train.npy')
X_all = np.load(X_train_path)
X_all = X_all[verified, :]
idx = list(range(X_all.shape[0]))
random.shuffle(idx)
xSize = math.ceil(X_all.shape[0] / num_fold)
split_X_path = os.path.join(base_path, 'X')
split_y_path = os.path.join(base_path, 'y')
split_fname_path = os.path.join(base_path, 'fname')
if not os.path.exists(split_X_path):
os.makedirs(split_X_path)
if not os.path.exists(split_y_path):
os.makedirs(split_y_path)
if not os.path.exists(split_fname_path):
os.makedirs(split_fname_path)
for i in range(num_fold):
X = X_all[idx[i*xSize:i*xSize+xSize]]
y = Y_all[idx[i*xSize:i*xSize+xSize]]
fname = fname_all[idx[i*xSize:i*xSize+xSize]]
print('Saving fold {}'.format(i+1))
print('X.shape:', X.shape)
print('y.shape:', y.shape)
print('fname.shape:', fname.shape)
filename = os.path.join(split_X_path, 'X' + str(i+1) + '.npy')
np.save(filename, X)
filename = os.path.join(split_y_path, 'y' + str(i+1) + '.npy')
np.save(filename, y)
filename = os.path.join(split_fname_path, 'fname' + str(i+1) + '.npy')
np.save(filename, fname)
time.sleep(1)
| 26.848485 | 78 | 0.69921 |
11f4d6e9d19e19114cfb55374ce83b82d623833d | 2,007 | py | Python | language/nql/nql/symbol_test.py | naveenjafer/language | efc5183855a7aeecac3e81fe12ce60fc824f8ca7 | [
"Apache-2.0"
] | 1,199 | 2018-10-16T01:30:18.000Z | 2022-03-31T21:05:24.000Z | language/nql/nql/symbol_test.py | naveenjafer/language | efc5183855a7aeecac3e81fe12ce60fc824f8ca7 | [
"Apache-2.0"
] | 116 | 2018-10-18T03:31:46.000Z | 2022-03-24T13:40:50.000Z | language/nql/nql/symbol_test.py | naveenjafer/language | efc5183855a7aeecac3e81fe12ce60fc824f8ca7 | [
"Apache-2.0"
] | 303 | 2018-10-22T12:35:12.000Z | 2022-03-27T17:38:17.000Z | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for symbol."""
from nql import symbol
import tensorflow.compat.v2 as tf
class TestSymbolTable(tf.test.TestCase):
def test_fixed_freeze_none(self):
tab = symbol.SymbolTable()
for s in 'abcdefg':
tab.insert(s)
tab.freeze(unknown_marker=None)
self.assertEqual(tab.get_id('Z'), None)
def test_unk(self):
tab = symbol.SymbolTable()
for s in 'abcdefg':
tab.insert(s)
self.assertEqual(tab.get_max_id(), 7)
self.assertEqual(tab.get_unk_id(), None)
# freezing adds an UNK symbol
tab.freeze()
self.assertEqual(tab.get_unk_id(), 7)
self.assertEqual(tab.get_max_id(), 8)
# new strings are now UNK'd out
self.assertEqual(tab.get_id('h'), tab.get_id('z'))
# even if you insert them
tab.insert('h')
self.assertEqual(tab.get_max_id(), 8)
self.assertEqual(tab.get_id('h'), tab.get_id('z'))
self.assertEqual(tab.get_id('h'), 7)
def test_padding(self):
tab = symbol.SymbolTable()
for s in 'abcdefg':
tab.insert(s)
self.assertTrue(tab.has_id('a'))
self.assertEqual(tab.get_max_id(), 7)
tab.pad_to_vocab_size(20)
self.assertEqual(tab.get_max_id(), 20)
tab.reset()
for s in 'tuvwx':
tab.insert(s)
self.assertEqual(tab.get_max_id(), 20)
self.assertTrue(tab.has_id('x'))
self.assertFalse(tab.has_id('a'))
if __name__ == '__main__':
tf.test.main()
| 30.409091 | 74 | 0.688092 |
21ec6a1b49e18f409aa0b0dea04d2a2f2e62c837 | 857 | py | Python | tatudashboard/dashboards/tatu/pat/panel.py | pinodeca/tatu-dashboard | 1685f7c862c2d3502abd51d9ff4f2a12b147680c | [
"Apache-2.0"
] | null | null | null | tatudashboard/dashboards/tatu/pat/panel.py | pinodeca/tatu-dashboard | 1685f7c862c2d3502abd51d9ff4f2a12b147680c | [
"Apache-2.0"
] | null | null | null | tatudashboard/dashboards/tatu/pat/panel.py | pinodeca/tatu-dashboard | 1685f7c862c2d3502abd51d9ff4f2a12b147680c | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2016 Huawei, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import horizon
from django.utils.translation import ugettext_lazy as _
from openstack_dashboard.dashboards.project import dashboard
class PAT(horizon.Panel):
name = _("PAT Gateways")
slug = 'tatu_pat'
dashboard.Project.register(PAT)
| 32.961538 | 78 | 0.735123 |
7e6750016008d461725153ed02a925585c4817cb | 7,142 | py | Python | bin/mapEncodeTSStoRegions.py | gaofeng21cn/IDP-fusion | 052ac4f868fd416e10165ef4891804d25edc8862 | [
"Apache-2.0"
] | 5 | 2017-09-18T18:21:26.000Z | 2020-05-01T23:29:26.000Z | bin/mapEncodeTSStoRegions.py | gaofeng21cn/IDP-fusion | 052ac4f868fd416e10165ef4891804d25edc8862 | [
"Apache-2.0"
] | 3 | 2018-11-26T16:35:12.000Z | 2021-11-22T07:16:51.000Z | bin/mapEncodeTSStoRegions.py | gaofeng21cn/IDP-fusion | 052ac4f868fd416e10165ef4891804d25edc8862 | [
"Apache-2.0"
] | 4 | 2018-01-02T13:08:14.000Z | 2020-10-30T01:07:58.000Z | #!/usr/bin/python
import sys
from operator import itemgetter, attrgetter
import bisect
import re
TSS_THRESHOLD = 0.99
OVERLAP_THRESHOLD = 0.0
### parse_tss_file
##################
def parse_tss_file(tss_file):
tss_dict = {}
for line in tss_file:
fields = line.split()
rname = fields[0]
start_pos = int(fields[1])
end_pos = int(fields[2]) - 1 # included
#description = fields[3]
#exp_level = fields[6]
#idr = fields[7]
if not tss_dict.has_key(rname):
tss_dict[rname] = []
tss_dict[rname].append([start_pos, end_pos, line[:-1]])
return [tss_dict]
### sort_tss_dict
##################
def sort_tss_dict(tss_dict):
tss_start_pos_dict = {}
tss_start_sorted_index_dict = {}
tss_end_pos_dict = {}
tss_end_sorted_index_dict = {}
CHR_LIST = tss_dict.keys()
for chr in CHR_LIST:
# Sort based on start position
temp_list = [tss_dict[chr][j] + [j] for j in range(len(tss_dict[chr]))]
sorted_temp_list = sorted(temp_list, key=itemgetter(0))
tss_start_pos_dict[chr] = [sorted_temp_list[j][0] for j in range(len(sorted_temp_list))]
tss_start_sorted_index_dict[chr] = [sorted_temp_list[j][-1] for j in range(len(sorted_temp_list))]
# Sort based on end position
sorted_temp_list = sorted(temp_list, key=itemgetter(1))
tss_end_pos_dict[chr] = [sorted_temp_list[j][1] for j in range(len(sorted_temp_list))]
tss_end_sorted_index_dict[chr] = [sorted_temp_list[j][-1] for j in range(len(sorted_temp_list))]
return [tss_start_pos_dict, tss_start_sorted_index_dict, tss_end_pos_dict, tss_end_sorted_index_dict]
### parse_region_file
#####################
def parse_region_file(region_file):
region_dict = {}
for line in region_file:
fields = line.split()
rname = re.split(r'_|:', fields[0])[0]
description = fields[0]
regions = [int(i) for i in fields[1:]]
regions_list = []
for i in range(len(regions) - 1):
regions_list.append([regions[i], regions[i+1] - 1])
if not region_dict.has_key(rname):
region_dict[rname] = []
region_dict[rname].append([regions_list] + [description])
return [region_dict]
### overlap
############
def overlap(region, tss):
start_point = max(region[0], tss[0])
end_point = min(region[1], tss[1])
if (end_point < start_point):
return 0
return (1. * (end_point - start_point) / (tss[1] - tss[0]))
### map_tss_to_regions
########################
def map_tss_to_regions(tss_dict, region_dict, tss_start_pos_dict, tss_start_sorted_index_dict,
tss_end_pos_dict, tss_end_sorted_index_dict, overlap_threshold ):
output_dict = {}
tss_dict_len = len(tss_start_pos_dict);
CHR_LIST = region_dict.keys()
for chr in CHR_LIST:
output_dict[chr] = {}
for gene in region_dict[chr]:
#print '***********'
output_dict[chr][gene[1]] = {}
if (tss_start_pos_dict.has_key(chr)):
start_index = bisect.bisect_right(tss_start_pos_dict[chr], gene[0][-1][1])
end_index = bisect.bisect_left(tss_end_pos_dict[chr], gene[0][0][0])
tss_candidates = set(tss_start_sorted_index_dict[chr][:start_index]) & set(tss_end_sorted_index_dict[chr][end_index:])
else:
tss_candidates = set()
#print tss_candidates
#print gene[0]
for region in gene[0]:
region_name = str(region[0]) + '-' + str(region[1])
output_dict[chr][gene[1]][region_name] = []
for tss_idx in tss_candidates:
if (overlap(region, tss_dict[chr][tss_idx][0:2]) > overlap_threshold):
tss = tss_dict[chr][tss_idx][-1] + '\t' + str(overlap(region, tss_dict[chr][tss_idx][0:2])) + '\n'
output_dict[chr][gene[1]][region_name].append(tss)
#print gene[1]
#print region, overlap(region, tss_dict[chr][tss_idx][0:2])
#print tss_dict[chr][tss_idx][0:2]
#print '-----------------------'
return output_dict
###generate_output_file
#########################
def filter_mapped_tss_prediction(output_dict, threshold):
CHR_LIST = output_dict.keys()
for chr in CHR_LIST:
for gname in sorted(output_dict[chr].keys()):
gene = output_dict[chr][gname]
for region_name in gene.keys():
if (len(gene[region_name]) == 0):
continue
region_tss = []
for tss in gene[region_name]:
fields = tss.split()
idr = float(fields[7])
tss_pred_strength = float(fields[3].split(':')[5])
if (tss_pred_strength >= threshold):
region_tss.append(tss)
gene[region_name] = region_tss
return output_dict
###generate_output_file
#########################
def generate_output_file(output_file_str, output_dict):
output_file = open(output_file_str, 'w')
CHR_LIST = output_dict.keys()
for chr in CHR_LIST:
for gname in sorted(output_dict[chr].keys()):
gene = output_dict[chr][gname]
gene_keys = sorted(gene.keys())
output_file.write(gname + '\t' + chr + '\t' + str(len(gene.keys())) + '\n')
for region_name in gene_keys:
output_file.write(region_name.ljust(30))
output_file.write('\n')
for region_name in gene_keys:
output_file.write(str(len(gene[region_name])).ljust(30))
output_file.write('\n')
for region_name in gene_keys:
for tss in gene[region_name]:
output_file.write(tss)
output_file.close()
### Main
########
def main():
# Read input parameters
tss_file_str = sys.argv[1]
region_file_str = sys.argv[2]
output_file_str = 'encodeTSS_mapped_regions.txt'
tss_file = open(tss_file_str, 'r')
[tss_dict] = parse_tss_file(tss_file)
[tss_start_pos_dict, tss_start_sorted_index_dict, tss_end_pos_dict, tss_end_sorted_index_dict] = sort_tss_dict(tss_dict)
tss_file.close()
region_file = open(region_file_str, 'r')
[region_dict] = parse_region_file(region_file)
region_file.close()
output_dict = map_tss_to_regions(tss_dict, region_dict, tss_start_pos_dict, tss_start_sorted_index_dict,
tss_end_pos_dict, tss_end_sorted_index_dict, OVERLAP_THRESHOLD)
output_dict = filter_mapped_tss_prediction(output_dict, TSS_THRESHOLD)
generate_output_file(output_file_str, output_dict)
if __name__ == '__main__':
main()
| 33.688679 | 134 | 0.573789 |
b31b2b4a0fbd27df0f7578d74da3028e2d3a0d88 | 1,284 | py | Python | ludwig/encoders/base.py | carlogrisetti/ludwig | 5c0887f14867e1577e0ddc3806c5cf7a781fb665 | [
"Apache-2.0"
] | null | null | null | ludwig/encoders/base.py | carlogrisetti/ludwig | 5c0887f14867e1577e0ddc3806c5cf7a781fb665 | [
"Apache-2.0"
] | null | null | null | ludwig/encoders/base.py | carlogrisetti/ludwig | 5c0887f14867e1577e0ddc3806c5cf7a781fb665 | [
"Apache-2.0"
] | null | null | null | #! /usr/bin/env python
# coding=utf-8
# Copyright (c) 2020 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from abc import ABC, abstractmethod
from ludwig.utils.registry import DEFAULT_KEYS
from ludwig.utils.torch_utils import LudwigModule
class Encoder(LudwigModule, ABC):
@abstractmethod
def forward(self, inputs, training=None, mask=None):
raise NotImplementedError
@classmethod
@abstractmethod
def register(cls, name):
raise NotImplementedError
@classmethod
def register_default(cls):
for key in DEFAULT_KEYS:
cls.register(name=key)
@property
def name(self):
return self.__class__.__name__
| 30.571429 | 80 | 0.683801 |
2df0deb44e3b346da84f372e632b8b53116ab426 | 3,014 | py | Python | src/controller/LoginController.py | tinfins/CMSC495-Group-3 | 0b7ea3c885322631d6dd3ef7ee96b6a98ba2392e | [
"MIT"
] | 1 | 2021-02-11T01:18:08.000Z | 2021-02-11T01:18:08.000Z | src/controller/LoginController.py | tinfins/CMSC495-Group-3 | 0b7ea3c885322631d6dd3ef7ee96b6a98ba2392e | [
"MIT"
] | 1 | 2021-01-28T00:03:15.000Z | 2021-01-28T00:03:15.000Z | src/controller/LoginController.py | tinfins/CMSC495-Group-3 | 0b7ea3c885322631d6dd3ef7ee96b6a98ba2392e | [
"MIT"
] | 3 | 2021-02-13T22:45:14.000Z | 2021-02-16T02:57:11.000Z | import logging.config
# Big Teacher module imports
import src.gui.LoginGui as LoginGui
import src.gui.MessageBox as MessageBox
from src.utils.Settings import Settings
import src.controller.MainPageController as MainPageController
from src.utils.Connector import Connector
class LoginController:
'''
Login controller for application
'''
def __init__(self, master, controller):
'''
Initializes LoginController and displays Login gui
:params master:tk.Tk():master window
:params controller:tk.obj:common controller for all views (MainApplication)
:params layout:tk.Frame:MainLayout frame
'''
self.logger = logging.getLogger(__name__)
self.master = master
self.controller = controller
self.login_view = LoginGui.LoginGui(self.master, self.controller)
# Set commands for LoginGui buttons
self.login_view.reset_button.config(command=self.reset_entries)
self.login_view.login_button.config(command=self.login)
def reset_entries(self):
'''
Reset entries for login page
'''
self.login_view.username.set('')
self.login_view.password.set('')
def login(self):
'''
Perform login function
'''
# Get username and password values from login_view
username = self.login_view.username.get()
password = self.login_view.password.get()
# Read settings from config.ini file
settings = Settings('config.ini')
config_values = settings.db_config_read('sqldb')
try:
# Create engine
self.connector = Connector(username, password, config_values)
engine = self.connector.create_engine(self.connector.settings_model)
conn = self.connector.login(engine)
if conn:
self.master.status_bar.status_set(f'{self.connector.settings_model.username} logged in')
self.logger.info(f"{self.connector.settings_model.username} successfully logged in")
# Restore indents for normal mode
self.login_view.master_frame.destroy()
# Call MainPage Controller
MainPageController.MainPageController(self.master, self.controller, self.connector)
else:
# Executes on failed login
self.logger.warning(f"{self.connector.settings_model.username} FAILED login attempt")
MessageBox.MessageBox().onWarn('Invalid Login Credentials')
self.master.status_bar.status_set('Invalid Login Credentials')
except:
# Executes if there are no values to pass or sqldb section is missing from conifg.ini
self.master.status_bar.status_set('Unable to login. Check your configuration settings.')
self.logger('Unable to login. Check settings')
MessageBox.MessageBox().onInfo('Unable to login\nGo to Edit -> Settings and configure your database settings.')
| 43.681159 | 123 | 0.663238 |
3e79a95ab68035628caa64fdd46e2a6a04a25bbc | 1,509 | py | Python | losses.py | 7enTropy7/ravdl | b478f21e8da7b3d3641456ceec4952a3d79ec515 | [
"MIT"
] | null | null | null | losses.py | 7enTropy7/ravdl | b478f21e8da7b3d3641456ceec4952a3d79ec515 | [
"MIT"
] | null | null | null | losses.py | 7enTropy7/ravdl | b478f21e8da7b3d3641456ceec4952a3d79ec515 | [
"MIT"
] | 4 | 2021-03-19T08:01:21.000Z | 2021-06-16T08:11:01.000Z | import ravop.core as R
class Loss():
def __init__(self):
self.one = R.Scalar(1)
self.zero = R.Scalar(0)
def cross_entropy(self, y_true, y_pred):
return R.Scalar(-1).multiply(R.sum(y_true.multiply(R.natlog(y_pred)))).div(R.Scalar(y_pred.shape[0]))
def mean_squared_error(self, y_true, y_pred):
return R.square(y_true.sub(y_pred)).mean()
def mean_absolute_error(self, y_true, y_pred):
return R.abs(y_pred.sub(y_true)).mean()
def KL_divergence(self, y_true, y_pred):
return R.sum((y_true.multiply(R.natlog(y_true.div(y_pred)))).where(self.zero,condition = y_true!=self.zero))
def cosine_similarity(self, y_true, y_pred):
return (y_true.dot(y_pred)).div((R.square_root(R.sum(R.square(y_true)))).multiply(R.square_root(R.sum(R.square(y_pred)))))
def poisson(self, y_true, y_pred):
return y_pred - y_true * R.natlog(y_pred)
def huber(self, y_true, y_pred, delta=0.1):
a = R.Scalar(0.5).multiply(R.square(y_true.sub(y_pred)))
b = (R.Scalar(delta).multiply(R.abs(y_true.sub(y_pred)))).sub(R.Scalar(0.5).multiply(R.square(R.Scalar(delta))))
return R.sum(a.where(b,condition=R.abs(y_true.sub(y_pred))<R.Scalar(delta)))
def logcosh(self, y_true, y_pred):
x = y_pred - y_true
return R.natlog(R.div(R.exp(x) + R.exp(R.neg(X)), 2))
def hinge(self, y_true, y_pred):
x = self.one - (y_true * y_pred)
return x.where(x * self.zero, condition=x > self.zero)
| 39.710526 | 130 | 0.645461 |
189e67fab78cd14dc2f8db42b0fc5db06447d076 | 49,500 | py | Python | django/db/models/fields/related_descriptors.py | stefb965/django | a599ae6018f748f66e774604d12989911ea09d33 | [
"PSF-2.0",
"BSD-3-Clause"
] | 4 | 2020-04-08T17:57:46.000Z | 2021-11-08T08:56:16.000Z | django/db/models/fields/related_descriptors.py | stefb965/django | a599ae6018f748f66e774604d12989911ea09d33 | [
"PSF-2.0",
"BSD-3-Clause"
] | 7 | 2018-06-18T17:56:50.000Z | 2020-06-24T16:51:04.000Z | django/db/models/fields/related_descriptors.py | stefb965/django | a599ae6018f748f66e774604d12989911ea09d33 | [
"PSF-2.0",
"BSD-3-Clause"
] | 2 | 2019-08-19T20:41:48.000Z | 2019-10-10T17:29:52.000Z | """
Accessors for related objects.
When a field defines a relation between two models, each model class provides
an attribute to access related instances of the other model class (unless the
reverse accessor has been disabled with related_name='+').
Accessors are implemented as descriptors in order to customize access and
assignment. This module defines the descriptor classes.
Forward accessors follow foreign keys. Reverse accessors trace them back. For
example, with the following models::
class Parent(Model):
pass
class Child(Model):
parent = ForeignKey(Parent, related_name='children')
``child.parent`` is a forward many-to-one relation. ``parent.children`` is a
reverse many-to-one relation.
There are three types of relations (many-to-one, one-to-one, and many-to-many)
and two directions (forward and reverse) for a total of six combinations.
1. Related instance on the forward side of a many-to-one relation:
``ForwardManyToOneDescriptor``.
Uniqueness of foreign key values is irrelevant to accessing the related
instance, making the many-to-one and one-to-one cases identical as far as
the descriptor is concerned. The constraint is checked upstream (unicity
validation in forms) or downstream (unique indexes in the database).
2. Related instance on the forward side of a one-to-one
relation: ``ForwardOneToOneDescriptor``.
It avoids querying the database when accessing the parent link field in
a multi-table inheritance scenario.
3. Related instance on the reverse side of a one-to-one relation:
``ReverseOneToOneDescriptor``.
One-to-one relations are asymmetrical, despite the apparent symmetry of the
name, because they're implemented in the database with a foreign key from
one table to another. As a consequence ``ReverseOneToOneDescriptor`` is
slightly different from ``ForwardManyToOneDescriptor``.
4. Related objects manager for related instances on the reverse side of a
many-to-one relation: ``ReverseManyToOneDescriptor``.
Unlike the previous two classes, this one provides access to a collection
of objects. It returns a manager rather than an instance.
5. Related objects manager for related instances on the forward or reverse
sides of a many-to-many relation: ``ManyToManyDescriptor``.
Many-to-many relations are symmetrical. The syntax of Django models
requires declaring them on one side but that's an implementation detail.
They could be declared on the other side without any change in behavior.
Therefore the forward and reverse descriptors can be the same.
If you're looking for ``ForwardManyToManyDescriptor`` or
``ReverseManyToManyDescriptor``, use ``ManyToManyDescriptor`` instead.
"""
from django.db import connections, router, transaction
from django.db.models import Q, signals
from django.db.models.query import QuerySet
from django.utils.functional import cached_property
class ForwardManyToOneDescriptor:
"""
Accessor to the related object on the forward side of a many-to-one or
one-to-one (via ForwardOneToOneDescriptor subclass) relation.
In the example::
class Child(Model):
parent = ForeignKey(Parent, related_name='children')
``child.parent`` is a ``ForwardManyToOneDescriptor`` instance.
"""
def __init__(self, field_with_rel):
self.field = field_with_rel
@cached_property
def RelatedObjectDoesNotExist(self):
# The exception can't be created at initialization time since the
# related model might not be resolved yet; `rel.model` might still be
# a string model reference.
return type(
'RelatedObjectDoesNotExist',
(self.field.remote_field.model.DoesNotExist, AttributeError),
{}
)
def is_cached(self, instance):
return self.field.is_cached(instance)
def get_queryset(self, **hints):
return self.field.remote_field.model._base_manager.db_manager(hints=hints).all()
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = self.get_queryset()
queryset._add_hints(instance=instances[0])
rel_obj_attr = self.field.get_foreign_related_value
instance_attr = self.field.get_local_related_value
instances_dict = {instance_attr(inst): inst for inst in instances}
related_field = self.field.foreign_related_fields[0]
remote_field = self.field.remote_field
# FIXME: This will need to be revisited when we introduce support for
# composite fields. In the meantime we take this practical approach to
# solve a regression on 1.6 when the reverse manager in hidden
# (related_name ends with a '+'). Refs #21410.
# The check for len(...) == 1 is a special case that allows the query
# to be join-less and smaller. Refs #21760.
if remote_field.is_hidden() or len(self.field.foreign_related_fields) == 1:
query = {'%s__in' % related_field.name: {instance_attr(inst)[0] for inst in instances}}
else:
query = {'%s__in' % self.field.related_query_name(): instances}
queryset = queryset.filter(**query)
# Since we're going to assign directly in the cache,
# we must manage the reverse relation cache manually.
if not remote_field.multiple:
for rel_obj in queryset:
instance = instances_dict[rel_obj_attr(rel_obj)]
remote_field.set_cached_value(rel_obj, instance)
return queryset, rel_obj_attr, instance_attr, True, self.field.get_cache_name(), False
def get_object(self, instance):
qs = self.get_queryset(instance=instance)
# Assuming the database enforces foreign keys, this won't fail.
return qs.get(self.field.get_reverse_related_filter(instance))
def __get__(self, instance, cls=None):
"""
Get the related instance through the forward relation.
With the example above, when getting ``child.parent``:
- ``self`` is the descriptor managing the ``parent`` attribute
- ``instance`` is the ``child`` instance
- ``cls`` is the ``Child`` class (we don't need it)
"""
if instance is None:
return self
# The related instance is loaded from the database and then cached
# by the field on the model instance state. It can also be pre-cached
# by the reverse accessor (ReverseOneToOneDescriptor).
try:
rel_obj = self.field.get_cached_value(instance)
except KeyError:
val = self.field.get_local_related_value(instance)
if None in val:
rel_obj = None
else:
rel_obj = self.get_object(instance)
remote_field = self.field.remote_field
# If this is a one-to-one relation, set the reverse accessor
# cache on the related object to the current instance to avoid
# an extra SQL query if it's accessed later on.
if not remote_field.multiple:
remote_field.set_cached_value(rel_obj, instance)
self.field.set_cached_value(instance, rel_obj)
if rel_obj is None and not self.field.null:
raise self.RelatedObjectDoesNotExist(
"%s has no %s." % (self.field.model.__name__, self.field.name)
)
else:
return rel_obj
def __set__(self, instance, value):
"""
Set the related instance through the forward relation.
With the example above, when setting ``child.parent = parent``:
- ``self`` is the descriptor managing the ``parent`` attribute
- ``instance`` is the ``child`` instance
- ``value`` is the ``parent`` instance on the right of the equal sign
"""
# An object must be an instance of the related class.
if value is not None and not isinstance(value, self.field.remote_field.model._meta.concrete_model):
raise ValueError(
'Cannot assign "%r": "%s.%s" must be a "%s" instance.' % (
value,
instance._meta.object_name,
self.field.name,
self.field.remote_field.model._meta.object_name,
)
)
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": the current database router prevents this relation.' % value)
remote_field = self.field.remote_field
# If we're setting the value of a OneToOneField to None, we need to clear
# out the cache on any old related object. Otherwise, deleting the
# previously-related object will also cause this object to be deleted,
# which is wrong.
if value is None:
# Look up the previously-related object, which may still be available
# since we've not yet cleared out the related field.
# Use the cache directly, instead of the accessor; if we haven't
# populated the cache, then we don't care - we're only accessing
# the object to invalidate the accessor cache, so there's no
# need to populate the cache just to expire it again.
related = self.field.get_cached_value(instance, default=None)
# If we've got an old related object, we need to clear out its
# cache. This cache also might not exist if the related object
# hasn't been accessed yet.
if related is not None:
remote_field.set_cached_value(related, None)
for lh_field, rh_field in self.field.related_fields:
setattr(instance, lh_field.attname, None)
# Set the values of the related field.
else:
for lh_field, rh_field in self.field.related_fields:
setattr(instance, lh_field.attname, getattr(value, rh_field.attname))
# Set the related instance cache used by __get__ to avoid an SQL query
# when accessing the attribute we just set.
self.field.set_cached_value(instance, value)
# If this is a one-to-one relation, set the reverse accessor cache on
# the related object to the current instance to avoid an extra SQL
# query if it's accessed later on.
if value is not None and not remote_field.multiple:
remote_field.set_cached_value(value, instance)
class ForwardOneToOneDescriptor(ForwardManyToOneDescriptor):
"""
Accessor to the related object on the forward side of a one-to-one relation.
In the example::
class Restaurant(Model):
place = OneToOneField(Place, related_name='restaurant')
``restaurant.place`` is a ``ForwardOneToOneDescriptor`` instance.
"""
def get_object(self, instance):
if self.field.remote_field.parent_link:
deferred = instance.get_deferred_fields()
# Because it's a parent link, all the data is available in the
# instance, so populate the parent model with this data.
rel_model = self.field.remote_field.model
fields = [field.attname for field in rel_model._meta.concrete_fields]
# If any of the related model's fields are deferred, fallback to
# fetching all fields from the related model. This avoids a query
# on the related model for every deferred field.
if not any(field in fields for field in deferred):
kwargs = {field: getattr(instance, field) for field in fields}
obj = rel_model(**kwargs)
obj._state.adding = instance._state.adding
obj._state.db = instance._state.db
return obj
return super().get_object(instance)
def __set__(self, instance, value):
super().__set__(instance, value)
# If the primary key is a link to a parent model and a parent instance
# is being set, update the value of the inherited pk(s).
if self.field.primary_key and self.field.remote_field.parent_link:
opts = instance._meta
# Inherited primary key fields from this object's base classes.
inherited_pk_fields = [
field for field in opts.concrete_fields
if field.primary_key and field.remote_field
]
for field in inherited_pk_fields:
rel_model_pk_name = field.remote_field.model._meta.pk.attname
raw_value = getattr(value, rel_model_pk_name) if value is not None else None
setattr(instance, rel_model_pk_name, raw_value)
class ReverseOneToOneDescriptor:
"""
Accessor to the related object on the reverse side of a one-to-one
relation.
In the example::
class Restaurant(Model):
place = OneToOneField(Place, related_name='restaurant')
``place.restaurant`` is a ``ReverseOneToOneDescriptor`` instance.
"""
def __init__(self, related):
# Following the example above, `related` is an instance of OneToOneRel
# which represents the reverse restaurant field (place.restaurant).
self.related = related
@cached_property
def RelatedObjectDoesNotExist(self):
# The exception isn't created at initialization time for the sake of
# consistency with `ForwardManyToOneDescriptor`.
return type(
'RelatedObjectDoesNotExist',
(self.related.related_model.DoesNotExist, AttributeError),
{}
)
def is_cached(self, instance):
return self.related.is_cached(instance)
def get_queryset(self, **hints):
return self.related.related_model._base_manager.db_manager(hints=hints).all()
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = self.get_queryset()
queryset._add_hints(instance=instances[0])
rel_obj_attr = self.related.field.get_local_related_value
instance_attr = self.related.field.get_foreign_related_value
instances_dict = {instance_attr(inst): inst for inst in instances}
query = {'%s__in' % self.related.field.name: instances}
queryset = queryset.filter(**query)
# Since we're going to assign directly in the cache,
# we must manage the reverse relation cache manually.
for rel_obj in queryset:
instance = instances_dict[rel_obj_attr(rel_obj)]
self.related.field.set_cached_value(rel_obj, instance)
return queryset, rel_obj_attr, instance_attr, True, self.related.get_cache_name(), False
def __get__(self, instance, cls=None):
"""
Get the related instance through the reverse relation.
With the example above, when getting ``place.restaurant``:
- ``self`` is the descriptor managing the ``restaurant`` attribute
- ``instance`` is the ``place`` instance
- ``cls`` is the ``Place`` class (unused)
Keep in mind that ``Restaurant`` holds the foreign key to ``Place``.
"""
if instance is None:
return self
# The related instance is loaded from the database and then cached
# by the field on the model instance state. It can also be pre-cached
# by the forward accessor (ForwardManyToOneDescriptor).
try:
rel_obj = self.related.get_cached_value(instance)
except KeyError:
related_pk = instance.pk
if related_pk is None:
rel_obj = None
else:
filter_args = self.related.field.get_forward_related_filter(instance)
try:
rel_obj = self.get_queryset(instance=instance).get(**filter_args)
except self.related.related_model.DoesNotExist:
rel_obj = None
else:
# Set the forward accessor cache on the related object to
# the current instance to avoid an extra SQL query if it's
# accessed later on.
self.related.field.set_cached_value(rel_obj, instance)
self.related.set_cached_value(instance, rel_obj)
if rel_obj is None:
raise self.RelatedObjectDoesNotExist(
"%s has no %s." % (
instance.__class__.__name__,
self.related.get_accessor_name()
)
)
else:
return rel_obj
def __set__(self, instance, value):
"""
Set the related instance through the reverse relation.
With the example above, when setting ``place.restaurant = restaurant``:
- ``self`` is the descriptor managing the ``restaurant`` attribute
- ``instance`` is the ``place`` instance
- ``value`` is the ``restaurant`` instance on the right of the equal sign
Keep in mind that ``Restaurant`` holds the foreign key to ``Place``.
"""
# The similarity of the code below to the code in
# ForwardManyToOneDescriptor is annoying, but there's a bunch
# of small differences that would make a common base class convoluted.
if value is None:
# Update the cached related instance (if any) & clear the cache.
try:
# Following the example above, this would be the cached
# ``restaurant`` instance (if any).
rel_obj = self.related.get_cached_value(instance)
except KeyError:
pass
else:
# Remove the ``restaurant`` instance from the ``place``
# instance cache.
self.related.delete_cached_value(instance)
# Set the ``place`` field on the ``restaurant``
# instance to None.
setattr(rel_obj, self.related.field.name, None)
elif not isinstance(value, self.related.related_model):
# An object must be an instance of the related class.
raise ValueError(
'Cannot assign "%r": "%s.%s" must be a "%s" instance.' % (
value,
instance._meta.object_name,
self.related.get_accessor_name(),
self.related.related_model._meta.object_name,
)
)
else:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": the current database router prevents this relation.' % value)
related_pk = tuple(getattr(instance, field.attname) for field in self.related.field.foreign_related_fields)
# Set the value of the related field to the value of the related object's related field
for index, field in enumerate(self.related.field.local_related_fields):
setattr(value, field.attname, related_pk[index])
# Set the related instance cache used by __get__ to avoid an SQL query
# when accessing the attribute we just set.
self.related.set_cached_value(instance, value)
# Set the forward accessor cache on the related object to the current
# instance to avoid an extra SQL query if it's accessed later on.
self.related.field.set_cached_value(value, instance)
class ReverseManyToOneDescriptor:
"""
Accessor to the related objects manager on the reverse side of a
many-to-one relation.
In the example::
class Child(Model):
parent = ForeignKey(Parent, related_name='children')
``parent.children`` is a ``ReverseManyToOneDescriptor`` instance.
Most of the implementation is delegated to a dynamically defined manager
class built by ``create_forward_many_to_many_manager()`` defined below.
"""
def __init__(self, rel):
self.rel = rel
self.field = rel.field
@cached_property
def related_manager_cls(self):
related_model = self.rel.related_model
return create_reverse_many_to_one_manager(
related_model._default_manager.__class__,
self.rel,
)
def __get__(self, instance, cls=None):
"""
Get the related objects through the reverse relation.
With the example above, when getting ``parent.children``:
- ``self`` is the descriptor managing the ``children`` attribute
- ``instance`` is the ``parent`` instance
- ``cls`` is the ``Parent`` class (unused)
"""
if instance is None:
return self
return self.related_manager_cls(instance)
def _get_set_deprecation_msg_params(self):
return (
'reverse side of a related set',
self.rel.get_accessor_name(),
)
def __set__(self, instance, value):
raise TypeError(
'Direct assignment to the %s is prohibited. Use %s.set() instead.'
% self._get_set_deprecation_msg_params(),
)
def create_reverse_many_to_one_manager(superclass, rel):
"""
Create a manager for the reverse side of a many-to-one relation.
This manager subclasses another manager, generally the default manager of
the related model, and adds behaviors specific to many-to-one relations.
"""
class RelatedManager(superclass):
def __init__(self, instance):
super().__init__()
self.instance = instance
self.model = rel.related_model
self.field = rel.field
self.core_filters = {self.field.name: instance}
def __call__(self, *, manager):
manager = getattr(self.model, manager)
manager_class = create_reverse_many_to_one_manager(manager.__class__, rel)
return manager_class(self.instance)
do_not_call_in_templates = True
def _apply_rel_filters(self, queryset):
"""
Filter the queryset for the instance this manager is bound to.
"""
db = self._db or router.db_for_read(self.model, instance=self.instance)
empty_strings_as_null = connections[db].features.interprets_empty_strings_as_nulls
queryset._add_hints(instance=self.instance)
if self._db:
queryset = queryset.using(self._db)
queryset = queryset.filter(**self.core_filters)
for field in self.field.foreign_related_fields:
val = getattr(self.instance, field.attname)
if val is None or (val == '' and empty_strings_as_null):
return queryset.none()
queryset._known_related_objects = {self.field: {self.instance.pk: self.instance}}
return queryset
def _remove_prefetched_objects(self):
try:
self.instance._prefetched_objects_cache.pop(self.field.related_query_name())
except (AttributeError, KeyError):
pass # nothing to clear from cache
def get_queryset(self):
try:
return self.instance._prefetched_objects_cache[self.field.related_query_name()]
except (AttributeError, KeyError):
queryset = super().get_queryset()
return self._apply_rel_filters(queryset)
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = super().get_queryset()
queryset._add_hints(instance=instances[0])
queryset = queryset.using(queryset._db or self._db)
rel_obj_attr = self.field.get_local_related_value
instance_attr = self.field.get_foreign_related_value
instances_dict = {instance_attr(inst): inst for inst in instances}
query = {'%s__in' % self.field.name: instances}
queryset = queryset.filter(**query)
# Since we just bypassed this class' get_queryset(), we must manage
# the reverse relation manually.
for rel_obj in queryset:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, self.field.name, instance)
cache_name = self.field.related_query_name()
return queryset, rel_obj_attr, instance_attr, False, cache_name, False
def add(self, *objs, bulk=True):
self._remove_prefetched_objects()
objs = list(objs)
db = router.db_for_write(self.model, instance=self.instance)
def check_and_update_obj(obj):
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected, got %r" % (
self.model._meta.object_name, obj,
))
setattr(obj, self.field.name, self.instance)
if bulk:
pks = []
for obj in objs:
check_and_update_obj(obj)
if obj._state.adding or obj._state.db != db:
raise ValueError(
"%r instance isn't saved. Use bulk=False or save "
"the object first." % obj
)
pks.append(obj.pk)
self.model._base_manager.using(db).filter(pk__in=pks).update(**{
self.field.name: self.instance,
})
else:
with transaction.atomic(using=db, savepoint=False):
for obj in objs:
check_and_update_obj(obj)
obj.save()
add.alters_data = True
def create(self, **kwargs):
kwargs[self.field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
kwargs[self.field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).get_or_create(**kwargs)
get_or_create.alters_data = True
def update_or_create(self, **kwargs):
kwargs[self.field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).update_or_create(**kwargs)
update_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
if rel.field.null:
def remove(self, *objs, bulk=True):
if not objs:
return
val = self.field.get_foreign_related_value(self.instance)
old_ids = set()
for obj in objs:
# Is obj actually part of this descriptor set?
if self.field.get_local_related_value(obj) == val:
old_ids.add(obj.pk)
else:
raise self.field.remote_field.model.DoesNotExist(
"%r is not related to %r." % (obj, self.instance)
)
self._clear(self.filter(pk__in=old_ids), bulk)
remove.alters_data = True
def clear(self, *, bulk=True):
self._clear(self, bulk)
clear.alters_data = True
def _clear(self, queryset, bulk):
self._remove_prefetched_objects()
db = router.db_for_write(self.model, instance=self.instance)
queryset = queryset.using(db)
if bulk:
# `QuerySet.update()` is intrinsically atomic.
queryset.update(**{self.field.name: None})
else:
with transaction.atomic(using=db, savepoint=False):
for obj in queryset:
setattr(obj, self.field.name, None)
obj.save(update_fields=[self.field.name])
_clear.alters_data = True
def set(self, objs, *, bulk=True, clear=False):
# Force evaluation of `objs` in case it's a queryset whose value
# could be affected by `manager.clear()`. Refs #19816.
objs = tuple(objs)
if self.field.null:
db = router.db_for_write(self.model, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
if clear:
self.clear()
self.add(*objs, bulk=bulk)
else:
old_objs = set(self.using(db).all())
new_objs = []
for obj in objs:
if obj in old_objs:
old_objs.remove(obj)
else:
new_objs.append(obj)
self.remove(*old_objs, bulk=bulk)
self.add(*new_objs, bulk=bulk)
else:
self.add(*objs, bulk=bulk)
set.alters_data = True
return RelatedManager
class ManyToManyDescriptor(ReverseManyToOneDescriptor):
"""
Accessor to the related objects manager on the forward and reverse sides of
a many-to-many relation.
In the example::
class Pizza(Model):
toppings = ManyToManyField(Topping, related_name='pizzas')
``pizza.toppings`` and ``topping.pizzas`` are ``ManyToManyDescriptor``
instances.
Most of the implementation is delegated to a dynamically defined manager
class built by ``create_forward_many_to_many_manager()`` defined below.
"""
def __init__(self, rel, reverse=False):
super().__init__(rel)
self.reverse = reverse
@property
def through(self):
# through is provided so that you have easy access to the through
# model (Book.authors.through) for inlines, etc. This is done as
# a property to ensure that the fully resolved value is returned.
return self.rel.through
@cached_property
def related_manager_cls(self):
related_model = self.rel.related_model if self.reverse else self.rel.model
return create_forward_many_to_many_manager(
related_model._default_manager.__class__,
self.rel,
reverse=self.reverse,
)
def _get_set_deprecation_msg_params(self):
return (
'%s side of a many-to-many set' % ('reverse' if self.reverse else 'forward'),
self.rel.get_accessor_name() if self.reverse else self.field.name,
)
def create_forward_many_to_many_manager(superclass, rel, reverse):
"""
Create a manager for the either side of a many-to-many relation.
This manager subclasses another manager, generally the default manager of
the related model, and adds behaviors specific to many-to-many relations.
"""
class ManyRelatedManager(superclass):
def __init__(self, instance=None):
super().__init__()
self.instance = instance
if not reverse:
self.model = rel.model
self.query_field_name = rel.field.related_query_name()
self.prefetch_cache_name = rel.field.name
self.source_field_name = rel.field.m2m_field_name()
self.target_field_name = rel.field.m2m_reverse_field_name()
self.symmetrical = rel.symmetrical
else:
self.model = rel.related_model
self.query_field_name = rel.field.name
self.prefetch_cache_name = rel.field.related_query_name()
self.source_field_name = rel.field.m2m_reverse_field_name()
self.target_field_name = rel.field.m2m_field_name()
self.symmetrical = False
self.through = rel.through
self.reverse = reverse
self.source_field = self.through._meta.get_field(self.source_field_name)
self.target_field = self.through._meta.get_field(self.target_field_name)
self.core_filters = {}
self.pk_field_names = {}
for lh_field, rh_field in self.source_field.related_fields:
core_filter_key = '%s__%s' % (self.query_field_name, rh_field.name)
self.core_filters[core_filter_key] = getattr(instance, rh_field.attname)
self.pk_field_names[lh_field.name] = rh_field.name
self.related_val = self.source_field.get_foreign_related_value(instance)
if None in self.related_val:
raise ValueError('"%r" needs to have a value for field "%s" before '
'this many-to-many relationship can be used.' %
(instance, self.pk_field_names[self.source_field_name]))
# Even if this relation is not to pk, we require still pk value.
# The wish is that the instance has been already saved to DB,
# although having a pk value isn't a guarantee of that.
if instance.pk is None:
raise ValueError("%r instance needs to have a primary key value before "
"a many-to-many relationship can be used." %
instance.__class__.__name__)
def __call__(self, *, manager):
manager = getattr(self.model, manager)
manager_class = create_forward_many_to_many_manager(manager.__class__, rel, reverse)
return manager_class(instance=self.instance)
do_not_call_in_templates = True
def _build_remove_filters(self, removed_vals):
filters = Q(**{self.source_field_name: self.related_val})
# No need to add a subquery condition if removed_vals is a QuerySet without
# filters.
removed_vals_filters = (not isinstance(removed_vals, QuerySet) or
removed_vals._has_filters())
if removed_vals_filters:
filters &= Q(**{'%s__in' % self.target_field_name: removed_vals})
if self.symmetrical:
symmetrical_filters = Q(**{self.target_field_name: self.related_val})
if removed_vals_filters:
symmetrical_filters &= Q(
**{'%s__in' % self.source_field_name: removed_vals})
filters |= symmetrical_filters
return filters
def _apply_rel_filters(self, queryset):
"""
Filter the queryset for the instance this manager is bound to.
"""
queryset._add_hints(instance=self.instance)
if self._db:
queryset = queryset.using(self._db)
return queryset._next_is_sticky().filter(**self.core_filters)
def _remove_prefetched_objects(self):
try:
self.instance._prefetched_objects_cache.pop(self.prefetch_cache_name)
except (AttributeError, KeyError):
pass # nothing to clear from cache
def get_queryset(self):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
queryset = super().get_queryset()
return self._apply_rel_filters(queryset)
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = super().get_queryset()
queryset._add_hints(instance=instances[0])
queryset = queryset.using(queryset._db or self._db)
query = {'%s__in' % self.query_field_name: instances}
queryset = queryset._next_is_sticky().filter(**query)
# M2M: need to annotate the query in order to get the primary model
# that the secondary model was actually related to. We know that
# there will already be a join on the join table, so we can just add
# the select.
# For non-autocreated 'through' models, can't assume we are
# dealing with PK values.
fk = self.through._meta.get_field(self.source_field_name)
join_table = fk.model._meta.db_table
connection = connections[queryset.db]
qn = connection.ops.quote_name
queryset = queryset.extra(select={
'_prefetch_related_val_%s' % f.attname:
'%s.%s' % (qn(join_table), qn(f.column)) for f in fk.local_related_fields})
return (
queryset,
lambda result: tuple(
getattr(result, '_prefetch_related_val_%s' % f.attname)
for f in fk.local_related_fields
),
lambda inst: tuple(
f.get_db_prep_value(getattr(inst, f.attname), connection)
for f in fk.foreign_related_fields
),
False,
self.prefetch_cache_name,
False,
)
def add(self, *objs):
if not rel.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
"Cannot use add() on a ManyToManyField which specifies an "
"intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
self._remove_prefetched_objects()
db = router.db_for_write(self.through, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
self._add_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
if self.symmetrical:
self._add_items(self.target_field_name, self.source_field_name, *objs)
add.alters_data = True
def remove(self, *objs):
if not rel.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
"Cannot use remove() on a ManyToManyField which specifies "
"an intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
self._remove_prefetched_objects()
self._remove_items(self.source_field_name, self.target_field_name, *objs)
remove.alters_data = True
def clear(self):
db = router.db_for_write(self.through, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
signals.m2m_changed.send(
sender=self.through, action="pre_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db,
)
self._remove_prefetched_objects()
filters = self._build_remove_filters(super().get_queryset().using(db))
self.through._default_manager.using(db).filter(filters).delete()
signals.m2m_changed.send(
sender=self.through, action="post_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db,
)
clear.alters_data = True
def set(self, objs, *, clear=False):
if not rel.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
# Force evaluation of `objs` in case it's a queryset whose value
# could be affected by `manager.clear()`. Refs #19816.
objs = tuple(objs)
db = router.db_for_write(self.through, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
if clear:
self.clear()
self.add(*objs)
else:
old_ids = set(self.using(db).values_list(self.target_field.target_field.attname, flat=True))
new_objs = []
for obj in objs:
fk_val = (
self.target_field.get_foreign_related_value(obj)[0]
if isinstance(obj, self.model) else obj
)
if fk_val in old_ids:
old_ids.remove(fk_val)
else:
new_objs.append(obj)
self.remove(*old_ids)
self.add(*new_objs)
set.alters_data = True
def create(self, **kwargs):
# This check needs to be done here, since we can't later remove this
# from the method lookup table, as we do with add and remove.
if not self.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
"Cannot use create() on a ManyToManyField which specifies "
"an intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
db = router.db_for_write(self.instance.__class__, instance=self.instance)
new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
obj, created = super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
get_or_create.alters_data = True
def update_or_create(self, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
obj, created = super(ManyRelatedManager, self.db_manager(db)).update_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
update_or_create.alters_data = True
def _add_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK fieldname in join table for the source object
# target_field_name: the PK fieldname in join table for the target object
# *objs - objects to add. Either object instances, or primary keys of object instances.
# If there aren't any objects, there is nothing to do.
from django.db.models import Model
if objs:
new_ids = set()
for obj in objs:
if isinstance(obj, self.model):
if not router.allow_relation(obj, self.instance):
raise ValueError(
'Cannot add "%r": instance is on database "%s", value is on database "%s"' %
(obj, self.instance._state.db, obj._state.db)
)
fk_val = self.through._meta.get_field(
target_field_name).get_foreign_related_value(obj)[0]
if fk_val is None:
raise ValueError(
'Cannot add "%r": the value for field "%s" is None' %
(obj, target_field_name)
)
new_ids.add(fk_val)
elif isinstance(obj, Model):
raise TypeError(
"'%s' instance expected, got %r" %
(self.model._meta.object_name, obj)
)
else:
new_ids.add(obj)
db = router.db_for_write(self.through, instance=self.instance)
vals = (self.through._default_manager.using(db)
.values_list(target_field_name, flat=True)
.filter(**{
source_field_name: self.related_val[0],
'%s__in' % target_field_name: new_ids,
}))
new_ids.difference_update(vals)
with transaction.atomic(using=db, savepoint=False):
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(
sender=self.through, action='pre_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db,
)
# Add the ones that aren't there already
self.through._default_manager.using(db).bulk_create([
self.through(**{
'%s_id' % source_field_name: self.related_val[0],
'%s_id' % target_field_name: obj_id,
})
for obj_id in new_ids
])
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(
sender=self.through, action='post_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db,
)
def _remove_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK colname in join table for the source object
# target_field_name: the PK colname in join table for the target object
# *objs - objects to remove
if not objs:
return
# Check that all the objects are of the right type
old_ids = set()
for obj in objs:
if isinstance(obj, self.model):
fk_val = self.target_field.get_foreign_related_value(obj)[0]
old_ids.add(fk_val)
else:
old_ids.add(obj)
db = router.db_for_write(self.through, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
# Send a signal to the other end if need be.
signals.m2m_changed.send(
sender=self.through, action="pre_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db,
)
target_model_qs = super().get_queryset()
if target_model_qs._has_filters():
old_vals = target_model_qs.using(db).filter(**{
'%s__in' % self.target_field.target_field.attname: old_ids})
else:
old_vals = old_ids
filters = self._build_remove_filters(old_vals)
self.through._default_manager.using(db).filter(filters).delete()
signals.m2m_changed.send(
sender=self.through, action="post_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db,
)
return ManyRelatedManager
| 44.196429 | 119 | 0.595515 |
8170ed7bf4b502f072be97c994ab3b9d61b673e8 | 16,993 | py | Python | life/universes.py | japinol7/life | 5462a497011685754eef61503e7697c22ae8dbe9 | [
"MIT"
] | 2 | 2019-07-13T09:38:42.000Z | 2019-07-14T13:06:16.000Z | life/universes.py | japinol7/life | 5462a497011685754eef61503e7697c22ae8dbe9 | [
"MIT"
] | null | null | null | life/universes.py | japinol7/life | 5462a497011685754eef61503e7697c22ae8dbe9 | [
"MIT"
] | null | null | null | """Module universes."""
__author__ = 'Joan A. Pinol (japinol)'
from random import randint
import numpy as np
from numpy.lib.stride_tricks import as_strided
import pygame as pg
from life.cells import Cell
from life import lib_jp
from life import resources
from life import seeds
from life.settings import Settings, UniverseCalculationType, NextGenerationType
INVISIBLE_DEAD_CELLS_ALLOWED = 5
class Universe(pg.sprite.Sprite):
"""Represents a universe."""
size = None # The size of the sprite of the universe
sprite_image = None
def __init__(self, game):
super().__init__()
self.id = game.current_game
self.cells = None # The matrix of cells
# Dictionary of the cells used for their graphic representation on the board
self.cells_board = {}
self.size_u = lib_jp.Size(w=Settings.universe_size.w, h=Settings.universe_size.h)
self.game = game
self.start_time = 0
self.stop_time = 0
self.generation = 0
self.stats = {'cells_total': 0,
'cells_age1': 0,
'cells_age2': 0,
'cells_age3': 0,
'cells_age4': 0,
'cells_age5': 0,
'cells_age6': 0,
'cells_age7': 0,
'cells_age8': 0,
'deads_age1': 0,
'deads_age2': 0,
'deads_age3': 0,
'deads_age4': 0,
'deads_age5': 0,
'deads_age6': 0,
'deads_age7': 0,
'deads_age8': 0,
}
self.set_universe_calculate_generation()
self.set_cells_board_update_calculation()
self.set_universe_colors(Settings.cell_one_color)
self.clean()
if not Universe.sprite_image:
image = pg.image.load(resources.file_name_get(name='im_universe')).convert()
image = pg.transform.smoothscale(image, Universe.size)
Universe.sprite_image = image
else:
image = Universe.sprite_image
self.image = image
self.rect = self.image.get_rect()
self.rect.x = Settings.universe_pos.x
self.rect.y = Settings.universe_pos.y
self.pos_limit = lib_jp.Point(x=self.rect.x + Universe.size.w,
y=self.rect.y + Universe.size.h)
# Add universe to the active sprite list
self.game.active_sprites.add(self)
def update(self):
if not self.game.next_generation or self.game.next_generation == NextGenerationType.NONE:
return
self.next_generation()
# Clean the graphic board of dead cells
if len(self.game.cells) > self.stats['cells_total'] + INVISIBLE_DEAD_CELLS_ALLOWED:
self._clean_board_of_dead_cells()
if self.game.next_generation == NextGenerationType.ONE:
self.game.next_generation = NextGenerationType.NONE
elif self.game.next_generation == NextGenerationType.SEVERAL:
if Settings.stop_after_generation and self.generation == Settings.stop_after_generation:
self.stop_time = pg.time.get_ticks()
self.game.next_generation = NextGenerationType.NONE
if Settings.exit_auto_when_auto_play:
self.game.exit_auto = True
def clean(self):
self.generation = 1
self.cells = np.array([[0] * self.size_u.w for _ in range(self.size_u.h)], dtype=np.uint8)
nd_slice = (slice(1, -1),) * len(self.size_u)
self.cells_slice = self.cells[nd_slice]
self.n_dims = len(self.cells_slice.shape)
self.cells_board = {}
cells_old = self.cells.copy()
self.stats.update((k, 0) for k in self.stats)
self._reset_population()
self.start_time = pg.time.get_ticks()
# Change the graphic cells on the board according to the matrix of cells
self._cells_board_update(cells_old)
def set_universe_calculate_generation(self):
"""Sets the calculation function of the universe a toroidal one
or a flat one according to settings.
"""
if Settings.universe_calculation == UniverseCalculationType.NON_TOROIDAL_FAST:
self._calculate_generation = self._calculate_generation_fast
elif Settings.universe_calculation == UniverseCalculationType.TOROIDAL_LOOP:
self._calculate_generation = self._calculate_generation_toroidal_with_loop
elif Settings.universe_calculation == UniverseCalculationType.NON_TOROIDAL_LOOP:
self._calculate_generation = self._calculate_generation_with_loop
def set_cells_board_update_calculation(self):
"""Sets the calculation function of cells on the board with and without statistics."""
if Settings.stats_activated:
self._cells_board_update = self._cells_board_update_with_stats
else:
self._cells_board_update = self._cells_board_update_standard
def set_universe_colors(self, cell_one_color):
"""Sets the representation of the universe to one color for each age
when cell_one_color is False; sets it to only on color despite the age if True.
"""
if cell_one_color:
self.pprint_to_string = self.pprint_to_string_board_one_color
else:
self.pprint_to_string = self.pprint_to_string_board_age
def next_generation(self):
if self.generation == 1:
self.game.output_info.write_head()
self.game.debug_info.write_head()
if Settings.save_to_out_file:
self.game.output_info.write_generation()
self.generation += 1
# Copy the old generation
cells_old = self.cells.copy()
# Calculate generation
self._calculate_generation(cells_old)
# Change the graphic cells on the board according to the array of cells
self._cells_board_update(cells_old)
if Settings.save_to_out_file:
self.game.output_info.write_generation()
def _cells_board_update_standard(self, cells_old):
# Change the graphic cells on the board according to the array of cells
for i, rows in enumerate(self.cells):
for j, cell in enumerate(rows):
cell_board = self.cells_board.get((j, i))
if cell_board:
if cell == 1 and cells_old[i, j] == 1:
cell_board.age = min(cell_board.age + 1, Cell.CELL_AGES_MAX - 1)
cell_board.age_total += 1
else:
cell_board.age = cell
if not cell and cells_old[i, j] == 1:
self.stats['cells_total'] -= 1
elif cell == 1 and not cells_old[i, j]:
self.stats['cells_total'] += 1
elif cell:
self.add_cell_to_board(j, i, age=cell)
self.stats['cells_total'] += 1
def _cells_board_update_with_stats(self, cells_old):
"""Change the graphic cells on the board according to the array of cells.
Also calculates statistics for total age dead cells and cells currently alive.
"""
for i, rows in enumerate(self.cells):
for j, cell in enumerate(rows):
cell_board = self.cells_board.get((j, i))
if cell_board:
if cell == 1 and cells_old[i, j] == 1:
self.stats[f'cells_age{cell_board.age}'] -= 1
cell_board.age = min(cell_board.age + 1, Cell.CELL_AGES_MAX - 1)
cell_board.age_total += 1
self.stats[f'cells_age{cell_board.age}'] += 1
else:
if not cell and cells_old[i, j] == 1:
self.stats['cells_total'] -= 1
self.stats[f'deads_age{cell_board.age}'] += 1
self.stats[f'cells_age{cell_board.age}'] -= 1
elif cell == 1 and not cells_old[i, j]:
self.stats['cells_total'] += 1
self.stats['cells_age1'] += 1
cell_board.age = cell
elif cell:
self.add_cell_to_board(j, i, age=cell)
self.stats['cells_total'] += 1
self.stats[f'cells_age{cell}'] += 1
def _clean_board_of_dead_cells(self):
for key, cell_board in tuple(self.cells_board.items()):
if cell_board.age == 0:
cell_board.kill()
del self.cells_board[key]
def _calculate_generation_fast(self, _):
'''Calculates the next generation for each cell
in a non toroidal universe.
This is a faster method that takes advantage of numpy.
'''
# Calculate the number of neighbors in a non toroidal universe
rule_of_life_alive = np.zeros(8 + 1, np.uint8)
rule_of_life_dead = np.zeros(8 + 1, np.uint8)
# If it is alive and has less than 2 neighbors, it dies by under-population.
# Also, if it has more than 3 neighbors, it dies by over-population.
# That is, only the cells alive with 2 or 3 neighbors survive
rule_of_life_alive[[2, 3]] = 1
# If the cell is dead but has exactly 3 neighbors, a new cell is born by reproduction.
rule_of_life_dead[3] = 1
# Calculate the neighborhoods and apply the rules
neighborhoods = self._grid_n_dims(self.cells)
sum_over = tuple(-(i+1) for i in range(self.n_dims))
neighbors_no = np.sum(neighborhoods, sum_over) - self.cells_slice
self.cells_slice[:] = np.where(self.cells_slice, rule_of_life_alive[neighbors_no],
rule_of_life_dead[neighbors_no])
def _calculate_generation_toroidal_with_loop(self, cells_old):
'''Calculates the next generation for each cell
in a toroidal universe.
'''
cells_ne = cells_old.copy()
cells_ne = self._neighborhood(cells_ne)
for x, rows in enumerate(cells_old):
for y, _ in enumerate(rows):
if cells_old[x, y] and not 2 <= cells_ne[x, y] <= 3:
# If it is alive and has less than 2 neighbors, it dies by under-population.
# Also, if it has more than 3 neighbors, it dies by over-population.
self.cells[x, y] = 0
elif cells_ne[x, y] == 3:
# If it has exactly 3 neighbors, it lives.
# Also, if it is dead, a new cell is born by reproduction.
self.cells[x, y] = 1
def _calculate_generation_with_loop(self, cells_old):
'''Calculates the next generation for each cell
in a non toroidal universe.
This is a slower method than the one used.
It can be used to test faster methods.
'''
for x, rows in enumerate(cells_old):
for y, _ in enumerate(rows):
# Calculate the number of neighbors in a no toroidal universe
neighbors_no = np.sum(cells_old[x - 1: x + 2, y - 1: y + 2]) - cells_old[x, y]
if cells_old[x, y] and not 2 <= neighbors_no <= 3:
# If it is alive and has less than 2 neighbors, it dies by under-population.
# Also, if it has more than 3 neighbors, it dies by over-population.
self.cells[x, y] = 0
elif neighbors_no == 3:
# If it has exactly 3 neighbors, it lives.
# Also, if it is dead, a new cell is born by reproduction.
self.cells[x, y] = 1
def _grid_n_dims(self, arr):
'''Calculates a sub-array for a given array and return it with a customized shape,
so we can get the neighborhoods of a cell.
'''
assert all(_len > 2 for _len in arr.shape)
n_dims = len(arr.shape)
new_shape = [_len - 2 for _len in arr.shape]
new_shape.extend([3] * n_dims)
new_strides = arr.strides + arr.strides
# Return the sub-array with our neighborhoods and a convenient way to move through them
return as_strided(arr, shape=new_shape, strides=new_strides)
def _neighborhood(self, arr):
'''Calculates the number of neighbors for each cell of a toroidal array.
To do so, it rotates the array in each direction.
Returns and array with this information.
'''
return (np.roll(np.roll(arr, 1, 1), 1, 0) + # Top, left
np.roll(arr, 1, 0) + # Top
np.roll(np.roll(arr, -1, 1), 1, 0) + # Top, right
np.roll(arr, -1, 1) + # Right
np.roll(np.roll(arr, -1, 1), -1, 0) + # Bottom, right
np.roll(arr, -1, 0) + # Bottom
np.roll(np.roll(arr, 1, 1), -1, 0) + # Bottom, left
np.roll(arr, 1, 1) # Left
)
def switch_cell(self):
if not Settings.edit_allowed:
return
x, y = self._cell_selected_coords()
self.cells[y, x] = 1 if not self.cells[y, x] else 0
self.stats['cells_total'] += 1 if self.cells[y, x] else -1
self.stats['cells_age1'] += 1 if self.cells[y, x] else -1
cell_board = self.cells_board.get((x, y))
if cell_board:
cell_board.age = self.cells[y, x]
else:
self.add_cell_to_board(x, y, age=self.cells[y, x])
def _cell_selected_coords(self):
x = (self.game.cell_selector.rect.x - Settings.universe_pos.x) // Settings.cell_size
y = (self.game.cell_selector.rect.y - Settings.universe_pos.y) // Settings.cell_size
return x, y
def cell_selected_age(self):
x, y = self._cell_selected_coords()
cell_board = self.cells_board.get((x, y))
if cell_board:
return cell_board.age
else:
return 0
def add_cell_to_board(self, x, y, age):
cell = Cell(x, y, self.game, age=age)
self.cells_board[x, y] = cell
def add_cells_seed(self, x, y, seed):
seed_array = np.array(seeds.seeds[seed])
x_end, y_end = x + seed_array.shape[0], y + seed_array.shape[1]
self.cells[x:x_end, y:y_end] = seed_array
def add_cells_random(self, n_cells):
for _ in range(n_cells):
added = False
while not added:
cell_pos_x = randint(0, self.size_u.w - 1)
cell_pos_y = randint(0, self.size_u.h - 1)
if self.cells[cell_pos_y, cell_pos_x]:
continue
self.cells[cell_pos_y, cell_pos_x] = 1
self.cells_alive += 1
added = True
self.add_cell_to_board(cell_pos_x, cell_pos_y)
def pprint_to_string_one_color(self):
res = []
for rows in self.cells:
for cell in rows:
res += [str(cell) if cell != 0 else '·']
res += ['\n']
return ''.join(res)
def pprint_to_string_board_age(self):
res = []
for i, rows in enumerate(self.cells):
for j, _ in enumerate(rows):
cell_board = self.cells_board.get((j, i))
if cell_board:
res += [str(cell_board.age) if cell_board.age != 0 else '·']
else:
res += ['·']
res += ['\n']
return ''.join(res)
def pprint_to_string_board_one_color(self):
res = []
for i, rows in enumerate(self.cells):
for j, _ in enumerate(rows):
cell_board = self.cells_board.get((j, i))
if cell_board:
res += ['1' if cell_board.age != 0 else '·']
else:
res += ['·']
res += ['\n']
return ''.join(res)
def _reset_population(self):
if Settings.empty_universe:
return
if Settings.starting_seeds:
for seed, pos in Settings.starting_seeds:
self.add_cells_seed(pos[0], pos[1], seed)
return
@classmethod
def init(cls):
cls.size = lib_jp.Size(w=Settings.universe_size.w * Settings.cell_size,
h=Settings.universe_size.h * Settings.cell_size)
| 43.34949 | 101 | 0.558348 |
6d728b6b2f39532ca7b1685f07a6bc7c957f85ce | 195 | py | Python | examples/sample_1/schema.py | sujavarghese/data-validator | e0c5d94da797cb43b17d6ee193d337cbcb602f49 | [
"MIT"
] | null | null | null | examples/sample_1/schema.py | sujavarghese/data-validator | e0c5d94da797cb43b17d6ee193d337cbcb602f49 | [
"MIT"
] | null | null | null | examples/sample_1/schema.py | sujavarghese/data-validator | e0c5d94da797cb43b17d6ee193d337cbcb602f49 | [
"MIT"
] | null | null | null | from file_validator.schema.schema import FeatureSchema
from examples.sample_1.validator import CustomRule
class ContactsSchema(FeatureSchema):
map = {
'check_xyz': CustomRule
}
| 21.666667 | 54 | 0.758974 |
b0b30d340e582108bb055dbe33b39f41f1f088aa | 976 | py | Python | explore.py | mbirkholzupc/btfm | 2ed2a46a9ae5d5e71651c06807f8d88f581f2613 | [
"MIT"
] | null | null | null | explore.py | mbirkholzupc/btfm | 2ed2a46a9ae5d5e71651c06807f8d88f581f2613 | [
"MIT"
] | null | null | null | explore.py | mbirkholzupc/btfm | 2ed2a46a9ae5d5e71651c06807f8d88f581f2613 | [
"MIT"
] | null | null | null | import os
import pickle
import numpy as np
from scipy.spatial.distance import euclidean
from pathlib import Path
from paths import *
infile=open(BTFM_BASE+TDPW_TRAIN_DIR+'/courtyard_arguing_00.pkl', 'rb')
#infile=open(BTFM_BASE+TDPW_TEST_DIR+'/downtown_arguing_00.pkl', 'rb')
#infile=open(BTFM_BASE+TDPW_TEST_DIR+'/outdoors_fencing_01.pkl', 'rb')
#infile=open(BTFM_BASE+TDPW_TEST_DIR+'/office_phoneCall_00.pkl', 'rb')
seq00=pickle.load(infile, encoding='latin1')
infile.close()
girl=seq00['jointPositions'][0]
#guy=seq00['jointPositions'][1]
#girl_diff=[max(abs(g1-g2)) for g1, g2 in zip(girl[1:], girl[0:-1])]
dirs = [ BTFM_BASE+TDPW_TRAIN_DIR, BTFM_BASE+TDPW_VAL_DIR, BTFM_BASE+TDPW_TEST_DIR ]
for d in dirs:
num_imgs=0
pkls=list(Path(d).glob('*'))
for p in pkls:
infile=open(p,'rb')
seq=pickle.load(infile,encoding='latin1')
infile.close()
num_imgs += len(seq['img_frame_ids'])
print('num_imgs: ' + str(num_imgs))
| 26.378378 | 84 | 0.71209 |
a6786da3ef9b1775bf7729991c4fd30283fea5a4 | 474 | py | Python | projects/TensorMask/project_structure.py | ndaysinaiK/detectron2 | 01089051137de08175e2376a17bd8e3106cdf372 | [
"Apache-2.0"
] | null | null | null | projects/TensorMask/project_structure.py | ndaysinaiK/detectron2 | 01089051137de08175e2376a17bd8e3106cdf372 | [
"Apache-2.0"
] | null | null | null | projects/TensorMask/project_structure.py | ndaysinaiK/detectron2 | 01089051137de08175e2376a17bd8e3106cdf372 | [
"Apache-2.0"
] | null | null | null | import os
import seedir as sd # pip install seedir
def list_files(startpath):
for root, dirs, files in os.walk(startpath):
level = root.replace(startpath, '').count(os.sep)
indent = ' ' * 4 * (level)
print('{}{}/'.format(indent, os.path.basename(root)))
subindent = ' ' * 4 * (level + 1)
for f in files:
print('{}{}'.format(subindent, f))
sd.seedir(style='lines', itemlimit=10, depthlimit=2, exclude_folders='.git')
| 33.857143 | 76 | 0.597046 |
41be1c556ab8551bc97ce458b09d1f3095601f6c | 1,116 | py | Python | svm preprocessing.py | agam001/Data-Preprocessing | 0d2a7e6316fcdc1b01a9f8465eb2e9fb3b4ec1fe | [
"MIT"
] | null | null | null | svm preprocessing.py | agam001/Data-Preprocessing | 0d2a7e6316fcdc1b01a9f8465eb2e9fb3b4ec1fe | [
"MIT"
] | null | null | null | svm preprocessing.py | agam001/Data-Preprocessing | 0d2a7e6316fcdc1b01a9f8465eb2e9fb3b4ec1fe | [
"MIT"
] | null | null | null | def preprocess():
feature_count = X_train.shape[1]
W = np.zeros((101,2,feature_count))
for i in range(feature_count):
feature = X_train_magnified[:,i]
for num in range(101):
index = np.argwhere(feature == num)
target_val,counts=np.unique(y_train.to_numpy()[index],return_counts=True)
if len(counts) == 0:
counts = np.zeros(2)
if len(counts) < 2 and len(counts)!=0:
tmp = np.zeros(2)
for x,y in zip(target_val,counts):
tmp[x] = y
counts = tmp
W[num,:,i] = counts
P = np.zeros((101,feature_count))
for i in range(feature_count):
for j in range(101):
if W[j,:,i].sum() == 0:
P[j,i] = 0
else:
P[j,i] = W[j,:,i].max()/W[j,:,i].sum()
P_scaled = scaler.fit_transform(P)
S = np.zeros(feature_count)
for i in range(feature_count):
S[i] = np.sum(W[:,:,i].sum(axis=1) * P_scaled[:,i])/np.sum(W[:,:,i].sum(axis=1))
S_prime = (t*(k/(t-np.power(S,2))))-k
for i in range(feature_count):
X_train.iloc[:,i]= X_train.iloc[:,i] * S_prime[i]
X_test.iloc[:,i]= X_test.iloc[:,i] * S_prime[i]
| 30.162162 | 84 | 0.575269 |
8e37424dc5f4411cbd4c80b37e968701fa4b3cc2 | 3,190 | py | Python | asset/synth.py | Kami/google-cloud-python | a14ffbaa50f7823c2792e91413a37cbc3ce687f5 | [
"Apache-2.0"
] | 1 | 2019-06-14T10:11:59.000Z | 2019-06-14T10:11:59.000Z | asset/synth.py | Kami/google-cloud-python | a14ffbaa50f7823c2792e91413a37cbc3ce687f5 | [
"Apache-2.0"
] | 1 | 2018-04-06T19:51:23.000Z | 2018-04-06T19:51:23.000Z | asset/synth.py | Kami/google-cloud-python | a14ffbaa50f7823c2792e91413a37cbc3ce687f5 | [
"Apache-2.0"
] | 1 | 2020-04-14T10:47:41.000Z | 2020-04-14T10:47:41.000Z | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
from synthtool import gcp
gapic = gcp.GAPICGenerator()
common = gcp.CommonTemplates()
versions = ["v1beta1", "v1"]
excludes = ["setup.py", "nox*.py", "README.rst", "docs/conf.py", "docs/index.rst"]
# ----------------------------------------------------------------------------
# Generate asset GAPIC layer
# ----------------------------------------------------------------------------
for version in versions:
library = gapic.py_library(
"asset",
version,
config_path=f"/google/cloud/asset/artman_cloudasset_{version}.yaml",
artman_output_name=f"asset-{version}",
include_protos=True,
)
s.move(library, excludes=excludes)
s.replace(
"google/cloud/asset_v*/proto/assets_pb2.py",
"from google.iam.v1 import policy_pb2 as",
"from google.iam.v1 import iam_policy_pb2_grpc as",
)
s.replace(
"google/cloud/asset_v*/proto/assets_pb2.py",
"from google.iam.v1 import iam_policy_pb2_grpc "
"as google_dot_iam_dot_v1_dot_policy__pb2",
"from google.iam.v1 import iam_policy_pb2 "
"as google_dot_iam_dot_v1_dot_policy__pb2",
)
s.replace(
"google/cloud/asset_v*/proto/assets_pb2.py",
"_ASSET.fields_by_name\['iam_policy'\].message_type "
"= google_dot_iam_dot_v1_dot_policy__pb2._POLICY",
"_ASSET.fields_by_name['iam_policy'].message_type = google_dot_iam_dot"
"_v1_dot_policy__pb2.google_dot_iam_dot_v1_dot_policy__pb2._POLICY",
)
_BORKED_ASSET_DOCSTRING = """\
The full name of the asset. For example: ``//compute.googleapi
s.com/projects/my_project_123/zones/zone1/instances/instance1`
`. See `Resource Names <https://cloud.google.com/apis/design/r
esource_names#full_resource_name>`__ for more information.
"""
_FIXED_ASSET_DOCSTRING = """
The full name of the asset. For example:
``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``.
See https://cloud.google.com/apis/design/resource_names#full_resource_name
for more information.
"""
s.replace(
"google/cloud/asset_v*/proto/assets_pb2.py",
_BORKED_ASSET_DOCSTRING,
_FIXED_ASSET_DOCSTRING,
)
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
templated_files = gcp.CommonTemplates().py_library(unit_cov_level=79, cov_level=80)
s.move(templated_files)
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
| 35.444444 | 95 | 0.652351 |
bf7ba5bbac8f62b4946ae15119247c797681af63 | 875 | py | Python | osm_web/users/tests/test_drf_views.py | asheeq/osm_web | 73dbb6aa343028b3a30b7a9be8c0502aa625c191 | [
"MIT"
] | null | null | null | osm_web/users/tests/test_drf_views.py | asheeq/osm_web | 73dbb6aa343028b3a30b7a9be8c0502aa625c191 | [
"MIT"
] | null | null | null | osm_web/users/tests/test_drf_views.py | asheeq/osm_web | 73dbb6aa343028b3a30b7a9be8c0502aa625c191 | [
"MIT"
] | null | null | null | import pytest
from django.test import RequestFactory
from osm_web.users.api.views import UserViewSet
from osm_web.users.models import User
pytestmark = pytest.mark.django_db
class TestUserViewSet:
def test_get_queryset(self, user: User, rf: RequestFactory):
view = UserViewSet()
request = rf.get("/fake-url/")
request.user = user
view.request = request
assert user in view.get_queryset()
def test_me(self, user: User, rf: RequestFactory):
view = UserViewSet()
request = rf.get("/fake-url/")
request.user = user
view.request = request
response = view.me(request)
assert response.data == {
"username": user.username,
"email": user.email,
"name": user.name,
"url": f"http://testserver/api/users/{user.username}/",
}
| 25 | 67 | 0.616 |
a9928d38f1c19715454503bd04c3039ec5b87d07 | 23,143 | py | Python | pyjiffy/cache_bench/file_cache_benchmark.py | mckarthik7/jiffy | 78202218b06669a100634fe65e3166bffe39bd83 | [
"Apache-2.0"
] | 10 | 2021-03-04T07:23:24.000Z | 2022-03-26T07:36:00.000Z | pyjiffy/cache_bench/file_cache_benchmark.py | hesuyouren/jiffy | 1bcee0593fe2ab505224f3f9c85caa5d0fe1e8c3 | [
"Apache-2.0"
] | 1 | 2021-03-12T15:20:03.000Z | 2021-03-12T15:20:03.000Z | pyjiffy/cache_bench/file_cache_benchmark.py | hesuyouren/jiffy | 1bcee0593fe2ab505224f3f9c85caa5d0fe1e8c3 | [
"Apache-2.0"
] | 7 | 2021-03-12T05:46:15.000Z | 2022-02-14T07:08:01.000Z | import argparse
from jiffy import JiffyClient, b, Flags
from jiffy.benchmark.zipf_generator import gen_zipf
import time
import threading
class FileBenchmark:
def __init__(self, clients, data_size, num_clients, num_ops):
self.data_ = "x" * data_size
self.num_clients = num_clients
self.num_ops_ = int(num_ops / num_clients)
self.clients_ = clients
self.workers_ = [None] * self.num_clients
self.total_bytes_ = [None] * self.num_clients
self.cache_hit_ = [None] * self.num_clients
self.total_access_ = [None] * self.num_clients
self.throughput_ = [None] * self.num_clients
self.latency_write_ = [None] * self.num_clients
self.latency_read_ = [None] * self.num_clients
def wait(self):
throughput = 0.0
latency_read = 0.0
latency_write = 0.0
hit = 0
access = 0
for i in range(self.num_clients):
self.workers_[i].join()
throughput += self.throughput_[i]
latency_read += self.latency_read_[i]
latency_write += self.latency_write_[i]
hit += self.cache_hit_[i]
access += self.total_access_[i]
return [throughput, latency_write / self.num_clients, latency_read / self.num_clients, float(hit * 100 / access)]
class WriteBenchmark(FileBenchmark):
def __init__(self, clients, data_size, num_clients, num_ops):
super(WriteBenchmark, self).__init__(clients, data_size, num_clients, num_ops)
def run(self):
for i in range(self.num_clients):
self.workers_[i] = threading.Thread(target = self.single_thread_action, args = (i,))
for i in range(self.num_clients):
self.workers_[i].start()
def single_thread_action(self, thread_index):
bench_begin = time.time()
tot_time = 0.0
t0, t1 = bench_begin, bench_begin
for j in range(self.num_ops_):
t0 = time.time()
self.clients_[thread_index].write(self.data_)
t1 = time.time()
tot_time += (t1 - t0)
j += 1
self.latency_[thread_index] = (10 ** 6) * float(tot_time) / float(j)
self.throughput_[thread_index] = j / (t1 - bench_begin)
class ReadBenchmark(FileBenchmark):
def __init__(self, clients, data_size, num_clients, num_ops):
super(ReadBenchmark, self).__init__(clients, data_size, num_clients, num_ops)
self.reading_offsets = gen_zipf(1, len(self.data_) * num_ops, num_ops)
def run(self):
for i in range(self.num_clients):
self.workers_[i] = threading.Thread(target = self.single_thread_action, args = (i,))
for i in range(self.num_clients):
self.workers_[i].start()
def single_thread_action(self, thread_index):
cache_hit = 0
total_access = 0
total_bytes = 0
write_begin = time.time()
write_time = 0.0
t0, t1 = write_begin, write_begin
for j in range(self.num_ops_):
t0 = time.time()
self.clients_[thread_index].write(self.data_)
t1 = time.time()
write_time += (t1 - t0)
self.clients_[thread_index].seek(0)
read_begin = time.time()
read_time = 0.0
t0, t1 = read_begin, read_begin
for j in range(self.num_ops_):
self.clients_[thread_index].seek(self.reading_offsets[j])
t0 = time.time()
resp = self.clients_[thread_index].read(len(self.data_))
t1 = time.time()
read_time += (t1 - t0)
total_access += resp[1]
cache_hit += resp[2]
total_bytes += len(resp[0])
self.total_access_[thread_index] = total_access
self.cache_hit_[thread_index] = cache_hit
self.total_bytes_[thread_index] = total_bytes
self.latency_write_[thread_index] = (10 ** 6) * float(write_time) / float(self.num_ops_)
self.latency_read_[thread_index] = (10 ** 6) * float(read_time) / float(self.num_ops_)
self.throughput_[thread_index] = total_bytes / (t1 - read_begin)
def file_bp_zipf():
address = "frog.zoo.cs.yale.edu"
service_port = 9090
lease_port = 9091
num_blocks = 1
chain_length = 1
num_ops = 100000
data_size = 64
op_type_set = []
op_type_set.append("write")
op_type_set.append("read")
path = "/tmp"
backing_path = "local://tmp"
# Output all the configuration parameters:
file_name = './file_bp_zipf.txt'
data = open(file_name,'w+')
print >> data, "host: ", address
print >> data, "service-port: ", service_port
print >> data, "lease-port: ", lease_port
print >> data, "num-blocks: ", num_blocks
print >> data, "chain-length: ", chain_length
print >> data, "num-ops: ", num_ops
print >> data, "data-size: ", data_size
print >> data, "path: ", path
print >> data, "backing-path: ", backing_path
for op_type in op_type_set:
count = 1
while count <= 1:
loading = 0
num_clients = count
cache_size = 200
client = JiffyClient(address, service_port, lease_port)
ht_clients = [None] * num_clients
for cache_block_size in range(1000, 32001, 1000):
for prefetch_size in range(5, 51, 5):
for i in range(num_clients):
ht_clients[i] = client.open_or_create_file(path, backing_path, num_blocks, chain_length, cache_size, cache_block_size, prefetch_size)
if (op_type == "write"):
benchmark = WriteBenchmark(ht_clients, data_size, num_clients, num_ops)
if (op_type == "read"):
benchmark = ReadBenchmark(ht_clients, data_size, num_clients, num_ops)
else:
print >> data, "Incorrect operation type for file: ", op_type
return 0
benchmark.run()
result = benchmark.wait()
client.remove(path)
print >> data, "===== ", "Block Size= ", cache_block_size, "Prefetch Size= ", prefetch_size, " ======"
print >> data, "\t", num_ops, " requests completed in ", (float(num_ops) / result[0]), " s"
print >> data, "\t", num_clients, " parallel clients"
print >> data, "\t", data_size, " payload"
print >> data, "\tAverage write latency: ", result[1], "us"
print >> data, "\tAverage read latency: ", result[2], "us"
print >> data, "\tAverage total latency: ", result[1]+result[2], "us"
print >> data, "\tThroughput: ", result[0], " bytes per second"
print >> data, "\tHit_rate: ", round(result[3],4), "%"
print >> data, "\n"
loading += 1
print("Loading -- ", round(float(loading*100/320),1), "%")
count *= 2
data.close()
return 0
def file_bp():
address = "frog.zoo.cs.yale.edu"
service_port = 9090
lease_port = 9091
num_blocks = 1
chain_length = 1
num_ops = 100000
data_size = 64
op_type_set = []
op_type_set.append("write")
op_type_set.append("read")
path = "/tmp"
backing_path = "local://tmp"
# Output all the configuration parameters:
file_name = './file_bp.txt'
data = open(file_name,'w+')
print >> data, "host: ", address
print >> data, "service-port: ", service_port
print >> data, "lease-port: ", lease_port
print >> data, "num-blocks: ", num_blocks
print >> data, "chain-length: ", chain_length
print >> data, "num-ops: ", num_ops
print >> data, "data-size: ", data_size
print >> data, "path: ", path
print >> data, "backing-path: ", backing_path
for op_type in op_type_set:
count = 1
while count <= 1:
loading = 0
num_clients = count
cache_size = 200
client = JiffyClient(address, service_port, lease_port)
ht_clients = [None] * num_clients
for cache_block_size in range(1000, 32001, 1000):
for prefetch_size in range(5, 51, 5):
for i in range(num_clients):
ht_clients[i] = client.open_or_create_file(path, backing_path, num_blocks, chain_length, cache_size, cache_block_size, prefetch_size)
if (op_type == "write"):
benchmark = WriteBenchmark(ht_clients, data_size, num_clients, num_ops)
if (op_type == "read"):
benchmark = ReadBenchmark(ht_clients, data_size, num_clients, num_ops)
else:
print >> data, "Incorrect operation type for file: ", op_type
return 0
benchmark.run()
result = benchmark.wait()
client.remove(path)
print >> data, "===== ", "Block Size= ", cache_block_size, "Prefetch Size= ", prefetch_size, " ======"
print >> data, "\t", num_ops, " requests completed in ", (float(num_ops) / result[0]), " s"
print >> data, "\t", num_clients, " parallel clients"
print >> data, "\t", data_size, " payload"
print >> data, "\tAverage write latency: ", result[1], "us"
print >> data, "\tAverage read latency: ", result[2], "us"
print >> data, "\tAverage total latency: ", result[1]+result[2], "us"
print >> data, "\tThroughput: ", result[0], " bytes per second"
print >> data, "\tHit_rate: ", round(result[3],4), "%"
print >> data, "\n"
loading += 1
print("Loading -- ", round(float(loading*100/320),1), "%")
count *= 2
data.close()
return 0
def file_cb_zipf():
address = "frog.zoo.cs.yale.edu"
service_port = 9090
lease_port = 9091
num_blocks = 1
chain_length = 1
num_ops = 100000
data_size = 64
op_type_set = []
op_type_set.append("write")
op_type_set.append("read")
path = "/tmp"
backing_path = "local://tmp"
# Output all the configuration parameters:
file_name = './file_cb_zipf.txt'
data = open(file_name,'w+')
print >> data, "host: ", address
print >> data, "service-port: ", service_port
print >> data, "lease-port: ", lease_port
print >> data, "num-blocks: ", num_blocks
print >> data, "chain-length: ", chain_length
print >> data, "num-ops: ", num_ops
print >> data, "data-size: ", data_size
print >> data, "path: ", path
print >> data, "backing-path: ", backing_path
for op_type in op_type_set:
count = 1
while count <= 1:
loading = 0
num_clients = count
prefetch_size = 20
client = JiffyClient(address, service_port, lease_port)
ht_clients = [None] * num_clients
for cache_size in range(100, 2101, 200):
for cache_block_size in range(1000, 20001, 1000):
for i in range(num_clients):
ht_clients[i] = client.open_or_create_file(path, backing_path, num_blocks, chain_length, cache_size, cache_block_size, prefetch_size)
if (op_type == "write"):
benchmark = WriteBenchmark(ht_clients, data_size, num_clients, num_ops)
if (op_type == "read"):
benchmark = ReadBenchmark(ht_clients, data_size, num_clients, num_ops)
else:
print >> data, "Incorrect operation type for file: ", op_type
return 0
benchmark.run()
result = benchmark.wait()
client.remove(path)
print >> data, "===== ", "Cache_Size= ", cache_size, "Block Size= ", cache_block_size, " ======"
print >> data, "\t", num_ops, " requests completed in ", (float(num_ops) / result[0]), " s"
print >> data, "\t", num_clients, " parallel clients"
print >> data, "\t", data_size, " payload"
print >> data, "\tAverage write latency: ", result[1], "us"
print >> data, "\tAverage read latency: ", result[2], "us"
print >> data, "\tAverage total latency: ", result[1]+result[2], "us"
print >> data, "\tThroughput: ", result[0], " bytes per second"
print >> data, "\tHit_rate: ", round(result[3],4), "%"
print >> data, "\n"
loading += 1
print("Loading -- ", round(float(loading*100/220),1), "%")
count *= 2
data.close()
return 0
def file_cb():
address = "frog.zoo.cs.yale.edu"
service_port = 9090
lease_port = 9091
num_blocks = 1
chain_length = 1
num_ops = 100000
data_size = 64
op_type_set = []
op_type_set.append("write")
op_type_set.append("read")
path = "/tmp"
backing_path = "local://tmp"
# Output all the configuration parameters:
file_name = './file_cb.txt'
data = open(file_name,'w+')
print >> data, "host: ", address
print >> data, "service-port: ", service_port
print >> data, "lease-port: ", lease_port
print >> data, "num-blocks: ", num_blocks
print >> data, "chain-length: ", chain_length
print >> data, "num-ops: ", num_ops
print >> data, "data-size: ", data_size
print >> data, "path: ", path
print >> data, "backing-path: ", backing_path
for op_type in op_type_set:
count = 1
while count <= 1:
loading = 0
num_clients = count
prefetch_size = 20
client = JiffyClient(address, service_port, lease_port)
ht_clients = [None] * num_clients
for cache_size in range(100, 2101, 200):
for cache_block_size in range(1000, 20001, 1000):
for i in range(num_clients):
ht_clients[i] = client.open_or_create_file(path, backing_path, num_blocks, chain_length, cache_size, cache_block_size, prefetch_size)
if (op_type == "write"):
benchmark = WriteBenchmark(ht_clients, data_size, num_clients, num_ops)
if (op_type == "read"):
benchmark = ReadBenchmark(ht_clients, data_size, num_clients, num_ops)
else:
print >> data, "Incorrect operation type for file: ", op_type
return 0
benchmark.run()
result = benchmark.wait()
client.remove(path)
print >> data, "===== ", "Cache_Size= ", cache_size, "Block Size= ", cache_block_size, " ======"
print >> data, "\t", num_ops, " requests completed in ", (float(num_ops) / result[0]), " s"
print >> data, "\t", num_clients, " parallel clients"
print >> data, "\t", data_size, " payload"
print >> data, "\tAverage write latency: ", result[1], "us"
print >> data, "\tAverage read latency: ", result[2], "us"
print >> data, "\tAverage total latency: ", result[1]+result[2], "us"
print >> data, "\tThroughput: ", result[0], " bytes per second"
print >> data, "\tHit_rate: ", round(result[3],4), "%"
print >> data, "\n"
loading += 1
print("Loading -- ", round(float(loading*100/220),1), "%")
count *= 2
data.close()
return 0
def file_cp_zipf():
address = "frog.zoo.cs.yale.edu"
service_port = 9090
lease_port = 9091
num_blocks = 1
chain_length = 1
num_ops = 100000
data_size = 64
op_type_set = []
op_type_set.append("write")
op_type_set.append("read")
path = "/tmp"
backing_path = "local://tmp"
# Output all the configuration parameters:
file_name = './file_cp_zipf.txt'
data = open(file_name,'w+')
print >> data, "host: ", address
print >> data, "service-port: ", service_port
print >> data, "lease-port: ", lease_port
print >> data, "num-blocks: ", num_blocks
print >> data, "chain-length: ", chain_length
print >> data, "num-ops: ", num_ops
print >> data, "data-size: ", data_size
print >> data, "path: ", path
print >> data, "backing-path: ", backing_path
for op_type in op_type_set:
count = 1
while count <= 1:
loading = 0
num_clients = count
cache_block_size = 2000
client = JiffyClient(address, service_port, lease_port)
ht_clients = [None] * num_clients
for cache_size in range(100, 2101, 200):
for prefetch_size in range(5, 51, 5):
for i in range(num_clients):
ht_clients[i] = client.open_or_create_file(path, backing_path, num_blocks, chain_length, cache_size, cache_block_size, prefetch_size)
if (op_type == "write"):
benchmark = WriteBenchmark(ht_clients, data_size, num_clients, num_ops)
if (op_type == "read"):
benchmark = ReadBenchmark(ht_clients, data_size, num_clients, num_ops)
else:
print >> data, "Incorrect operation type for file: ", op_type
return 0
benchmark.run()
result = benchmark.wait()
client.remove(path)
print >> data, "===== ", "Cache_Size= ", cache_size, "Prefetch Size= ", prefetch_size, " ======"
print >> data, "\t", num_ops, " requests completed in ", (float(num_ops) / result[0]), " s"
print >> data, "\t", num_clients, " parallel clients"
print >> data, "\t", data_size, " payload"
print >> data, "\tAverage write latency: ", result[1], "us"
print >> data, "\tAverage read latency: ", result[2], "us"
print >> data, "\tAverage total latency: ", result[1]+result[2], "us"
print >> data, "\tThroughput: ", result[0], " bytes per second"
print >> data, "\tHit_rate: ", round(result[3],4), "%"
print >> data, "\n"
loading += 1
print("Loading -- ", round(float(loading*100/110),1), "%")
count *= 2
data.close()
return 0
def file_cp():
address = "frog.zoo.cs.yale.edu"
service_port = 9090
lease_port = 9091
num_blocks = 1
chain_length = 1
num_ops = 100000
data_size = 64
op_type_set = []
op_type_set.append("write")
op_type_set.append("read")
path = "/tmp"
backing_path = "local://tmp"
# Output all the configuration parameters:
file_name = './file_cp.txt'
data = open(file_name,'w+')
print >> data, "host: ", address
print >> data, "service-port: ", service_port
print >> data, "lease-port: ", lease_port
print >> data, "num-blocks: ", num_blocks
print >> data, "chain-length: ", chain_length
print >> data, "num-ops: ", num_ops
print >> data, "data-size: ", data_size
print >> data, "path: ", path
print >> data, "backing-path: ", backing_path
for op_type in op_type_set:
count = 1
while count <= 1:
loading = 0
num_clients = count
cache_block_size = 2000
client = JiffyClient(address, service_port, lease_port)
ht_clients = [None] * num_clients
for cache_size in range(100, 2101, 200):
for prefetch_size in range(5, 51, 5):
for i in range(num_clients):
ht_clients[i] = client.open_or_create_file(path, backing_path, num_blocks, chain_length, cache_size, cache_block_size, prefetch_size)
if (op_type == "write"):
benchmark = WriteBenchmark(ht_clients, data_size, num_clients, num_ops)
if (op_type == "read"):
benchmark = ReadBenchmark(ht_clients, data_size, num_clients, num_ops)
else:
print >> data, "Incorrect operation type for file: ", op_type
return 0
benchmark.run()
result = benchmark.wait()
client.remove(path)
print >> data, "===== ", "Cache_Size= ", cache_size, "Prefetch Size= ", prefetch_size, " ======"
print >> data, "\t", num_ops, " requests completed in ", (float(num_ops) / result[0]), " s"
print >> data, "\t", num_clients, " parallel clients"
print >> data, "\t", data_size, " payload"
print >> data, "\tAverage write latency: ", result[1], "us"
print >> data, "\tAverage read latency: ", result[2], "us"
print >> data, "\tAverage total latency: ", result[1]+result[2], "us"
print >> data, "\tThroughput: ", result[0], " bytes per second"
print >> data, "\tHit_rate: ", round(result[3],4), "%"
print >> data, "\n"
loading += 1
print("Loading -- ", round(float(loading*100/110),1), "%")
count *= 2
data.close()
return 0
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-m', "--mode", dest="mode", default="bp",
help="""
Specify different variants. Use the following flags to specify: 'bp', 'bpz', 'cp', 'cpz', 'cb', cbz'.
Notice here 'b' = block size, 'c' = cache size, 'p' = prefetch size, 'z' = block visited by zipf distribution.
Default mode is bp. """)
args = parser.parse_args()
if args.mode == "bp":
file_bp()
elif args.mode == "bpz":
file_bp_zipf()
elif args.mode == "cp":
file_cp()
elif args.mode == "cpz":
file_cp_zipf()
elif args.mode == "cb":
file_cb()
elif args.mode == "cbz":
file_cb_zipf()
else:
print("Wrong mode arguments!")
| 43.257944 | 157 | 0.530095 |
ea6bb250dae4a3c8beee6bd443ebf6a8e051d41f | 779 | py | Python | adv/valentines_melody.py | XenoXilus/dl | cdfce03835cd67aac553140d6d88bc4c5c5d60ff | [
"Apache-2.0"
] | null | null | null | adv/valentines_melody.py | XenoXilus/dl | cdfce03835cd67aac553140d6d88bc4c5c5d60ff | [
"Apache-2.0"
] | null | null | null | adv/valentines_melody.py | XenoXilus/dl | cdfce03835cd67aac553140d6d88bc4c5c5d60ff | [
"Apache-2.0"
] | null | null | null | from core.advbase import *
def module():
return Valentines_Melody
class Valentines_Melody(Adv):
conf = {}
conf['slots.a'] = [
'Summer_Paladyns',
'Flash_of_Genius',
'Kung_Fu_Masters',
'The_Plaguebringer',
'Chariot_Drift'
]
conf['slots.d'] = 'Ariel'
conf['acl'] = """
`dragon(c3-s-end), s=1
`s3, not buff(s3)
`s1
`s4
`s2, cancel
"""
conf['coabs'] = ['Blade','Eleonora','Dragonyule_Xainfried']
conf['share'] = ['Curran']
# def __init__(self, **kwargs):
# super().__init__(**kwargs)
# self.slots.c.coabs['Valentines_Melody'] = [None, 'axe2']
if __name__ == '__main__':
from core.simulate import test_with_argv
test_with_argv(None, *sys.argv)
| 22.911765 | 66 | 0.567394 |
172c36aa6f0f69d8190eb1a1f9945be053800ee5 | 404 | py | Python | datasources/migrations/0007_datasource_api_key.py | tiferrei/PEDASI | b819aee93de99c00a1aa3eb9d32102b89f72459e | [
"MIT"
] | null | null | null | datasources/migrations/0007_datasource_api_key.py | tiferrei/PEDASI | b819aee93de99c00a1aa3eb9d32102b89f72459e | [
"MIT"
] | 18 | 2019-02-27T12:39:27.000Z | 2021-03-24T16:32:47.000Z | datasources/migrations/0007_datasource_api_key.py | Southampton-RSG/PEDASI-IoT | 25a111ac7cf4b23fee50ad8eac6ea21564954859 | [
"MIT"
] | 1 | 2021-02-16T17:47:15.000Z | 2021-02-16T17:47:15.000Z | # Generated by Django 2.0.8 on 2018-10-10 12:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('datasources', '0006_one_to_one_group'),
]
operations = [
migrations.AddField(
model_name='datasource',
name='api_key',
field=models.CharField(blank=True, max_length=127),
),
]
| 21.263158 | 63 | 0.608911 |
ef6a9878f83acb73ca53ea0d24cf0f12a8718976 | 183 | py | Python | sound/basic-sounds/square-wave.py | martinmcbride/python-projects-for-gcse | cdf4696650b641657e116a3307d4271a114b80df | [
"MIT"
] | null | null | null | sound/basic-sounds/square-wave.py | martinmcbride/python-projects-for-gcse | cdf4696650b641657e116a3307d4271a114b80df | [
"MIT"
] | null | null | null | sound/basic-sounds/square-wave.py | martinmcbride/python-projects-for-gcse | cdf4696650b641657e116a3307d4271a114b80df | [
"MIT"
] | null | null | null | from pysound.wavetable import square_wave
from pysound.soundfile import write_wav
wave = square_wave(frequency=400, amplitude=0.9)
write_wav(source=wave, filename='square-wave.wav')
| 30.5 | 50 | 0.819672 |
9d0b6b009f679a030f4d8859615513523da6cfde | 20,897 | py | Python | src/command_modules/azure-cli-monitor/azure/cli/command_modules/monitor/_params.py | chpspiir/azure-cli | 70a9d7fda754267a20f8593adf9f2013ae18f74f | [
"MIT"
] | 1 | 2020-12-14T15:30:11.000Z | 2020-12-14T15:30:11.000Z | src/command_modules/azure-cli-monitor/azure/cli/command_modules/monitor/_params.py | chpspiir/azure-cli | 70a9d7fda754267a20f8593adf9f2013ae18f74f | [
"MIT"
] | 3 | 2021-03-26T00:41:56.000Z | 2022-03-29T22:04:08.000Z | src/command_modules/azure-cli-monitor/azure/cli/command_modules/monitor/_params.py | chpspiir/azure-cli | 70a9d7fda754267a20f8593adf9f2013ae18f74f | [
"MIT"
] | 1 | 2020-12-14T15:30:14.000Z | 2020-12-14T15:30:14.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core.util import get_json_object
from azure.cli.core.commands.parameters import (
get_location_type, tags_type, get_three_state_flag, get_enum_type, get_datetime_type, resource_group_name_type)
from azure.cli.core.commands.validators import get_default_location_from_resource_group
from azure.cli.command_modules.monitor.actions import (
AlertAddAction, AlertRemoveAction, ConditionAction, AutoscaleAddAction, AutoscaleRemoveAction,
AutoscaleScaleAction, AutoscaleConditionAction, get_period_type,
timezone_offset_type, timezone_name_type, MetricAlertConditionAction, MetricAlertAddAction)
from azure.cli.command_modules.monitor.util import get_operator_map, get_aggregation_map
from azure.cli.command_modules.monitor.validators import (
process_webhook_prop, validate_autoscale_recurrence, validate_autoscale_timegrain, get_action_group_validator,
get_action_group_id_validator, validate_metric_dimension)
from knack.arguments import CLIArgumentType
# pylint: disable=line-too-long, too-many-statements
def load_arguments(self, _):
from azure.mgmt.monitor.models import ConditionOperator, TimeAggregationOperator, EventData
name_arg_type = CLIArgumentType(options_list=['--name', '-n'], metavar='NAME')
webhook_prop_type = CLIArgumentType(validator=process_webhook_prop, nargs='*')
autoscale_name_type = CLIArgumentType(options_list=['--autoscale-name'], help='Name of the autoscale settings.', id_part='name')
autoscale_profile_name_type = CLIArgumentType(options_list=['--profile-name'], help='Name of the autoscale profile.')
autoscale_rule_name_type = CLIArgumentType(options_list=['--rule-name'], help='Name of the autoscale rule.')
with self.argument_context('monitor') as c:
c.argument('location', get_location_type(self.cli_ctx), validator=get_default_location_from_resource_group)
c.argument('tags', tags_type)
# region Alerts
with self.argument_context('monitor alert') as c:
c.argument('rule_name', name_arg_type, id_part='name', help='Name of the alert rule.')
with self.argument_context('monitor alert create') as c:
c.resource_parameter('target', arg_group='Target Resource', alias='target', preserve_resource_group_parameter=True)
c.argument('rule_name', name_arg_type, id_part='name', help='Name of the alert rule.')
c.argument('disabled', arg_type=get_three_state_flag())
c.argument('condition', action=ConditionAction, nargs='+')
with self.argument_context('monitor alert create', arg_group='Action') as c:
c.argument('custom_emails', nargs='+')
c.argument('email_service_owners', arg_type=get_three_state_flag())
c.argument('actions', options_list=['--action', '-a'], action=AlertAddAction, nargs='+')
with self.argument_context('monitor alert create', arg_group='Condition') as c:
c.argument('metric_name')
c.argument('operator', arg_type=get_enum_type(ConditionOperator))
c.argument('threshold')
c.argument('time_aggregation', arg_type=get_enum_type(TimeAggregationOperator))
c.argument('window_size')
with self.argument_context('monitor alert update') as c:
c.argument('rule_name', name_arg_type, id_part='name', help='Name of the alert rule.')
c.resource_parameter('target', arg_group='Target Resource', required=False, preserve_resource_group_parameter=True)
with self.argument_context('monitor alert update', arg_group='Action') as c:
c.argument('email_service_owners', arg_type=get_three_state_flag())
c.argument('add_actions', options_list=['--add-action', '-a'], nargs='+', action=AlertAddAction)
c.argument('remove_actions', options_list=['--remove-action', '-r'], nargs='+', action=AlertRemoveAction)
with self.argument_context('monitor alert update', arg_group='Condition') as c:
c.argument('condition', action=ConditionAction, nargs='+')
c.argument('metric')
c.argument('operator', arg_type=get_enum_type(get_operator_map().keys()))
c.argument('threshold')
c.argument('aggregation', arg_type=get_enum_type(get_aggregation_map().keys()))
c.argument('period', type=get_period_type())
for scope in ['monitor alert show-incident', 'monitor alert list-incidents']:
with self.argument_context(scope) as c:
c.argument('rule_name', options_list=['--rule-name'], id_part='name')
c.argument('incident_name', name_arg_type, id_part='child_name_1')
with self.argument_context('monitor alert list-incidents') as c:
c.argument('rule_name', options_list=['--rule-name'], id_part=None)
# endregion
# region Metrics
with self.argument_context('monitor metrics') as c:
c.argument('metricnamespace', options_list=['--namespace'], help='Namespace to query metric definitions for.')
with self.argument_context('monitor metrics list-definitions') as c:
c.resource_parameter('resource_uri', arg_group='Target Resource')
with self.argument_context('monitor metrics list') as c:
from azure.mgmt.monitor.models import AggregationType
c.resource_parameter('resource', arg_group='Target Resource')
c.argument('metadata', action='store_true')
c.argument('dimension', nargs='*', validator=validate_metric_dimension)
c.argument('aggregation', arg_type=get_enum_type(t for t in AggregationType if t.name != 'none'), nargs='*')
c.argument('metrics', nargs='+')
c.argument('orderby', help='Aggregation to use for sorting results and the direction of the sort. Only one order can be specificed. Examples: sum asc')
c.argument('top', help='Max number of records to retrieve. Valid only if --filter used.')
c.argument('filters', options_list='--filter')
c.argument('metric_namespace', options_list='--namespace')
with self.argument_context('monitor metrics list', arg_group='Time') as c:
c.argument('start_time', arg_type=get_datetime_type(help='Start time of the query.'))
c.argument('end_time', arg_type=get_datetime_type(help='End time of the query. Defaults to the current time.'))
c.argument('offset', type=get_period_type(as_timedelta=True))
c.argument('interval', arg_group='Time', type=get_period_type())
# endregion
# region MetricAlerts
with self.argument_context('monitor metrics alert') as c:
c.argument('rule_name', name_arg_type, id_part='name', help='Name of the alert rule.')
c.argument('severity', type=int, help='Severity of the alert from 0 (low) to 4 (high).')
c.argument('window_size', type=get_period_type(), help='Time over which to aggregate metrics in "##h##m##s" format.')
c.argument('evaluation_frequency', type=get_period_type(), help='Frequency with which to evaluate the rule in "##h##m##s" format.')
c.argument('auto_mitigate', arg_type=get_three_state_flag(), help='Automatically resolve the alert.')
c.argument('condition', options_list=['--condition'], action=MetricAlertConditionAction, nargs='+')
c.argument('description', help='Free-text description of the rule.')
c.argument('scopes', nargs='+', help='Space-separated list of scopes the rule applies to.')
c.argument('disabled', arg_type=get_three_state_flag())
c.argument('enabled', arg_type=get_three_state_flag(), help='Whether the metric alert rule is enabled.')
with self.argument_context('monitor metrics alert create', arg_group=None) as c:
c.argument('actions', options_list=['--action', '-a'], action=MetricAlertAddAction, nargs='+', validator=get_action_group_validator('actions'))
with self.argument_context('monitor metrics alert update', arg_group='Action') as c:
c.argument('add_actions', options_list='--add-action', action=MetricAlertAddAction, nargs='+', validator=get_action_group_validator('add_actions'))
c.argument('remove_actions', nargs='+', validator=get_action_group_id_validator('remove_actions'))
with self.argument_context('monitor metrics alert update', arg_group='Condition') as c:
c.argument('add_conditions', options_list='--add-condition', action=MetricAlertConditionAction, nargs='+')
c.argument('remove_conditions', nargs='+')
# endregion
# region Autoscale
with self.argument_context('monitor autoscale') as c:
c.argument('autoscale_name', arg_type=autoscale_name_type, options_list=['--name', '-n'])
c.argument('autoscale_setting_name', arg_type=autoscale_name_type, options_list=['--name', '-n'])
c.argument('profile_name', arg_type=autoscale_profile_name_type)
c.argument('rule_name', arg_type=autoscale_rule_name_type)
c.argument('enabled', arg_type=get_three_state_flag(), help='Autoscale settings enabled status.')
with self.argument_context('monitor autoscale', arg_group='Notification') as c:
c.argument('actions', options_list=['--action', '-a'], action=AutoscaleAddAction, nargs='+')
c.argument('add_actions', options_list=['--add-action', '-a'], action=AutoscaleAddAction, nargs='+')
c.argument('remove_actions', options_list=['--remove-action', '-r'], action=AutoscaleRemoveAction, nargs='+')
c.argument('email_administrator', arg_type=get_three_state_flag(), help='Send email to subscription administrator on scaling.')
c.argument('email_coadministrators', arg_type=get_three_state_flag(), help='Send email to subscription co-administrators on scaling.')
with self.argument_context('monitor autoscale create') as c:
c.resource_parameter('resource', arg_group='Target Resource')
c.argument('disabled', arg_type=get_three_state_flag(), help='Create the autoscale settings in a disabled state.')
with self.argument_context('monitor autoscale', arg_group='Instance Limit') as c:
c.argument('count', type=int, help='The numer of instances to use. If used with --min/max-count, the default number of instances to use.')
c.argument('min_count', type=int, help='The minimum number of instances.')
c.argument('max_count', type=int, help='The maximum number of instances.')
with self.argument_context('monitor autoscale profile') as c:
c.argument('autoscale_name', arg_type=autoscale_name_type, id_part=None)
c.argument('profile_name', arg_type=autoscale_profile_name_type, options_list=['--name', '-n'])
c.argument('copy_rules', help='Name of an existing schedule from which to copy the scaling rules for the new schedule.')
with self.argument_context('monitor autoscale profile list-timezones') as c:
c.argument('search_query', options_list=['--search-query', '-q'], help='Query text to find.')
c.argument('offset', help='Filter results based on UTC hour offset.', type=timezone_offset_type)
with self.argument_context('monitor autoscale profile', arg_group='Schedule') as c:
c.argument('timezone', type=timezone_name_type)
c.argument('start', arg_type=get_datetime_type(help='Start time.', timezone=False))
c.argument('end', arg_type=get_datetime_type(help='End time.', timezone=False))
c.argument('recurrence', options_list=['--recurrence', '-r'], nargs='+', validator=validate_autoscale_recurrence)
with self.argument_context('monitor autoscale rule') as c:
c.argument('autoscale_name', arg_type=autoscale_name_type, id_part=None)
c.argument('rule_name', arg_type=autoscale_rule_name_type, options_list=['--name', '-n'])
c.argument('scale', help='The direction and amount to scale.', action=AutoscaleScaleAction, nargs='+')
c.argument('condition', help='Condition on which to scale.', action=AutoscaleConditionAction, nargs='+')
c.argument('timegrain', validator=validate_autoscale_timegrain, nargs='+')
c.argument('cooldown', type=int, help='The number of minutes that must elapse before another scaling event can occur.')
with self.argument_context('monitor autoscale rule delete') as c:
c.argument('index', nargs='+', help="Space-separated list of rule indices to remove, or '*' to clear all rules.")
with self.argument_context('monitor autoscale rule copy') as c:
c.argument('index', nargs='+', help="Space-separated list of rule indices to copy, or '*' to copy all rules.")
c.argument('source_profile', options_list=['--source-schedule'], help='Name of the profile to copy rules from.')
c.argument('dest_profile', options_list=['--dest-schedule'], help='Name of the profile to copy rules to.')
with self.argument_context('monitor autoscale rule create') as c:
c.resource_parameter('source', arg_group='Source', required=False, preserve_resource_group_parameter=True)
# endregion
# region Autoscale (OLD)
with self.argument_context('monitor autoscale-settings') as c:
c.argument('name', options_list=['--azure-resource-name'])
c.argument('autoscale_setting_name', options_list=['--name', '-n'])
with self.argument_context('monitor autoscale-settings create') as c:
c.argument('parameters', type=get_json_object, help='JSON encoded parameters configuration. Use @{file} to load from a file. Use az autoscale-settings get-parameters-template to export json template.')
for scope in ['monitor autoscale-settings show', 'monitor autoscale-settings delete']:
with self.argument_context(scope) as c:
c.argument('autoscale_setting_name', id_part='name')
# https://github.com/Azure/azure-rest-api-specs/issues/1017
with self.argument_context('monitor autoscale-settings list') as c:
c.ignore('filter')
# endregion
# region Diagnostic
with self.argument_context('monitor diagnostic-settings') as c:
c.argument('name', options_list=('--name', '-n'))
with self.argument_context('monitor diagnostic-settings show') as c:
c.resource_parameter('resource_uri', required=True, arg_group='Target Resource')
with self.argument_context('monitor diagnostic-settings list') as c:
c.resource_parameter('resource_uri', required=True)
with self.argument_context('monitor diagnostic-settings delete') as c:
c.resource_parameter('resource_uri', required=True, arg_group='Target Resource')
with self.argument_context('monitor diagnostic-settings update') as c:
c.resource_parameter('resource_uri', required=True, arg_group='Target Resource')
with self.argument_context('monitor diagnostic-settings create') as c:
c.resource_parameter('resource_uri', required=True, arg_group='Target Resource', skip_validator=True)
c.argument('logs', type=get_json_object)
c.argument('metrics', type=get_json_object)
with self.argument_context('monitor diagnostic-settings categories list') as c:
c.resource_parameter('resource_uri', required=True)
with self.argument_context('monitor diagnostic-settings categories show') as c:
c.resource_parameter('resource_uri', required=True)
# endregion
# region LogProfiles
with self.argument_context('monitor log-profiles') as c:
c.argument('log_profile_name', options_list=['--name', '-n'])
with self.argument_context('monitor log-profiles create') as c:
c.argument('name', options_list=['--name', '-n'])
c.argument('categories', nargs='+')
c.argument('locations', nargs='+')
c.argument('days', type=int, arg_group='Retention Policy')
c.argument('enabled', arg_type=get_three_state_flag(), arg_group='Retention Policy')
# endregion
# region ActivityLog
with self.argument_context('monitor activity-log list') as c:
activity_log_props = [x['key'] for x in EventData()._attribute_map.values()] # pylint: disable=protected-access
c.argument('select', nargs='+', arg_type=get_enum_type(activity_log_props))
c.argument('max_events', type=int)
with self.argument_context('monitor activity-log list', arg_group='Time') as c:
c.argument('start_time', arg_type=get_datetime_type(help='Start time of the query.'))
c.argument('end_time', arg_type=get_datetime_type(help='End time of the query. Defaults to the current time.'))
c.argument('offset', type=get_period_type(as_timedelta=True))
with self.argument_context('monitor activity-log list', arg_group='Filter') as c:
c.argument('filters', deprecate_info=c.deprecate(target='--filters', hide=True, expiration='2.1.0'), help='OData filters. Will ignore other filter arguments.')
c.argument('correlation_id')
c.argument('resource_group', resource_group_name_type)
c.argument('resource_id')
c.argument('resource_provider', options_list=['--namespace', c.deprecate(target='--resource-provider', redirect='--namespace', hide=True, expiration='2.1.0')])
c.argument('caller')
c.argument('status')
# endregion
# region ActionGroup
with self.argument_context('monitor action-group') as c:
c.argument('action_group_name', options_list=['--name', '-n'], id_part='name')
with self.argument_context('monitor action-group create') as c:
from .validators import process_action_group_detail_for_creation
from .actions import ActionGroupReceiverParameterAction
c.extra('receivers', options_list=['--action', '-a'], nargs='+', arg_group='Actions', action=ActionGroupReceiverParameterAction, validator=process_action_group_detail_for_creation)
c.extra('short_name')
c.extra('tags')
c.ignore('action_group')
with self.argument_context('monitor action-group update', arg_group='Actions') as c:
c.extra('add_receivers', options_list=['--add-action', '-a'], nargs='+', action=ActionGroupReceiverParameterAction)
c.extra('remove_receivers', options_list=['--remove-action', '-r'], nargs='+')
c.ignore('action_group')
with self.argument_context('monitor action-group enable-receiver') as c:
c.argument('receiver_name', options_list=['--name', '-n'])
c.argument('action_group_name', options_list=['--action-group'])
# endregion
# region ActivityLog Alerts
with self.argument_context('monitor activity-log alert') as c:
c.argument('activity_log_alert_name', options_list=['--name', '-n'], id_part='name')
with self.argument_context('monitor activity-log alert create') as c:
from .operations.activity_log_alerts import process_condition_parameter
c.argument('disable', action='store_true')
c.argument('scopes', options_list=['--scope', '-s'], nargs='+')
c.argument('condition', options_list=['--condition', '-c'], nargs='+', validator=process_condition_parameter)
c.argument('action_groups', options_list=['--action-group', '-a'], nargs='+')
c.argument('webhook_properties', options_list=['--webhook-properties', '-w'], arg_type=webhook_prop_type)
with self.argument_context('monitor activity-log alert update-condition') as c:
c.argument('reset', action='store_true')
c.argument('add_conditions', options_list=['--add-condition', '-a'], nargs='+')
c.argument('remove_conditions', options_list=['--remove-condition', '-r'], nargs='+')
with self.argument_context('monitor activity-log alert update') as c:
from .operations.activity_log_alerts import process_condition_parameter
c.argument('condition', options_list=['--condition', '-c'], nargs='+', validator=process_condition_parameter)
c.argument('enabled', arg_type=get_three_state_flag())
with self.argument_context('monitor activity-log alert action-group add') as c:
c.argument('reset', action='store_true')
c.argument('action_group_ids', options_list=['--action-group', '-a'], nargs='+')
c.argument('webhook_properties', options_list=['--webhook-properties', '-w'], arg_type=webhook_prop_type)
with self.argument_context('monitor activity-log alert action-group remove') as c:
c.argument('action_group_ids', options_list=['--action-group', '-a'], nargs='+')
with self.argument_context('monitor activity-log alert scope add') as c:
c.argument('scopes', options_list=['--scope', '-s'], nargs='+')
c.argument('reset', action='store_true')
with self.argument_context('monitor activity-log alert scope remove') as c:
c.argument('scopes', options_list=['--scope', '-s'], nargs='+')
# endregion
| 62.565868 | 209 | 0.705077 |
d89f2c58e573682eed9e02a5ef058e37a657c170 | 3,696 | py | Python | utils/script/recorder.py | junkunyuan/CSAC | 70d918ed2fe65a0a503b56d66136032031cd67e4 | [
"MIT"
] | 3 | 2022-01-06T06:42:12.000Z | 2022-01-20T04:00:40.000Z | utils/script/recorder.py | junkunyuan/CSAC | 70d918ed2fe65a0a503b56d66136032031cd67e4 | [
"MIT"
] | null | null | null | utils/script/recorder.py | junkunyuan/CSAC | 70d918ed2fe65a0a503b56d66136032031cd67e4 | [
"MIT"
] | null | null | null | import json,os
import numpy as np
import os.path as osp
class AccRecorder(object):
"""Save acc on all domains."""
def __init__(self, name, decimal=4):
self.decimal = decimal
self.name = name
self.reset()
def reset(self):
self.accs = {}
self.iter_steps = {}
self.count = 0
def updata(self, kwages):
self.count+=1
for k, (step, acc) in kwages.items():
self.accs.setdefault(k,[])
self.iter_steps.setdefault(k,[])
if self.decimal:
self.accs[k].append(round(float(acc), self.decimal))
else:
self.accs[k].append(acc)
self.iter_steps[k].append(step)
def save(self, pth):
savefile = {"name": self.name,
"count": self.count,
"accs": self.accs,
"step": self.iter_steps}
if not osp.exists(osp.split(pth)[0]):
os.makedirs(osp.split(pth)[0])
if pth[-4:] == "json":
with open(pth, "w", encoding="utf-8") as f:
json.dump(savefile, f, indent=4)
elif pth[-4:] == ".npy":
np.save(pth, savefile)
def load(self,pth):
with open(pth,"r",encoding='utf-8') as f:
load_dict = json.load(f)
self.name = load_dict["name"]
self.count = load_dict["count"]
self.accs = load_dict["accs"]
self.iter_steps = load_dict["step"]
def merge(self,pth,name=None):
with open(pth, 'r') as f:
load_dict = json.load(f)
print(load_dict.keys(), self.accs.keys())
if name:
self.name = name
else:
self.name = self.name+"_" + load_dict['name']
self.count += load_dict['count']
for key in self.accs.keys():
self.accs[key] += load_dict['accs'][key]
new_steps = [item + self.iter_steps[-1] for item in load_dict['step']]
# print(new_steps)
self.iter_steps += new_steps
def merge_r(self,recorder):
self.count += recorder.count
for key in self.accs.keys():
self.accs[key] += recorder.accs[key]
new_steps = [item + self.iter_steps[-1] for item in recorder.iter_steps]
self.iter_steps += new_steps
def __str__(self):
fmtstr = 'name: {name}\ncount: {count}\naccs:{accs}\nsteps:{iter_steps}\n'
return fmtstr.format(**self.__dict__)
class ShareAccRecorder(AccRecorder):
def __init__(self, name, *domains, decimal=None):
self.domains = list(domains)
super().__init__(name, decimal)
def reset(self):
super().reset()
for domain in self.domains:
self.accs[domain] = []
self.iter_steps = []
def updata(self, iter_step, kwages):
self.count+=1
self.iter_steps.append(iter_step)
for k, v in kwages.items():
if self.decimal:
self.accs[k].append(round(float(v), self.decimal))
else:
self.accs[k].append(v)
assert len(self.accs[k]) == self.count
def pure(self):
accs = {}
for k,v in self.accs.items():
accs[k] = []
steps = []
for i,step in enumerate(self.iter_steps):
if step < 0:
continue
steps.append(step)
for k,v in self.accs.items():
accs[k].append(self.accs[k][i])
self.accs = accs
self.iter_steps = steps
def addkey(self,key):
self.domains.append(key)
self.accs[key] = []
| 32.13913 | 82 | 0.517045 |
02247f87c08dc1df7c3462512f8dae8f187e53f5 | 413 | py | Python | app/email.py | ibukamshindi/Pitches-App | 43406aa9e3f0710eb954dfb463c4d76fb525851f | [
"MIT"
] | null | null | null | app/email.py | ibukamshindi/Pitches-App | 43406aa9e3f0710eb954dfb463c4d76fb525851f | [
"MIT"
] | null | null | null | app/email.py | ibukamshindi/Pitches-App | 43406aa9e3f0710eb954dfb463c4d76fb525851f | [
"MIT"
] | null | null | null | from flask_mail import Message
from flask import render_template
from . import mail
# subject_pref = 'PITCH'
sender_email = 'patodev01@yahoo.com'
def mail_message(subject, template, to, **kwargs,):
email = Message(subject, sender=sender_email, recipients=[to])
email.body = render_template(template + ".txt", **kwargs)
email.html = render_template(template + ".html", **kwargs)
mail.send(email) | 31.769231 | 66 | 0.726392 |
271432a4942c0a109d0372908def8b3d01837e4d | 1,295 | py | Python | tests/unit/modules/test_chef.py | nevins-b/salt | 56363bc41ca36e757103df3504d1bb07e3a7251b | [
"Apache-2.0"
] | null | null | null | tests/unit/modules/test_chef.py | nevins-b/salt | 56363bc41ca36e757103df3504d1bb07e3a7251b | [
"Apache-2.0"
] | null | null | null | tests/unit/modules/test_chef.py | nevins-b/salt | 56363bc41ca36e757103df3504d1bb07e3a7251b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
import salt.modules.chef as chef
@skipIf(NO_MOCK, NO_MOCK_REASON)
class ChefTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.modules.chef
'''
def setup_loader_modules(self):
return {chef: {}}
# 'client' function tests: 1
@patch('salt.modules.chef._exec_cmd', MagicMock(return_value={}))
@patch('salt.utils.which', MagicMock(return_value=True))
def test_client(self):
'''
Test if it execute a chef client run and return a dict
'''
self.assertDictEqual(chef.client(), {})
# 'solo' function tests: 1
@patch('salt.modules.chef._exec_cmd', MagicMock(return_value={}))
@patch('salt.utils.which', MagicMock(return_value=True))
def test_solo(self):
'''
Test if it execute a chef solo run and return a dict
'''
self.assertDictEqual(chef.solo('/dev/sda1'), {})
| 26.428571 | 69 | 0.666409 |
3c13578c7681177719d7ec63c1b8021c6050d90a | 11,646 | py | Python | src/python/wntr_vis.py | NREL-SIIP/WaterModelsAnalytics.jl | dff18c644546cb0087bf5ca2569845fe7b9a84f9 | [
"BSD-3-Clause"
] | 4 | 2020-10-15T15:40:31.000Z | 2021-02-19T19:44:03.000Z | src/python/wntr_vis.py | NREL-SIIP/WaterModelsAnalytics.jl | dff18c644546cb0087bf5ca2569845fe7b9a84f9 | [
"BSD-3-Clause"
] | 37 | 2020-08-16T23:08:43.000Z | 2022-03-03T18:07:03.000Z | src/python/wntr_vis.py | NREL-SIIP/WaterModelsAnalytics.jl | dff18c644546cb0087bf5ca2569845fe7b9a84f9 | [
"BSD-3-Clause"
] | 2 | 2020-08-16T23:01:14.000Z | 2021-01-28T16:49:06.000Z | """
Functions to create graph visualizations of WNTR networks
"""
## TODO:
# - [nothing at the moment, JJS 1/25/21]
import numpy as np
import wntr
import networkx as nx
import pandas as pd
import warnings
import matplotlib
from matplotlib import cm, colors, colorbar, pyplot
import PyPDF2
import os
def build_graph(wn, time=1, wnsol=None):
"""
Build pygraphviz graph from a WNTR network object parsed from an inp file.
`time` is presumed to be integer hours
`wnsol`, if provided, should be a WNTR solution dict generated via `run.sim()`
"""
# create the graph
Gnx = wn.get_graph() # networkx object; more features and functions
node_attrs(wn, Gnx, time)
link_attrs(wn, Gnx)
if wnsol is not None:
add_solution(wn, Gnx, wnsol, time)
return nx.nx_agraph.to_agraph(Gnx)
def node_attrs(wn, Gnx, time):
"""
Add/change node attributes suitable for visualization
"""
# node position attributes (coordinates in inp file)
pos = nx.get_node_attributes(Gnx, "pos")
# adjust and convert position tuple to comma-delimited string with
# exclamation point; not used with default dot layout
xpos = np.array([pos[i][0] for i in pos.keys()])
ypos = np.array([pos[i][1] for i in pos.keys()])
xmin = xpos.min()
xmax = xpos.max()
xspan = xmax - xmin
ymin = ypos.min()
ymax = ypos.max()
yspan = ymax-ymin
# scale to use to get "good" position results with graphviz output --
# calculate this systematically from the number of nodes?
scale = 20
# scale position and convert to string
for node in Gnx.nodes:
# convert position tuple to string (so that graphviz can actually use it!)
postup = Gnx.nodes[node]["pos"]
# during conversion to strings, adjust scale so that positions are reasonable
xval = scale*(postup[0] - xmin)/xspan
yval = scale*(postup[1] - ymin)/yspan
posstr = str(xval) + "," + str(yval)
# add exclamation point to set fixed positions when using neato layout
posstr += "!"
Gnx.nodes[node]["pos"] = posstr
# add label designations to reservoirs and tanks
for rsvr in wn.reservoir_name_list: # usually just one, but could be more I suppose
Gnx.nodes[rsvr]['label'] = "Rsvr\n" + rsvr
for tank in wn.tank_name_list:
Gnx.nodes[tank]['label'] = "Tank\n" + tank
# highlight junctions with demand, using the time provided (or default of 1)
based = wn.query_node_attribute('base_demand')
idx = np.nonzero((based>1e-12).values)[0]
demnodes = based.iloc[idx].index
#demand = wn.query_node_attribute('base_demand')*1000 # L/s
for nname in demnodes:
node = wn.get_node(nname)
pat = node.demand_timeseries_list.pattern_list()[0]
demval = based[nname]*pat[time-1]
Gnx.nodes[nname]['label'] = nname + "\nd = %2.2g" % demval
## add elevation information
# presume that junctions and tanks have an "elevation" and reservoirs have a
# "base head" (and not keys for both)
elevs = wn.query_node_attribute('elevation')
# use the time and the pattern and multiply by the base head
res_elevs = wn.query_node_attribute('base_head')
resnames = res_elevs.index.values
for node in resnames:
res = wn.get_node(node)
if res.head_timeseries.pattern is None:
res_elevs[node] = res.head_timeseries.base_value
else:
mults = res.head_timeseries.pattern.multipliers
# "base_value" should be the same as what was aleady queried by
# "base_head", so either should work
res_elevs[node] = res.head_timeseries.base_value * mults[time-1]
elevs = pd.concat((elevs, res_elevs))
elmin = elevs.min()
elmax = elevs.max()
# store max and min elevations as graph attributes to use for creating a colorbar
Gnx.graph["elmin"] = elmin
Gnx.graph["elmax"] = elmax
nodenames = elevs.index.values
elevsrel = (elevs - elmin)/(elmax - elmin)
cmap = cm.get_cmap('viridis') # can make colormap a user option
elevclrs = pd.DataFrame(colors.rgb_to_hsv(cmap(elevsrel)[:,:3]), index=nodenames)
for node in nodenames:
Gnx.nodes[node]['style'] = "filled"
clr = np.array(elevclrs.loc[node])
clrstr = np.array_str(clr)
Gnx.nodes[node]['fillcolor'] = clrstr[1:-1] # remove brackets from string
if clr[2] < 0.6:
Gnx.nodes[node]["fontcolor"] = "white"
return
def link_attrs(wn, Gnx):
"""
Add/change link attributes suitable for visualization
"""
# loop over controls to find pipes with shutoff valves
sv_name_list = []
for control in wn.controls():
link = control[1].actions()[0].target()[0]
if link.link_type == "Pipe":
if link.name not in sv_name_list:
sv_name_list.append(link.name)
# loop over links that have a closed status to find closed pipes; will
# presume these could be controllable by shutoff valves, even if controls
# are not specified in the inp file
status = wn.query_link_attribute("status")
closed = status[status == 0]
for name in closed.index:
link = wn.get_link(name)
if link.link_type == "Pipe":
if link.name not in sv_name_list:
sv_name_list.append(link.name)
# loop over links to set graphviz attributes
# (note: `list(Gnx.edges)` provides a list of the edge keys)
for edge in Gnx.edges:
eatts = Gnx.edges.get(edge)
link = wn.get_link(edge[2])
if eatts['type'] == 'Pump':
eatts['color'] = 'red'
eatts['style'] = 'bold'
eatts['label'] = "Pmp\n" + edge[2]
elif eatts['type'] == 'Valve': # these are special-type valves, e.g., PRVs
link = wn.get_link(edge[2])
eatts['color'] = 'purple' # what is a good eye-catching color?
eatts['style'] = 'bold'
eatts['label'] = link.valve_type + "\n" + edge[2]
elif edge[2] in wn._check_valves:
length = "%2.2g m" % link.length
eatts['label'] = "CV\n" + edge[2] + "\n" + length
elif edge[2] in sv_name_list:
length = "%2.2g m" % link.length
eatts['label'] = "SV\n" + edge[2] + "\n" + length
else:
length = "%2.2g m" % link.length
eatts["label"] = edge[2] + "\n" + length
return
def add_solution(wn, Gnx, wnsol, time):
"""
Add head and flowrates to the labels for nodes and links, respectively.
"""
# add head to the node labels (could alternatively show pressure)
node_results = wnsol.node
head = node_results["head"]
for node in Gnx.nodes:
natts = Gnx.nodes.get(node)
headval = _val_string_cut(head[node].iloc[time], 1e-10)
if "label" in natts:
natts["label"] += "\nh: " + headval
else:
natts["label"] = node + "\nh: " + headval
link_results = wnsol.link
flowrate = link_results["flowrate"]
for link in Gnx.edges:
flowval = _val_string_cut(flowrate[link[2]].iloc[time], 1e-10)
latts = Gnx.edges.get(link)
latts["label"] += "\nq: " + flowval
return
def _val_string_cut(val, cut):
if val < cut:
return "0"
else:
return "%2.2g" % val
def write_graph(G, filename, layout="dot"):
"""
Use graphviz (via pygraphviz) to output a visualization to a file for a graph. The
`layout` option equates to the layout functions of graphviz (dot, neato, etc.).
"""
try:
G.draw(filename, prog=layout)
except:
G.draw(filename, prog="dot")
warnings.warn("%s is not a supported layout; dot was used instead"%layout)
return
def write_cbar(G, filename):
"""
make the colorbar for the elevations
"""
# https://matplotlib.org/tutorials/colors/colorbar_only.html
# G.graph_attr.keys() # to see the attribute keys
# if user's matplotlib environment is interactive, turn off
interactive = matplotlib.is_interactive()
if interactive:
pyplot.ioff()
fig = pyplot.figure(figsize=(6,1))
ax = pyplot.gca()
cmap = cm.viridis
norm = colors.Normalize(vmin=G.graph_attr["elmin"], vmax=G.graph_attr["elmax"])
cb1 = colorbar.ColorbarBase(ax, cmap=cmap, norm=norm, orientation='horizontal')
cb1.set_label('Elevation [m]')
pyplot.savefig(filename, bbox_inches='tight')
pyplot.close(fig) # may not be necessary if not displaying the figure?
# if user's matplotlib environment was interactive, turn back on
if interactive:
pyplot.ion()
return
def stack_cbar(graphfilename, cbfilename, outfilename, sep_page=False):
"""
Stack the colorbar on top of the graph using PyPDF2. Use `sep_page=True` to
have the colorbar on a separate page (faster processing for large graphs).
"""
# use PyPDF2 to merge the colorbar
input1 = PyPDF2.PdfFileReader(open(graphfilename, "rb"))
input2 = PyPDF2.PdfFileReader(open(cbfilename, "rb"))
output = PyPDF2.PdfFileWriter()
page1 = input1.getPage(0)
page2 = input2.getPage(0)
if sep_page: # set colorbar to be first page
output.addPage(page2)
output.addPage(page1)
else: # merge the colorbar
h1 = page1.mediaBox.getHeight()
w1 = page1.mediaBox.getWidth()
h2 = page2.mediaBox.getHeight()
w2 = page2.mediaBox.getWidth()
w = max(w1,w2)
h = h1 + h2
newpage = PyPDF2.pdf.PageObject.createBlankPage(None, w, h)
# the coordinates are referenced to lower-left
if w2>w1:
newpage.mergeScaledTranslatedPage(page1, 1, (w2-w1)/2, 0)
newpage.mergeScaledTranslatedPage(page2, 1, 0, h1)
else:
newpage.mergeScaledTranslatedPage(page1, 1, 0, 0)
newpage.mergeScaledTranslatedPage(page2, 1, (w1-w2)/2, h1)
output.addPage(newpage)
outfile = open(outfilename, "wb")
output.write(outfile)
outfile.close()
return
def collate_viz(filenames, outfilename):
"""
Collate the pages of a multi-time visualization.
"""
output = PyPDF2.PdfFileWriter()
for filename in filenames:
inpdf = PyPDF2.PdfFileReader(open(filename, "rb"))
page = inpdf.getPage(0)
output.addPage(page)
outfile = open(outfilename, "wb")
output.write(outfile)
outfile.close()
return
def write_visualization(wn, basefilename, time=1, wnsol=None, layout="dot",
sep_page=False, del_files=True):
"""
Write out to a file a visualization for an Epanet network dictionary
parsed from an EPANET file. `basefilename` should not include an extension
and will be appended with `_w_cb.pdf` in the final output file, which is a
multi-page PDF. The `layout` option equates to the layout functions of
graphviz (dot, neato, etc.). Use `sep_page=True` to have the colorbar on a
separate page (faster processing for large graphs). Use `del_files=False`
to keep the intermediate files.
`time` is presumed to be integer hours
`wnsol`, if provided, should be a WNTR solution dict generated via `run.sim()`
"""
graphfile = basefilename + "_graph.pdf"
cbfile = basefilename + "_cbar.pdf"
outfile = basefilename + "_w_cb.pdf"
G = build_graph(wn, time, wnsol)
write_graph(G, graphfile, layout)
write_cbar(G, cbfile)
stack_cbar(graphfile, cbfile, outfile, sep_page)
if del_files:
os.remove(graphfile)
os.remove(cbfile)
return
| 35.290909 | 87 | 0.633866 |
bbea081bdac1eac18ee037150227c01c2aadf088 | 1,424 | py | Python | sentiment_model.py | justinli930/Public-Morale-Over-Covid | 36fc07e48fe5fcf11cc9c35856f3e2eedc4998ec | [
"Apache-2.0"
] | null | null | null | sentiment_model.py | justinli930/Public-Morale-Over-Covid | 36fc07e48fe5fcf11cc9c35856f3e2eedc4998ec | [
"Apache-2.0"
] | null | null | null | sentiment_model.py | justinli930/Public-Morale-Over-Covid | 36fc07e48fe5fcf11cc9c35856f3e2eedc4998ec | [
"Apache-2.0"
] | null | null | null | import os
import spacy
from spacy.tokenizer import Tokenizer
def predict_sentiment(txt: str, direc: str = 'models/sentiment/saved_models/model50') -> float:
"""
predicts sentiment of string
only use for testing not good for large data because
model is loaded each time
input is a txt string
optional directory change for using different models
returns a value from -1 to 1
Aproaching -1 being a negative sentiment
Aproaching 1 being a positive sentiment
"""
vals = spacy.load(direc)(txt).cats
return vals["pos"] if vals["pos"]>vals["neg"] else -1*vals["neg"]
def open_model(direc: str = 'models/sentiment/saved_models/model50'):
"""opens model from optional directory string"""
return spacy.load(direc)
def model_predict_sentiment(model, txt: str) -> float:
"""
use for larger data because model is not loaded each time
given a model input and string input,
output sentiment prediction
returns a value from -1 to 1
Aproaching -1 being a negative sentiment
Aproaching 1 being a positive sentiment
"""
vals = model(txt).cats
return vals["pos"] if vals["pos"]>vals["neg"] else -1*vals["neg"]
if __name__ == "__main__":
"""Leo Test Area"""
model = open_model()
txt = """Hopefully this works it should print some output of a value between -1 and 1. Cross your fingers"""
print(model_predict_sentiment(model,txt)) | 35.6 | 112 | 0.698034 |
77a74d3571b72b946a5602bab7a4ad2f14081738 | 2,016 | py | Python | tablet/urls.py | FarsetLabs/farset-nadine | f0f5e81a9fbe98a4333f6318443fefbb5517c60f | [
"Apache-2.0"
] | null | null | null | tablet/urls.py | FarsetLabs/farset-nadine | f0f5e81a9fbe98a4333f6318443fefbb5517c60f | [
"Apache-2.0"
] | 4 | 2021-03-19T16:10:13.000Z | 2022-03-12T00:55:50.000Z | tablet/urls.py | FarsetLabs/farset-nadine | f0f5e81a9fbe98a4333f6318443fefbb5517c60f | [
"Apache-2.0"
] | null | null | null | from django.conf.urls import url
from django.views.generic import RedirectView
from . import views
urlpatterns = [
url(r'^$', views.members),
url(r'^members/$', views.members, name='tablet_members'),
url(r'^here_today/$', views.here_today, name='tablet_here_today'),
url(r'^visitors/$', views.visitors, name='tablet_visitors'),
url(r'^search/$', views.search, name='tablet_search'),
url(r'^welcome/(?P<username>[^/]+)$', views.welcome, name='tablet_welcome'),
url(r'^signin/(?P<username>[^/]+)/$', views.user_signin, name='tablet_user_signin'),
url(r'^profile/(?P<username>[^/]+)/$', views.user_profile, name='tablet_profile'),
url(r'^post_create/(?P<username>[^/]+)/$', views.post_create, name='tablet_post_create'),
url(r'^(?P<username>[^/]+)/signin/$', views.signin_user, name='tablet_signin_user'),
url(r'^(?P<username>[^/]+)/guestof/(?P<paid_by>[^/]+)$', views.signin_user_guest, name='tablet_signin_guest'),
url(r'^(?P<username>[^/]+)/documents/$', views.document_list, name='tablet_document_list'),
url(r'^(?P<username>[^/]+)/document/(?P<doc_type>[^/]+)$', views.document_view, name='tablet_document_view'),
url(r'^(?P<username>[^/]+)/signature/(?P<doc_type>[^/]+)/$', views.signature_capture, name='tablet_sig_capture'),
url(r'^(?P<username>[^/]+)/signature/(?P<doc_type>[^/]+)/(?P<signature_file>[^/]+)$', views.signature_render, name='tablet_sig_render'),
]
# Copyright 2016 Office Nomads LLC (http://www.officenomads.com/) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
| 80.64 | 583 | 0.69494 |
cc98243ffeebc459d53201d7a316c93abf07da80 | 2,667 | py | Python | app/waterQual/30yr/sherlock/sh_rbWN5_hs.py | fkwai/geolearn | 30cb4353d22af5020a48100d07ab04f465a315b0 | [
"MIT"
] | null | null | null | app/waterQual/30yr/sherlock/sh_rbWN5_hs.py | fkwai/geolearn | 30cb4353d22af5020a48100d07ab04f465a315b0 | [
"MIT"
] | null | null | null | app/waterQual/30yr/sherlock/sh_rbWN5_hs.py | fkwai/geolearn | 30cb4353d22af5020a48100d07ab04f465a315b0 | [
"MIT"
] | 2 | 2021-04-04T02:45:59.000Z | 2022-03-19T09:41:39.000Z | from hydroDL import kPath
from hydroDL.app import waterQuality
from hydroDL.master import basins
from hydroDL.data import usgs, gageII, gridMET, ntn
import numpy as np
from hydroDL.master import slurm
import importlib
# for a test on training to resolve warnings
varNtnLst = ['ph', 'Conduc', 'Ca', 'Mg', 'K', 'Na', 'NH4', 'NO3', 'Cl', 'SO4']
varNtnUsgsLst = ['00400', '00095', '00915', '00925', '00935',
'00930', '71846', '00618', '00940', '00945']
# ntn variables
dataName = 'rbWN5'
# wqData = waterQuality.DataModelWQ(dataName)
codeLst = ['comb']
# labelLst = ['QFP_C', 'QF_C', 'FP_C', 'QP_C', 'Q_C', 'FP_QC']
# labelLst = ['QT_C', 'QTFP_C', 'QFP_C', 'QTF_C']
# labelLst = ['FP_QC', 'FP_C']
labelLst = ['FP_QC', 'QFP_C']
varF = gridMET.varLst
varQ = ['runoff']
varP = ntn.varLst
varT = ['sinT', 'cosT']
caseLst = list()
for hs in [16, 32, 64, 128, 256, 512]:
for code in codeLst:
if code == 'comb':
varC = usgs.newC
else:
varC = [code]
varYC = varC
for label in labelLst:
if label == 'QFP_C':
varX = varQ+varF+varP
varY = None
elif label == 'FP_QC':
varX = varF+varP
varY = varQ
elif label == 'FP_Q':
varX = varF+varP
varY = varQ
varYC = None
elif label == 'F_QC':
varX = varF
varY = varQ
elif label == 'QF_C':
varX = varQ+varF
varY = None
elif label == 'FP_C':
varX = varF+varP
varY = None
elif label == 'P_C':
varX = varP
varY = None
elif label == 'Q_C':
varX = varQ
varY = None
elif label == 'QT_C':
varX = varQ+varT
varY = None
elif label == 'QTFP_C':
varX = varQ+varT+varF+varP
varY = None
trainSet = '{}-B10'.format(code)
saveName = '{}-{}-{}-{}-hs{}'.format(dataName,
code, label, trainSet, hs)
caseName = basins.wrapMaster(
dataName=dataName, trainName=trainSet, batchSize=[None, 500],
outName=saveName, varX=varX, varY=varY, varYC=varYC, hiddenSize=hs)
caseLst.append(caseName)
cmdP = 'python /home/users/kuaifang/GitHUB/geolearn/app/waterQual/model/cmdTrain.py -M {}'
for caseName in caseLst:
slurm.submitJobGPU(caseName, cmdP.format(caseName), nH=24)
# basins.trainModelTS(caseName)
| 33.3375 | 90 | 0.509186 |
d91b80d2017a70a51ec7867d27733d1c22579dfd | 9,963 | py | Python | config/settings/base.py | jisuhan3201/osc-bge | 125c441d23d7f1fdb2d9b8f42f859082e757e25a | [
"MIT"
] | null | null | null | config/settings/base.py | jisuhan3201/osc-bge | 125c441d23d7f1fdb2d9b8f42f859082e757e25a | [
"MIT"
] | 5 | 2020-06-05T19:49:47.000Z | 2021-09-08T00:50:55.000Z | config/settings/base.py | jisuhan3201/osc-bge | 125c441d23d7f1fdb2d9b8f42f859082e757e25a | [
"MIT"
] | null | null | null | """
Base settings to build other settings files upon.
"""
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (osc_bge/config/settings/base.py - 3 = osc_bge/)
APPS_DIR = ROOT_DIR.path('osc_bge')
env = environ.Env()
READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False)
if READ_DOT_ENV_FILE:
# OS environment variables take precedence over variables from .env
env.read_env(str(ROOT_DIR.path('.env')))
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool('DJANGO_DEBUG', True)
# Local time zone. Choices are
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# though not all of them may be available with every OS.
# In Windows, this must be set to your system time zone.
TIME_ZONE = 'Etc/GMT-4'
# https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = False
# DATABASES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'osc_bge',
'USER': 'root',
'PASSWORD': 'gkswlgk1',
'HOST': 'localhost', # Or an IP Address that your DB is hosted on
'PORT': '3306',
}
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# URLS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = 'config.urls'
# https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# APPS
# ------------------------------------------------------------------------------
DJANGO_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize', # Handy template tags
'django.contrib.admin',
]
THIRD_PARTY_APPS = [
'crispy_forms',
'allauth',
'allauth.account',
'allauth.socialaccount',
'rest_framework',
"django_countries",
'django_seed',
'bootstrap4',
'bootstrap_datepicker_plus',
]
LOCAL_APPS = [
'osc_bge.users.apps.UsersAppConfig',
'osc_bge.student.apps.StudentConfig',
'osc_bge.school.apps.SchoolConfig',
'osc_bge.agent.apps.AgentConfig',
'osc_bge.bge.apps.BgeConfig',
'osc_bge.form.apps.FormConfig',
'osc_bge.branch.apps.BranchConfig',
# Your stuff: custom apps go here
'common'
]
# https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIGRATIONS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#migration-modules
MIGRATION_MODULES = {
'sites': 'osc_bge.contrib.sites.migrations'
}
# AUTHENTICATION
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#authentication-backends
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-user-model
AUTH_USER_MODEL = 'users.User'
# https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/accounts/login'
# https://docs.djangoproject.com/en/dev/ref/settings/#login-url
LOGIN_URL = 'account_login'
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = [
# https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# MIDDLEWARE
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#middleware
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
# STATIC
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = [
str(APPS_DIR.path('static')),
]
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap4'
# FIXTURES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#fixture-dirs
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
# EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# EMAIL_HOST = 'smtp.gmail.com'
# EMAIL_PORT = 587
# EMAIL_HOST_USER = 'testsite_app'
# EMAIL_HOST_PASSWORD = 'mys3cr3tp4ssw0rd'
# EMAIL_USE_TLS = True
# DEFAULT_FROM_EMAIL = 'TestSite Team <jisu.han3201@gmail.com>'
# ADMIN
# ------------------------------------------------------------------------------
# Django Admin URL.
ADMIN_URL = 'admin/'
# https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = [
("""Jisu Han""", 'jisu.han3201@gmail.com'),
]
# https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# django-allauth
# ------------------------------------------------------------------------------
ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True)
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_AUTHENTICATION_METHOD = 'username'
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_EMAIL_REQUIRED = True
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_ADAPTER = 'osc_bge.users.adapters.AccountAdapter'
# https://django-allauth.readthedocs.io/en/latest/configuration.html
SOCIALACCOUNT_ADAPTER = 'osc_bge.users.adapters.SocialAccountAdapter'
# Your stuff...
# ------------------------------------------------------------------------------
BOOTSTRAP4 = {
'include_jquery': True,
}
| 38.172414 | 100 | 0.630934 |
d745b1e12887d8beb4bb725e0f01c8a6976c8a44 | 1,361 | py | Python | src/twitter_monitor/monitoring/twitter_monitor/cli.py | P6rguVyrst/pymon-tools | bcf88d29dc5df31c87df220ef02aeb8124d309a2 | [
"MIT"
] | null | null | null | src/twitter_monitor/monitoring/twitter_monitor/cli.py | P6rguVyrst/pymon-tools | bcf88d29dc5df31c87df220ef02aeb8124d309a2 | [
"MIT"
] | 2 | 2021-11-15T17:46:46.000Z | 2021-11-15T17:46:49.000Z | src/twitter_monitor/monitoring/twitter_monitor/cli.py | P6rguVyrst/pymon-tools | bcf88d29dc5df31c87df220ef02aeb8124d309a2 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Console script for twitter_monitor."""
from datetime import (
datetime,
timedelta,
)
import configparser
import click
from monitoring.twitter_monitor import Monitor
from monitoring.exceptions import MonitoringError
'''
@click.option('--host', default='localhost')
@click.option('--key', defaults='twitter.monitor.keyword.count')
@click.option('--zabbix', default='localhost')
'''
@click.command()
@click.option('--config', '-c', help='Configuration file location')
@click.option('--key', '-k', help='Keyword to monitor in Twitter')
@click.option('--operation', '-o')
@click.option('--date_from', '-df', help='YYYY-MM-DD')
@click.option('--date_to', '-dt', help='YYYY-MM-DD')
def main(**kwargs):
"""Console script for twitter_monitor."""
if not kwargs.get('config'):
raise MonitoringError('Missing configuration file')
if not kwargs.get('key'):
raise MonitoringError('Missing key to monitor')
cp = configparser.ConfigParser()
cp.read(kwargs['config'])
app_config = cp['twitter']
if not kwargs.get('date_from'):
kwargs['date_from'] = today - timedelta(days=1)
if not kwargs.get('date_to'):
kwargs['date_to'] = today
monitor = Monitor(app_config)
count = monitor.run(**kwargs)
print(count)
if __name__ == "__main__":
main()
| 26.173077 | 68 | 0.65687 |
10692e7daa15c2c8ef05c8fef6f457212dbf10ff | 1,195 | py | Python | backend/unpp_api/apps/project/admin.py | unicef/un-partner-portal | 73afa193a5f6d626928cae0025c72a17f0ef8f61 | [
"Apache-2.0"
] | 6 | 2017-11-21T10:00:44.000Z | 2022-02-12T16:51:48.000Z | backend/unpp_api/apps/project/admin.py | unicef/un-partner-portal | 73afa193a5f6d626928cae0025c72a17f0ef8f61 | [
"Apache-2.0"
] | 995 | 2017-07-31T02:08:36.000Z | 2022-03-08T22:44:03.000Z | backend/unpp_api/apps/project/admin.py | unicef/un-partner-portal | 73afa193a5f6d626928cae0025c72a17f0ef8f61 | [
"Apache-2.0"
] | 1 | 2021-07-21T10:45:15.000Z | 2021-07-21T10:45:15.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from project.models import (
EOI,
ClarificationRequestQuestion,
ClarificationRequestAnswerFile,
EOIAttachment,
Pin,
Application,
ApplicationFeedback,
Assessment,
)
class ApplicationAdmin(admin.ModelAdmin):
search_fields = ('partner__legal_name', 'eoi__title')
list_display = ('id', 'partner', 'eoi', 'agency', 'did_win', 'did_accept')
list_filter = ('is_unsolicited', 'agency', 'status', 'did_win', 'did_accept', 'did_decline', 'did_withdraw')
class EOIAdmin(admin.ModelAdmin):
search_fields = ('displayID', 'title')
list_display = ('displayID', 'display_type', 'title', 'agency')
list_filter = ('display_type', 'agency', 'sent_for_publishing', 'is_published')
exclude = (
'preselected_partners',
)
admin.site.register(EOI, EOIAdmin)
admin.site.register(EOIAttachment)
admin.site.register(ClarificationRequestQuestion)
admin.site.register(ClarificationRequestAnswerFile)
admin.site.register(Pin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(ApplicationFeedback)
admin.site.register(Assessment)
| 30.641026 | 112 | 0.735565 |
ffa52ab333df987c1aa61d89fe98b3dfa5edf99a | 1,393 | py | Python | pymage/io.py | MickaelRigault/pymage | 8e2ec7ef876b0e9f51c465a12a6ad9e83688622c | [
"Apache-2.0"
] | null | null | null | pymage/io.py | MickaelRigault/pymage | 8e2ec7ef876b0e9f51c465a12a6ad9e83688622c | [
"Apache-2.0"
] | null | null | null | pymage/io.py | MickaelRigault/pymage | 8e2ec7ef876b0e9f51c465a12a6ad9e83688622c | [
"Apache-2.0"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import io
import warnings
DATAPATH = os.getenv("DATAPATH","_notdefined_")
if DATAPATH == "_notdefined_":
warnings.warn("You don't have a global variable named 'DATAPATH'. You need one to be able to download data. They will be stored in $DATAPATH/{INSTRUNAME}/bla")
def download_single_url(url, fileout=None, mkdir=True,
overwrite=False, verbose=True, chunk=1024, **kwargs):
""" Download the url target using requests.get.
the data is returned (if fileout is None) or stored in `fileout`
Pa
"""
import requests
if fileout is not None and not overwrite and os.path.isfile( fileout ):
if verbose:
print("%s already exists: skipped"%fileout)
return
else:
if verbose and fileout:
print("downloading %s -> %s"%(url,fileout))
request_fnc = "get" if not "data" in kwargs else "post"
response = getattr(requests,request_fnc)(url, **kwargs)
if response.status_code == 200:
if fileout in ["BytesIO", "StringIO"]:
return getattr(io, fileout)(response.content)
with open(fileout, 'wb') as f:
for data in response.iter_content(chunk):
f.write(data)
else:
print("Issue downloading")
print("response.status_code: ", response.status_code)
| 34.825 | 163 | 0.628859 |
82ff5d0e01423b6aa8e09017b5e960b9e3f26a20 | 1,296 | py | Python | analysis/model_get_spike_stats_active_inputs.py | danielmk/pyDentateeLife2020 | b4a9f2beaa0c74dbc9583e2cf228856612596f8a | [
"MIT"
] | null | null | null | analysis/model_get_spike_stats_active_inputs.py | danielmk/pyDentateeLife2020 | b4a9f2beaa0c74dbc9583e2cf228856612596f8a | [
"MIT"
] | null | null | null | analysis/model_get_spike_stats_active_inputs.py | danielmk/pyDentateeLife2020 | b4a9f2beaa0c74dbc9583e2cf228856612596f8a | [
"MIT"
] | 4 | 2020-02-18T09:25:20.000Z | 2021-11-20T23:52:29.000Z | # -*- coding: utf-8 -*-
"""
Created on Sun Mar 11 10:25:41 2018
@author: daniel
"""
import shelve
import numpy as np
import matplotlib.pyplot as plt
import os
import pdb
#Home PC
#directory = "C:\\Users\\daniel\\repos\\pyDentate\paradigm_pattern-separation_saves_2018-03-11\\"
#Office PC
#directory = "Y:\\DanielM\\023_Dentate Gyrus Model\\paradigm_spatial-inhibition\\"
#Dropbox
def get_spike_stats(data_path):
data_files = [f for f in os.listdir(data_path) if os.path.isfile(os.path.join(data_path, f)) and '.npz' in f and not 'norm' in f and not 'trifilt' in f]
data_files.sort()
# Get to BasketCell Connection
n_spike_list = []
for x in data_files:
print(x)
#curr_data = shelve.open(data_path + x)
curr_data = np.load(data_path+x, mmap_mode='r')['arr_0']
active_patterns = np.argwhere(np.any(curr_data, axis=1))[:,0]
n_spikes = curr_data[active_patterns,:].sum(axis=1)
n_spikes = np.append(n_spikes, n_spikes.mean())
n_spike_list.append(n_spikes)
np.savetxt(data_path + "1_n_spikes.txt", np.array(n_spike_list), delimiter = '\t')
if __name__ == '__main__':
data_path = "Z:\\pyDentate\\pyDentateData\\pattern_separation_data_local_30Hz_input\\seed10006\\input_patterns\\"
get_spike_stats(data_path)
| 33.230769 | 156 | 0.695216 |
3bc6fa5ec0c086333fc87c6d894830e397a36b5a | 3,212 | py | Python | app/app/settings.py | VishnuSuresh2000/recipe-api | e162511568e37a98186a129079a3605f48c4dd14 | [
"MIT"
] | null | null | null | app/app/settings.py | VishnuSuresh2000/recipe-api | e162511568e37a98186a129079a3605f48c4dd14 | [
"MIT"
] | null | null | null | app/app/settings.py | VishnuSuresh2000/recipe-api | e162511568e37a98186a129079a3605f48c4dd14 | [
"MIT"
] | null | null | null | """
Django settings for app project.
Generated by 'django-admin startproject' using Django 4.0.1.
For more information on this file, see
https://docs.djangoproject.com/en/4.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/4.0/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-+hqs0wp3#qmf^z0z(dm!(0h&@i%o^ygcvrpt!_e+)kbh@we23y'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/4.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/4.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/4.0/howto/static-files/
STATIC_URL = 'static/'
# Default primary key field type
# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| 25.903226 | 91 | 0.700187 |
9e87641dfc171dcd6c18826fcc20b42c3426a276 | 985 | py | Python | CloudDisk/urls.py | MartinMa28/Cloud-Disk-Server | 31d6dfda2c608bdaab351c9fe8c22a9da927cd1d | [
"MIT"
] | 1 | 2020-04-25T02:21:08.000Z | 2020-04-25T02:21:08.000Z | CloudDisk/urls.py | MartinMa28/Cloud-Disk-Server | 31d6dfda2c608bdaab351c9fe8c22a9da927cd1d | [
"MIT"
] | 2 | 2021-03-30T13:10:00.000Z | 2021-06-10T18:57:26.000Z | CloudDisk/urls.py | MartinMa28/Cloud-Disk-Server | 31d6dfda2c608bdaab351c9fe8c22a9da927cd1d | [
"MIT"
] | null | null | null | """CloudDisk URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', include('file_receiver.urls')),
path('admin/', admin.site.urls),
]
# if settings.DEBUG:
# urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | 36.481481 | 82 | 0.722843 |
bcb4c42969b6daf505596dada7c633225285148a | 1,952 | py | Python | wye/profiles/migrations/0001_initial.py | salmanulfarzy/wye | a52c15725f44688243c4b63ff7375553c7002d7b | [
"MIT"
] | 75 | 2015-08-27T04:16:17.000Z | 2022-01-05T13:59:46.000Z | wye/profiles/migrations/0001_initial.py | salmanulfarzy/wye | a52c15725f44688243c4b63ff7375553c7002d7b | [
"MIT"
] | 396 | 2015-09-13T04:50:58.000Z | 2022-03-11T23:25:50.000Z | wye/profiles/migrations/0001_initial.py | taranjeet/wye | ac4cc23d38cf2e72f87a0c1d26fff0316645c1ea | [
"MIT"
] | 112 | 2015-08-30T12:58:50.000Z | 2021-01-31T17:02:31.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('auth', '0006_require_contenttypes_0002'),
('regions', '0001_initial'),
('workshops', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('user', models.OneToOneField(primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL, related_name='profile')),
('slug', models.CharField(max_length=100, unique=True)),
('mobile', models.CharField(max_length=10)),
('interested_locations', models.ManyToManyField(to='regions.Location')),
('interested_sections', models.ManyToManyField(to='workshops.WorkshopSections')),
],
options={
'db_table': 'user_profile',
'verbose_name': 'UserProfile',
'verbose_name_plural': 'UserProfiles',
},
),
migrations.CreateModel(
name='UserType',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, verbose_name='ID', serialize=False)),
('slug', models.CharField(max_length=100, verbose_name='slug')),
('display_name', models.CharField(max_length=300, verbose_name='Display Name')),
('active', models.BooleanField(default=1)),
],
options={
'db_table': 'users_type',
'ordering': ('-id',),
'verbose_name_plural': 'UserTypes',
'verbose_name': 'UserType',
},
),
migrations.AddField(
model_name='profile',
name='usertype',
field=models.ManyToManyField(to='profiles.UserType'),
),
]
| 36.830189 | 135 | 0.555328 |
fcc724d2bee395159786083d98a634c764ea9db0 | 12,879 | py | Python | models/mobilenetv2exfuse.py | headupinclouds/LightNet | 04af22501d644b30a93b30b28b32163d60ae2266 | [
"MIT"
] | 737 | 2018-03-21T12:28:04.000Z | 2021-07-07T16:03:09.000Z | models/mobilenetv2exfuse.py | headupinclouds/LightNet | 04af22501d644b30a93b30b28b32163d60ae2266 | [
"MIT"
] | 23 | 2018-03-22T01:19:53.000Z | 2021-03-26T15:08:26.000Z | models/mobilenetv2exfuse.py | headupinclouds/LightNet | 04af22501d644b30a93b30b28b32163d60ae2266 | [
"MIT"
] | 189 | 2018-03-22T08:55:43.000Z | 2021-07-01T12:14:08.000Z | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from modules import SCSEBlock, InPlaceABN, InPlaceABNWrapper, RFBlock
from modules.misc import InvertedResidual, conv_bn
from modules.exfuse import SemanticSupervision
from collections import OrderedDict
from functools import partial
class MobileNetV2ExFuse(nn.Module):
def __init__(self, n_class=19, in_size=(448, 896), width_mult=1.,
out_sec=256, norm_act=InPlaceABN, traval="train"):
"""
MobileNetV2Plus: MobileNetV2 based Semantic Segmentation
:param n_class: (int) Number of classes
:param in_size: (tuple or int) Size of the input image feed to the network
:param width_mult: (float) Network width multiplier
:param out_sec: (tuple) Number of the output channels of the ASPP Block
:param aspp_sec: (tuple) Dilation rates used in ASPP
"""
super(MobileNetV2ExFuse, self).__init__()
self.n_class = n_class
self.traval = traval
# setting of inverted residual blocks
self.interverted_residual_setting = [
# t, c, n, s, d
[1, 16, 1, 1, 1], # 1/2
[6, 24, 2, 2, 1], # 1/4
[6, 32, 3, 2, 1], # 1/8
[6, 64, 4, 1, 2], # 1/8
[6, 96, 3, 1, 4], # 1/8
[6, 160, 3, 1, 8], # 1/8
[6, 320, 1, 1, 16], # 1/8
]
# building first layer
assert in_size[0] % 8 == 0
assert in_size[1] % 8 == 0
self.input_size = in_size
input_channel = int(32 * width_mult)
self.mod1 = nn.Sequential(OrderedDict([("conv1", conv_bn(inp=3, oup=input_channel, stride=2))]))
# building inverted residual blocks
mod_id = 0
for t, c, n, s, d in self.interverted_residual_setting:
output_channel = int(c * width_mult)
# Create blocks for module
blocks = []
for block_id in range(n):
if block_id == 0 and s == 2:
blocks.append(("block%d" % (block_id + 1), InvertedResidual(inp=input_channel,
oup=output_channel,
stride=s,
dilate=1,
expand_ratio=t)))
else:
blocks.append(("block%d" % (block_id + 1), InvertedResidual(inp=input_channel,
oup=output_channel,
stride=1,
dilate=d,
expand_ratio=t)))
input_channel = output_channel
self.add_module("mod%d" % (mod_id + 2), nn.Sequential(OrderedDict(blocks)))
mod_id += 1
# building last several layers
org_last_chns = (self.interverted_residual_setting[0][1] +
self.interverted_residual_setting[1][1] +
self.interverted_residual_setting[2][1] +
self.interverted_residual_setting[3][1] +
self.interverted_residual_setting[4][1] +
self.interverted_residual_setting[5][1] +
self.interverted_residual_setting[6][1])
self.last_channel = int(org_last_chns * width_mult) if width_mult > 1.0 else org_last_chns
self.out_se = nn.Sequential(SCSEBlock(channel=self.last_channel, reduction=16))
if self.n_class != 0:
self.rfblock = nn.Sequential(RFBlock(in_chs=self.last_channel, out_chs=out_sec,
scale=1.0, feat_res=(int(in_size[0] / 8), int(in_size[1] / 8)),
up_ratio=2, norm_act=norm_act))
in_stag2_up_chs = self.interverted_residual_setting[1][1] + self.interverted_residual_setting[0][1]
self.score_se = nn.Sequential(SCSEBlock(channel=out_sec + in_stag2_up_chs, reduction=16))
self.score = nn.Sequential(OrderedDict([("norm.1", norm_act(out_sec + in_stag2_up_chs)),
("conv.1", nn.Conv2d(out_sec + in_stag2_up_chs,
out_sec + in_stag2_up_chs,
kernel_size=3, stride=1, padding=2,
dilation=2, bias=False)),
("norm.2", norm_act(out_sec + in_stag2_up_chs)),
("conv.2", nn.Conv2d(out_sec + in_stag2_up_chs, self.n_class,
kernel_size=1, stride=1, padding=0,
bias=True)),
("up1", nn.Upsample(size=in_size, mode='bilinear'))]))
self.sesuper1 = SemanticSupervision(in_chns=self.interverted_residual_setting[0][1],
out_chns=self.n_class)
self.sesuper2 = SemanticSupervision(in_chns=self.interverted_residual_setting[1][1],
out_chns=self.n_class)
self.sesuper3 = SemanticSupervision(in_chns=self.interverted_residual_setting[2][1],
out_chns=self.n_class)
self.sesuper4 = SemanticSupervision(in_chns=self.interverted_residual_setting[3][1],
out_chns=self.n_class)
self.sesuper5 = SemanticSupervision(in_chns=self.interverted_residual_setting[4][1],
out_chns=self.n_class)
self.sesuper6 = SemanticSupervision(in_chns=self.interverted_residual_setting[5][1],
out_chns=self.n_class)
self.sesuper7 = SemanticSupervision(in_chns=self.interverted_residual_setting[6][1],
out_chns=self.n_class)
self._initialize_weights()
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2.0 / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
# +++++++++++++++++++++++++++++++++++++++++++++++++++ #
# channel_shuffle: shuffle channels in groups
# +++++++++++++++++++++++++++++++++++++++++++++++++++ #
@staticmethod
def _channel_shuffle(x, groups):
"""
Channel shuffle operation
:param x: input tensor
:param groups: split channels into groups
:return: channel shuffled tensor
"""
batch_size, num_channels, height, width = x.data.size()
channels_per_group = num_channels // groups
# reshape
x = x.view(batch_size, groups, channels_per_group, height, width)
# transpose
# - contiguous() required if transpose() is used before view().
# See https://github.com/pytorch/pytorch/issues/764
x = torch.transpose(x, 1, 2).contiguous().view(batch_size, -1, height, width)
return x
def forward(self, x):
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
# 1. Encoder: feature extraction
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
stg1 = self.mod1(x) # (N, 32, 224, 448) 1/2
stg1 = self.mod2(stg1) # (N, 16, 224, 448) 1/2 -> 1/4 -> 1/8
stg2 = self.mod3(stg1) # (N, 24, 112, 224) 1/4 -> 1/8
stg3 = self.mod4(stg2) # (N, 32, 56, 112) 1/8
stg4 = self.mod5(stg3) # (N, 64, 56, 112) 1/8 dilation=2
stg5 = self.mod6(stg4) # (N, 96, 56, 112) 1/8 dilation=4
stg6 = self.mod7(stg5) # (N, 160, 56, 112) 1/8 dilation=8
stg7 = self.mod8(stg6) # (N, 320, 56, 112) 1/8 dilation=16
stg1_1 = F.max_pool2d(input=stg1, kernel_size=3, stride=2, ceil_mode=True) # 1/4
stg1_2 = F.max_pool2d(input=stg1_1, kernel_size=3, stride=2, ceil_mode=True) # 1/8
stg2_1 = F.max_pool2d(input=stg2, kernel_size=3, stride=2, ceil_mode=True) # 1/8
# (N, 672, 56, 112) 1/8 (16+24+32+64+96+160+320)
stg8 = self.out_se(torch.cat([stg3, stg4, stg5, stg6, stg7, stg1_2, stg2_1], dim=1))
# stg8 = torch.cat([stg3, stg4, stg5, stg6, stg7, stg1_2, stg2_1], dim=1)
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
# 2. Decoder: multi-scale feature fusion
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
if self.traval == "train" and self.n_class != 0:
# (N, 672, H/8, W/8) -> (N, 256, H/4, W/4)
de_stg1 = self.rfblock(stg8)
# (N, 256+24+16=296, H/4, W/4)
de_stg1 = self.score_se(torch.cat([de_stg1, stg2, stg1_1], dim=1))
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
# 3. Classifier: pixel-wise classification-segmentation
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
net_out = self.score(de_stg1)
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
# 4. Auxiliary supervision: semantic supervision
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
enc1 = self.sesuper1(stg1)
enc2 = self.sesuper2(stg2)
enc3 = self.sesuper3(stg3)
enc4 = self.sesuper4(stg4)
enc5 = self.sesuper5(stg5)
enc6 = self.sesuper6(stg6)
enc7 = self.sesuper7(stg7)
return enc1, enc2, enc3, enc4, enc5, enc6, enc7, net_out
elif self.traval == "train" and self.n_class != 0:
# (N, 672, H/8, W/8) -> (N, 256, H/4, W/4)
de_stg1 = self.rfblock(stg8)
# (N, 256+24+16=296, H/4, W/4)
de_stg1 = self.score_se(torch.cat([de_stg1, stg2, stg1_1], dim=1))
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
# 3. Classifier: pixel-wise classification-segmentation
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
return self.score(de_stg1)
else:
return stg8
if __name__ == '__main__':
import os
import time
from scripts.loss import *
from torch.autograd import Variable
net_h, net_w = 384, 768
os.environ["CUDA_VISIBLE_DEVICES"] = "1,0"
model = MobileNetV2ExFuse(n_class=19, in_size=(net_h, net_w), width_mult=1.0, out_sec=256,
norm_act=partial(InPlaceABNWrapper, activation="leaky_relu", slope=0.1),
traval="train")
model = torch.nn.DataParallel(model, device_ids=[0]).cuda()
model_dict = model.state_dict()
pre_weight = torch.load("/zfs/zhang/TrainLog/weights/cityscapes_mobilenetv2_gtfine_best_model.pkl")["model_state"]
pretrained_dict = {k: v for k, v in pre_weight.items() if k in model_dict}
model_dict.update(pretrained_dict)
model.load_state_dict(model_dict)
del pre_weight
del model_dict
del pretrained_dict
optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.90, weight_decay=5e-4)
loss_fn = bootstrapped_cross_entropy2d
i = 0
while True:
i += 1
print("iter :", i)
model.train()
dummy_input = Variable(torch.rand(1, 3, net_h, net_w).cuda(), requires_grad=True)
dummy_target = Variable(torch.rand(1, net_h, net_w).cuda(), requires_grad=False).long()
start_time = time.time()
dummy_out = model(dummy_input)
print("> Inference Time: {}".format(time.time() - start_time))
optimizer.zero_grad()
topk = 512 * 256
loss = loss_fn(dummy_out, dummy_target, K=topk)
print("> Loss: {}".format(loss.data[0]))
loss.backward()
optimizer.step()
| 47.349265 | 118 | 0.487538 |
d43de291ccc21c1e26c87cfc2fe006547be7a462 | 245 | py | Python | Test Codes And Data/Detect_Handwrite_1.py | hmnk-1967/OCR-Python-Project-CS-BUIC | 28c72d9913a25655f6183a7b960e527a0432c8e1 | [
"MIT"
] | null | null | null | Test Codes And Data/Detect_Handwrite_1.py | hmnk-1967/OCR-Python-Project-CS-BUIC | 28c72d9913a25655f6183a7b960e527a0432c8e1 | [
"MIT"
] | null | null | null | Test Codes And Data/Detect_Handwrite_1.py | hmnk-1967/OCR-Python-Project-CS-BUIC | 28c72d9913a25655f6183a7b960e527a0432c8e1 | [
"MIT"
] | null | null | null | #For Test Image 1
import pytesseract
import cv2
img = cv2.imread('/HandwritingTest.jpg')
thresh, binary = cv2.threshold(img, 123, 255, cv2.THRESH_BINARY)
tess = pytesseract.image_to_string(binary, config='-l eng --oem 1 --psm 7')
print(tess)
| 22.272727 | 75 | 0.738776 |
6e68cecb7a8ae0d7ec14136c76dac75319df53f9 | 16,330 | py | Python | src/build/toolset.py | loongson-zn/build | d4bedebfa046b763c316e31c98b48ed2779741b9 | [
"BSL-1.0"
] | 215 | 2015-01-10T17:16:34.000Z | 2022-02-23T15:22:08.000Z | src/build/toolset.py | loongson-zn/build | d4bedebfa046b763c316e31c98b48ed2779741b9 | [
"BSL-1.0"
] | 594 | 2015-01-22T16:17:55.000Z | 2022-02-26T22:11:01.000Z | src/build/toolset.py | loongson-zn/build | d4bedebfa046b763c316e31c98b48ed2779741b9 | [
"BSL-1.0"
] | 302 | 2015-02-03T01:20:29.000Z | 2022-02-12T07:01:28.000Z | # Status: being ported by Vladimir Prus
# Base revision: 40958
#
# Copyright 2003 Dave Abrahams
# Copyright 2005 Rene Rivera
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
""" Support for toolset definition.
"""
import sys
import feature, property, generators, property_set
import b2.util.set
import bjam
from b2.util import cached, qualify_jam_action, is_iterable_typed, is_iterable
from b2.util.utility import *
from b2.util import bjam_signature, sequence
from b2.manager import get_manager
__re_split_last_segment = re.compile (r'^(.+)\.([^\.])*')
__re_two_ampersands = re.compile ('(&&)')
__re_first_segment = re.compile ('([^.]*).*')
__re_first_group = re.compile (r'[^.]*\.(.*)')
_ignore_toolset_requirements = '--ignore-toolset-requirements' not in sys.argv
# Flag is a mechanism to set a value
# A single toolset flag. Specifies that when certain
# properties are in build property set, certain values
# should be appended to some variable.
#
# A flag applies to a specific action in specific module.
# The list of all flags for a module is stored, and each
# flag further contains the name of the rule it applies
# for,
class Flag:
def __init__(self, variable_name, values, condition, rule = None):
assert isinstance(variable_name, basestring)
assert is_iterable(values) and all(
isinstance(v, (basestring, type(None))) for v in values)
assert is_iterable_typed(condition, property_set.PropertySet)
assert isinstance(rule, (basestring, type(None)))
self.variable_name = variable_name
self.values = values
self.condition = condition
self.rule = rule
def __str__(self):
return("Flag(" + str(self.variable_name) + ", " + str(self.values) +\
", " + str(self.condition) + ", " + str(self.rule) + ")")
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __module_flags, __flags, __stv
# Mapping from module name to a list of all flags that apply
# to either that module directly, or to any rule in that module.
# Each element of the list is Flag instance.
# So, for module named xxx this might contain flags for 'xxx',
# for 'xxx.compile', for 'xxx.compile.c++', etc.
__module_flags = {}
# Mapping from specific rule or module name to a list of Flag instances
# that apply to that name.
# Say, it might contain flags for 'xxx.compile.c++'. If there are
# entries for module name 'xxx', they are flags for 'xxx' itself,
# not including any rules in that module.
__flags = {}
# A cache for variable settings. The key is generated from the rule name and the properties.
__stv = {}
reset ()
# FIXME: --ignore-toolset-requirements
def using(toolset_module, *args):
if isinstance(toolset_module, (list, tuple)):
toolset_module = toolset_module[0]
loaded_toolset_module= get_manager().projects().load_module(toolset_module, [os.getcwd()]);
loaded_toolset_module.init(*args)
# FIXME push-checking-for-flags-module ....
# FIXME: investigate existing uses of 'hack-hack' parameter
# in jam code.
@bjam_signature((["rule_or_module", "variable_name", "condition", "*"],
["values", "*"]))
def flags(rule_or_module, variable_name, condition, values = []):
""" Specifies the flags (variables) that must be set on targets under certain
conditions, described by arguments.
rule_or_module: If contains dot, should be a rule name.
The flags will be applied when that rule is
used to set up build actions.
If does not contain dot, should be a module name.
The flags will be applied for all rules in that
module.
If module for rule is different from the calling
module, an error is issued.
variable_name: Variable that should be set on target
condition A condition when this flag should be applied.
Should be set of property sets. If one of
those property sets is contained in build
properties, the flag will be used.
Implied values are not allowed:
"<toolset>gcc" should be used, not just
"gcc". Subfeatures, like in "<toolset>gcc-3.2"
are allowed. If left empty, the flag will
always used.
Property sets may use value-less properties
('<a>' vs. '<a>value') to match absent
properties. This allows to separately match
<architecture>/<address-model>64
<architecture>ia64/<address-model>
Where both features are optional. Without this
syntax we'd be forced to define "default" value.
values: The value to add to variable. If <feature>
is specified, then the value of 'feature'
will be added.
"""
assert isinstance(rule_or_module, basestring)
assert isinstance(variable_name, basestring)
assert is_iterable_typed(condition, basestring)
assert is_iterable(values) and all(isinstance(v, (basestring, type(None))) for v in values)
caller = bjam.caller()
if not '.' in rule_or_module and caller and caller[:-1].startswith("Jamfile"):
# Unqualified rule name, used inside Jamfile. Most likely used with
# 'make' or 'notfile' rules. This prevents setting flags on the entire
# Jamfile module (this will be considered as rule), but who cares?
# Probably, 'flags' rule should be split into 'flags' and
# 'flags-on-module'.
rule_or_module = qualify_jam_action(rule_or_module, caller)
else:
# FIXME: revive checking that we don't set flags for a different
# module unintentionally
pass
if condition and not replace_grist (condition, ''):
# We have condition in the form '<feature>', that is, without
# value. That's a previous syntax:
#
# flags gcc.link RPATH <dll-path> ;
# for compatibility, convert it to
# flags gcc.link RPATH : <dll-path> ;
values = [ condition ]
condition = None
if condition:
transformed = []
for c in condition:
# FIXME: 'split' might be a too raw tool here.
pl = [property.create_from_string(s,False,True) for s in c.split('/')]
pl = feature.expand_subfeatures(pl);
transformed.append(property_set.create(pl))
condition = transformed
property.validate_property_sets(condition)
__add_flag (rule_or_module, variable_name, condition, values)
def set_target_variables (manager, rule_or_module, targets, ps):
"""
"""
assert isinstance(rule_or_module, basestring)
assert is_iterable_typed(targets, basestring)
assert isinstance(ps, property_set.PropertySet)
settings = __set_target_variables_aux(manager, rule_or_module, ps)
if settings:
for s in settings:
for target in targets:
manager.engine ().set_target_variable (target, s [0], s[1], True)
def find_satisfied_condition(conditions, ps):
"""Returns the first element of 'property-sets' which is a subset of
'properties', or an empty list if no such element exists."""
assert is_iterable_typed(conditions, property_set.PropertySet)
assert isinstance(ps, property_set.PropertySet)
for condition in conditions:
found_all = True
for i in condition.all():
if i.value:
found = i.value in ps.get(i.feature)
else:
# Handle value-less properties like '<architecture>' (compare with
# '<architecture>x86').
# If $(i) is a value-less property it should match default
# value of an optional property. See the first line in the
# example below:
#
# property set properties result
# <a> <b>foo <b>foo match
# <a> <b>foo <a>foo <b>foo no match
# <a>foo <b>foo <b>foo no match
# <a>foo <b>foo <a>foo <b>foo match
found = not ps.get(i.feature)
found_all = found_all and found
if found_all:
return condition
return None
def register (toolset):
""" Registers a new toolset.
"""
assert isinstance(toolset, basestring)
feature.extend('toolset', [toolset])
def inherit_generators (toolset, properties, base, generators_to_ignore = []):
assert isinstance(toolset, basestring)
assert is_iterable_typed(properties, basestring)
assert isinstance(base, basestring)
assert is_iterable_typed(generators_to_ignore, basestring)
if not properties:
properties = [replace_grist (toolset, '<toolset>')]
base_generators = generators.generators_for_toolset(base)
for g in base_generators:
id = g.id()
if not id in generators_to_ignore:
# Some generator names have multiple periods in their name, so
# $(id:B=$(toolset)) doesn't generate the right new_id name.
# e.g. if id = gcc.compile.c++, $(id:B=darwin) = darwin.c++,
# which is not what we want. Manually parse the base and suffix
# (if there's a better way to do this, I'd love to see it.)
# See also register in module generators.
(base, suffix) = split_action_id(id)
new_id = toolset + '.' + suffix
generators.register(g.clone(new_id, properties))
def inherit_flags(toolset, base, prohibited_properties = []):
"""Brings all flag definitions from the 'base' toolset into the 'toolset'
toolset. Flag definitions whose conditions make use of properties in
'prohibited-properties' are ignored. Don't confuse property and feature, for
example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
not block the other one.
The flag conditions are not altered at all, so if a condition includes a name,
or version of a base toolset, it won't ever match the inheriting toolset. When
such flag settings must be inherited, define a rule in base toolset module and
call it as needed."""
assert isinstance(toolset, basestring)
assert isinstance(base, basestring)
assert is_iterable_typed(prohibited_properties, basestring)
for f in __module_flags.get(base, []):
if not f.condition or b2.util.set.difference(f.condition, prohibited_properties):
match = __re_first_group.match(f.rule)
rule_ = None
if match:
rule_ = match.group(1)
new_rule_or_module = ''
if rule_:
new_rule_or_module = toolset + '.' + rule_
else:
new_rule_or_module = toolset
__add_flag (new_rule_or_module, f.variable_name, f.condition, f.values)
def inherit_rules(toolset, base):
engine = get_manager().engine()
new_actions = {}
for action_name, action in engine.actions.iteritems():
module, id = split_action_id(action_name)
if module == base:
new_action_name = toolset + '.' + id
# make sure not to override any existing actions
# that may have been declared already
if new_action_name not in engine.actions:
new_actions[new_action_name] = action
engine.actions.update(new_actions)
######################################################################################
# Private functions
@cached
def __set_target_variables_aux (manager, rule_or_module, ps):
""" Given a rule name and a property set, returns a list of tuples of
variables names and values, which must be set on targets for that
rule/properties combination.
"""
assert isinstance(rule_or_module, basestring)
assert isinstance(ps, property_set.PropertySet)
result = []
for f in __flags.get(rule_or_module, []):
if not f.condition or find_satisfied_condition (f.condition, ps):
processed = []
for v in f.values:
# The value might be <feature-name> so needs special
# treatment.
processed += __handle_flag_value (manager, v, ps)
for r in processed:
result.append ((f.variable_name, r))
# strip away last dot separated part and recurse.
next = __re_split_last_segment.match(rule_or_module)
if next:
result.extend(__set_target_variables_aux(
manager, next.group(1), ps))
return result
def __handle_flag_value (manager, value, ps):
assert isinstance(value, basestring)
assert isinstance(ps, property_set.PropertySet)
result = []
if get_grist (value):
f = feature.get(value)
values = ps.get(f)
for value in values:
if f.dependency:
# the value of a dependency feature is a target
# and must be actualized
result.append(value.actualize())
elif f.path or f.free:
# Treat features with && in the value
# specially -- each &&-separated element is considered
# separate value. This is needed to handle searched
# libraries, which must be in specific order.
if not __re_two_ampersands.search(value):
result.append(value)
else:
result.extend(value.split ('&&'))
else:
result.append (value)
else:
result.append (value)
return sequence.unique(result, stable=True)
def __add_flag (rule_or_module, variable_name, condition, values):
""" Adds a new flag setting with the specified values.
Does no checking.
"""
assert isinstance(rule_or_module, basestring)
assert isinstance(variable_name, basestring)
assert is_iterable_typed(condition, property_set.PropertySet)
assert is_iterable(values) and all(
isinstance(v, (basestring, type(None))) for v in values)
f = Flag(variable_name, values, condition, rule_or_module)
# Grab the name of the module
m = __re_first_segment.match (rule_or_module)
assert m
module = m.group(1)
__module_flags.setdefault(module, []).append(f)
__flags.setdefault(rule_or_module, []).append(f)
__requirements = []
def requirements():
"""Return the list of global 'toolset requirements'.
Those requirements will be automatically added to the requirements of any main target."""
return __requirements
def add_requirements(requirements):
"""Adds elements to the list of global 'toolset requirements'. The requirements
will be automatically added to the requirements for all main targets, as if
they were specified literally. For best results, all requirements added should
be conditional or indirect conditional."""
assert is_iterable_typed(requirements, basestring)
if _ignore_toolset_requirements:
__requirements.extend(requirements)
# Make toolset 'toolset', defined in a module of the same name,
# inherit from 'base'
# 1. The 'init' rule from 'base' is imported into 'toolset' with full
# name. Another 'init' is called, which forwards to the base one.
# 2. All generators from 'base' are cloned. The ids are adjusted and
# <toolset> property in requires is adjusted too
# 3. All flags are inherited
# 4. All rules are imported.
def inherit(toolset, base):
assert isinstance(toolset, basestring)
assert isinstance(base, basestring)
get_manager().projects().load_module(base, ['.']);
inherit_generators(toolset, [], base)
inherit_flags(toolset, base)
inherit_rules(toolset, base)
| 39.066986 | 96 | 0.633068 |
468fe69081f4d470947517a6c3b3df7413ac0b32 | 56 | py | Python | tests/unit/test_loader.py | sinofseven/honmaru | 2148f3334f5afd243612e11ccfb3361496a7a843 | [
"MIT"
] | null | null | null | tests/unit/test_loader.py | sinofseven/honmaru | 2148f3334f5afd243612e11ccfb3361496a7a843 | [
"MIT"
] | null | null | null | tests/unit/test_loader.py | sinofseven/honmaru | 2148f3334f5afd243612e11ccfb3361496a7a843 | [
"MIT"
] | null | null | null | import loader
def test_tmp():
assert loader.tmp()
| 9.333333 | 23 | 0.678571 |
56f500183e34c73d962ee71be63eb1c833d33e56 | 1,580 | py | Python | tests/test_geometry.py | phi-wol/3d-object-detection.pytorch | 9437e289ba878da2dbf03e7e7d4d7ae1eb9da486 | [
"MIT"
] | 6 | 2021-06-10T11:53:24.000Z | 2022-03-31T19:34:59.000Z | tests/test_geometry.py | phi-wol/3d-object-detection.pytorch | 9437e289ba878da2dbf03e7e7d4d7ae1eb9da486 | [
"MIT"
] | 6 | 2021-03-15T11:01:27.000Z | 2021-09-25T16:58:16.000Z | tests/test_geometry.py | phi-wol/3d-object-detection.pytorch | 9437e289ba878da2dbf03e7e7d4d7ae1eb9da486 | [
"MIT"
] | 2 | 2021-07-29T08:05:54.000Z | 2022-02-22T16:14:06.000Z | import numpy as np
from torchdet3d.utils import (lift_2d, get_default_camera_matrix,
convert_camera_matrix_2_ndc, project_3d_points,
convert_2d_to_ndc)
from objectron.dataset import iou
from objectron.dataset import box
class TestCasesGeometry:
test_kps = np.array([[0.47714591, 0.47491544],
[0.73884577, 0.39749265],
[0.18508956, 0.40002537],
[0.74114597, 0.48664019],
[0.18273196, 0.48833901 ],
[0.64639187, 0.46719882],
[0.32766378, 0.46827659],
[0.64726073, 0.51853681],
[0.32699507, 0.51933688]])
EPS = 1e-5
IOU_THR = 0.5
def test_reprojection_error(self):
kps_3d = lift_2d([self.test_kps], portrait=True)[0]
reprojected_kps = project_3d_points(kps_3d, convert_camera_matrix_2_ndc(get_default_camera_matrix()))
test_kps_ndc = convert_2d_to_ndc(self.test_kps, portrait=True)
assert np.any(np.linalg.norm(test_kps_ndc - reprojected_kps, axis=1) < self.EPS)
def test_3d_iou_stability(self):
np.random.seed(10)
noisy_kps = np.clip(self.test_kps + 0.01*np.random.rand(*self.test_kps.shape), 0, 1)
lifted_3d_sets = lift_2d([self.test_kps, noisy_kps], portrait=True)
b1 = box.Box(vertices=lifted_3d_sets[0])
b2 = box.Box(vertices=lifted_3d_sets[1])
loss = iou.IoU(b1, b2)
assert loss.iou() > self.IOU_THR
| 38.536585 | 109 | 0.591772 |
381795407791c526888e9e0e8279687404d703b3 | 1,504 | py | Python | utils.py | IMEplusplus/tinyctf-platform | 37759c08bd82e39b45a7182d525848db09a42c6e | [
"MIT"
] | 4 | 2016-03-04T22:34:08.000Z | 2016-04-23T18:21:49.000Z | utils.py | IMESec/imectf-platform | 37759c08bd82e39b45a7182d525848db09a42c6e | [
"MIT"
] | 5 | 2016-03-22T16:25:45.000Z | 2016-03-29T19:05:35.000Z | utils.py | IMEplusplus/tinyctf-platform | 37759c08bd82e39b45a7182d525848db09a42c6e | [
"MIT"
] | null | null | null | """
Low level deployment operations.
"""
from random import randint, Random
from os import path, makedirs
from spur import LocalShell
from time import time
from signal import SIGKILL
from crypt import crypt
class TimeoutError(Exception):
"""
Exception dealing with executed commands that timeout.
"""
pass
def execute(cmd, timeout=60, **kwargs):
"""
Executes the given shell command
Args:
cmd: List of command arguments
timeout: maximum alloted time for the command
**kwargs: passes to LocalShell.spawn
Returns:
An execution result.
Raises:
NoSuchCommandError, RunProcessError, FileNotFoundError
"""
shell = LocalShell()
#It is unlikely that someone actually intends to supply
#a string based on how spur works.
if type(cmd) == str:
cmd = ["bash", "-c"] + [cmd]
process = shell.spawn(cmd, store_pid=True, **kwargs)
start_time = time()
while process.is_running():
delta_time = time() - start_time
if delta_time > timeout:
process.send_signal(SIGKILL)
raise TimeoutError(cmd, timeout)
return process.wait_for_result()
def create_user(username, password):
"""
Creates a user account with the given username
Args:
username: the username to create
"""
execute(["useradd", "-s", "/bin/bash", "-g", "competitors", "-m", "-p", crypt(password, "42"), username])
execute(["chmod", "700", "/home/"+username])
| 24.655738 | 109 | 0.644282 |
0da277a2e97af30a73ddf920187b4ce45e7bd6d3 | 1,709 | py | Python | simplified_scrapy/request.py | yiyedata/simplified-scrapy | ccfdc686c53b2da3dac733892d4f184f6293f002 | [
"Apache-2.0"
] | 7 | 2019-08-11T10:31:03.000Z | 2021-03-08T10:07:52.000Z | simplified_scrapy/request.py | yiyedata/simplified-scrapy | ccfdc686c53b2da3dac733892d4f184f6293f002 | [
"Apache-2.0"
] | 1 | 2020-12-29T02:30:18.000Z | 2021-01-25T02:49:37.000Z | simplified_scrapy/request.py | yiyedata/simplified-scrapy | ccfdc686c53b2da3dac733892d4f184f6293f002 | [
"Apache-2.0"
] | 4 | 2019-10-22T02:14:35.000Z | 2021-05-13T07:01:56.000Z | #!/usr/bin/python
#coding=utf-8
from simplified_scrapy.core.request_helper import requestGet as _get, requestPost as _post, requestRaw as _raw
from simplified_scrapy.spider import Spider
from simplified_scrapy.core.mem_cookiestore import MemCookieStore as _store
class _MemSpider(Spider):
def __init__(self, name=None):
self.cookie_store = _store()
class Request():
def __init__(self, ssp=None):
if ssp:
self._ssp_ = ssp
else:
self._ssp_ = _MemSpider()
def get(self, url, header=None, timeout=30, useIp=False, saveCookie=True):
ssp = None
if (saveCookie): ssp = self._ssp_
return _get(url, header, useIp, ssp, timeout, True)
def post(self,
url,
data=None,
header=None,
timeout=30,
useIp=False,
saveCookie=True,
method=None):
ssp = None
if (saveCookie): ssp = self._ssp_
return _post(url, data, header, useIp, ssp, timeout, True, method)
def raw(self,
url,
data=None,
header=None,
timeout=30,
useIp=False,
saveCookie=True,
method=None):
ssp = None
if (saveCookie): ssp = self._ssp_
return _raw(url, data, header, useIp, ssp, timeout, True, method)
def setEncoding(self, encodings):
self._ssp_.encodings = encodings
def getCookie(self, url):
return self._ssp_.getCookie(url)
def setCookie(self, url, cookie):
self._ssp_.setCookie(url, cookie)
def setCookieStore(self, cookieStore):
self._ssp_.cookie_store = cookieStore
req = Request() | 27.564516 | 110 | 0.598596 |
3cd1e88012c510d344885c83651dedb0c4fcd7e9 | 880 | py | Python | scorum/utils/files.py | scorum/pyscorum | b78832a1994357db726333fac151c32d059d7ed7 | [
"MIT"
] | null | null | null | scorum/utils/files.py | scorum/pyscorum | b78832a1994357db726333fac151c32d059d7ed7 | [
"MIT"
] | 1 | 2019-02-22T14:43:22.000Z | 2019-02-22T14:43:22.000Z | scorum/utils/files.py | scorum/pyscorum | b78832a1994357db726333fac151c32d059d7ed7 | [
"MIT"
] | 1 | 2019-02-01T11:56:51.000Z | 2019-02-01T11:56:51.000Z | import os
import shutil
import tempfile
from contextlib import contextmanager
def create_dir(path, rewrite=False):
try:
os.mkdir(path)
except FileExistsError:
if rewrite:
remove_dir_tree(path)
os.mkdir(path)
def create_temp_dir(path, prefix=""):
return tempfile.mkdtemp(prefix=prefix, dir=path)
def remove_dir_tree(path):
try:
shutil.rmtree(path)
except FileNotFoundError:
pass
def remove_file(path):
os.remove(path)
def which(file):
for path in os.environ["PATH"].split(os.pathsep):
if os.path.exists(os.path.join(path, file)):
return os.path.join(path, file)
return ''
@contextmanager
def write_to_tempfile(content):
with tempfile.NamedTemporaryFile() as file:
with open(file.name, 'w') as f:
f.write(content)
yield file.name
| 20 | 53 | 0.647727 |
824cf27ad96aa03875039c07b3fa1be75be54259 | 190 | py | Python | src/olympia/applications/api_urls.py | osamamagdy/addons-server | f7326c94d1d40c71eca991242288edf799146182 | [
"BSD-3-Clause"
] | 843 | 2016-02-09T13:00:37.000Z | 2022-03-20T19:17:06.000Z | src/olympia/applications/api_urls.py | osamamagdy/addons-server | f7326c94d1d40c71eca991242288edf799146182 | [
"BSD-3-Clause"
] | 10,187 | 2016-02-05T23:51:05.000Z | 2022-03-31T15:24:44.000Z | src/olympia/applications/api_urls.py | osamamagdy/addons-server | f7326c94d1d40c71eca991242288edf799146182 | [
"BSD-3-Clause"
] | 551 | 2016-02-08T20:32:16.000Z | 2022-03-15T16:49:24.000Z | from django.urls import path
from .views import AppVersionView
urlpatterns = [
path(
'<str:application>/<str:version>/', AppVersionView.as_view(), name='appversions'
),
]
| 17.272727 | 88 | 0.673684 |
f81bf24b81686c7b57d3c82c59fa96de7ae61def | 3,071 | py | Python | jcdb/jcdb/settings.py | vdslab/JCDb-scraper | 476b8142c189c8030dc436a9580fa751e0e60c0c | [
"MIT"
] | null | null | null | jcdb/jcdb/settings.py | vdslab/JCDb-scraper | 476b8142c189c8030dc436a9580fa751e0e60c0c | [
"MIT"
] | 1 | 2021-05-26T23:29:23.000Z | 2021-05-26T23:29:23.000Z | jcdb/jcdb/settings.py | vdslab/JCDb-scraper | 476b8142c189c8030dc436a9580fa751e0e60c0c | [
"MIT"
] | null | null | null | # Scrapy settings for jcdb project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://docs.scrapy.org/en/latest/topics/settings.html
# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'jcdb'
SPIDER_MODULES = ['jcdb.spiders']
NEWSPIDER_MODULE = 'jcdb.spiders'
FEED_EXPORT_ENCODING = 'utf-8'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'jcdb (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
DOWNLOAD_DELAY = 1
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
# DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
# }
# Enable or disable spider middlewares
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
# SPIDER_MIDDLEWARES = {
# 'jcdb.middlewares.JcdbSpiderMiddleware': 543,
# }
# Enable or disable downloader middlewares
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# DOWNLOADER_MIDDLEWARES = {
# 'jcdb.middlewares.JcdbDownloaderMiddleware': 543,
# }
# Enable or disable extensions
# See https://docs.scrapy.org/en/latest/topics/extensions.html
# EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
# }
# Configure item pipelines
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# ITEM_PIPELINES = {
# 'jcdb.pipelines.JcdbPipeline': 300,
# }
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
HTTPCACHE_ENABLED = True
HTTPCACHE_EXPIRATION_SECS = 0
HTTPCACHE_DIR = 'httpcache'
HTTPCACHE_IGNORE_HTTP_CODES = []
HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| 33.747253 | 103 | 0.773364 |
3cb449c0502afe82cc8be1d3615ac32b0f707f8f | 425 | py | Python | A1_T1_6.py | DavidTF85/assigment1 | 2ca18c366425f7e1b9741db26a1bf8e66465d0c8 | [
"BSD-3-Clause"
] | null | null | null | A1_T1_6.py | DavidTF85/assigment1 | 2ca18c366425f7e1b9741db26a1bf8e66465d0c8 | [
"BSD-3-Clause"
] | null | null | null | A1_T1_6.py | DavidTF85/assigment1 | 2ca18c366425f7e1b9741db26a1bf8e66465d0c8 | [
"BSD-3-Clause"
] | null | null | null | # if formula is T(°F) = T(K) × 9/5 - 459.67
#check here URL: https://www.rapidtables.com/convert/temperature/
k = 300.00 # if temperature is
def k_2_f(k):
c = (k * 9/5) - 459.67
return c
c = k_2_f(k)
b = int (c)
print ("Kelvin of " + str (k) + " is " + str (b) + " in Celsius. ")
# for this calculator the divice will not show a decimal value
# is easy to real. but if need in line 10 change str(b) for: str (c)
| 26.5625 | 68 | 0.611765 |
dc7cd0bce56f3b6f8d1f44a74f711eebeefaeed2 | 6,202 | py | Python | python/pyspark/conf.py | MiguelPeralvo/spark | 979a73f86f77e7ae294979b7962b8ae30d38f1ff | [
"Apache-2.0"
] | 73 | 2018-07-06T07:41:46.000Z | 2022-01-06T02:32:22.000Z | python/pyspark/conf.py | MiguelPeralvo/spark | 979a73f86f77e7ae294979b7962b8ae30d38f1ff | [
"Apache-2.0"
] | 6 | 2019-11-13T07:48:07.000Z | 2022-01-21T23:24:20.000Z | python/pyspark/conf.py | MiguelPeralvo/spark | 979a73f86f77e7ae294979b7962b8ae30d38f1ff | [
"Apache-2.0"
] | 44 | 2018-07-09T12:42:44.000Z | 2021-10-17T06:46:01.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
>>> from pyspark.conf import SparkConf
>>> from pyspark.context import SparkContext
>>> conf = SparkConf()
>>> conf.setMaster("local").setAppName("My app")
<pyspark.conf.SparkConf object at ...>
>>> conf.get("spark.master")
u'local'
>>> conf.get("spark.app.name")
u'My app'
>>> sc = SparkContext(conf=conf)
>>> sc.master
u'local'
>>> sc.appName
u'My app'
>>> sc.sparkHome is None
True
>>> conf = SparkConf(loadDefaults=False)
>>> conf.setSparkHome("/path")
<pyspark.conf.SparkConf object at ...>
>>> conf.get("spark.home")
u'/path'
>>> conf.setExecutorEnv("VAR1", "value1")
<pyspark.conf.SparkConf object at ...>
>>> conf.setExecutorEnv(pairs = [("VAR3", "value3"), ("VAR4", "value4")])
<pyspark.conf.SparkConf object at ...>
>>> conf.get("spark.executorEnv.VAR1")
u'value1'
>>> print conf.toDebugString()
spark.executorEnv.VAR1=value1
spark.executorEnv.VAR3=value3
spark.executorEnv.VAR4=value4
spark.home=/path
>>> sorted(conf.getAll(), key=lambda p: p[0])
[(u'spark.executorEnv.VAR1', u'value1'), (u'spark.executorEnv.VAR3', u'value3'), \
(u'spark.executorEnv.VAR4', u'value4'), (u'spark.home', u'/path')]
"""
__all__ = ['SparkConf']
class SparkConf(object):
"""
Configuration for a Spark application. Used to set various Spark
parameters as key-value pairs.
Most of the time, you would create a SparkConf object with
C{SparkConf()}, which will load values from C{spark.*} Java system
properties as well. In this case, any parameters you set directly on
the C{SparkConf} object take priority over system properties.
For unit tests, you can also call C{SparkConf(false)} to skip
loading external settings and get the same configuration no matter
what the system properties are.
All setter methods in this class support chaining. For example,
you can write C{conf.setMaster("local").setAppName("My app")}.
Note that once a SparkConf object is passed to Spark, it is cloned
and can no longer be modified by the user.
"""
def __init__(self, loadDefaults=True, _jvm=None, _jconf=None):
"""
Create a new Spark configuration.
:param loadDefaults: whether to load values from Java system
properties (True by default)
:param _jvm: internal parameter used to pass a handle to the
Java VM; does not need to be set by users
:param _jconf: Optionally pass in an existing SparkConf handle
to use its parameters
"""
if _jconf:
self._jconf = _jconf
else:
from pyspark.context import SparkContext
SparkContext._ensure_initialized()
_jvm = _jvm or SparkContext._jvm
self._jconf = _jvm.SparkConf(loadDefaults)
def set(self, key, value):
"""Set a configuration property."""
self._jconf.set(key, unicode(value))
return self
def setIfMissing(self, key, value):
"""Set a configuration property, if not already set."""
if self.get(key) is None:
self.set(key, value)
return self
def setMaster(self, value):
"""Set master URL to connect to."""
self._jconf.setMaster(value)
return self
def setAppName(self, value):
"""Set application name."""
self._jconf.setAppName(value)
return self
def setSparkHome(self, value):
"""Set path where Spark is installed on worker nodes."""
self._jconf.setSparkHome(value)
return self
def setExecutorEnv(self, key=None, value=None, pairs=None):
"""Set an environment variable to be passed to executors."""
if (key is not None and pairs is not None) or (key is None and pairs is None):
raise Exception("Either pass one key-value pair or a list of pairs")
elif key is not None:
self._jconf.setExecutorEnv(key, value)
elif pairs is not None:
for (k, v) in pairs:
self._jconf.setExecutorEnv(k, v)
return self
def setAll(self, pairs):
"""
Set multiple parameters, passed as a list of key-value pairs.
:param pairs: list of key-value pairs to set
"""
for (k, v) in pairs:
self._jconf.set(k, v)
return self
def get(self, key, defaultValue=None):
"""Get the configured value for some key, or return a default otherwise."""
if defaultValue is None: # Py4J doesn't call the right get() if we pass None
if not self._jconf.contains(key):
return None
return self._jconf.get(key)
else:
return self._jconf.get(key, defaultValue)
def getAll(self):
"""Get all values as a list of key-value pairs."""
pairs = []
for elem in self._jconf.getAll():
pairs.append((elem._1(), elem._2()))
return pairs
def contains(self, key):
"""Does this configuration contain a given key?"""
return self._jconf.contains(key)
def toDebugString(self):
"""
Returns a printable version of the configuration, as a list of
key=value pairs, one per line.
"""
return self._jconf.toDebugString()
def _test():
import doctest
(failure_count, test_count) = doctest.testmod(optionflags=doctest.ELLIPSIS)
if failure_count:
exit(-1)
if __name__ == "__main__":
_test()
| 33.524324 | 86 | 0.650113 |
5d0ade2dc3e7c66710a7110d14baff18ae32e261 | 1,535 | py | Python | examples/mountain_car.py | kngwyu/rlpy | 329166de28d311d8f87358a62c38f40a7318fe07 | [
"BSD-3-Clause"
] | 3 | 2019-12-07T13:34:02.000Z | 2021-03-29T10:20:05.000Z | examples/mountain_car.py | kngwyu/rlpy | 329166de28d311d8f87358a62c38f40a7318fe07 | [
"BSD-3-Clause"
] | 14 | 2019-09-29T03:09:09.000Z | 2022-01-13T03:17:48.000Z | examples/mountain_car.py | kngwyu/rlpy3 | 329166de28d311d8f87358a62c38f40a7318fe07 | [
"BSD-3-Clause"
] | null | null | null | from rlpy.domains import MountainCar
from rlpy.tools.cli import run_experiment
import methods
def select_domain(noise=0.0):
return MountainCar(noise=noise)
def select_agent(name, domain, max_steps, seed, **kwargs):
if name is None or name == "ifdd-q":
return methods.ifdd_q(
domain,
discretization=47,
threshold=77.0,
lambda_=0.9,
initial_learn_rate=0.05,
boyan_N0=11,
ifddplus=True,
)
elif name == "kifdd-q":
return methods.kifdd_q(
domain,
kernel_resolution=13.14,
threshold=0.21,
lambda_=0.9,
initial_learn_rate=0.07,
boyan_N0=37.0,
kernel="gaussian_kernel",
)
elif name == "tabular-q":
return methods.tabular_q(
domain,
lambda_=0.9,
initial_learn_rate=0.26,
boyan_N0=119,
incremental=True,
)
elif name == "rbf-q":
return methods.rbf_q(
domain,
seed,
num_rbfs=5000,
resolution=8,
initial_learn_rate=0.26,
lambda_=0.9,
boyan_N0=2120,
)
else:
raise NotImplementedError("Method {} is not supported".format(name))
if __name__ == "__main__":
run_experiment(
select_domain,
select_agent,
default_max_steps=30000,
default_num_policy_checks=10,
default_checks_per_policy=50,
)
| 24.758065 | 76 | 0.543322 |
d8fc07428741203e9116587338df74db0bc57169 | 5,088 | py | Python | tests/test_max7219.py | r1val/123 | 65c8455a3f6982f743261d41eb4bd43eaf788b47 | [
"MIT"
] | null | null | null | tests/test_max7219.py | r1val/123 | 65c8455a3f6982f743261d41eb4bd43eaf788b47 | [
"MIT"
] | null | null | null | tests/test_max7219.py | r1val/123 | 65c8455a3f6982f743261d41eb4bd43eaf788b47 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014-17 Richard Hull and contributors
# See LICENSE.rst for details.
import pytest
import warnings
from luma.led_matrix.device import max7219
from luma.core.render import canvas
from helpers import setup_function, serial, call, assert_invalid_dimensions # noqa: F401
def test_init_cascaded():
device = max7219(serial, cascaded=4)
assert device.width == 32
assert device.height == 8
def test_init_8x8():
device = max7219(serial)
assert device.cascaded == 1
serial.data.assert_has_calls([
call([11, 7]),
call([9, 0]),
call([15, 0]),
call([10, 7]),
call([1, 0]),
call([2, 0]),
call([3, 0]),
call([4, 0]),
call([5, 0]),
call([6, 0]),
call([7, 0]),
call([8, 0]),
call([12, 1])
])
def test_init_16x8():
device = max7219(serial, width=16, height=8)
assert device.cascaded == 2
serial.data.assert_has_calls([
call([11, 7, 11, 7]),
call([9, 0, 9, 0]),
call([15, 0, 15, 0]),
call([10, 7, 10, 7]),
call([1, 0, 1, 0]),
call([2, 0, 2, 0]),
call([3, 0, 3, 0]),
call([4, 0, 4, 0]),
call([5, 0, 5, 0]),
call([6, 0, 6, 0]),
call([7, 0, 7, 0]),
call([8, 0, 8, 0]),
call([12, 1, 12, 1])
])
def test_init_invalid_dimensions():
assert_invalid_dimensions(max7219, serial, 59, 22)
def test_hide():
device = max7219(serial, cascaded=5)
serial.reset_mock()
device.hide()
serial.data.assert_called_once_with([12, 0] * 5)
def test_show():
device = max7219(serial, cascaded=3)
serial.reset_mock()
device.show()
serial.data.assert_called_once_with([12, 1] * 3)
def test_contrast():
device = max7219(serial, cascaded=6)
serial.reset_mock()
device.contrast(0x6B)
serial.data.assert_called_once_with([10, 6] * 6)
def test_display_16x8():
device = max7219(serial, cascaded=2)
serial.reset_mock()
with canvas(device) as draw:
draw.rectangle(device.bounding_box, outline="white")
serial.data.assert_has_calls([
call([1, 0x81, 1, 0xFF]),
call([2, 0x81, 2, 0x81]),
call([3, 0x81, 3, 0x81]),
call([4, 0x81, 4, 0x81]),
call([5, 0x81, 5, 0x81]),
call([6, 0x81, 6, 0x81]),
call([7, 0x81, 7, 0x81]),
call([8, 0xFF, 8, 0x81])
])
def test_display_16x16():
device = max7219(serial, width=16, height=16)
serial.reset_mock()
with canvas(device) as draw:
draw.rectangle(device.bounding_box, outline="white")
serial.data.assert_has_calls([
call([1, 0x80, 1, 0xFF, 1, 0x01, 1, 0xFF]),
call([2, 0x80, 2, 0x80, 2, 0x01, 2, 0x01]),
call([3, 0x80, 3, 0x80, 3, 0x01, 3, 0x01]),
call([4, 0x80, 4, 0x80, 4, 0x01, 4, 0x01]),
call([5, 0x80, 5, 0x80, 5, 0x01, 5, 0x01]),
call([6, 0x80, 6, 0x80, 6, 0x01, 6, 0x01]),
call([7, 0x80, 7, 0x80, 7, 0x01, 7, 0x01]),
call([8, 0xFF, 8, 0x80, 8, 0xFF, 8, 0x01])
])
def test_normal_alignment():
device = max7219(serial, cascaded=2, block_orientation=0)
serial.reset_mock()
with canvas(device) as draw:
draw.rectangle((0, 0, 15, 3), outline="white")
serial.data.assert_has_calls([
call([1, 0x09, 1, 0x0F]),
call([2, 0x09, 2, 0x09]),
call([3, 0x09, 3, 0x09]),
call([4, 0x09, 4, 0x09]),
call([5, 0x09, 5, 0x09]),
call([6, 0x09, 6, 0x09]),
call([7, 0x09, 7, 0x09]),
call([8, 0x0F, 8, 0x09])
])
def test_block_realignment_minus90():
device = max7219(serial, cascaded=2, block_orientation=-90)
serial.reset_mock()
with canvas(device) as draw:
draw.rectangle((0, 0, 15, 3), outline="white")
serial.data.assert_has_calls([
call([1, 0x00, 1, 0x00]),
call([2, 0x00, 2, 0x00]),
call([3, 0x00, 3, 0x00]),
call([4, 0x00, 4, 0x00]),
call([5, 0xFF, 5, 0xFF]),
call([6, 0x80, 6, 0x01]),
call([7, 0x80, 7, 0x01]),
call([8, 0xFF, 8, 0xFF])
])
def test_block_realignment_plus90():
device = max7219(serial, cascaded=2, block_orientation=90)
serial.reset_mock()
with canvas(device) as draw:
draw.rectangle((0, 0, 15, 3), outline="white")
serial.data.assert_has_calls([
call([1, 0xFF, 1, 0xFF]),
call([2, 0x01, 2, 0x80]),
call([3, 0x01, 3, 0x80]),
call([4, 0xFF, 4, 0xFF]),
call([5, 0x00, 5, 0x00]),
call([6, 0x00, 6, 0x00]),
call([7, 0x00, 7, 0x00]),
call([8, 0x00, 8, 0x00])
])
def test_unknown_block_orientation():
with pytest.raises(AssertionError):
max7219(serial, cascaded=2, block_orientation="sausages")
def test_deprecated_block_orientation(recwarn):
warnings.simplefilter('always')
max7219(serial, cascaded=2, block_orientation="vertical")
max7219(serial, cascaded=2, block_orientation="horizontal")
assert len(recwarn) == 2
| 26.638743 | 89 | 0.564269 |
aa0af0c309e2cca8de42a71f029125f91e841d13 | 1,164 | py | Python | setup.py | Donearm/data-structures-and-algorithms | f33ced7d24ee60336d5ab6f850cdbed84a344d9a | [
"MIT"
] | 1 | 2019-10-21T10:07:04.000Z | 2019-10-21T10:07:04.000Z | setup.py | Donearm/data-structures-and-algorithms | f33ced7d24ee60336d5ab6f850cdbed84a344d9a | [
"MIT"
] | null | null | null | setup.py | Donearm/data-structures-and-algorithms | f33ced7d24ee60336d5ab6f850cdbed84a344d9a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (c) 2019, Gianluca Fiore
#
###############################################################################
__author__ = "Gianluca Fiore"
import os
import io
import re
from setuptools import find_packages, setup
def long_description():
with io.open('README.md', 'r', encoding='utf-8') as f:
readme = f.read()
return readme
setup(name='data-structures-and-algorithms',
version='0.0.1',
description='Data Structures and Algorithms',
long_description=long_description(),
long_description_content_type="text/markdown",
url='https://github.com/Donearm/data-structures-and-algorithms',
author='Gianluca Fiore',
author_email="gianlucafiore@papersounds.eu",
license='MIT',
packages=find_packages(),
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
zip_safe=False)
| 28.390244 | 79 | 0.549828 |
05ff3531fea855632f8f50db0dd3e2f331d03f87 | 1,622 | py | Python | mac/google-cloud-sdk/lib/surface/secrets/list.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
] | null | null | null | mac/google-cloud-sdk/lib/surface/secrets/list.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
] | null | null | null | mac/google-cloud-sdk/lib/surface/secrets/list.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""List all secret names."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.secrets import api as secrets_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.secrets import args as secrets_args
from googlecloudsdk.command_lib.secrets import fmt as secrets_fmt
class List(base.ListCommand):
r"""List all secret names.
List all secret names. This command only returns the secret's names, not
their secret data. To retrieve the secret's data, run `$ {parent_command}
access SECRET`.
## EXAMPLES
List secret names.
$ {command}
"""
@staticmethod
def Args(parser):
secrets_args.AddProject(parser)
secrets_fmt.UseSecretTable(parser)
base.PAGE_SIZE_FLAG.SetDefault(parser, 100)
def Run(self, args):
project_ref = args.CONCEPTS.project.Parse()
return secrets_api.Secrets().ListWithPager(
project_ref=project_ref, limit=args.limit)
| 31.803922 | 75 | 0.759556 |
de8558268ec5abf8d4f7f48e85040b9770d039a6 | 17,467 | py | Python | rltoolkit/rltoolkit/tensorboard_logger.py | MIMUW-RL/spp-rl | 86b96cdd220cc4eae86f7cfd26924c69b498dcc6 | [
"MIT"
] | 7 | 2020-06-15T12:25:53.000Z | 2021-11-03T01:08:47.000Z | rltoolkit/rltoolkit/tensorboard_logger.py | MIMUW-RL/spp-rl | 86b96cdd220cc4eae86f7cfd26924c69b498dcc6 | [
"MIT"
] | null | null | null | rltoolkit/rltoolkit/tensorboard_logger.py | MIMUW-RL/spp-rl | 86b96cdd220cc4eae86f7cfd26924c69b498dcc6 | [
"MIT"
] | 1 | 2020-12-21T11:21:22.000Z | 2020-12-21T11:21:22.000Z | import ctypes
import logging
import multiprocessing as mp
import numbers
from os import path
from typing import Any, Dict, Iterable, Tuple
import gym
import numpy as np
import torch
from pyvirtualdisplay import Display
from torch.nn import functional as F
from torch.utils.tensorboard import SummaryWriter
from rltoolkit.buffer import Memory, MemoryAcM
from rltoolkit.utils import measure_time
try:
import neptune
_has_neptune = True
except:
# no neptune installed
_has_neptune = False
logger = logging.getLogger(__name__)
class TensorboardWriter(SummaryWriter):
"""
Custom SummaryWriter class for RL purposes.
"""
def __init__(
self,
env_name: str,
log_dir: str,
filename: str,
render: bool = True,
fps: int = 24,
frames_count: int = 2000,
):
"""
Arguments:
env_name {str} -- name of env used for getting rendering resolution
log_dir {str} -- directory to store tensorboard logs
Keyword Arguments:
filename {str} -- filename of the experiment
fps {int} -- number of frames per seconds. Used in recording rollout.
(default: {24})
frames_count {int} -- maximal length of recorded rollout (default: {2000})
"""
self.render = render
self.use_neptune = _has_neptune and neptune.project is not None
if self.render:
self.fps = fps
self.recording_process = None
self.rendering_video_index = 0
self.frames_shape = self.get_rendering_shape(env_name, frames_count)
self.array_size = int(np.prod(self.frames_shape))
self.shared_video_frames = mp.Array(ctypes.c_uint8, self.array_size)
log_dir = path.join(log_dir, filename)
super().__init__(log_dir=log_dir)
def get_rendering_shape(
self, env_name: str, frames_count: int
) -> Tuple[int, int, int, int]:
"""
Create dummy environment instance to get rendering resolution.
Due to classical controll environments initialization it has to be done in the
separate process.
Arguments:
env_name {str} -- name of the environment
frames_count {int} -- maximal length of video in frames
Returns:
Tuple[int, int, int, int] -- frames_count, height, width, and chanels of
a video
"""
resolution = mp.Array(ctypes.c_uint16, 3)
ctx = mp.get_context("spawn")
p = ctx.Process(target=get_resolution_mp, args=(env_name, resolution))
p.start()
p.join()
height, width, chanels = np.frombuffer(resolution.get_obj(), dtype=np.uint16)
height, width, chanels = int(height), int(width), int(chanels)
return frames_count, height, width, chanels
@measure_time
def record_episode(self, a2c: Any, i: int, done: bool = False):
"""
At the first run time just start collecting frames in separete process.
At each next first join previously started process and send video to the
tensorboard, after that start new collecting frames process.
At last run (flag done=True) do above and wait for the last process and send
video to the tensorboard
Args:
a2c (A2C): a2c object with the agent.
i (int): iteration number - used to log this in the tensorboard
done (bool, optional): information if recording is done at the end of the
experiment. Defaults to False.
"""
if not self.render:
return
if self.recording_process is not None:
self._join_process_and_add_video()
self.rendering_video_index = i
self._start_collecting_rollout_frames(a2c)
if done:
self._join_process_and_add_video()
def _join_process_and_add_video(self):
"""
Join recording process and send generated frames to the tensorboard.
"""
self.recording_process.join()
frames = self._from_mp_array_to_tensor(self.shared_video_frames)
self.add_video("Episode", frames, self.rendering_video_index, fps=self.fps)
self.shared_video_frames = mp.Array(ctypes.c_uint8, self.array_size)
def _start_collecting_rollout_frames(self, a2c: Any):
"""
Start collecting frames from a rollout in the different process.
Args:
a2c (A2C): a2c object with the agent.
"""
args = (a2c, self.shared_video_frames, self.frames_shape)
self.recording_process = mp.Process(target=_record_episode_mp, args=args)
self.recording_process.start()
def _from_mp_array_to_tensor(self, mp_arr: mp.Array) -> torch.tensor:
"""
Convert multiprocessing.Array with recorded frames into torch.tensor.
Additionally remove last black frames and change chanel position.
Original shape: B x H x W x C
Output shape B x C x H x W
Args:
mp_arr (mp.Array): recorded frames in the shared memory.
Returns:
torch.tensor: tensor with data ready to use in tensorboard writer
"""
arr = np.frombuffer(mp_arr.get_obj(), dtype=np.uint8)
arr = arr.reshape(self.frames_shape)
arr = self._remove_black_frames(arr)
arr = torch.tensor(arr)
arr = torch.unsqueeze(arr.permute(0, 3, 1, 2), dim=0)
return arr
def _remove_black_frames(self, arr: np.array) -> np.array:
"""
Not always rollout will last for full buffer capacity so we need to remove all
black frames.
Args:
arr (np.array): array with frames.
Returns:
np.array: array with frames without black frames at the end.
"""
for frame in range(self.frames_shape[0]):
if (arr[frame] == 0).all():
sl = slice(frame, None)
arr = np.delete(arr, sl, 0)
break
return arr
def log_returns(self, i: int, buffer: Memory):
assert buffer.rewards and buffer.done
self.add_histogram("Return", buffer.returns_rollouts, i)
def log_kl_div_updates(
self, iterations: int, frames: int, rollouts: int, updates_no: float
):
self.add_scalar("PPO/KL_updates_mean/per_iterations", updates_no, iterations)
self.add_scalar("PPO/KL_updates_mean/per_frames", updates_no, frames)
self.add_scalar("PPO/KL_updates_mean/per_rollouts", updates_no, rollouts)
if self.use_neptune:
neptune.log_metric("PPO/KL_updates_mean/per_iterations",
iterations, updates_no)
neptune.log_metric("PPO/KL_updates_mean/per_frames",
frames, updates_no)
neptune.log_metric("PPO/KL_updates_mean/per_rollouts",
rollouts, updates_no)
def log_sac_alpha(self, iterations: int, alpha: float):
self.add_scalar("SAC/Alpha_per_iterations", alpha, iterations)
if self.use_neptune:
neptune.log_metric("SAC/Alpha_per_iterations", iterations, alpha)
def log_actions(self, i: int, buffer: Memory, denormalize: bool = False):
assert len(buffer.actions) > 0
actions = self._get_buffer_elem_tensor(buffer.actions)
if denormalize:
actions = buffer.denormalize(actions)
if len(actions.shape) == 1:
self._add_single_variable_histogram("Action", actions, i)
elif len(actions.shape) == 2:
self._add_multiple_variables_histograms("Action", actions, i)
else:
raise ValueError("2D actions are not supported")
@staticmethod
def _get_buffer_elem_tensor(buffer_list: Iterable) -> torch.tensor:
first_val = buffer_list[0]
if isinstance(first_val, numbers.Number):
output = torch.tensor(buffer_list)
elif isinstance(first_val, np.ndarray):
output = np.array(buffer_list)
elif isinstance(first_val, torch.Tensor):
output = torch.cat(buffer_list)
else:
raise TypeError("Unsupported action type.")
return output
def _add_single_variable_histogram(self, name: str, vector: torch.tensor, i: int):
self.add_histogram(f"{name}/0", vector, i)
def _add_multiple_variables_histograms(
self, name: str, matrix: torch.tensor, i: int
):
matrix_rows = matrix.shape[1]
for j in range(matrix_rows):
var_j = matrix[:, j]
self.add_histogram(f"{name}/{j}", var_j, i)
def log_observations(self, i: int, buffer: Memory):
assert len(buffer) > 0
if isinstance(buffer.obs[0], torch.Tensor):
obs = buffer.obs.squeeze()
elif isinstance(buffer.obs[0], np.ndarray):
obs = np.stack(buffer.obs, axis=0)
else:
raise TypeError("Unsupported observation type.")
if len(obs.shape) == 1:
self._add_single_variable_histogram("Observation", obs, i)
elif len(obs.shape) == 2:
self._add_multiple_variables_histograms("Observation", obs, i)
else:
raise ValueError("2D observations are not supported")
def log_running_return(
self, iterations: int, frames: int, rollouts: int, running_return: float
):
self.add_scalar("1_Running_return/per_iterations", running_return, iterations)
self.add_scalar("1_Running_return/per_frames", running_return, frames)
self.add_scalar("1_Running_return/per_rollouts", running_return, rollouts)
if self.use_neptune:
neptune.log_metric("1_Running_return/per_iterations", iterations, running_return)
neptune.log_metric("1_Running_return/per_frames", frames, running_return)
neptune.log_metric("1_Running_return/per_rollouts", rollouts, running_return)
def log_test_return(
self, iterations: int, frames: int, rollouts: int, test_return: float
):
self.add_scalar("1_Test_return/per_iterations", test_return, iterations)
self.add_scalar("1_Test_return/per_frames", test_return, frames)
self.add_scalar("1_Test_return/per_rollouts", test_return, rollouts)
if self.use_neptune:
neptune.log_metric("1_Test_return/per_iterations", iterations, test_return)
neptune.log_metric("1_Test_return/per_frames", frames, test_return)
neptune.log_metric("1_Test_return/per_rollouts", rollouts, test_return)
def log_loss(self, i: int, loss: Dict[str, int]):
for key, value in loss.items():
label = "Loss/" + key.capitalize()
self.add_scalar(label, value, i)
if self.use_neptune:
neptune.log_metric(label, i, value)
def log_success_rate(self, frames: int, success_rate: float):
self.add_scalar("success_rate", success_rate, frames)
if self.use_neptune:
neptune.log_metric("success_rate", frames, success_rate)
def log_acm_pretrain_loss(
self, train_loss: float, validation_loss: float, epoch: int
):
self.add_scalar("Loss/pretrain_acm_train", train_loss, epoch)
self.add_scalar("Loss/pretrain_acm_val", validation_loss, epoch)
if self.use_neptune:
neptune.log_metric("Loss/pretrain_acm_train", epoch, train_loss)
neptune.log_metric("Loss/pretrain_acm_val", epoch, validation_loss)
def log_custom_loss_param(self, iterations: int, custom_loss_param: torch.Tensor):
with torch.no_grad():
custom_loss_param_softplus = F.softplus(custom_loss_param)
if 0 == custom_loss_param.ndimension():
self.add_scalar("CL_param/val", custom_loss_param, iterations)
self.add_scalar("CL_param/softplus", custom_loss_param_softplus, iterations)
if self.use_neptune:
neptune.log_metric("CL_param/val", iterations, custom_loss_param)
neptune.log_metric("CL_param/softplus", iterations, custom_loss_param_softplus)
else:
for i in range(len(custom_loss_param)):
self.add_scalar(f"CL_param/{i}/val", custom_loss_param[i], iterations)
self.add_scalar(f"CL_param/{i}/softplus", custom_loss_param_softplus[i], iterations)
if self.use_neptune:
neptune.log_metric(f"CL_param/{i}/val", iterations, custom_loss_param[i])
neptune.log_metric(f"CL_param/{i}/softplus", iterations, custom_loss_param_softplus[i])
def log_hyperparameters(self, hparam_dict, metric_dict):
"""
[summary]
Args:
hyps (dict): [description]
metrics (dict): [description]
"""
self.add_hparams(hparam_dict, metric_dict)
def log_acm_action_histogram(self, i: int, buffer: MemoryAcM):
# TODO: refactor this and more :D
assert len(buffer.actions_acm) > 0
actions = self._get_buffer_elem_tensor(buffer.actions_acm)
if len(actions.shape) == 1:
self._add_single_variable_histogram("ActionACM", actions, i)
elif len(actions.shape) == 2:
self._add_multiple_variables_histograms("ActionACM", actions, i)
else:
raise ValueError("2D actions are not supported")
assert len(buffer) > 0
def log_action_mean_std(
self, iterations: int, buffer: MemoryAcM, denormalize: bool = False,
action_idx : list = None
):
if isinstance(buffer.actions[0], torch.Tensor):
actions = torch.cat(buffer.actions)
else:
actions = torch.tensor(buffer.actions)
if denormalize:
actions = buffer.denormalize(actions, action_idx)
actions_columns = actions.shape[1]
for j in range(actions_columns):
col_actions = actions[:, j]
mean = col_actions.mean()
std = col_actions.std()
self.add_scalar(f"Action/mean/{j}", mean, iterations)
self.add_scalar(f"Action/std/{j}", std, iterations)
if self.use_neptune:
neptune.log_metric(f"Action/mean/{j}", iterations, mean)
neptune.log_metric(f"Action/std/{j}", iterations, std)
def plot_dist_loss(self, i: int, actions: torch.Tensor, next_obs: torch.Tensor, on_policy: bool = False,
action_idx: list = None):
if action_idx is not None:
next_obs = next_obs[:, action_idx]
dist_loss_action_next_obs = F.mse_loss(actions, next_obs).item()
# MSE for whole matrices
self.add_scalar(
"ACM_distance_general/MSE_action_next_obs", dist_loss_action_next_obs, i
)
if self.use_neptune:
neptune.log_metric(
"ACM_distance_general/MSE_action_next_obs", i, dist_loss_action_next_obs
)
obs_columns = next_obs.shape[1]
for j in range(obs_columns):
col_next_obs = next_obs[:, j]
col_actions = actions[:, j]
col_dist_loss_action_next_obs = F.mse_loss(col_actions, col_next_obs).item()
self.add_scalar(
f"ACM_distance/MSE_action_next_obs/{j}",
col_dist_loss_action_next_obs,
i,
)
if self.use_neptune:
neptune.log_metric(
f"ACM_distance/MSE_action_next_obs/{j}",
i,
col_dist_loss_action_next_obs,
)
def log_obs_mean_std(self, iterations: int, mean: torch.tensor, std: torch.tensor):
"""
Log all observation means and standard deviations.
"""
for i in range(len(mean)):
self.add_scalar(f"Obs/mean/{i}", mean[i], iterations)
self.add_scalar(f"Obs/std/{i}", std[i], iterations)
if self.use_neptune:
neptune.log_metric(f"Obs/mean/{i}", iterations, mean[i])
neptune.log_metric(f"Obs/std/{i}", iterations, std[i])
def _record_episode_mp(
a2c: Any, mp_arr: mp.Array, frames_shape: Tuple[int, int, int, int]
):
frames_count = frames_shape[0]
with Display():
done = False
obs = a2c.env.reset()
obs = a2c.process_obs(obs)
i = 0
frames = np.frombuffer(mp_arr.get_obj(), dtype=np.uint8).reshape(frames_shape)
frames[i] = a2c.env.render(mode="rgb_array")
while not done:
if i == frames_count:
logger.warning("Too small frame count declared for full video.")
break
action, action_logprobs = a2c.actor.act(obs)
action_proc = a2c.process_action(action, obs)
obs, rew, done, _ = a2c.env.step(action_proc)
obs = a2c.process_obs(obs)
frames[i] = a2c.env.render(mode="rgb_array")
i += 1
return 0
def get_resolution_mp(env_name: str, mp_resolution: mp.Array):
with Display():
resolution = np.frombuffer(mp_resolution.get_obj(), dtype=np.uint16)
env = gym.make(env_name)
env.reset()
frame = env.render(mode="rgb_array")
env.close()
for i in range(len(frame.shape)):
resolution[i] = frame.shape[i]
| 38.137555 | 108 | 0.624148 |
b1238e655047727834ff13547625171c45924ebb | 1,872 | py | Python | pyta/hypothesis/extra/dateutil.py | AbChatt/Word-Search-Python | a08bcadf3e77b18541a1b5e8c7e98746978bb1fe | [
"MIT"
] | null | null | null | pyta/hypothesis/extra/dateutil.py | AbChatt/Word-Search-Python | a08bcadf3e77b18541a1b5e8c7e98746978bb1fe | [
"MIT"
] | null | null | null | pyta/hypothesis/extra/dateutil.py | AbChatt/Word-Search-Python | a08bcadf3e77b18541a1b5e8c7e98746978bb1fe | [
"MIT"
] | null | null | null | # coding=utf-8
#
# This file is part of Hypothesis, which may be found at
# https://github.com/HypothesisWorks/hypothesis-python
#
# Most of this work is copyright (C) 2013-2018 David R. MacIver
# (david@drmaciver.com), but it contains contributions by others. See
# CONTRIBUTING.rst for a full list of people who may hold copyright, and
# consult the git log if you need to determine who owns an individual
# contribution.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
#
# END HEADER
"""This module provides ``dateutil`` timezones.
You can use this strategy to make
:py:func:`hypothesis.strategies.datetimes` and
:py:func:`hypothesis.strategies.times` produce timezone-aware values.
"""
from __future__ import division, print_function, absolute_import
import datetime as dt
from dateutil import tz, zoneinfo # type: ignore
import hypothesis.strategies as st
__all__ = ['timezones']
@st.cacheable
@st.defines_strategy
def timezones():
# type: () -> st.SearchStrategy[dt.tzinfo]
"""Any timezone in dateutil.
This strategy minimises to UTC, or the timezone with the smallest offset
from UTC as of 2000-01-01, and is designed for use with
:py:func:`~hypothesis.strategies.datetimes`.
Note that the timezones generated by the strategy may vary depending on the
configuration of your machine. See the dateutil documentation for more
information.
"""
reference_date = dt.datetime(2000, 1, 1)
tz_names = zoneinfo.get_zonefile_instance().zones
all_timezones = [tz.UTC] # type: ignore
all_timezones += sorted(
[tz.gettz(t) for t in tz_names],
key=lambda zone: abs(zone.utcoffset(reference_date))
)
return st.sampled_from(all_timezones)
| 31.728814 | 79 | 0.732372 |
aacdc9d2c10facf5b7c46db785d3e64b988da251 | 2,074 | py | Python | test/test_utils.py | MateoLostanlen/Holocron | 65e570fc0b072226ae3299574b4926e2fa4e76b0 | [
"Apache-2.0"
] | 181 | 2019-09-05T10:48:15.000Z | 2022-03-29T06:53:15.000Z | test/test_utils.py | MateoLostanlen/Holocron | 65e570fc0b072226ae3299574b4926e2fa4e76b0 | [
"Apache-2.0"
] | 179 | 2019-08-29T09:15:49.000Z | 2022-02-13T21:49:20.000Z | test/test_utils.py | MateoLostanlen/Holocron | 65e570fc0b072226ae3299574b4926e2fa4e76b0 | [
"Apache-2.0"
] | 46 | 2019-07-25T03:36:27.000Z | 2022-03-29T06:53:19.000Z | # Copyright (C) 2019-2021, François-Guillaume Fernandez.
# This program is licensed under the Apache License version 2.
# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.
import torch
from torch.utils.data import DataLoader, Dataset
from holocron import utils
class MockDataset(Dataset):
"""Mock dataset generating a random sample and a fixed zero target"""
def __init__(self, n):
super().__init__()
self.n = n
def __getitem__(self, idx):
return torch.rand(32), 0
def __len__(self):
return self.n
def test_mixup():
num_it = 10
batch_size = 2
# Generate all dependencies
loader = DataLoader(MockDataset(num_it * batch_size), batch_size=batch_size, collate_fn=utils.data.mixup_collate)
inputs, targets_a, targets_b, lam = next(iter(loader))
assert inputs.shape == (batch_size, 32)
assert targets_a.shape == targets_b.shape
def _train_one_batch(model, x, target, optimizer, criterion, device):
"""Mock batch training function"""
x, target = x.to(device), target.to(device)
output = model(x)
batch_loss = criterion(output, target)
optimizer.zero_grad()
batch_loss.backward()
optimizer.step()
return batch_loss.item()
def test_lr_finder():
num_it = 10
batch_size = 2
start_lr, end_lr = 1e-7, 10
# Generate all dependencies
model = torch.nn.Linear(32, 5)
train_loader = DataLoader(MockDataset(num_it * batch_size), batch_size=batch_size)
optimizer = torch.optim.Adam(model.parameters())
criterion = torch.nn.CrossEntropyLoss()
# Perform the iterations
lrs, losses = utils.misc.lr_finder(_train_one_batch, model, train_loader, optimizer, criterion,
num_it=num_it, start_lr=start_lr, end_lr=end_lr, stop_div=False)
# Check integrity of results
assert isinstance(lrs, list) and isinstance(losses, list)
assert len(lrs) == len(losses) == num_it
assert lrs[0] == start_lr and abs(lrs[-1] - end_lr) / lrs[-1] < 1e-7
| 30.5 | 117 | 0.684667 |
03dbd680f6d5c4e401b9e7f2239db4676737c553 | 215 | py | Python | ahook/test_ahook.py | akun/ahook | 985c9e5652eb50ff777858bdeeaeac736ac99409 | [
"MIT"
] | 1 | 2021-03-30T16:21:56.000Z | 2021-03-30T16:21:56.000Z | ahook/test_ahook.py | akun/ahook | 985c9e5652eb50ff777858bdeeaeac736ac99409 | [
"MIT"
] | null | null | null | ahook/test_ahook.py | akun/ahook | 985c9e5652eb50ff777858bdeeaeac736ac99409 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from unittest import TestCase
from ahook.main import hello_world
class HelloWordTestCase(TestCase):
def test_hello_world(self):
self.assertEqual('Hello World!', hello_world())
| 17.916667 | 55 | 0.744186 |
6b029f7bc7f034992cfb7fd3f97bddc6bc51aedc | 5,284 | py | Python | code/CNN/CNN_16s/predict.py | alexander-malafeev/microsleep-detection | a548382796ae0cb7f3aecec3a6bff315d25b5d29 | [
"MIT"
] | 10 | 2020-05-28T16:26:27.000Z | 2022-03-31T15:06:38.000Z | code/CNN/CNN_16s/predict.py | alexander-malafeev/microsleep-detection | a548382796ae0cb7f3aecec3a6bff315d25b5d29 | [
"MIT"
] | null | null | null | code/CNN/CNN_16s/predict.py | alexander-malafeev/microsleep-detection | a548382796ae0cb7f3aecec3a6bff315d25b5d29 | [
"MIT"
] | 1 | 2022-03-23T15:31:58.000Z | 2022-03-23T15:31:58.000Z | import os
import keras
from keras.layers import concatenate
from sklearn.metrics import cohen_kappa_score
import scipy.io
import math
import random
from keras import optimizers
import numpy as np
import scipy.io as spio
from sklearn.metrics import f1_score, accuracy_score
np.random.seed(0)
from keras.preprocessing import sequence
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers import Layer,Dense, Dropout, Input, Activation, TimeDistributed, Reshape
from keras.layers import GRU, Bidirectional
from keras.layers import Conv1D, Conv2D, MaxPooling2D, Flatten, BatchNormalization, LSTM, ZeroPadding2D
from loadData import load_data, classes_global, gen_data_seq, load_recording, gen_data
from keras.callbacks import History
from keras.models import Model
from keras.layers.noise import GaussianNoise
from collections import Counter
from sklearn.utils import class_weight
from myModel import build_model
from os import listdir
from os.path import isfile, join
import sys
sys.path.append("../..")
from loadData import *
from utils import *
data_dir = '../data/'
out_dir = './pred/'
files = [f for f in listdir(data_dir) if isfile(join(data_dir, f))]
batch_size = 200
n_ep = 4
fs = 200;
# half_size of the sliding window in samples
w_len = 8*fs;
data_dim = w_len*2
half_prec = 0.5
prec = 1
n_cl = 4
print("=====================")
print("Reading dataset to predict:")
( data_val, targets_val, N_samples_val) = load_data(data_dir,files, w_len)
ordering = 'tf';
keras.backend.common.set_image_dim_ordering(ordering)
sample_list_val = []
for i in range(len(targets_val)):
sample_list_val.append([])
for j in range(len(targets_val[i][0])):
mid = j*prec
# we add the padding size
mid += w_len
wnd_begin = mid-w_len
wnd_end = mid+w_len-1
sample_list_val[i].append([i,j,wnd_begin, wnd_end, 0 ])
print(len(sample_list_val))
for i in range(len(sample_list_val)):
print(len(sample_list_val[i]))
sample_list_val2 = []
for i in range(len(targets_val)):
sample_list_val2.append([])
for j in range(len(targets_val[i][1])):
mid = j*prec
# we add the padding size
mid += w_len
wnd_begin = mid-w_len
wnd_end = mid+w_len-1
sample_list_val2[i].append([i,j,wnd_begin, wnd_end, 1 ])
n_channels = 3
def my_generator(data_train, targets_train, sample_list, shuffle = True):
if shuffle:
random.shuffle(sample_list)
while True:
for batch in batch_generator(sample_list, batch_size):
batch_data1 = []
batch_data2 = []
batch_targets = []
for sample in batch:
[f, s, b, e, c] = sample
sample_label = targets_train[f][c][s]
sample_x1 = data_train[f][c][b:e+1]
sample_x2 = data_train[f][2][b:e+1]
sample_x = np.concatenate( ( sample_x1, sample_x2 ), axis = 2 )
batch_data1.append(sample_x)
batch_targets.append(sample_label)
batch_data1 = np.stack(batch_data1, axis=0)
batch_targets = np.array(batch_targets)
batch_targets = np_utils.to_categorical(batch_targets, n_cl)
batch_data1 = (batch_data1 )/100 #- np.mean(batch_data, axis=1)
batch_data1 = np.clip(batch_data1, -1, 1)
yield [ batch_data1 ], batch_targets
# file, sample, begin, end
def val_data_to_batch(data, targets):
batch_data = []
batch_targets = []
for j in range(len(targets)):
mid = j*prec
# we add the padding size
mid += w_len
wnd_begin = mid-w_len
wnd_end = mid+w_len-1
b = wnd_begin
e = wnd_end
sample_label = targets[j]
sample_x = data[b:e+1]
batch_data.append(sample_x)
batch_targets.append(sample_label)
batch_data = np.stack(batch_data, axis=0)
batch_targets = np_utils.to_categorical(batch_targets, n_cl)
batch_data = (batch_data )/100
np.clip(batch_data, -1, 1)
return batch_data, batch_targets
[cnn_eeg, model] = build_model(data_dim, n_channels, n_cl)
Nadam = optimizers.Nadam( )
model.compile(optimizer=Nadam, loss='categorical_crossentropy', metrics=['accuracy'], sample_weight_mode=None)
model.load_weights('./model.h5')
y_ = []
y = []
O2y_ = []
O2y = []
y_p = []
O2y_p = []
f_list = files
for j in range(0,len(f_list)):
f = f_list[j]
print(f)
generator_val = my_generator(data_val, targets_val, sample_list_val[j], shuffle = False)
scores = model.evaluate_generator( generator_val, int(math.ceil((len(sample_list_val[j],)+0.0)/batch_size)), workers=1)
generator_val = my_generator(data_val, targets_val, sample_list_val[j], shuffle = False)
y_pred = model.predict_generator( generator_val, int(math.ceil((len(sample_list_val[j],)+0.0)/batch_size)), workers=1)
print(y_pred.shape)
y_ = np.argmax(y_pred, axis=1).flatten()
y_p = scores
y = targets_val[j][0]
generator_val = my_generator(data_val, targets_val, sample_list_val2[j], shuffle = False)
scores2 = model.evaluate_generator( generator_val, int(math.ceil((len(sample_list_val2[j],)+0.0)/batch_size)), workers=1)
generator_val = my_generator(data_val, targets_val, sample_list_val2[j], shuffle = False)
y_pred2 = model.predict_generator( generator_val, int(math.ceil((len(sample_list_val2[j],)+0.0)/batch_size)), workers=1)
O2y_ = np.argmax(y_pred, axis=1).flatten()
O2y_p = scores
O2y = targets_val[j][0]
scipy.io.savemat( out_dir+f+'.mat', mdict={ 'y_p':y_p, 'y_': y_, 'y':y, 'O2y_p':O2y_p, 'O2y_': O2y_, 'O2y':O2y })
| 27.237113 | 122 | 0.725587 |
95a34064708ea6df95b2025a89216841e896f40e | 8,823 | py | Python | src/core/db/backend/pg.py | KevinXuxuxu/datahub_lsems | 87ecaf877117b6747f2432fa4379243fdd3d87bd | [
"MIT"
] | null | null | null | src/core/db/backend/pg.py | KevinXuxuxu/datahub_lsems | 87ecaf877117b6747f2432fa4379243fdd3d87bd | [
"MIT"
] | null | null | null | src/core/db/backend/pg.py | KevinXuxuxu/datahub_lsems | 87ecaf877117b6747f2432fa4379243fdd3d87bd | [
"MIT"
] | null | null | null | import os
#import psycopg2
import pymongo
import asctime from time
import re
import shutil
import data
from config import settings
from inventory.models import *
'''
@author: anant bhardwaj
@date: Oct 3, 2013
DataHub internal APIs for postgres repo_base
'''
HOST = settings.DATABASES['default']['HOST']
PORT = 27017
if settings.DATABASES['default']['PORT'] != '':
try:
PORT = int(settings.DATABASES['default']['PORT'])
except:
pass
class MGBackend:
def __init__(self, user, password, host=HOST, port=PORT, repo_base=None):
self.user = user
self.password = password
self.host = host
self.port = port
self.repo_base = repo_base
self.__open_connection__()
def __open_connection__(self):
self.connection = pymongo.MongoClient(host=self.host, port=self.port)
def reset_connection(self, repo_base):
self.repo_base=repo_base
self.__open_connection__()
def close_connection(self):
self.connection.close()
def create_repo(self, repo):
try:
Repo = self.connection[repo]
Repo.create_collection('repo_info')
Repo['repo_info'].insert({'owner': self.user, 'time_created': asctime()})
Repo['repo_info'].insert({'tag':'collaborator'})
Repo.add_user(self.user, password=self.password)
return self.handle_return(True)
except Exception as e:
print e
return self.handle_return(False)
def list_repos(self):
repo_list = self.connection.database_names()
rtn = []
for name in repo_list:
try:
if(self.connection[name].authenticate(self.name, password=self.password)):
rtn += [name]
except Exception as e:
pass
return self.handle_return(rtn)
def delete_repo(self, repo, force=False):
repo_dir = '/user_data/%s/%s' %(self.user, repo)
if os.path.exists(repo_dir):
shutil.rmtree(repo_dir)
self.connection.drop_database(repo)
return self.handle_return(True)
def add_collaborator(self, repo, username, privileges, auto_in_future=True):
password = User.objects.get(username=username).password
Repo = self.connection[repo]
Repo.add_user(username,password=password)
coll = Repo['repo_info'].find_one({'tag':'collaborator'})
if coll.has_key('collaborator'):
coll['collaborator'].append(username)
else:
coll['collaborator'] = [username]
Repo['repo_info'].replace_one({'tag':'collaborator'}, coll)
def delete_collaborator(self, repo, username):
self.connection[repo].remove_user(username)
def list_tables(self, repo):
res = self.list_repos()
all_repos = [t[0] for t in res['tuples']]
if repo not in all_repos:
raise LookupError('Invalid repository name: %s' %(repo))
rtn = self.connection[repo].collection_names()
return self.handle_return(rtn)
def list_views(self, repo):
res = self.list_repos()
all_repos = [t[0] for t in res['tuples']]
if repo not in all_repos:
raise LookupError('Invalid repository name: %s' %(repo))
query = ''' SELECT table_name FROM information_schema.tables
WHERE table_schema = '%s' AND table_type = 'VIEW'
''' %(repo)
return self.execute_sql(query)
def get_schema(self, table):
tokens = table.split('.')
if len(tokens) < 2:
raise NameError(
"Invalid name: '%s'.\n"
"HINT: use <repo-name>.<table-name> " %(table))
query = ''' SELECT column_name, data_type
FROM information_schem a.columns
WHERE table_name = '%s'
AND table_schema = '%s'
''' %(tokens[-1], tokens[-2])
res = self.execute_sql(query)
if res['row_count'] < 1:
raise NameError("Invalid reference: '%s'.\n" %(table))
return res
def handle_return(self, param):
result = {
'status': True,
'row_count': 0,
'tuples': [],
'fields': []
}
if type(param) == bool:
result['status'] = param
else if type(param) == list:
result['tuples'] = [[t,''] for t in param]
#
# conn = self.connection
# c = conn.cursor()
# c.execute(query.strip(), params)
#
# try:
# result['tuples'] = c.fetchall()
# except:
# pass
#
# result['status'] = True
# result['row_count'] = c.rowcount
# if c.description:
# result['fields'] = [
# {'name': col[0], 'type': col[1]} for col in c.description]
#
# tokens = query.strip().split(' ', 2)
# c.close()
return result
def create_user(self, username, password, create_db):
users = self.connection['users']
users.create_collection(username)
users[username].insert({'username':username, 'password':password})
# if not create_db:
# return
#
# query = ''' CREATE DATABASE %s ''' %(username)
# self.execute_sql(query)
#
# query = ''' ALTER DATABASE %s OWNER TO %s ''' %(username, username)
# return self.execute_sql(query)
return
def remove_user(self, username):
users = self.connection['users']
users.drop_collection(username)
return handle_return(True)
def change_password(self, username, password):
user = self.connection['users'][username]
user_info = user.find_one({'username':username})
user_info['password'] = password
user.replace_one({'username':username}, user_info)
def list_collaborators(self, repo_base, repo):
return handle_return(self.connection[repo]['repo_info'].find_one({'tag':'collaborator'})['collaborator'])
def has_base_privilege(self, login, privilege):
query = ''' SELECT has_database_privilege('%s', '%s')
''' %(login, privilege)
return self.execute_sql(query)
def has_repo_privilege(self, login, repo, privilege):
query = ''' SELECT has_schema_privilege('%s', '%s', '%s')
''' %(login, repo, privilege)
return self.execute_sql(query)
def has_table_privilege(self, login, table, privilege):
query = ''' SELECT has_table_privilege('%s', '%s', '%s')
''' %(login, table, privilege)
return self.execute_sql(query)
def has_column_privilege(self, login, table, column, privilege):
query = ''' SELECT has_column_privilege('%s', '%s', '%s')
''' %(login, table, column, privilege)
return self.execute_sql(query)
def export_table(self, table_name, file_path, file_format='CSV',
delimiter=',', header=True):
header_option = 'HEADER' if header else ''
return self.execute_sql(
''' COPY %s TO '%s'
WITH %s %s DELIMITER '%s';
''' %(table_name, file_path, file_format, header_option, delimiter))
def export_query(self, query, file_path, file_format='CSV',
delimiter=',', header=True):
header_option = 'HEADER' if header else ''
return self.execute_sql(
''' COPY (%s) TO '%s'
WITH %s %s DELIMITER '%s';
''' %(query, file_path, file_format, header_option, delimiter))
def import_file(self, table_name, file_path, file_format='CSV',
delimiter=',', header=True, encoding='ISO-8859-1', quote_character='"'):
try:
header_option = 'HEADER' if header else ''
if quote_character == "'":
quote_character = "''"
escape = ''
if delimiter.startswith('\\'):
escape = 'E'
return self.execute_sql(
''' COPY %s FROM '%s'
WITH %s %s DELIMITER %s'%s' ENCODING '%s' QUOTE '%s';
''' %(table_name, file_path, file_format,
header_option, escape, delimiter, encoding, quote_character))
except Exception, e:
self.execute_sql(
''' DROP TABLE IF EXISTS %s;
''' %(table_name))
raise ImportError(e);
"""
Try importing using dbtruck.
"""
# return self.import_file_w_dbtruck(table_name, file_path)
def import_file_w_dbtruck(self, table_name, file_path):
from dbtruck.dbtruck import import_datafiles
from dbtruck.util import get_logger
from dbtruck.exporters.pg import PGMethods
dbsettings = {
'dbname': self.repo_base,
'hostname': self.host,
'username': self.user,
'password': self.password,
'port': self.port,
}
create_new = True
errfile = None
return import_datafiles([file_path], create_new, table_name, errfile,
PGMethods, **dbsettings)
| 31.967391 | 113 | 0.594356 |
df6685687a2ab1a27761f0715814cb260cf3c105 | 2,049 | py | Python | sightings/views.py | flannerykj/urbanapplause | c9b6c0f9a2f65b869fe1e6fa921972e7236e4fe5 | [
"MIT"
] | null | null | null | sightings/views.py | flannerykj/urbanapplause | c9b6c0f9a2f65b869fe1e6fa921972e7236e4fe5 | [
"MIT"
] | null | null | null | sightings/views.py | flannerykj/urbanapplause | c9b6c0f9a2f65b869fe1e6fa921972e7236e4fe5 | [
"MIT"
] | null | null | null | from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from django.views.generic.edit import UpdateView, CreateView, DeleteView
from .models import Sighting
from django.core.urlresolvers import reverse_lazy
from .forms import SightingForm
from django.views.generic import ListView
# Create your views here.
class IndexView(ListView):
model = Sighting
template_name = "index.html"
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(IndexView, self).get_context_data(**kwargs)
# Add in a QuerySet of all the books
context['object_list'] = Sighting.objects.all().order_by('-datetime')
context['page_title'] = "Sightings"
context['model_type'] = "Sighting"
return context
class DetailView(generic.DetailView):
model = Sighting
template_name = 'detail.html'
class AddSighting(CreateView):
model = Sighting
form_class = SightingForm
template_name = 'edit.html'
def get_form_kwargs(self):
kwargs = super( AddSighting, self).get_form_kwargs()
# update the kwargs for the form init method with yours
kwargs.update(self.kwargs) # self.kwargs contains all url conf params
return kwargs
def get_success_url(self):
return '/sightings/'
def form_valid(self, form):
obj = form.save(commit=False)
obj.author = self.request.user
obj.save()
return HttpResponseRedirect(self.get_success_url())
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(AddSighting, self).get_context_data(**kwargs)
# Add in a QuerySet of all the books
context['page_title'] = "Add Sighting"
context['model_type'] = "Sighting"
context['cancel_url'] = "/sightings"
context['submit_text'] = "Save"
return context
| 34.728814 | 78 | 0.691557 |
d77f223e45faa60e2aa9ca3bd0b6a47ab5f3bcc9 | 10,272 | py | Python | saleor/dashboard/reports/urls.py | glosoftgroup/glosoftgroup-django-pos | b489c402939b9ebabd164c449e7da38fe849d550 | [
"BSD-3-Clause"
] | 2 | 2017-07-11T12:40:59.000Z | 2017-10-18T18:02:46.000Z | saleor/dashboard/reports/urls.py | glosoftgroup/glosoftgroup-django-pos | b489c402939b9ebabd164c449e7da38fe849d550 | [
"BSD-3-Clause"
] | 12 | 2017-06-19T07:20:41.000Z | 2022-03-15T19:03:33.000Z | saleor/dashboard/reports/urls.py | glosoftgroup/glosoftgroup-django-pos | b489c402939b9ebabd164c449e7da38fe849d550 | [
"BSD-3-Clause"
] | null | null | null | from django.conf.urls import url
from django.contrib.auth.decorators import login_required, permission_required
from . import views,charts, pdfs, purchase, sales_profit, sales_tax, sales_margin2, product_sales
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^$', permission_required('reports.view_sale_reports', login_url='not_found')
(views.sales_reports), name='sales_reports'),
url(r'^sales/$', permission_required('reports.view_sale_reports', login_url='not_found')
(views.sales_list), name='sales_list'),
url(r'^prs/sales/$', permission_required('reports.view_sale_reports', login_url='not_found')
(product_sales.sales_list), name='product_sales_list'),
url( r'^sales/prs/paginate/$', product_sales.sales_paginate, name = 'product_sales_paginate'),
url( r'^sales/prs/search/$', product_sales.sales_search, name = 'product_sales_search' ),
url( r'^sales/prs/pdf/$', product_sales.sales_list_pdf, name = 'product_sales_list_pdf' ),
url(r'^detail/(?P<pk>[0-9]+)/$', permission_required('reports.view_sale_reports', login_url='not_found')
(views.sales_detail), name='sale-detail'),
url(r'^revert/(?P<pk>[0-9]+)/$', permission_required('reports.view_sale_reports', login_url='not_found')
(views.sales_revert), name='sale-revert'),
# Sales Tax
url(r'^tx/$', permission_required('reports.view_sale_reports', login_url='not_found')
(sales_tax.sales_reports), name='sales_tax_reports'),
url(r'^tx/sales/$', permission_required('reports.view_sale_reports', login_url='not_found')
(sales_tax.sales_list), name='sales_tax_list'),
url(r'^tx/detail/(?P<pk>[0-9]+)/$', permission_required('reports.view_sale_reports', login_url='not_found')
(sales_tax.sales_detail), name='sale_tax_detail'),
url( r'^tx/sales_search/$', sales_tax.sales_search, name = 'sales_tax_search' ),
url( r'^tx/sales_paginate/$', sales_tax.sales_paginate, name = 'sales_tax_paginate'),
url(r'^tx/pdf/detail/(?P<pk>[0-9]+)/$', permission_required('reports.view_sale_reports', login_url='not_found')
(sales_tax.pdf_sale_tax_detail), name='pdf-sale-tax-detail'),
url(r'^tx/reports/sales/list/pdf/$', sales_tax.sales_list_tax_pdf, name='reports_sales_tax_list_pdf'),
# Sales Margin
url(r'^mrg/$', permission_required('reports.view_sale_reports', login_url='not_found')
(sales_margin2.sales_reports), name='sales_margin_list_reports'),
url(r'^mrg/sales/$', permission_required('reports.view_sale_reports', login_url='not_found')
(sales_margin2.sales_list), name='sales_margin_list'),
url(r'^mrg/detail/(?P<pk>[0-9]+)/$', permission_required('reports.view_sale_reports', login_url='not_found')
(sales_margin2.sales_detail), name='sale_margin_detail'),
url( r'^mrg/sales_search/$', sales_margin2.sales_search, name = 'sales_margin_search' ),
url( r'^mrg/sales_paginate/$', sales_margin2.sales_paginate, name = 'sales_margin_paginate'),
url(r'^mrg/pdf/detail/(?P<pk>[0-9]+)/$', permission_required('reports.view_sale_reports', login_url='not_found')
(sales_margin2.pdf_sale_tax_detail), name='pdf-sale-margin-detail'),
url(r'^mrg/sales/list/pdf/$', sales_margin2.sales_list_tax_pdf, name='reports_sales_margin_list_pdf'),
url(r'^mrg/sls/itms/paginate/$', sales_margin2.sales_items_paginate, name='sales_margin_items_paginate'),
url(r'^mrg/sls/itms/search/$', sales_margin2.sales_items_search, name='sales_margin_items_search'),
url(r'^mrg/sales/list/items/pdf/$', sales_margin2.sales_list_margin_items_pdf, name='reports_sales_margin_items_pdf'),
url(r'^reports/sales/list/pdf/$', pdfs.sales_list_pdf, name='reports_sales_list_pdf'),
url(r'^reports/category/pdf/$', pdfs.sales_category, name='reports_sales_category_pdf'),
url(r'^reports/items/pdf/$', pdfs.sales_items, name='reports_sales_items_pdf'),
url(r'^reports/discount/pdf/$', pdfs.discount_items, name='reports_discount_items_pdf'),
url(r'^reports/user/pdf/$', pdfs.sales_user, name='reports_sales_user_pdf'),
url(r'^reports/till/pdf/$', pdfs.sales_tills, name='reports_sales_tills_pdf'),
url(r'^pdf/detail/(?P<pk>[0-9]+)/$', permission_required('reports.view_sale_reports', login_url='not_found')
(pdfs.sales_detail), name='pdf-sale-detail'),
url(r'^reports/sales/list/export_csv/$', views.sales_list_export_csv, name='reports_sales_list_export_csv'),
url(r'^product/$', permission_required('reports.view_products_reports', login_url='not_found')
(views.product_reports), name='products_reports'),
url( r'^product/search/$', views.products_search, name = 'products_search' ),
url( r'^products/paginate/$', views.products_paginate, name = 'products_paginate' ),
url(r'^prd/$', permission_required('reports.view_products_reports', login_url='not_found')
(views.product_reorder), name='products_reorder'),
url( r'^prs/$', views.products_reorder_search, name = 'products_reorder_search' ),
url( r'^prp/$', views.products_reorder_paginate, name = 'products_reorder_paginate' ),
url(r'^reports/prs/pdf/$', views.reorder_pdf, name='reports_reorder_pdf'),
url(r'^reports/prs/export_csv/$', views.reorder_export_csv, name='reports_reorder_export_csv'),
url(r'^reports/products/pdf/$', views.products_pdf, name='reports_products_pdf'),
url(r'^reports/products/export_csv/$', views.products_export_csv, name='reports_products_export_csv'),
url(r'^purchases/$', permission_required('reports.view_purchase_reports', login_url='not_found')
(purchase.purchase_reports), name='purchases_reports'),
url(r'^purchases/paginate$', purchase.purchase_paginate, name='purchase_reports_paginate'),
url(r'^purchases/search$', purchase.purchase_search, name='purchase_reports_search'),
url(r'^reports/purchases/pdf/$', purchase.purchase_pdf, name='reports_purchase_pdf'),
url(r'^reports/purchases/export_csv/$', purchase.purchase_export_csv, name='reports_purchases_export_csv'),
url(r'^balancesheet_reports/$', permission_required('reports.view_balancesheet', login_url='not_found')
(views.balancesheet_reports), name='balancesheet_reports'),
url(r'^chart/$', views.get_dashboard_data, name='chart'),
url( r'^sales_search/$', views.sales_search, name = 'sales_search' ),
url( r'^sales_paginate/$', views.sales_paginate, name = 'sales_paginate'),
url(r'^cpdf/(?P<image>.+)/$', pdfs.chart_pdf, name='chart_pdf'),
url(r'^csv/(?P<image>.+)/$', pdfs.sales_export_csv, name='chart_csv'),
url( r'^summary/$', permission_required('reports.view_sale_reports', login_url='not_found')
(charts.sales_date_chart), name = 'sales_date_chart' ),
# url( r'^summary/image/(?P<image>.+)/$', charts.sales_date_chart, name = 'sales_date_chart' ),
url( r'^productchart/$', permission_required('reports.view_products_reports', login_url='not_found')
(charts.sales_product_chart), name = 'sales_product_chart' ),
url( r'^productchart/pnt/$', permission_required('reports.view_products_reports', login_url='not_found')
(charts.sales_product_chart_paginate), name = 'sales_product_chart_paginate' ),
url( r'^discountchart/$', permission_required('reports.view_products_reports', login_url='not_found')
(charts.sales_discount_chart), name = 'sales_discount_chart' ),
url( r'^discountchart/pnt/$', permission_required('reports.view_products_reports', login_url='not_found')
(charts.sales_discount_chart_paginate), name = 'sales_discount_chart_paginate' ),
url( r'^ptd/$', charts.get_product_sale_details, name = 'get_product_sale_details' ),
url( r'^category/$', permission_required('reports.view_sale_reports', login_url='not_found')
(charts.sales_category_chart), name = 'sales_category_chart' ),
url( r'^category/pnt/$', permission_required('reports.view_sale_reports', login_url='not_found')
(charts.sales_category_chart_paginate), name = 'sales_category_chart_paginate' ),
url( r'^catimage/(?P<image>.+)/$', charts.sales_category_chart, name = 'sales_category_chart' ),
url( r'^catd/$', charts.get_category_sale_details, name = 'get_category_sale_details' ),
url( r'^userchart/$', permission_required('reports.view_sale_reports', login_url='not_found')
(charts.sales_user_chart), name = 'sales_user_chart' ),
url( r'user/tchart/pnt/$', permission_required('reports.view_products_reports', login_url='not_found')
(charts.sales_user_chart_paginate), name = 'sales_user_chart_paginate' ),
url( r'^utd/$', charts.get_user_sale_details, name = 'get_user_sale_details' ),
url( r'^till/chart/$', permission_required('reports.view_sale_reports', login_url='not_found')
(charts.sales_terminal_chart), name = 'sales_terminal_chart' ),
url( r'^till/tchart/pnt/$', permission_required('reports.view_products_reports', login_url='not_found')
(charts.sales_till_chart_paginate), name = 'sales_till_chart_paginate' ),
url( r'^ttd/$', charts.get_terminal_sale_details, name = 'get_terminal_sale_details' ),
url( r'^weekfilter/$', charts.get_sales_by_week, name = 'get_sales_by_week' ),
url( r'^sales/profit/$', sales_profit.sales_profit, name ='sales_profit'),
url( r'^sales/tax/report/$', sales_profit.sales_tax, name ='sales_tax'),
]
if settings.DEBUG:
# urlpatterns += [ url(r'^static/(?P<path>.*)$', serve)] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | 75.529412 | 127 | 0.687208 |
39fe604a79c5d1be144840d1fa0379f4cdbc6f3c | 9,225 | py | Python | src/inst_CARM_VIS.py | hlruh/serval | f2b43737aa865f7b5f9fb1fdb5d25483f37b068c | [
"MIT"
] | 28 | 2017-10-31T22:26:05.000Z | 2022-03-15T18:18:18.000Z | src/inst_CARM_VIS.py | hlruh/serval | f2b43737aa865f7b5f9fb1fdb5d25483f37b068c | [
"MIT"
] | 50 | 2018-01-11T13:47:03.000Z | 2022-03-11T08:57:58.000Z | src/inst_CARM_VIS.py | hlruh/serval | f2b43737aa865f7b5f9fb1fdb5d25483f37b068c | [
"MIT"
] | 8 | 2019-04-24T14:56:33.000Z | 2022-01-18T08:28:15.000Z | from read_spec import *
#from read_spec import Inst
# Instrument parameters
name = 'CARM_VIS'
obsname = 'ca'
obsloc = dict(lat=37.2236, lon= -2.5463, elevation=2168.)
iomax = 61 # NAXIS2
snmax = 500
oset = '10:52'
maskfile = 'telluric_mask_carm_short.dat'
def scan(self, s, pfits=True):
"""
Returns
-------
namedtuple('spectrum', 'w f berv bjd blaze drift timeid sn55 ')
w - wavelength
f - flux
berv - Barycentric Earth Radial Velocity
bjd - Barycentric Julian Day
blaze - Blaze filename
drift - Used RV Drift
sn55 - S_N order center55
Example
-------
>>> read_carm_vis(filename)
"""
HIERARCH = 'HIERARCH '
hdulist = self.hdulist = pyfits.open(s) # slow 30 ms
if 1:
self.header = hdr = hdulist[0].header
if 'HIERARCH CARACAL DRIFT FP REF' in hdr: del hdr['HIERARCH CARACAL DRIFT FP REF']
self.instname = hdr['INSTRUME'][0:4] + '_VIS'
self.drsberv = hdr.get('HIERARCH CARACAL BERV', np.nan)
# BJD for stars, JD for for calibration products
self.drsbjd = hdr.get('HIERARCH CARACAL BJD', np.nan) + 2400000
#if isinstance(self.drsbjd, str):
#self.drsbjd = 0.0 # workaround for MJD-OBS bugs (empty or missing fractional digits) @2016-Jan
self.dateobs = hdr['DATE-OBS']
# dateobs is used by MH, but date-obs seems more reliable from FILENAME
# CARACAL computes mjd-obs also from FILENAME
self.dateobs = hdr['FILENAME'].replace("h",":").replace("m",":")
self.dateobs = self.dateobs[4:8]+"-"+self.dateobs[8:10]+"-"+self.dateobs[10:21]
self.mjd = hdr.get('HIERARCH CARACAL MJD-OBS')
if not self.mjd:
import warnings
warnings.warn("Warning: keyword HIERARCH CARACAL MJD-OBS not found! This was implemented in CARACAL v2.00."+
"Please use lastest products.")
self.drift = hdr.get(HIERARCH+'CARACAL SERVAL FP RV', hdr.get(HIERARCH+'CARACAL DRIFT FP RV', np.nan))
self.e_drift = hdr.get(HIERARCH+'CARACAL SERVAL FP E_RV', hdr.get(HIERARCH+'CARACAL DRIFT FP E_RV', np.nan))
self.fox = HIERARCH+'CARACAL FOX XWD' in hdr
self.sn55 = hdr.get(HIERARCH+'CARACAL '+('FOX' if self.fox else 'LXT')+' SNR 36', np.nan) # @ 746nm
sn25 = hdr.get(HIERARCH+'CARACAL FOX SNR 25', np.nan)
sn30 = hdr.get(HIERARCH+'CARACAL FOX SNR 30', np.nan)
if sn25 > 70 and sn25 > 10*sn30: # hig
self.flag |= sflag.led
#print(sn25, sn30, self.sn55, )
self.fileid = hdr.get('FILENAME', 0)
self.timeid = self.fileid
self.calmode = hdr.get('SOURCE', '') #.split("_")[3] #fileid[fileid.index('(')+1:fileid.index(')')]
self.calmode = hdr.get(HIERARCH+'CARACAL FIB', '')
#calmodedict = {'objcal':'OBJ,CAL','objsky':'OBJ,SKY'}
#if calmode in calmodedict: calmode = calmodedict[calmode]
self.ccf.rvc = hdr.get(HIERARCH+'CARACAL SERVAL RV', np.nan)
self.ccf.err_rvc = hdr.get(HIERARCH+'CARACAL SERVAL E_RV', np.nan)
#self.ccf.rvc = hdr.get(HIERARCH+'CARACAL CCF RV', np.nan)
#self.ccf.err_rvc = hdr.get(HIERARCH+'CARACAL CCF E_RV', np.nan)
self.ra = hdr['RA']
self.de = hdr['DEC']
self.airmass = hdr.get('AIRMASS', np.nan)
self.exptime = hdr['EXPTIME']
self.tmmean = hdr.get(HIERARCH+'CARACAL TMEAN', 0.0)
if self.exptime: self.tmmean /= self.exptime # normalise
if self.tmmean == 0: self.tmmean = 0.5
def data(self, orders, pfits=True):
hdulist = self.hdulist
if 1: # read order data
f = hdulist['SPEC'].section[orders]
w = hdulist['WAVE'].section[orders]
e = hdulist['SIG'].section[orders]
bpmap = np.isnan(f).astype(np.uint64) # flag 1 for nan
bpmap0 = np.zeros((61,4096), dtype=np.uint64)
bpmap0[14:38,[2453-3, 2453-2, 2453-1, 2453, 2453+1, 2453+2, 2453+3]] |= 1
bpmap0[14:38,1643] |= 1 # ghost of hotspot tail
bpmap0[14:38,2459] |= 1 # spikes of hotspot satellite (bug not correct due to bug in v2.00)
bpmap0[15:41,3374] |= 1 # displaced column; ignore by marking as nan
bpmap0[28,3395:3400] |= flag.sky # car-20160701T00h49m36s-sci-gtoc-vis.fits
bpmap0[34,838:850] |= flag.sky # car-20160803T22h46m41s-sci-gtoc-vis.fits
bpmap0[34,2035:2044] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[34,3150:3161] |= flag.sky # car-20160803T22h46m41s-sci-gtoc-vis.fits
bpmap0[35,403:410] |= flag.sky # car-20160803T22h46m41s-sci-gtoc-vis
bpmap0[35,754:759] |= flag.sky # car-20170419T03h27m48s-sci-gtoc-vis
bpmap0[35,1083:1093] |= flag.sky # car-20160803T22h46m41s-sci-gtoc-vis
bpmap0[35,1944:1956] |= flag.sky # car-20160803T22h46m41s-sci-gtoc-vis
bpmap0[35,2710:2715] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[35,3050:3070] |= flag.sky # car-20160803T22h46m41s-sci-gtoc-vis
bpmap0[35,3706:3717] |= flag.sky # car-20160803T22h46m41s-sci-gtoc-vis
bpmap0[35,3706:3717] |= flag.sky # car-20160803T22h46m41s-sci-gtoc-vis
bpmap0[36,303:308] |= flag.sky # car-20170419T03h27m48s-sci-gtoc-vis
bpmap0[36,312:317] |= flag.sky # car-20170419T03h27m48s-sci-gtoc-vis
bpmap0[36,1311:1315] |= flag.sky # car-20170419T03h27m48s-sci-gtoc-vis
bpmap0[36,1325:1329] |= flag.sky # car-20170419T03h27m48s-sci-gtoc-vis
bpmap0[37,1326:1343] |= flag.sky # car-20170419T03h27m48s-sci-gtoc-vis
bpmap0[39,1076:1082] |= flag.sky # car-20170626T02h00m17s-sci-gtoc-vis
bpmap0[39,1204:1212] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[39,1236:1243] |= flag.sky # car-20170419T03h27m48s-sci-gtoc-vis
bpmap0[39,1463:1468] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[39,2196:2203] |= flag.sky # car-20160520T03h10m13s-sci-gtoc-vis.fits
bpmap0[39,2493:2504] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[39,3705:3717] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[40,2765:2773] |= flag.sky # car-20170419T03h27m48s-sci-gtoc-vis
bpmap0[40,3146:3153] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[40,3556:3564] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[41,486:491] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[41,495:501] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[41,1305:1315] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[42,480:490] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[42,1316:1330] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[42,2363:2368] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[42,2375:2382] |= flag.sky # car-20170509T03h05m21s-sci-gtoc-vis
bpmap0[44,3355:3361] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[46,311:321] |= flag.sky # car-20160701T00h49m36s-sci-gtoc-vis.fits
bpmap0[46,835:845] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[46,1156:1171] |= flag.sky # car-20160701T00h49m36s-sci-gtoc-vis.fits
bpmap0[46,1895:1905] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[46,2212:2232] |= flag.sky # car-20160701T00h49m36s-sci-gtoc-vis.fits
bpmap0[47,2127:2133] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[47,2218:2223] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[47,2260:2266] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[47,2313:2319] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[47,3111:3116] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[47,3267:3272] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[47,3316:3321] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[47,3432:3438] |= flag.sky # car-20170509T03h05m21s-sci-gtoc-vis
bpmap0[47,3480:3488] |= flag.sky # car-20160714T00h18m29s-sci-gtoc-vis
bpmap0[47,3658:3665] |= flag.sky # car-20170509T03h05m21s-sci-gtoc-vis
bpmap0[49,1008:1017] |= flag.sky # car-20160701T00h49m36s-sci-gtoc-vis.fits
bpmap0[49,2532:2544] |= flag.sky # car-20160701T00h49m36s-sci-gtoc-vis.fits
bpmap0[49,3046:3056] |= flag.sky # car-20160701T00h49m36s-sci-gtoc-vis.fits
bpmap0[49,3574:3588] |= flag.sky # car-20160701T00h49m36s-sci-gtoc-vis.fits
# interpolate bad columns, they mess up a lot the creation of the template
# We do this only when we read all order (preRVs), but not for coadding (single orders)
if orders == np.s_[:]:
# the hotspot
#f[14:38,2453-3: 2453+4] = f[14:38,2453-4][:,np.newaxis] + (f[14:38,2453+4]-f[14:38,2453-4]).reshape(24,1)*np.arange(1,8).reshape(1,7)/8.
#f[15:41,3374] = np.nan # 0.5*(f[15:41,3374-1]+f[15:41,3374+1])
pass
bpmap |= bpmap0[orders]
bpmap = bpmap.astype(int)
with np.errstate(invalid='ignore'):
# arrgh, in newer numpy version comparison with nan raises a warning
if self.fox:
e = e * 10.
f = f * 10.
else:
e = np.sqrt(5.*10 + np.abs(f))
bpmap[f>300000] |= flag.sat
bpmap[f < -3*e] |= flag.neg
bpmap[e==0] |= flag.nan
return w, f, e, bpmap
| 52.414773 | 147 | 0.64878 |
99a9e97ca069bb6e4786fed4581b4c53f919d128 | 20,987 | py | Python | test/lib/base.py | jhserrano/gobgp | 6bcfe1186d6347fc8d59f566fa36231c82aa49f8 | [
"Apache-2.0"
] | null | null | null | test/lib/base.py | jhserrano/gobgp | 6bcfe1186d6347fc8d59f566fa36231c82aa49f8 | [
"Apache-2.0"
] | null | null | null | test/lib/base.py | jhserrano/gobgp | 6bcfe1186d6347fc8d59f566fa36231c82aa49f8 | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2015 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import time
import itertools
from fabric.api import local, lcd
from fabric import colors
from fabric.state import env, output
try:
from docker import Client
except ImportError:
from docker import APIClient as Client
import netaddr
DEFAULT_TEST_PREFIX = ''
DEFAULT_TEST_BASE_DIR = '/tmp/gobgp'
TEST_PREFIX = DEFAULT_TEST_PREFIX
TEST_BASE_DIR = DEFAULT_TEST_BASE_DIR
BGP_FSM_IDLE = 'idle'
BGP_FSM_ACTIVE = 'active'
BGP_FSM_ESTABLISHED = 'established'
BGP_ATTR_TYPE_ORIGIN = 1
BGP_ATTR_TYPE_AS_PATH = 2
BGP_ATTR_TYPE_NEXT_HOP = 3
BGP_ATTR_TYPE_MULTI_EXIT_DISC = 4
BGP_ATTR_TYPE_LOCAL_PREF = 5
BGP_ATTR_TYPE_COMMUNITIES = 8
BGP_ATTR_TYPE_ORIGINATOR_ID = 9
BGP_ATTR_TYPE_CLUSTER_LIST = 10
BGP_ATTR_TYPE_MP_REACH_NLRI = 14
BGP_ATTR_TYPE_EXTENDED_COMMUNITIES = 16
GRACEFUL_RESTART_TIME = 30
LONG_LIVED_GRACEFUL_RESTART_TIME = 30
FLOWSPEC_NAME_TO_TYPE = {
"destination": 1,
"source": 2,
"protocol": 3,
"port": 4,
"destination-port": 5,
"source-port": 6,
"icmp-type": 7,
"icmp-code": 8,
"tcp-flags": 9,
"packet-length": 10,
"dscp": 11,
"fragment": 12,
"label": 13,
"ether-type": 14,
"source-mac": 15,
"destination-mac": 16,
"llc-dsap": 17,
"llc-ssap": 18,
"llc-control": 19,
"snap": 20,
"vid": 21,
"cos": 22,
"inner-vid": 23,
"inner-cos": 24,
}
# with this label, we can do filtering in `docker ps` and `docker network prune`
TEST_CONTAINER_LABEL = 'gobgp-test'
TEST_NETWORK_LABEL = TEST_CONTAINER_LABEL
env.abort_exception = RuntimeError
output.stderr = False
def community_str(i):
"""
Converts integer in to colon separated two bytes decimal strings like
BGP Community or Large Community representation.
For example, this function converts 13107300 = ((200 << 16) | 100)
into "200:100".
"""
values = []
while i > 0:
values.append(str(i & 0xffff))
i >>= 16
return ':'.join(reversed(values))
def wait_for_completion(f, timeout=120):
interval = 1
count = 0
while True:
if f():
return
time.sleep(interval)
count += interval
if count >= timeout:
raise Exception('timeout')
def try_several_times(f, t=3, s=1):
e = Exception
for _ in range(t):
try:
r = f()
except RuntimeError as e:
time.sleep(s)
else:
return r
raise e
def assert_several_times(f, t=30, s=1):
e = AssertionError
for _ in range(t):
try:
f()
except AssertionError as e:
time.sleep(s)
else:
return
raise e
def get_bridges():
return try_several_times(lambda: local("docker network ls | awk 'NR > 1{print $2}'", capture=True)).split('\n')
def get_containers():
return try_several_times(lambda: local("docker ps -a | awk 'NR > 1 {print $NF}'", capture=True)).split('\n')
class CmdBuffer(list):
def __init__(self, delim='\n'):
super(CmdBuffer, self).__init__()
self.delim = delim
def __lshift__(self, value):
self.append(value)
def __str__(self):
return self.delim.join(self)
def make_gobgp_ctn(tag='gobgp', local_gobgp_path='', from_image='osrg/quagga'):
if local_gobgp_path == '':
local_gobgp_path = os.getcwd()
c = CmdBuffer()
c << 'FROM {0}'.format(from_image)
c << 'RUN go get -u github.com/golang/dep/cmd/dep'
c << 'RUN mkdir -p /go/src/github.com/osrg/'
c << 'ADD gobgp /go/src/github.com/osrg/gobgp/'
c << 'RUN cd /go/src/github.com/osrg/gobgp && dep ensure && go install ./gobgpd ./gobgp'
rindex = local_gobgp_path.rindex('gobgp')
if rindex < 0:
raise Exception('{0} seems not gobgp dir'.format(local_gobgp_path))
workdir = local_gobgp_path[:rindex]
with lcd(workdir):
local('echo \'{0}\' > Dockerfile'.format(str(c)))
local('docker build -t {0} .'.format(tag))
local('rm Dockerfile')
class Bridge(object):
def __init__(self, name, subnet='', with_ip=True, self_ip=False):
self.name = name
if TEST_PREFIX != '':
self.name = '{0}_{1}'.format(TEST_PREFIX, name)
self.with_ip = with_ip
if with_ip:
self.subnet = netaddr.IPNetwork(subnet)
def _f():
for host in self.subnet:
yield host
self._ip_generator = _f()
# throw away first network address
self.next_ip_address()
def f():
if self.name in get_bridges():
self.delete()
v6 = ''
if self.subnet.version == 6:
v6 = '--ipv6'
self.id = local('docker network create --driver bridge {0} --subnet {1} --label {2} {3}'.format(v6, subnet, TEST_NETWORK_LABEL, self.name), capture=True)
try_several_times(f)
self.self_ip = self_ip
if self_ip:
self.ip_addr = self.next_ip_address()
try_several_times(lambda: local("ip addr add {0} dev {1}".format(self.ip_addr, self.name)))
self.ctns = []
# Note: Here removes routes from the container host to prevent traffic
# from going through the container host's routing table.
if with_ip:
local('ip route del {0}; echo $?'.format(subnet),
capture=True)
# When IPv6, 2 routes will be installed to the container host's
# routing table.
if self.subnet.version == 6:
local('ip -6 route del {0}; echo $?'.format(subnet),
capture=True)
def next_ip_address(self):
return "{0}/{1}".format(self._ip_generator.next(),
self.subnet.prefixlen)
def addif(self, ctn):
_name = ctn.next_if_name()
self.ctns.append(ctn)
local("docker network connect {0} {1}".format(self.name, ctn.docker_name()))
i = [x for x in Client(timeout=60, version='auto').inspect_network(self.id)['Containers'].values() if x['Name'] == ctn.docker_name()][0]
if self.subnet.version == 4:
addr = i['IPv4Address']
else:
addr = i['IPv6Address']
ctn.ip_addrs.append(('eth1', addr, self.name))
def delete(self):
try_several_times(lambda: local("docker network rm {0}".format(self.name)))
class Container(object):
def __init__(self, name, image):
self.name = name
self.image = image
self.shared_volumes = []
self.ip_addrs = []
self.ip6_addrs = []
self.is_running = False
self.eths = []
self.tcpdump_running = False
if self.docker_name() in get_containers():
self.remove()
def docker_name(self):
if TEST_PREFIX == DEFAULT_TEST_PREFIX:
return '{0}'.format(self.name)
return '{0}_{1}'.format(TEST_PREFIX, self.name)
def next_if_name(self):
name = 'eth{0}'.format(len(self.eths) + 1)
self.eths.append(name)
return name
def run(self):
c = CmdBuffer(' ')
c << "docker run --privileged=true"
for sv in self.shared_volumes:
c << "-v {0}:{1}".format(sv[0], sv[1])
c << "--name {0} -l {1} -id {2}".format(self.docker_name(), TEST_CONTAINER_LABEL, self.image)
self.id = try_several_times(lambda: local(str(c), capture=True))
self.is_running = True
self.local("ip li set up dev lo")
for line in self.local("ip a show dev eth0", capture=True).split('\n'):
if line.strip().startswith("inet "):
elems = [e.strip() for e in line.strip().split(' ')]
self.ip_addrs.append(('eth0', elems[1], 'docker0'))
elif line.strip().startswith("inet6 "):
elems = [e.strip() for e in line.strip().split(' ')]
self.ip6_addrs.append(('eth0', elems[1], 'docker0'))
return 0
def stop(self):
ret = try_several_times(lambda: local("docker stop -t 0 " + self.docker_name(), capture=True))
self.is_running = False
return ret
def remove(self):
ret = try_several_times(lambda: local("docker rm -f " + self.docker_name(), capture=True))
self.is_running = False
return ret
def pipework(self, bridge, ip_addr, intf_name=""):
if not self.is_running:
print colors.yellow('call run() before pipeworking')
return
c = CmdBuffer(' ')
c << "pipework {0}".format(bridge.name)
if intf_name != "":
c << "-i {0}".format(intf_name)
else:
intf_name = "eth1"
c << "{0} {1}".format(self.docker_name(), ip_addr)
self.ip_addrs.append((intf_name, ip_addr, bridge.name))
try_several_times(lambda: local(str(c)))
def local(self, cmd, capture=False, stream=False, detach=False, tty=True):
if stream:
dckr = Client(timeout=120, version='auto')
i = dckr.exec_create(container=self.docker_name(), cmd=cmd)
return dckr.exec_start(i['Id'], tty=tty, stream=stream, detach=detach)
else:
flag = '-d' if detach else ''
return local('docker exec {0} {1} {2}'.format(flag, self.docker_name(), cmd), capture)
def get_pid(self):
if self.is_running:
cmd = "docker inspect -f '{{.State.Pid}}' " + self.docker_name()
return int(local(cmd, capture=True))
return -1
def start_tcpdump(self, interface=None, filename=None, expr='tcp port 179'):
if self.tcpdump_running:
raise Exception('tcpdump already running')
self.tcpdump_running = True
if not interface:
interface = "eth0"
if not filename:
filename = '{0}.dump'.format(interface)
self.local("tcpdump -U -i {0} -w {1}/{2} {3}".format(interface, self.shared_volumes[0][1], filename, expr), detach=True)
return '{0}/{1}'.format(self.shared_volumes[0][0], filename)
def stop_tcpdump(self):
self.local("pkill tcpdump")
self.tcpdump_running = False
class BGPContainer(Container):
WAIT_FOR_BOOT = 1
RETRY_INTERVAL = 5
def __init__(self, name, asn, router_id, ctn_image_name):
self.config_dir = '/'.join((TEST_BASE_DIR, TEST_PREFIX, name))
local('if [ -e {0} ]; then rm -rf {0}; fi'.format(self.config_dir))
local('mkdir -p {0}'.format(self.config_dir))
local('chmod 777 {0}'.format(self.config_dir))
self.asn = asn
self.router_id = router_id
self.peers = {}
self.routes = {}
self.policies = {}
super(BGPContainer, self).__init__(name, ctn_image_name)
def __repr__(self):
return str({'name': self.name, 'asn': self.asn, 'router_id': self.router_id})
def run(self):
self.create_config()
super(BGPContainer, self).run()
return self.WAIT_FOR_BOOT
def peer_name(self, peer):
if peer not in self.peers:
raise Exception('not found peer {0}'.format(peer.router_id))
name = self.peers[peer]['interface']
if name == '':
name = self.peers[peer]['neigh_addr'].split('/')[0]
return name
def update_peer(self, peer, **kwargs):
if peer not in self.peers:
raise Exception('peer not exists')
self.add_peer(peer, **kwargs)
def add_peer(self, peer, passwd=None, vpn=False, is_rs_client=False,
policies=None, passive=False,
is_rr_client=False, cluster_id=None,
flowspec=False, bridge='', reload_config=True, as2=False,
graceful_restart=None, local_as=None, prefix_limit=None,
v6=False, llgr=None, vrf='', interface='', allow_as_in=0,
remove_private_as=None, replace_peer_as=False, addpath=False,
treat_as_withdraw=False, remote_as=None):
neigh_addr = ''
local_addr = ''
it = itertools.product(self.ip_addrs, peer.ip_addrs)
if v6:
it = itertools.product(self.ip6_addrs, peer.ip6_addrs)
if interface == '':
for me, you in it:
if bridge != '' and bridge != me[2]:
continue
if me[2] == you[2]:
neigh_addr = you[1]
local_addr = me[1]
if v6:
addr, mask = local_addr.split('/')
local_addr = "{0}%{1}/{2}".format(addr, me[0], mask)
break
if neigh_addr == '':
raise Exception('peer {0} seems not ip reachable'.format(peer))
if not policies:
policies = {}
self.peers[peer] = {'neigh_addr': neigh_addr,
'interface': interface,
'passwd': passwd,
'vpn': vpn,
'flowspec': flowspec,
'is_rs_client': is_rs_client,
'is_rr_client': is_rr_client,
'cluster_id': cluster_id,
'policies': policies,
'passive': passive,
'local_addr': local_addr,
'as2': as2,
'graceful_restart': graceful_restart,
'local_as': local_as,
'prefix_limit': prefix_limit,
'llgr': llgr,
'vrf': vrf,
'allow_as_in': allow_as_in,
'remove_private_as': remove_private_as,
'replace_peer_as': replace_peer_as,
'addpath': addpath,
'treat_as_withdraw': treat_as_withdraw,
'remote_as': remote_as or peer.asn}
if self.is_running and reload_config:
self.create_config()
self.reload_config()
def del_peer(self, peer, reload_config=True):
del self.peers[peer]
if self.is_running and reload_config:
self.create_config()
self.reload_config()
def disable_peer(self, peer):
raise Exception('implement disable_peer() method')
def enable_peer(self, peer):
raise Exception('implement enable_peer() method')
def log(self):
return local('cat {0}/*.log'.format(self.config_dir), capture=True)
def _extract_routes(self, families):
routes = {}
for prefix, paths in self.routes.items():
if paths and paths[0]['rf'] in families:
routes[prefix] = paths
return routes
def add_route(self, route, rf='ipv4', attribute=None, aspath=None,
community=None, med=None, extendedcommunity=None,
nexthop=None, matchs=None, thens=None,
local_pref=None, identifier=None, reload_config=True):
if route not in self.routes:
self.routes[route] = []
prefix = route
if 'flowspec' in rf:
prefix = ' '.join(['match'] + matchs)
self.routes[route].append({
'prefix': prefix,
'rf': rf,
'attr': attribute,
'next-hop': nexthop,
'as-path': aspath,
'community': community,
'med': med,
'local-pref': local_pref,
'extended-community': extendedcommunity,
'identifier': identifier,
'matchs': matchs,
'thens': thens,
})
if self.is_running and reload_config:
self.create_config()
self.reload_config()
def del_route(self, route, identifier=None, reload_config=True):
if route not in self.routes:
return
self.routes[route] = [p for p in self.routes[route] if p['identifier'] != identifier]
if self.is_running and reload_config:
self.create_config()
self.reload_config()
def add_policy(self, policy, peer, typ, default='accept', reload_config=True):
self.set_default_policy(peer, typ, default)
self.define_policy(policy)
self.assign_policy(peer, policy, typ)
if self.is_running and reload_config:
self.create_config()
self.reload_config()
def set_default_policy(self, peer, typ, default):
if typ in ['in', 'out', 'import', 'export'] and default in ['reject', 'accept']:
if 'default-policy' not in self.peers[peer]:
self.peers[peer]['default-policy'] = {}
self.peers[peer]['default-policy'][typ] = default
else:
raise Exception('wrong type or default')
def define_policy(self, policy):
self.policies[policy['name']] = policy
def assign_policy(self, peer, policy, typ):
if peer not in self.peers:
raise Exception('peer {0} not found'.format(peer.name))
name = policy['name']
if name not in self.policies:
raise Exception('policy {0} not found'.format(name))
self.peers[peer]['policies'][typ] = policy
def get_local_rib(self, peer, rf):
raise Exception('implement get_local_rib() method')
def get_global_rib(self, rf):
raise Exception('implement get_global_rib() method')
def get_neighbor_state(self, peer_id):
raise Exception('implement get_neighbor() method')
def get_reachability(self, prefix, timeout=20):
version = netaddr.IPNetwork(prefix).version
addr = prefix.split('/')[0]
if version == 4:
ping_cmd = 'ping'
elif version == 6:
ping_cmd = 'ping6'
else:
raise Exception('unsupported route family: {0}'.format(version))
cmd = '/bin/bash -c "/bin/{0} -c 1 -w 1 {1} | xargs echo"'.format(ping_cmd, addr)
interval = 1
count = 0
while True:
res = self.local(cmd, capture=True)
print colors.yellow(res)
if '1 packets received' in res and '0% packet loss':
break
time.sleep(interval)
count += interval
if count >= timeout:
raise Exception('timeout')
return True
def wait_for(self, expected_state, peer, timeout=120):
interval = 1
count = 0
while True:
state = self.get_neighbor_state(peer)
y = colors.yellow
print y("{0}'s peer {1} state: {2}".format(self.router_id,
peer.router_id,
state))
if state == expected_state:
return
time.sleep(interval)
count += interval
if count >= timeout:
raise Exception('timeout')
def add_static_route(self, network, next_hop):
cmd = '/sbin/ip route add {0} via {1}'.format(network, next_hop)
self.local(cmd, capture=True)
def set_ipv6_forward(self):
cmd = 'sysctl -w net.ipv6.conf.all.forwarding=1'
self.local(cmd, capture=True)
def create_config(self):
raise Exception('implement create_config() method')
def reload_config(self):
raise Exception('implement reload_config() method')
class OSPFContainer(Container):
WAIT_FOR_BOOT = 1
def __init__(self, name, ctn_image_name):
self.config_dir = '/'.join((TEST_BASE_DIR, TEST_PREFIX, name))
local('if [ -e {0} ]; then rm -rf {0}; fi'.format(self.config_dir))
local('mkdir -p {0}'.format(self.config_dir))
local('chmod 777 {0}'.format(self.config_dir))
# Example:
# networks = {
# '192.168.1.0/24': '0.0.0.0', # <network>: <area>
# }
self.networks = {}
super(OSPFContainer, self).__init__(name, ctn_image_name)
def __repr__(self):
return str({'name': self.name, 'networks': self.networks})
def run(self):
self.create_config()
super(OSPFContainer, self).run()
return self.WAIT_FOR_BOOT
def create_config(self):
raise NotImplementedError
| 34.236542 | 165 | 0.570877 |
9d94bfd0fcf9489b2dfb3c4314a93c00e5a19949 | 79,677 | py | Python | tests/test_grain.py | bbc/rd-apmm-python-lib-mediagrains | 84c9de511cc53418c277867eaf143f2cc8730d02 | [
"ECL-2.0",
"Apache-2.0"
] | 6 | 2018-03-26T23:49:34.000Z | 2021-12-23T10:06:09.000Z | tests/test_grain.py | bbc/rd-apmm-python-lib-mediagrains | 84c9de511cc53418c277867eaf143f2cc8730d02 | [
"ECL-2.0",
"Apache-2.0"
] | 34 | 2018-03-21T16:45:10.000Z | 2022-03-28T13:27:34.000Z | tests/test_grain.py | bbc/rd-apmm-python-lib-mediagrains | 84c9de511cc53418c277867eaf143f2cc8730d02 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | #
# Copyright 2018 British Broadcasting Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from asynctest import TestCase
import uuid
from mediagrains import Grain, VideoGrain, AudioGrain, CodedVideoGrain, CodedAudioGrain, EventGrain
from mediagrains.cogenums import CogFrameFormat, CogFrameLayout, CogAudioFormat
from mediatimestamp.immutable import Timestamp, TimeOffset, TimeRange
from asynctest import mock
from fractions import Fraction
import json
from copy import copy, deepcopy
src_id = uuid.UUID("f18ee944-0841-11e8-b0b0-17cef04bd429")
flow_id = uuid.UUID("f79ce4da-0841-11e8-9a5b-dfedb11bafeb")
cts = Timestamp.from_tai_sec_nsec("417798915:0")
ots = Timestamp.from_tai_sec_nsec("417798915:5")
sts = Timestamp.from_tai_sec_nsec("417798915:10")
VIDEOGRAIN_TEST_METADATA = {
"@_ns": "urn:x-ipstudio:ns:0.1",
"grain": {
"grain_type": "video",
"source_id": str(src_id),
"flow_id": str(flow_id),
"origin_timestamp": str(ots),
"sync_timestamp": str(sts),
"creation_timestamp": str(cts),
"rate": {
"numerator": 25,
"denominator": 1,
},
"duration": {
"numerator": 1,
"denominator": 25,
},
"cog_frame": {
"format": CogFrameFormat.S16_422_10BIT,
"width": 1920,
"height": 1080,
"layout": CogFrameLayout.FULL_FRAME,
"extension": 0,
"components": [
{
'stride': 4096,
'width': 1920,
'height': 1080,
'offset': 0,
'length': 4096*1080
},
{
'stride': 2048,
'width': 960,
'height': 1080,
'offset': 4096*1080,
'length': 2048*1080
},
{
'stride': 2048,
'width': 960,
'height': 1080,
'offset': 4096*1080 + 2048*1080,
'length': 2048*1080
}
]
}
},
}
class ConvertsToTimestamp (object):
def __init__(self, ts: Timestamp):
self.ts = ts
def __mediatimestamp__(self) -> Timestamp:
return self.ts
class TestGrain (TestCase):
def test_empty_grain_creation(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(src_id, flow_id)
self.assertEqual(grain.grain_type, "empty")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.final_origin_timestamp(), cts)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(cts))
self.assertEqual(grain.sync_timestamp, cts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(0, 1))
self.assertIsNone(grain.media_rate)
self.assertEqual(grain.duration, Fraction(0, 1))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain, (grain.meta, grain.data))
self.assertIsNone(grain.data)
self.assertEqual(grain.length, 0)
self.assertEqual(grain.expected_length, 0)
def test_empty_grain_creation_with_missing_data(self):
cts = Timestamp.from_tai_sec_nsec("417798915:0")
meta = {}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(meta)
self.assertEqual(grain.grain_type, "empty")
self.assertEqual(grain.creation_timestamp, cts)
def test_empty_grain_creation_with_odd_data(self):
meta = {
"grain": {
"source_id": src_id,
"flow_id": flow_id,
"origin_timestamp": ots,
"sync_timestamp": sts,
"creation_timestamp": cts,
"rate": Fraction(25, 1),
"duration": Fraction(1, 25),
"length": 23
}
}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(meta)
self.assertEqual(grain.grain_type, "empty")
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.sync_timestamp, sts)
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.length, 0)
self.assertEqual(grain.expected_length, 23)
def test_empty_grain_creation_with_ots(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(src_id, flow_id, origin_timestamp=ots)
self.assertEqual(grain.grain_type, "empty")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.sync_timestamp, ots)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(0, 1))
self.assertEqual(grain.duration, Fraction(0, 1))
self.assertEqual(grain.timelabels, [])
def test_empty_grain_creation_with_convertable_ots(self):
converts_to_ots = ConvertsToTimestamp(ots)
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(src_id, flow_id, origin_timestamp=converts_to_ots)
self.assertEqual(grain.grain_type, "empty")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.sync_timestamp, ots)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(0, 1))
self.assertEqual(grain.duration, Fraction(0, 1))
self.assertEqual(grain.timelabels, [])
def test_empty_grain_creation_with_ots_and_sts(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts)
self.assertEqual(grain.grain_type, "empty")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.sync_timestamp, sts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(0, 1))
self.assertEqual(grain.duration, Fraction(0, 1))
self.assertEqual(grain.timelabels, [])
def test_empty_grain_castable_to_tuple(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts)
self.assertEqual(len(grain), 2)
self.assertIsInstance(grain[0], dict)
self.assertIsNone(grain[1])
with self.assertRaises(IndexError):
grain[2]
self.assertIsInstance(tuple(grain[0]), tuple)
def test_empty_grain_with_meta(self):
meta = {
"@_ns": "urn:x-ipstudio:ns:0.1",
"grain": {
"grain_type": "empty",
"source_id": str(src_id),
"flow_id": str(flow_id),
"origin_timestamp": str(ots),
"sync_timestamp": str(sts),
"creation_timestamp": str(cts),
"rate": {
"numerator": 25,
"denominator": 1,
},
"duration": {
"numerator": 1,
"denominator": 25,
},
"timelabels": [{
"tag": "timelabel1",
"timelabel": {
"frames_since_midnight": 0,
"frame_rate_numerator": 25,
"frame_rate_denominator": 1,
"drop_frame": False
}
}]
}
}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(meta)
self.assertEqual(grain.grain_type, "empty")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.sync_timestamp, sts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [
{
"tag": "timelabel1",
"timelabel": {
"frames_since_midnight": 0,
"frame_rate_numerator": 25,
"frame_rate_denominator": 1,
"drop_frame": False
}
}
])
self.assertEqual(repr(grain), "Grain({!r})".format(meta))
def test_empty_grain_setters(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts)
new_src_id = uuid.UUID("18d1a52e-0a67-11e8-ba57-776dc8ceabcb")
new_flow_id = uuid.UUID("1ed4cfb4-0a67-11e8-b803-733e0764879a")
new_cts = Timestamp.from_tai_sec_nsec("417798915:15")
new_ots = Timestamp.from_tai_sec_nsec("417798915:20")
new_sts = Timestamp.from_tai_sec_nsec("417798915:25")
new_grain_type = "potato"
converts_to_ots = ConvertsToTimestamp(ots)
grain.grain_type = new_grain_type
self.assertEqual(grain.grain_type, new_grain_type)
grain.source_id = new_src_id
self.assertEqual(grain.source_id, new_src_id)
grain.flow_id = new_flow_id
self.assertEqual(grain.flow_id, new_flow_id)
grain.origin_timestamp = new_ots
self.assertEqual(grain.origin_timestamp, new_ots)
self.assertEqual(grain.final_origin_timestamp(), new_ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(new_ots))
grain.sync_timestamp = new_sts
self.assertEqual(grain.sync_timestamp, new_sts)
grain.creation_timestamp = new_cts
self.assertEqual(grain.creation_timestamp, new_cts)
grain.origin_timestamp = converts_to_ots
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
grain.rate = 50
self.assertEqual(grain.rate, Fraction(50, 1))
grain.duration = 0.25
self.assertEqual(grain.duration, Fraction(1, 4))
grain.data = bytearray(10)
self.assertEqual(len(grain.data), 10)
self.assertEqual(grain.length, 10)
self.assertEqual(grain.timelabels, [])
grain.add_timelabel('test', 1, 25)
self.assertEqual(len(grain.timelabels), 1)
self.assertEqual(grain.timelabels[0].tag, 'test')
self.assertEqual(grain.timelabels[0].count, 1)
self.assertEqual(grain.timelabels[0].rate, Fraction(25, 1))
self.assertFalse(grain.timelabels[0].drop_frame)
grain.timelabels[0]['tag'] = 'potato'
self.assertEqual(grain.timelabels[0].tag, 'potato')
with self.assertRaises(KeyError):
grain.timelabels[0]['potato'] = 3
self.assertEqual(len(grain.timelabels[0]), 2)
grain.timelabels[0] = {
'tag': 'other_tag',
'timelabel': {
'frames_since_midnight': 7,
'frame_rate_numerator': 30000,
'frame_rate_denominator': 1001,
'drop_frame': True
}
}
self.assertEqual(len(grain.timelabels), 1)
self.assertEqual(grain.timelabels[0].tag, 'other_tag')
self.assertEqual(grain.timelabels[0].count, 7)
self.assertEqual(grain.timelabels[0].rate, Fraction(30000, 1001))
self.assertTrue(grain.timelabels[0].drop_frame)
del grain.timelabels[0]
self.assertEqual(len(grain.timelabels), 0)
with self.assertRaises(IndexError):
del grain.timelabels[0]
with self.assertRaises(IndexError):
grain.timelabels[0] = {
'tag': 'other_tag',
'timelabel': {
'frames_since_midnight': 7,
'frame_rate_numerator': 30000,
'frame_rate_denominator': 1001,
'drop_frame': True
}
}
def test_video_grain_create_YUV422_10bit(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = VideoGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_frame_format=CogFrameFormat.S16_422_10BIT,
width=1920, height=1080, cog_frame_layout=CogFrameLayout.FULL_FRAME)
self.assertEqual(grain.grain_type, "video")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.sync_timestamp, sts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.media_rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogFrameFormat.S16_422_10BIT)
self.assertEqual(grain.width, 1920)
self.assertEqual(grain.height, 1080)
self.assertEqual(grain.layout, CogFrameLayout.FULL_FRAME)
self.assertEqual(grain.extension, 0)
self.assertIsNone(grain.source_aspect_ratio)
self.assertIsNone(grain.pixel_aspect_ratio)
self.assertEqual(len(grain.components), 3)
self.assertEqual(grain.components[0].stride, 1920*2)
self.assertEqual(grain.components[0].width, 1920)
self.assertEqual(grain.components[0].height, 1080)
self.assertEqual(grain.components[0].offset, 0)
self.assertEqual(grain.components[0].length, 1920*1080*2)
self.assertEqual(len(grain.components[0]), 5)
self.assertEqual(grain.components[1].stride, 1920)
self.assertEqual(grain.components[1].width, 1920/2)
self.assertEqual(grain.components[1].height, 1080)
self.assertEqual(grain.components[1].offset, 1920*1080*2)
self.assertEqual(grain.components[1].length, 1920*1080)
self.assertEqual(len(grain.components[1]), 5)
self.assertEqual(grain.components[2].stride, 1920)
self.assertEqual(grain.components[2].width, 1920/2)
self.assertEqual(grain.components[2].height, 1080)
self.assertEqual(grain.components[2].offset, 1920*1080*2 + 1920*1080)
self.assertEqual(grain.components[2].length, 1920*1080)
self.assertEqual(len(grain.components[2]), 5)
self.assertIsInstance(grain.data, bytearray)
self.assertEqual(len(grain.data), 1920*1080*2*2)
self.assertEqual(repr(grain), "VideoGrain({!r},< binary data of length {} >)".format(grain.meta, len(grain.data)))
self.assertEqual(grain.components, [{'stride': 1920*2,
'width': 1920,
'height': 1080,
'offset': 0,
'length': 1920*1080*2},
{'stride': 1920,
'width': 1920/2,
'height': 1080,
'offset': 1920*1080*2,
'length': 1920*1080},
{'stride': 1920,
'width': 1920/2,
'height': 1080,
'offset': 1920*1080*3,
'length': 1920*1080}])
self.assertEqual(grain.expected_length, 1920*1080*4)
def test_video_grain_create_sizes(self):
for (fmt, complens) in [
(CogFrameFormat.S32_444, (1920*1080*4, 1920*1080*4, 1920*1080*4)),
(CogFrameFormat.S32_422, (1920*1080*4, 1920*1080*2, 1920*1080*2)),
(CogFrameFormat.S32_420, (1920*1080*4, 1920*1080, 1920*1080)),
(CogFrameFormat.S16_444_10BIT, (1920*1080*2, 1920*1080*2, 1920*1080*2)),
(CogFrameFormat.S16_422_10BIT, (1920*1080*2, 1920*1080, 1920*1080)),
(CogFrameFormat.S16_420_10BIT, (1920*1080*2, 1920*1080/2, 1920*1080/2)),
(CogFrameFormat.U8_444, (1920*1080, 1920*1080, 1920*1080)),
(CogFrameFormat.U8_422, (1920*1080, 1920*1080/2, 1920*1080/2)),
(CogFrameFormat.U8_420, (1920*1080, 1920*1080/4, 1920*1080/4)),
(CogFrameFormat.UYVY, (1920*1080*2,)),
(CogFrameFormat.RGB, (1920*1080*3,)),
(CogFrameFormat.RGBA, (1920*1080*4,)),
(CogFrameFormat.v210, (40*128*1080,)),
(CogFrameFormat.v216, (1920*1080*4,)),
(CogFrameFormat.UNKNOWN, ()),
]:
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = VideoGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_frame_format=fmt,
width=1920, height=1080, cog_frame_layout=CogFrameLayout.FULL_FRAME)
self.assertEqual(len(grain.components), len(complens))
offset = 0
for (complen, comp) in zip(complens, grain.components):
self.assertEqual(complen, comp.length)
self.assertEqual(offset, comp.offset)
offset += complen
self.assertEqual(len(grain.data), offset)
def test_video_component_setters(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = VideoGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_frame_format=CogFrameFormat.S16_422_10BIT,
width=1920, height=1080, cog_frame_layout=CogFrameLayout.FULL_FRAME)
grain.components[0].stride = 23
self.assertEqual(grain.components[0].stride, 23)
grain.components[0].width = 23
self.assertEqual(grain.components[0].width, 23)
grain.components[0].height = 23
self.assertEqual(grain.components[0].height, 23)
grain.components[0].offset = 23
self.assertEqual(grain.components[0].offset, 23)
grain.components[0].length = 23
self.assertEqual(grain.components[0].length, 23)
grain.components[0]['length'] = 17
self.assertEqual(grain.components[0].length, 17)
grain.components.append({'stride': 1920,
'width': 1920,
'height': 1080,
'offset': 1920*1080*2*2,
'length': 1920*1080})
self.assertEqual(grain.components[3].stride, 1920)
self.assertEqual(grain.components[3].width, 1920)
self.assertEqual(grain.components[3].height, 1080)
self.assertEqual(grain.components[3].offset, 1920*1080*2*2)
self.assertEqual(grain.components[3].length, 1920*1080)
self.assertEqual(len(grain.components), 4)
del grain.components[3]
self.assertEqual(len(grain.components), 3)
grain.components[0] = {'stride': 1920,
'width': 1920,
'height': 1080,
'offset': 1920*1080*2*2,
'length': 1920*1080}
self.assertEqual(grain.components[0].stride, 1920)
self.assertEqual(grain.components[0].width, 1920)
self.assertEqual(grain.components[0].height, 1080)
self.assertEqual(grain.components[0].offset, 1920*1080*2*2)
self.assertEqual(grain.components[0].length, 1920*1080)
def test_video_grain_with_sparse_meta(self):
meta = {
"@_ns": "urn:x-ipstudio:ns:0.1",
"grain": {
"grain_type": "video",
"source_id": str(src_id),
"flow_id": str(flow_id),
"origin_timestamp": str(ots),
"sync_timestamp": str(sts),
"creation_timestamp": str(cts),
"rate": {
"numerator": 25,
"denominator": 1,
},
"duration": {
"numerator": 1,
"denominator": 25,
}
},
}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = VideoGrain(meta)
self.assertEqual(grain.format, CogFrameFormat.UNKNOWN)
self.assertEqual(grain.width, 0)
self.assertEqual(grain.height, 0)
self.assertEqual(grain.layout, CogFrameLayout.UNKNOWN)
self.assertEqual(grain.extension, 0)
self.assertEqual(len(grain.components), 0)
def test_video_grain_with_numeric_identifiers(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = VideoGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_frame_format=0x2805,
width=1920, height=1080,
cog_frame_layout=0)
self.assertEqual(grain.grain_type, "video")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.sync_timestamp, sts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogFrameFormat.S16_422_10BIT)
self.assertEqual(grain.width, 1920)
self.assertEqual(grain.height, 1080)
self.assertEqual(grain.layout, CogFrameLayout.FULL_FRAME)
self.assertEqual(grain.extension, 0)
self.assertIsNone(grain.source_aspect_ratio)
self.assertIsNone(grain.pixel_aspect_ratio)
self.assertEqual(len(grain.components), 3)
self.assertEqual(grain.components[0].stride, 1920*2)
self.assertEqual(grain.components[0].width, 1920)
self.assertEqual(grain.components[0].height, 1080)
self.assertEqual(grain.components[0].offset, 0)
self.assertEqual(grain.components[0].length, 1920*1080*2)
self.assertEqual(grain.components[1].stride, 1920)
self.assertEqual(grain.components[1].width, 1920/2)
self.assertEqual(grain.components[1].height, 1080)
self.assertEqual(grain.components[1].offset, 1920*1080*2)
self.assertEqual(grain.components[1].length, 1920*1080)
self.assertEqual(grain.components[2].stride, 1920)
self.assertEqual(grain.components[2].width, 1920/2)
self.assertEqual(grain.components[2].height, 1080)
self.assertEqual(grain.components[2].offset, 1920*1080*2 + 1920*1080)
self.assertEqual(grain.components[2].length, 1920*1080)
self.assertIsInstance(grain.data, bytearray)
self.assertEqual(len(grain.data), 1920*1080*2*2)
self.assertEqual(repr(grain), "VideoGrain({!r},< binary data of length {} >)".format(grain.meta, len(grain.data)))
self.assertEqual(dict(grain.components[0]), {'stride': 1920*2,
'width': 1920,
'height': 1080,
'offset': 0,
'length': 1920*1080*2})
def test_video_grain_setters(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = VideoGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_frame_format=CogFrameFormat.S16_422_10BIT,
width=1920, height=1080, cog_frame_layout=CogFrameLayout.FULL_FRAME)
grain.format = CogFrameFormat.S16_444
self.assertEqual(grain.format, CogFrameFormat.S16_444)
grain.format = 0x0207
self.assertEqual(grain.format, CogFrameFormat.VC2)
grain.width = 2
self.assertEqual(grain.width, 2)
grain.height = 13
self.assertEqual(grain.height, 13)
grain.layout = CogFrameLayout.SEPARATE_FIELDS
self.assertEqual(grain.layout, CogFrameLayout.SEPARATE_FIELDS)
grain.layout = 0x02
self.assertEqual(grain.layout, CogFrameLayout.SINGLE_FIELD)
grain.extension = 1
self.assertEqual(grain.extension, 1)
grain.source_aspect_ratio = 50
self.assertEqual(grain.source_aspect_ratio, Fraction(50, 1))
grain.pixel_aspect_ratio = 0.25
self.assertEqual(grain.pixel_aspect_ratio, Fraction(1, 4))
def test_grain_fails_with_no_metadata(self):
with self.assertRaises(AttributeError):
Grain(None)
def test_grain_fails_with_bad_src_id(self):
with self.assertRaises(AttributeError):
Grain([], 0x44)
def test_video_grain_fails_with_no_metadata(self):
with self.assertRaises(AttributeError):
VideoGrain(None)
def test_video_grain_create_with_ots_and_no_sts(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = VideoGrain(src_id, flow_id, origin_timestamp=ots,
cog_frame_format=CogFrameFormat.S16_422_10BIT,
width=1920, height=1080, cog_frame_layout=CogFrameLayout.FULL_FRAME)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.sync_timestamp, ots)
self.assertEqual(grain.creation_timestamp, cts)
def test_video_grain_create_with_no_ots_and_no_sts(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = VideoGrain(src_id, flow_id,
cog_frame_format=CogFrameFormat.S16_422_10BIT,
width=1920, height=1080, cog_frame_layout=CogFrameLayout.FULL_FRAME)
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.final_origin_timestamp(), cts)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(cts))
self.assertEqual(grain.sync_timestamp, cts)
self.assertEqual(grain.creation_timestamp, cts)
def test_videograin_meta_is_json_serialisable(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = VideoGrain(src_id, flow_id, origin_timestamp=ots,
cog_frame_format=CogFrameFormat.S16_422_10BIT,
width=1920, height=1080, cog_frame_layout=CogFrameLayout.FULL_FRAME)
self.assertEqual(json.loads(json.dumps(grain.meta)), grain.meta)
def test_grain_makes_videograin(self):
meta = VIDEOGRAIN_TEST_METADATA
data = bytearray(8192*1080)
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(meta, data=data)
self.assertEqual(grain.grain_type, "video")
self.assertEqual(grain.format, CogFrameFormat.S16_422_10BIT)
self.assertEqual(grain.meta, meta)
self.assertEqual(grain.data, data)
def test_grain_makes_videograin_without_data(self):
meta = VIDEOGRAIN_TEST_METADATA
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(meta)
self.assertEqual(grain.grain_type, "video")
self.assertEqual(grain.format, CogFrameFormat.S16_422_10BIT)
self.assertEqual(grain.meta, meta)
self.assertEqual(grain.length, 0)
self.assertEqual(grain.expected_length, 8192*1080)
async def test_videograin_with_async_data(self):
meta = VIDEOGRAIN_TEST_METADATA
async def _get_data():
_data = bytearray(8192*1080)
for n in range(0, 16):
_data[n] = n & 0xFF
return _data
data_awaitable = _get_data()
expected_data = await _get_data()
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(meta, data=data_awaitable)
self.assertEqual(grain.grain_type, "video")
self.assertEqual(grain.format, CogFrameFormat.S16_422_10BIT)
self.assertEqual(grain.meta, meta)
self.assertIsNone(grain.data)
self.assertEqual((await grain)[:16], expected_data[:16])
self.assertEqual(grain.data[:16], expected_data[:16])
async def test_videograin_with_async_data_as_acm(self):
meta = VIDEOGRAIN_TEST_METADATA
async def _get_data():
_data = bytearray(8192*1080)
for n in range(0, 16):
_data[n] = n & 0xFF
return _data
data_awaitable = _get_data()
expected_data = await _get_data()
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(meta, data=data_awaitable)
self.assertEqual(grain.grain_type, "video")
self.assertEqual(grain.format, CogFrameFormat.S16_422_10BIT)
self.assertEqual(grain.meta, meta)
self.assertIsNone(grain.data)
async with grain as _grain:
self.assertEqual(_grain.data[:16], expected_data[:16])
def test_video_grain_normalise(self):
with mock.patch.object(Timestamp, "get_time", return_value=ots):
grain = VideoGrain(src_id, flow_id, origin_timestamp=ots,
rate=Fraction(25, 1),
cog_frame_format=CogFrameFormat.S16_422_10BIT,
width=1920, height=1080, cog_frame_layout=CogFrameLayout.FULL_FRAME)
self.assertEqual(grain.origin_timestamp, ots)
self.assertNotEqual(grain.normalise_time(grain.origin_timestamp),
ots)
self.assertEqual(grain.normalise_time(grain.origin_timestamp),
ots.normalise(25, 1))
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertNotEqual(grain.normalise_time(grain.origin_timerange()),
TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.normalise_time(grain.origin_timerange()),
TimeRange.from_single_timestamp(ots).normalise(25, 1))
def test_audio_grain_create_S16_PLANES(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = AudioGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_audio_format=CogAudioFormat.S16_PLANES,
channels=2, samples=1920, sample_rate=48000)
self.assertEqual(grain.grain_type, "audio")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots + TimeOffset.from_count(1919, 48000, 1))
self.assertEqual(grain.origin_timerange(), TimeRange(ots, ots + TimeOffset.from_count(1919, 48000, 1)))
self.assertEqual(grain.sync_timestamp, sts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.media_rate, Fraction(48000, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogAudioFormat.S16_PLANES)
self.assertEqual(grain.channels, 2)
self.assertEqual(grain.samples, 1920)
self.assertEqual(grain.sample_rate, 48000)
self.assertIsInstance(grain.data, bytearray)
self.assertEqual(len(grain.data), 1920*2*2)
self.assertEqual(grain.expected_length, 1920*2*2)
self.assertEqual(repr(grain), "AudioGrain({!r},< binary data of length {} >)".format(grain.meta, len(grain.data)))
def test_audio_grain_create_fills_in_missing_sts(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = AudioGrain(src_id, flow_id, origin_timestamp=ots,
cog_audio_format=CogAudioFormat.S16_PLANES,
channels=2, samples=1920, sample_rate=48000)
self.assertEqual(grain.grain_type, "audio")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots + TimeOffset.from_count(1919, 48000, 1))
self.assertEqual(grain.origin_timerange(), TimeRange(ots, ots + TimeOffset.from_count(1919, 48000, 1)))
self.assertEqual(grain.sync_timestamp, ots)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogAudioFormat.S16_PLANES)
self.assertEqual(grain.channels, 2)
self.assertEqual(grain.samples, 1920)
self.assertEqual(grain.sample_rate, 48000)
self.assertIsInstance(grain.data, bytearray)
self.assertEqual(len(grain.data), 1920*2*2)
self.assertEqual(repr(grain), "AudioGrain({!r},< binary data of length {} >)".format(grain.meta, len(grain.data)))
def test_audio_grain_create_fills_in_missing_ots(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = AudioGrain(src_id, flow_id,
cog_audio_format=CogAudioFormat.S16_PLANES,
channels=2, samples=1920, sample_rate=48000)
self.assertEqual(grain.grain_type, "audio")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.final_origin_timestamp(), cts + TimeOffset.from_count(1919, 48000, 1))
self.assertEqual(grain.origin_timerange(), TimeRange(cts, cts + TimeOffset.from_count(1919, 48000, 1)))
self.assertEqual(grain.sync_timestamp, cts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogAudioFormat.S16_PLANES)
self.assertEqual(grain.channels, 2)
self.assertEqual(grain.samples, 1920)
self.assertEqual(grain.sample_rate, 48000)
self.assertIsInstance(grain.data, bytearray)
self.assertEqual(len(grain.data), 1920*2*2)
self.assertEqual(repr(grain), "AudioGrain({!r},< binary data of length {} >)".format(grain.meta, len(grain.data)))
def test_audio_grain_create_fails_with_no_params(self):
with self.assertRaises(AttributeError):
AudioGrain(None)
def test_audio_grain_create_all_formats(self):
for (fmt, length) in [(CogAudioFormat.S16_PLANES, 1920*2*2),
(CogAudioFormat.S16_PAIRS, 1920*2*2),
(CogAudioFormat.S16_INTERLEAVED, 1920*2*2),
(CogAudioFormat.S24_PLANES, 1920*2*4),
(CogAudioFormat.S24_PAIRS, 1920*2*3),
(CogAudioFormat.S24_INTERLEAVED, 1920*2*3),
(CogAudioFormat.S32_PLANES, 1920*2*4),
(CogAudioFormat.S32_PAIRS, 1920*2*4),
(CogAudioFormat.S32_INTERLEAVED, 1920*2*4),
(CogAudioFormat.S64_INVALID, 1920*2*8),
(CogAudioFormat.FLOAT_PLANES, 1920*2*4),
(CogAudioFormat.FLOAT_PAIRS, 1920*2*4),
(CogAudioFormat.FLOAT_INTERLEAVED, 1920*2*4),
(CogAudioFormat.DOUBLE_PLANES, 1920*2*8),
(CogAudioFormat.DOUBLE_PAIRS, 1920*2*8),
(CogAudioFormat.DOUBLE_INTERLEAVED, 1920*2*8)]:
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = AudioGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_audio_format=fmt,
channels=2, samples=1920, sample_rate=48000)
self.assertEqual(grain.grain_type, "audio")
self.assertEqual(grain.format, fmt)
self.assertIsInstance(grain.data, bytearray)
self.assertEqual(len(grain.data), length)
def test_audio_grain_create_fills_in_missing_meta(self):
meta = {}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = AudioGrain(meta)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.sync_timestamp, cts)
self.assertEqual(grain.format, CogAudioFormat.INVALID)
self.assertEqual(grain.channels, 0)
self.assertEqual(grain.samples, 0)
self.assertEqual(grain.sample_rate, 0)
self.assertIsNone(grain.media_rate)
def test_audio_grain_setters(self):
meta = {}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = AudioGrain(meta)
grain.format = CogAudioFormat.S16_PLANES
self.assertEqual(grain.format, CogAudioFormat.S16_PLANES)
grain.format = 0xA
self.assertEqual(grain.format, CogAudioFormat.S32_INTERLEAVED)
grain.channels = 2
self.assertEqual(grain.channels, 2)
grain.samples = 1920
self.assertEqual(grain.samples, 1920)
grain.sample_rate = 48000
self.assertEqual(grain.sample_rate, 48000)
def test_audiograin_meta_is_json_serialisable(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = AudioGrain(src_id, flow_id,
cog_audio_format=CogAudioFormat.S16_PLANES,
channels=2, samples=1920, sample_rate=48000)
try:
self.assertEqual(json.loads(json.dumps(grain.meta)), grain.meta)
except ValueError:
self.fail(msg="Json serialisation produces: {} which is not json deserialisable".format(json.dumps(grain.meta)))
def test_grain_makes_audiograin(self):
meta = {
"@_ns": "urn:x-ipstudio:ns:0.1",
"grain": {
"grain_type": "audio",
"source_id": str(src_id),
"flow_id": str(flow_id),
"origin_timestamp": str(ots),
"sync_timestamp": str(sts),
"creation_timestamp": str(cts),
"rate": {
"numerator": 25,
"denominator": 1,
},
"duration": {
"numerator": 1,
"denominator": 25,
},
"cog_audio": {
"format": CogAudioFormat.S16_PLANES,
"samples": 1920,
"channels": 6,
"sample_rate": 48000
}
},
}
data = bytearray(1920*6*2)
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(meta, data=data)
self.assertEqual(grain.grain_type, "audio")
self.assertEqual(grain.format, CogAudioFormat.S16_PLANES)
self.assertEqual(grain.meta, meta)
self.assertEqual(grain.data, data)
def test_audio_grain_normalise(self):
with mock.patch.object(Timestamp, "get_time", return_value=ots):
grain = AudioGrain(src_id, flow_id,
cog_audio_format=CogAudioFormat.S16_PLANES,
channels=2, samples=1920, sample_rate=48000)
final_ts = ots + TimeOffset.from_count(1920 - 1, 48000, 1)
self.assertEqual(grain.origin_timestamp, ots)
self.assertNotEqual(grain.normalise_time(grain.origin_timestamp),
ots)
self.assertEqual(grain.normalise_time(grain.origin_timestamp),
ots.normalise(48000, 1))
self.assertEqual(grain.final_origin_timestamp(), final_ts)
self.assertNotEqual(grain.normalise_time(grain.origin_timerange()),
TimeRange(ots, final_ts))
self.assertEqual(grain.normalise_time(grain.origin_timerange()),
TimeRange(ots, final_ts).normalise(48000, 1))
def test_coded_video_grain_create_VC2(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedVideoGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_frame_format=CogFrameFormat.VC2,
origin_width=1920, origin_height=1080,
length=1296000, cog_frame_layout=CogFrameLayout.FULL_FRAME,
unit_offsets=[3, 2])
self.assertEqual(grain.grain_type, "coded_video")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.sync_timestamp, sts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.media_rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogFrameFormat.VC2)
self.assertEqual(grain.origin_width, 1920)
self.assertEqual(grain.origin_height, 1080)
self.assertEqual(grain.coded_width, 1920)
self.assertEqual(grain.coded_height, 1080)
self.assertEqual(grain.length, 1296000)
self.assertEqual(grain.layout, CogFrameLayout.FULL_FRAME)
self.assertEqual(grain.unit_offsets, [3, 2])
self.assertEqual(repr(grain.unit_offsets), repr([3, 2]))
self.assertIsInstance(grain.data, bytearray)
self.assertEqual(len(grain.data), grain.length)
self.assertEqual(repr(grain), "CodedVideoGrain({!r},< binary data of length {} >)".format(grain.meta, len(grain.data)))
def test_coded_video_grain_create_fills_empty_meta(self):
meta = {}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedVideoGrain(meta)
self.assertEqual(grain.grain_type, "coded_video")
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.sync_timestamp, cts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(0, 1))
self.assertEqual(grain.duration, Fraction(0, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogFrameFormat.UNKNOWN)
self.assertEqual(grain.origin_width, 0)
self.assertEqual(grain.origin_height, 0)
self.assertEqual(grain.coded_width, 0)
self.assertEqual(grain.coded_height, 0)
self.assertEqual(grain.length, 0)
self.assertEqual(grain.layout, CogFrameLayout.UNKNOWN)
self.assertEqual(grain.unit_offsets, [])
def test_coded_video_grain_create_corrects_numeric_data(self):
meta = {
'grain': {
'cog_coded_frame': {
'format': 0x0200,
'layout': 0x04
}
}
}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedVideoGrain(meta)
self.assertEqual(grain.grain_type, "coded_video")
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.sync_timestamp, cts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(0, 1))
self.assertEqual(grain.duration, Fraction(0, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogFrameFormat.MJPEG)
self.assertEqual(grain.origin_width, 0)
self.assertEqual(grain.origin_height, 0)
self.assertEqual(grain.coded_width, 0)
self.assertEqual(grain.coded_height, 0)
self.assertEqual(grain.length, 0)
self.assertEqual(grain.layout, CogFrameLayout.SEGMENTED_FRAME)
self.assertEqual(grain.unit_offsets, [])
def test_coded_video_grain_setters(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedVideoGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_frame_format=CogFrameFormat.VC2,
origin_width=1920, origin_height=1080,
length=1296000, cog_frame_layout=CogFrameLayout.FULL_FRAME)
grain.format = CogFrameFormat.MJPEG
self.assertEqual(grain.format, CogFrameFormat.MJPEG)
grain.origin_width = 1
self.assertEqual(grain.origin_width, 1)
grain.origin_height = 2
self.assertEqual(grain.origin_height, 2)
grain.coded_width = 3
self.assertEqual(grain.coded_width, 3)
grain.coded_height = 4
self.assertEqual(grain.coded_height, 4)
grain.layout = CogFrameLayout.UNKNOWN
self.assertEqual(grain.layout, CogFrameLayout.UNKNOWN)
grain.temporal_offset = 75
self.assertEqual(grain.temporal_offset, 75)
grain.is_key_frame = True
self.assertTrue(grain.is_key_frame, 75)
self.assertNotIn('unit_offsets', grain.meta['grain']['cog_coded_frame'])
self.assertEqual(grain.unit_offsets, [])
grain.unit_offsets = [1, 2, 3]
self.assertEqual(grain.unit_offsets, grain.meta['grain']['cog_coded_frame']['unit_offsets'])
self.assertEqual(grain.unit_offsets, [1, 2, 3])
grain.unit_offsets.append(4)
self.assertEqual(grain.unit_offsets, grain.meta['grain']['cog_coded_frame']['unit_offsets'])
self.assertEqual(grain.unit_offsets, [1, 2, 3, 4])
grain.unit_offsets[0] = 35
self.assertEqual(grain.unit_offsets, grain.meta['grain']['cog_coded_frame']['unit_offsets'])
self.assertEqual(grain.unit_offsets, [35, 2, 3, 4])
del grain.unit_offsets[3]
self.assertEqual(grain.unit_offsets, grain.meta['grain']['cog_coded_frame']['unit_offsets'])
self.assertEqual(grain.unit_offsets, [35, 2, 3])
del grain.unit_offsets[0]
del grain.unit_offsets[0]
del grain.unit_offsets[0]
self.assertNotIn('unit_offsets', grain.meta['grain']['cog_coded_frame'])
self.assertEqual(grain.unit_offsets, [])
with self.assertRaises(IndexError):
del grain.unit_offsets[0]
with self.assertRaises(IndexError):
grain.unit_offsets[0] = 1
grain.unit_offsets.append(1)
self.assertEqual(grain.unit_offsets, grain.meta['grain']['cog_coded_frame']['unit_offsets'])
self.assertEqual(grain.unit_offsets, [1])
grain.unit_offsets = []
self.assertNotIn('unit_offsets', grain.meta['grain']['cog_coded_frame'])
self.assertEqual(grain.unit_offsets, [])
def test_coded_video_grain_create_with_data(self):
data = bytearray(500)
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedVideoGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_frame_format=CogFrameFormat.VC2,
origin_width=1920, origin_height=1080,
cog_frame_layout=CogFrameLayout.FULL_FRAME,
data=data)
self.assertEqual(grain.data, data)
self.assertEqual(len(grain.data), grain.length)
def test_coded_video_grain_create_with_cts_and_ots(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedVideoGrain(src_id, flow_id, origin_timestamp=ots,
cog_frame_format=CogFrameFormat.VC2,
origin_width=1920, origin_height=1080,
cog_frame_layout=CogFrameLayout.FULL_FRAME)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.sync_timestamp, ots)
def test_coded_video_grain_create_with_cts(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedVideoGrain(src_id, flow_id,
cog_frame_format=CogFrameFormat.VC2,
origin_width=1920, origin_height=1080,
cog_frame_layout=CogFrameLayout.FULL_FRAME)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.sync_timestamp, cts)
def test_coded_video_grain_create_fails_with_empty(self):
with self.assertRaises(AttributeError):
CodedVideoGrain(None)
def test_coded_video_grain_meta_is_json_serialisable(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedVideoGrain(src_id, flow_id,
cog_frame_format=CogFrameFormat.VC2,
origin_width=1920, origin_height=1080,
cog_frame_layout=CogFrameLayout.FULL_FRAME)
self.assertEqual(json.loads(json.dumps(grain.meta)), grain.meta)
def test_grain_makes_codedvideograin(self):
meta = {
"@_ns": "urn:x-ipstudio:ns:0.1",
"grain": {
"grain_type": "coded_video",
"source_id": str(src_id),
"flow_id": str(flow_id),
"origin_timestamp": str(ots),
"sync_timestamp": str(sts),
"creation_timestamp": str(cts),
"rate": {
"numerator": 25,
"denominator": 1,
},
"duration": {
"numerator": 1,
"denominator": 25,
},
"cog_coded_frame": {
"format": 0x0207,
"origin_width": 1920,
"origin_height": 1080,
"coded_width": 1920,
"coded_height": 1088,
"layout": 0x00,
"length": 1296000
}
},
}
data = bytearray(1296000)
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(meta, data=data)
self.assertEqual(grain.grain_type, "coded_video")
self.assertEqual(grain.format, CogFrameFormat.VC2)
self.assertEqual(grain.meta, meta)
self.assertEqual(grain.data, data)
def test_coded_video_grain_normalise(self):
with mock.patch.object(Timestamp, "get_time", return_value=ots):
grain = CodedVideoGrain(src_id, flow_id, origin_timestamp=ots,
rate=Fraction(25, 1),
cog_frame_format=CogFrameFormat.VC2,
origin_width=1920, origin_height=1080,
cog_frame_layout=CogFrameLayout.FULL_FRAME)
self.assertEqual(grain.origin_timestamp, ots)
self.assertNotEqual(grain.normalise_time(grain.origin_timestamp),
ots)
self.assertEqual(grain.normalise_time(grain.origin_timestamp),
ots.normalise(25, 1))
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertNotEqual(grain.normalise_time(grain.origin_timerange()),
TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.normalise_time(grain.origin_timerange()),
TimeRange.from_single_timestamp(ots).normalise(25, 1))
def test_coded_audio_grain_create_MP1(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedAudioGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_audio_format=CogAudioFormat.MP1,
samples=1920,
channels=6,
priming=0,
remainder=0,
sample_rate=48000,
length=15360)
self.assertEqual(grain.grain_type, "coded_audio")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots + TimeOffset.from_count(1919, 48000, 1))
self.assertEqual(grain.origin_timerange(), TimeRange(ots, ots + TimeOffset.from_count(1919, 48000, 1)))
self.assertEqual(grain.sync_timestamp, sts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogAudioFormat.MP1)
self.assertEqual(grain.samples, 1920)
self.assertEqual(grain.channels, 6)
self.assertEqual(grain.priming, 0)
self.assertEqual(grain.remainder, 0)
self.assertEqual(grain.sample_rate, 48000)
self.assertEqual(grain.media_rate, Fraction(48000, 1))
self.assertEqual(grain.length, 15360)
self.assertIsInstance(grain.data, bytearray)
self.assertEqual(len(grain.data), grain.length)
self.assertEqual(repr(grain), "CodedAudioGrain({!r},< binary data of length {} >)".format(grain.meta, len(grain.data)))
def test_coded_audio_grain_create_without_sts(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedAudioGrain(src_id, flow_id, origin_timestamp=ots,
cog_audio_format=CogAudioFormat.MP1,
samples=1920,
channels=6,
priming=0,
remainder=0,
sample_rate=48000,
length=15360)
self.assertEqual(grain.grain_type, "coded_audio")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots + TimeOffset.from_count(1919, 48000, 1))
self.assertEqual(grain.origin_timerange(), TimeRange(ots, ots + TimeOffset.from_count(1919, 48000, 1)))
self.assertEqual(grain.sync_timestamp, ots)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogAudioFormat.MP1)
self.assertEqual(grain.samples, 1920)
self.assertEqual(grain.channels, 6)
self.assertEqual(grain.priming, 0)
self.assertEqual(grain.remainder, 0)
self.assertEqual(grain.sample_rate, 48000)
self.assertEqual(grain.length, 15360)
self.assertIsInstance(grain.data, bytearray)
self.assertEqual(len(grain.data), grain.length)
self.assertEqual(repr(grain), "CodedAudioGrain({!r},< binary data of length {} >)".format(grain.meta, len(grain.data)))
def test_coded_audio_grain_create_without_sts_or_ots(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedAudioGrain(src_id, flow_id,
cog_audio_format=CogAudioFormat.MP1,
samples=1920,
channels=6,
priming=0,
remainder=0,
sample_rate=48000,
length=15360)
self.assertEqual(grain.grain_type, "coded_audio")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.final_origin_timestamp(), cts + TimeOffset.from_count(1919, 48000, 1))
self.assertEqual(grain.origin_timerange(), TimeRange(cts, cts + TimeOffset.from_count(1919, 48000, 1)))
self.assertEqual(grain.sync_timestamp, cts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogAudioFormat.MP1)
self.assertEqual(grain.samples, 1920)
self.assertEqual(grain.channels, 6)
self.assertEqual(grain.priming, 0)
self.assertEqual(grain.remainder, 0)
self.assertEqual(grain.sample_rate, 48000)
self.assertEqual(grain.length, 15360)
self.assertIsInstance(grain.data, bytearray)
self.assertEqual(len(grain.data), grain.length)
self.assertEqual(repr(grain), "CodedAudioGrain({!r},< binary data of length {} >)".format(grain.meta, len(grain.data)))
def test_coded_audio_grain_create_fills_empty_meta(self):
meta = {}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedAudioGrain(meta)
self.assertEqual(grain.grain_type, "coded_audio")
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.sync_timestamp, cts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(0, 1))
self.assertEqual(grain.duration, Fraction(0, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogAudioFormat.INVALID)
self.assertEqual(grain.samples, 0)
self.assertEqual(grain.channels, 0)
self.assertEqual(grain.priming, 0)
self.assertEqual(grain.remainder, 0)
self.assertEqual(grain.sample_rate, 48000)
self.assertEqual(grain.length, 0)
def test_coded_audio_grain_create_corrects_numeric_data(self):
meta = {
'grain': {
'cog_coded_audio': {
'format': 0x200
}
}
}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedAudioGrain(meta)
self.assertEqual(grain.grain_type, "coded_audio")
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.sync_timestamp, cts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(0, 1))
self.assertEqual(grain.duration, Fraction(0, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.format, CogAudioFormat.MP1)
self.assertEqual(grain.samples, 0)
self.assertEqual(grain.channels, 0)
self.assertEqual(grain.priming, 0)
self.assertEqual(grain.remainder, 0)
self.assertEqual(grain.sample_rate, 48000)
self.assertEqual(grain.length, 0)
def test_coded_audio_grain_setters(self):
meta = {}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedAudioGrain(meta)
grain.format = CogAudioFormat.MP1
self.assertEqual(grain.format, CogAudioFormat.MP1)
grain.format = 0x202
self.assertEqual(grain.format, CogAudioFormat.OPUS)
grain.channels = 2
self.assertEqual(grain.channels, 2)
grain.samples = 1920
self.assertEqual(grain.samples, 1920)
grain.priming = 12
self.assertEqual(grain.priming, 12)
grain.remainder = 105
self.assertEqual(grain.remainder, 105)
grain.sample_rate = 48000
self.assertEqual(grain.sample_rate, 48000)
def test_coded_audio_grain_with_data(self):
meta = {}
data = bytearray(15360)
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedAudioGrain(meta, data)
self.assertEqual(grain.length, len(data))
self.assertEqual(grain.data, data)
def test_coded_audio_grain_raises_on_empty(self):
with self.assertRaises(AttributeError):
CodedAudioGrain(None)
def test_codedaudiograin_meta_is_json_serialisable(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = CodedAudioGrain(src_id, flow_id,
cog_audio_format=CogAudioFormat.MP1,
samples=1920,
channels=6,
priming=0,
remainder=0,
sample_rate=48000,
length=15360)
try:
self.assertEqual(json.loads(json.dumps(grain.meta)), grain.meta)
except ValueError:
self.fail(msg="Json serialisation produces: {} which is not json deserialisable".format(json.dumps(grain.meta)))
def test_grain_makes_codedaudiograin(self):
meta = {
"@_ns": "urn:x-ipstudio:ns:0.1",
"grain": {
"grain_type": "coded_audio",
"source_id": str(src_id),
"flow_id": str(flow_id),
"origin_timestamp": str(ots),
"sync_timestamp": str(sts),
"creation_timestamp": str(cts),
"rate": {
"numerator": 25,
"denominator": 1,
},
"duration": {
"numerator": 1,
"denominator": 25,
},
"cog_coded_audio": {
"format": CogAudioFormat.MP1,
"samples": 1920,
"channels": 6,
"priming": 12,
"remainder": 105,
"sample_rate": 48000
}
},
}
data = bytearray(15360)
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(meta, data=data)
self.assertEqual(grain.grain_type, "coded_audio")
self.assertEqual(grain.format, CogAudioFormat.MP1)
self.assertEqual(grain.meta, meta)
self.assertEqual(grain.data, data)
def test_coded_audio_grain_normalise(self):
with mock.patch.object(Timestamp, "get_time", return_value=ots):
grain = CodedAudioGrain(src_id, flow_id, origin_timestamp=ots,
cog_audio_format=CogAudioFormat.MP1,
samples=1920,
channels=6,
priming=0,
remainder=0,
sample_rate=48000,
length=15360)
final_ts = ots + TimeOffset.from_count(1920 - 1, 48000, 1)
self.assertEqual(grain.origin_timestamp, ots)
self.assertNotEqual(grain.normalise_time(grain.origin_timestamp),
ots)
self.assertEqual(grain.normalise_time(grain.origin_timestamp),
ots.normalise(48000, 1))
self.assertEqual(grain.final_origin_timestamp(), final_ts)
self.assertNotEqual(grain.normalise_time(grain.origin_timerange()),
TimeRange(ots, final_ts))
self.assertEqual(grain.normalise_time(grain.origin_timerange()),
TimeRange(ots, final_ts).normalise(48000, 1))
def test_event_grain_create(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = EventGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
event_type='urn:x-ipstudio:format:event.query', topic='/dummy')
self.assertEqual(grain.grain_type, "event")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.sync_timestamp, sts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertIsNone(grain.media_rate)
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.event_type, "urn:x-ipstudio:format:event.query")
self.assertEqual(grain.topic, "/dummy")
self.assertEqual(grain.event_data, [])
self.assertEqual(json.loads(grain.data.decode('utf-8')), {'type': "urn:x-ipstudio:format:event.query",
'topic': "/dummy",
'data': []})
self.assertEqual(repr(grain), "EventGrain({!r})".format(grain.meta))
def test_event_grain_create_without_sts(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = EventGrain(src_id, flow_id, origin_timestamp=ots,
event_type='urn:x-ipstudio:format:event.query', topic='/dummy')
self.assertEqual(grain.grain_type, "event")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.sync_timestamp, ots)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.event_type, "urn:x-ipstudio:format:event.query")
self.assertEqual(grain.topic, "/dummy")
self.assertEqual(grain.event_data, [])
self.assertEqual(repr(grain), "EventGrain({!r})".format(grain.meta))
def test_event_grain_create_without_sts_or_ots(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = EventGrain(src_id, flow_id,
event_type='urn:x-ipstudio:format:event.query', topic='/dummy')
self.assertEqual(grain.grain_type, "event")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.final_origin_timestamp(), cts)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(cts))
self.assertEqual(grain.sync_timestamp, cts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.event_type, "urn:x-ipstudio:format:event.query")
self.assertEqual(grain.topic, "/dummy")
self.assertEqual(grain.event_data, [])
self.assertEqual(repr(grain), "EventGrain({!r})".format(grain.meta))
def test_event_grain_create_fills_in_empty_meta(self):
meta = {}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = EventGrain(meta)
self.assertEqual(grain.grain_type, "event")
self.assertEqual(grain.origin_timestamp, cts)
self.assertEqual(grain.final_origin_timestamp(), cts)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(cts))
self.assertEqual(grain.sync_timestamp, cts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(0, 1))
self.assertEqual(grain.duration, Fraction(0, 1))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.event_type, "")
self.assertEqual(grain.topic, "")
self.assertEqual(grain.event_data, [])
def test_event_grain_create_fails_on_None(self):
with self.assertRaises(AttributeError):
EventGrain(None)
def test_event_grain_create_from_meta_and_data(self):
meta = {
"@_ns": "urn:x-ipstudio:ns:0.1",
"grain": {
"grain_type": "event",
"source_id": str(src_id),
"flow_id": str(flow_id),
"origin_timestamp": str(ots),
"sync_timestamp": str(sts),
"creation_timestamp": str(cts),
"rate": {
"numerator": 25,
"denominator": 1,
},
"duration": {
"numerator": 1,
"denominator": 25,
}
}
}
data = json.dumps({
'type': 'urn:x-ipstudio:format:event.notify',
'topic': '/foo',
'data': [
{
'path': '/bar',
'pre': 'baz'
},
{
'path': '/beep',
'pre': 'boop',
'post': 'bong'
}
]
})
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = Grain(meta, data.encode('utf-8'))
self.assertEqual(grain.grain_type, "event")
self.assertEqual(grain.source_id, src_id)
self.assertEqual(grain.flow_id, flow_id)
self.assertEqual(grain.origin_timestamp, ots)
self.assertEqual(grain.final_origin_timestamp(), ots)
self.assertEqual(grain.origin_timerange(), TimeRange.from_single_timestamp(ots))
self.assertEqual(grain.sync_timestamp, sts)
self.assertEqual(grain.creation_timestamp, cts)
self.assertEqual(grain.rate, Fraction(25, 1))
self.assertEqual(grain.duration, Fraction(1, 25))
self.assertEqual(grain.timelabels, [])
self.assertEqual(grain.event_type, "urn:x-ipstudio:format:event.notify")
self.assertEqual(grain.topic, "/foo")
self.assertEqual(len(grain.event_data), 2)
self.assertEqual(len(grain.event_data[0]), 2)
self.assertEqual(grain.event_data[0].path, '/bar')
self.assertEqual(grain.event_data[0].pre, 'baz')
self.assertIsNone(grain.event_data[0].post)
self.assertEqual(len(grain.event_data[1]), 3)
self.assertEqual(grain.event_data[1].path, '/beep')
self.assertEqual(grain.event_data[1].pre, 'boop')
self.assertEqual(grain.event_data[1].post, 'bong')
def test_event_grain_setters(self):
meta = {}
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = EventGrain(meta)
grain.event_type = "urn:x-ipstudio:format:event.potato"
self.assertEqual(grain.event_type, "urn:x-ipstudio:format:event.potato")
grain.topic = "/important/data"
self.assertEqual(grain.topic, "/important/data")
grain.append('/sub/path', 'was', 'is')
self.assertEqual(len(grain.event_data), 1)
self.assertEqual(grain.event_data[0], {'path': '/sub/path',
'pre': 'was',
'post': 'is'})
self.assertEqual(grain.event_data[0].path, '/sub/path')
self.assertEqual(grain.event_data[0].pre, 'was')
self.assertEqual(grain.event_data[0].post, 'is')
grain.event_data[0].path = '/location'
grain.event_data[0].pre = 'now'
grain.event_data[0].post = 'next'
self.assertEqual(grain.event_data[0], {'path': '/location',
'pre': 'now',
'post': 'next'})
self.assertEqual(json.loads(grain.data.decode('utf-8')), {'type': "urn:x-ipstudio:format:event.potato",
'topic': "/important/data",
'data': [{'path': '/location',
'pre': 'now',
'post': 'next'}]})
grain.event_data[0]['post'] = 'never'
del grain.event_data[0]['post']
self.assertIsNone(grain.event_data[0].post)
grain.event_data[0].pre = None
grain.event_data[0].post = 'never_was'
grain.event_data[0].post = None
grain.event_data[0].post = None
self.assertNotIn('pre', grain.event_data[0])
self.assertIsNone(grain.event_data[0].pre)
grain.event_data = []
self.assertEqual(len(grain.event_data), 0)
self.assertEqual(json.loads(grain.data.decode('utf-8')), {'type': "urn:x-ipstudio:format:event.potato",
'topic': "/important/data",
'data': []})
grain.data = json.dumps({'type': "urn:x-ipstudio:format:event.potato",
'topic': "/important/data",
'data': [{'path': '/location',
'pre': 'now',
'post': 'next'}]})
self.assertEqual(json.loads(grain.data.decode('utf-8')), {'type': "urn:x-ipstudio:format:event.potato",
'topic': "/important/data",
'data': [{'path': '/location',
'pre': 'now',
'post': 'next'}]})
with self.assertRaises(ValueError):
grain.data = bytearray(json.dumps({'potato': "masher"}).encode('utf-8'))
def test_copy(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = VideoGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_frame_format=CogFrameFormat.S16_422_10BIT,
width=1920, height=1080, cog_frame_layout=CogFrameLayout.FULL_FRAME)
grain.data[0] = 0x1B
grain.data[1] = 0xBC
clone = copy(grain)
self.assertEqual(grain.data[0], clone.data[0])
self.assertEqual(grain.data[1], clone.data[1])
grain.data[0] = 0xCA
grain.data[1] = 0xFE
self.assertEqual(grain.data[0], clone.data[0])
self.assertEqual(grain.data[1], clone.data[1])
def test_deepcopy(self):
with mock.patch.object(Timestamp, "get_time", return_value=cts):
grain = VideoGrain(src_id, flow_id, origin_timestamp=ots, sync_timestamp=sts,
cog_frame_format=CogFrameFormat.S16_422_10BIT,
width=1920, height=1080, cog_frame_layout=CogFrameLayout.FULL_FRAME)
grain.data[0] = 0x1B
grain.data[1] = 0xBC
clone = deepcopy(grain)
self.assertEqual(grain.data[0], clone.data[0])
self.assertEqual(grain.data[1], clone.data[1])
grain.data[0] = 0xCA
grain.data[1] = 0xFE
self.assertNotEqual(grain.data[0], clone.data[0])
self.assertNotEqual(grain.data[1], clone.data[1])
| 45.374146 | 127 | 0.598742 |
bfeb6d27221f3c65d3736744a0128f0bb6b91eeb | 559 | py | Python | WebMirror/management/rss_parser_funcs/feed_parse_extractRadianttranslationsCom.py | fake-name/ReadableWebProxy | ed5c7abe38706acc2684a1e6cd80242a03c5f010 | [
"BSD-3-Clause"
] | 193 | 2016-08-02T22:04:35.000Z | 2022-03-09T20:45:41.000Z | WebMirror/management/rss_parser_funcs/feed_parse_extractRadianttranslationsCom.py | fake-name/ReadableWebProxy | ed5c7abe38706acc2684a1e6cd80242a03c5f010 | [
"BSD-3-Clause"
] | 533 | 2016-08-23T20:48:23.000Z | 2022-03-28T15:55:13.000Z | WebMirror/management/rss_parser_funcs/feed_parse_extractRadianttranslationsCom.py | rrosajp/ReadableWebProxy | ed5c7abe38706acc2684a1e6cd80242a03c5f010 | [
"BSD-3-Clause"
] | 19 | 2015-08-13T18:01:08.000Z | 2021-07-12T17:13:09.000Z |
def extractRadianttranslationsCom(item):
'''
Parser for 'radianttranslations.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| 25.409091 | 104 | 0.640429 |
36d08b58835795de6198ceed4c15bbd7d5b9df34 | 4,913 | py | Python | Code/bases.py | t0ri/CS-1.3-Core-Data-Structures | 1aa0b226feda77d0c49215700e8b8700a26d93ff | [
"MIT"
] | null | null | null | Code/bases.py | t0ri/CS-1.3-Core-Data-Structures | 1aa0b226feda77d0c49215700e8b8700a26d93ff | [
"MIT"
] | 5 | 2019-06-14T04:59:55.000Z | 2019-08-13T03:01:14.000Z | Code/bases.py | t0ri/Core-Data-Structures | 1aa0b226feda77d0c49215700e8b8700a26d93ff | [
"MIT"
] | null | null | null | #!python
import string
# Hint: Use these string constants to encode/decode hexadecimal digits and more
# string.digits is '0123456789'
# string.hexdigits is '0123456789abcdefABCDEF'
# string.ascii_lowercase is 'abcdefghijklmnopqrstuvwxyz'
# string.ascii_uppercase is 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
# string.ascii_letters is ascii_lowercase + ascii_uppercase
# string.printable is digits + ascii_letters + punctuation + whitespace
def is_number(number):
try:
int(number)
return True
except ValueError:
return False
def old_decode(digits, base):
pass
# Decode digits from binary (base 2)
# if base == 2:
# accumulator = 0
# bit_place = len(digit_list) - 1 # holds place in bits, counts backwards
# for num in digit_list:
# if num == '1':
# accumulator += 2 ** bit_place
# bit_place -= 1
# return accumulator
# Decode digits from hexadecimal (base 16)
# if base == 16:
# accumulator = 0
# for num in digit_list:
# if is_number(num):
# accumulator += int(num)
# else:
# values = {'A': 10, 'B': 11, 'C': 12, 'D': 13, 'E': 14, 'F': 15}
# accumulator += int(values.get(num.upper()))
# return accumulator
def decode(digits, base):
"""Decode given digits in given base to number in base 10.
digits: str -- string representation of number (in given base)
base: int -- base of given number
return: int -- integer representation of number (in base 10)"""
# Handle up to base 36 [0-9a-z]
assert 2 <= base <= 36, 'base is out of range: {}'.format(base)
# Decode digits from any base (2 up to 36)
digit_list = list(digits)
numbers = list(string.digits + string.ascii_lowercase)
result = 0
bit_place = len(digit_list) - 1
for num in digit_list:
value = numbers.index(num)
result += value * (base ** bit_place)
bit_place -= 1
return result
def old_encode(number, base):
pass
# Encode number in binary (base 2)
# if base == 2:
# result = list()
# bit_place = 7
# current_num = number
# while len(result) <= 7:
# fit = 2 ** bit_place
# if current_num - fit >= 0:
# current_num -= fit
# result.append('1')
# else:
# result.append('0')
# bit_place -= 1
# return ''.join(result)
# while number >= base:
# remainder = int(number % base)
# quotient = int(number / base)
# converted = numbers[remainder]
# result.append(converted)
# number = quotient
def encode(number, base):
"""Encode given number in base 10 to digits in given base.
number: int -- integer representation of number (in base 10)
base: int -- base to convert to
return: str -- string representation of number (in given base)"""
# Handle up to base 36 [0-9a-z]
assert 2 <= base <= 36, 'base is out of range: {}'.format(base)
# Handle unsigned numbers only for now
assert number >= 0, 'number is negative: {}'.format(number)
# If base 10, do nothing
if base == 10:
return str(number)
number = int(number)
numbers = list(string.digits + string.ascii_lowercase)
result = list()
while number >= base:
result.append(numbers[int(number % base)])
number = int(number / base)
result.append(numbers[number])
result.reverse()
return ''.join(result)
def convert(digits, base1, base2):
"""Convert given digits in base1 to digits in base2.
digits: str -- string representation of number (in base1)
base1: int -- base of given number
base2: int -- base to convert to
return: str -- string representation of number (in base2)"""
# Handle up to base 36 [0-9a-z]
assert 2 <= base1 <= 36, 'base1 is out of range: {}'.format(base1)
assert 2 <= base2 <= 36, 'base2 is out of range: {}'.format(base2)
# Convert digits from any base to any base (2 up to 36)
if base1 == '10':
return encode(digits, base2)
else:
num = decode(digits, base1)
return encode(num, base2)
def main():
"""Read command-line arguments and convert given digits between bases."""
import sys
args = sys.argv[1:] # Ignore script file name
if len(args) == 3:
digits = args[0]
base1 = int(args[1])
base2 = int(args[2])
# Convert given digits between bases
result = convert(digits, base1, base2)
print('{} in base {} is {} in base {}'.format(digits, base1, result, base2))
else:
print('Usage: {} digits base1 base2'.format(sys.argv[0]))
print('Converts digits from base1 to base2')
if __name__ == '__main__':
main()
| 31.696774 | 86 | 0.588846 |
89301da4b8829677f0cd4950e929614351d7e8ee | 1,514 | py | Python | gbe/forms/summer_act_form.py | bethlakshmi/gbe-divio-djangocms-python2.7 | 6e9b2c894162524bbbaaf73dcbe927988707231d | [
"Apache-2.0"
] | 1 | 2021-03-14T11:56:47.000Z | 2021-03-14T11:56:47.000Z | gbe/forms/summer_act_form.py | bethlakshmi/gbe-divio-djangocms-python2.7 | 6e9b2c894162524bbbaaf73dcbe927988707231d | [
"Apache-2.0"
] | 180 | 2019-09-15T19:52:46.000Z | 2021-11-06T23:48:01.000Z | gbe/forms/summer_act_form.py | bethlakshmi/gbe-divio-djangocms-python2.7 | 6e9b2c894162524bbbaaf73dcbe927988707231d | [
"Apache-2.0"
] | null | null | null | from django.forms import (
CheckboxSelectMultiple,
DurationField,
HiddenInput,
MultipleChoiceField,
)
from gbe.forms import (
ActEditDraftForm,
ActEditForm,
)
from gbe_forms_text import (
act_help_texts,
act_bid_labels,
summer_bid_label,
summer_help_texts,
)
from gbetext import (
more_shows_options,
summer_other_perf_options,
)
from gbe.functions import get_current_conference
from gbe.models import Act
class SummerActDraftForm(ActEditDraftForm):
shows_preferences = MultipleChoiceField(
widget=CheckboxSelectMultiple,
choices=more_shows_options,
label=summer_bid_label,
help_text=summer_help_texts['shows_preferences'],
required=False
)
other_performance = MultipleChoiceField(
widget=CheckboxSelectMultiple,
choices=summer_other_perf_options,
label=act_bid_labels['other_performance'],
help_text=act_help_texts['other_performance'],
required=False
)
act_duration = DurationField(
required=False,
help_text=summer_help_texts['act_duration']
)
class SummerActForm(SummerActDraftForm):
shows_preferences = MultipleChoiceField(
widget=CheckboxSelectMultiple,
choices=more_shows_options,
label=act_bid_labels['summer_shows_preferences'],
help_text=act_help_texts['shows_preferences']
)
act_duration = DurationField(
required=True,
help_text=summer_help_texts['act_duration']
)
| 26.103448 | 57 | 0.720608 |
f9ae44085aad4c16f6592f43de3a099254f05d59 | 47 | py | Python | conjur/util/__init__.py | mbjahnoon/conjur-api-python3 | ec1f62bb1baf2bdcd34d2fb92c97db724f761020 | [
"Apache-2.0"
] | 16 | 2019-05-17T15:34:59.000Z | 2021-11-08T10:30:21.000Z | conjur/util/__init__.py | mbjahnoon/conjur-api-python3 | ec1f62bb1baf2bdcd34d2fb92c97db724f761020 | [
"Apache-2.0"
] | 301 | 2019-05-07T18:27:10.000Z | 2022-01-26T13:03:49.000Z | conjur/util/__init__.py | cyberark/cyberark-conjur-cli | 2507e8769808643d89efa7e2496cfc14f505bd7e | [
"Apache-2.0"
] | 10 | 2019-07-30T17:00:13.000Z | 2022-01-20T17:00:34.000Z | """
util
This package is for util modules
"""
| 7.833333 | 32 | 0.659574 |
67883520ca861f37157fe9459e625d4902633a1c | 16,572 | py | Python | machinevisiontoolbox/base/graphics.py | petercorke/machinevision-toolbox-python | d7e465575ad3c512387e9486b3b556dc9faa43cf | [
"MIT"
] | 38 | 2020-07-19T15:42:43.000Z | 2022-03-31T05:48:44.000Z | machinevisiontoolbox/base/graphics.py | petercorke/machinevision-toolbox-python | d7e465575ad3c512387e9486b3b556dc9faa43cf | [
"MIT"
] | 1 | 2021-01-03T19:40:21.000Z | 2021-01-03T19:40:21.000Z | machinevisiontoolbox/base/graphics.py | petercorke/machinevision-toolbox-python | d7e465575ad3c512387e9486b3b556dc9faa43cf | [
"MIT"
] | 9 | 2020-10-17T13:46:32.000Z | 2021-12-30T08:30:35.000Z | import cv2 as cv
from spatialmath import base
from ansitable import ANSITable, Column
from machinevisiontoolbox.base import color_bgr
import matplotlib.pyplot as plt
import numpy as np
from spatialmath import base
def plot_box(ax=None,
bbox=None, bl=None, tl=None, br=None, tr=None, wh=None, centre=None,
color=None, fillcolor=None, alpha=None, thickness=None, **kwargs):
"""
Plot a box using matplotlib
:param ax: the axes to draw on, defaults to ``gca()``
:type ax: Axis, optional
:param bbox: bounding box matrix, defaults to None
:type bbox: ndarray(2,2), optional
:param bl: bottom-left corner, defaults to None
:type bl: array_like(2), optional
:param tl: top-left corner, defaults to None
:type tl: [array_like(2), optional
:param br: bottom-right corner, defaults to None
:type br: array_like(2), optional
:param tr: top -ight corner, defaults to None
:type tr: array_like(2), optional
:param wh: width and height, defaults to None
:type wh: array_like(2), optional
:param centre: [description], defaults to None
:type centre: array_like(2), optional
:param color: box outline color
:type color: array_like(3) or str
:param fillcolor: box fill color
:type fillcolor: array_like(3) or str
:param alpha: transparency, defaults to 1
:type alpha: float, optional
:param thickness: line thickness, defaults to None
:type thickness: float, optional
:return: the matplotlib object
:rtype: Patch.Rectangle
Plots a box on the specified axes using matplotlib
The box can be specified in many ways:
- bounding box which is a 2x2 matrix [xmin, xmax; ymin, ymax]
- centre and width+height
- bottom-left and top-right corners
- bottom-left corner and width+height
- top-right corner and width+height
- top-left corner and width+height
"""
if bbox is not None:
xy = bbox[:,0]
w = bbox[0,1] - bbox[0,0]
h = bbox[1,1] - bbox[1,0]
elif bl is not None and tl is None and tr is None and wh is not None and centre is None:
# bl + wh
xy = bl
w, h = wh
elif bl is not None and tl is None and tr is not None and wh is None and centre is None:
# bl + tr
xy = bl
w = br[0] - bl[0]
h = br[1] - bl[1]
elif bl is None and tl is None and tr is None and wh is not None and centre is not None:
# centre + wh
w, h = wh
xy = (centre[0] - w / 2, centre[1] - h / 2)
elif bl is None and tl is None and tr is not None and wh is not None and centre is None:
# tr + wh
w, h = wh
xy = (tr[0] - wh[0], tr[1] - wh[1])
elif bl is None and tl is not None and tr is None and wh is not None and centre is None:
# tl + wh
w, h = wh
xy = (tl[0], tl[1] - h)
if ax is None:
ax = plt.gca()
fill = fillcolor is not None
rect = plt.Rectangle(xy, w, h, edgecolor=color, facecolor=fillcolor, fill=fill,
alpha=alpha, linewidth=thickness, clip_on=True)
ax.add_patch(rect)
plt.draw()
return rect
def draw_box(image,
bbox=None, bl=None, tl=None, br=None, tr=None, wh=None, centre=None,
color=None, fillcolor=None, alpha=None, thickness=1, **kwargs):
"""
Draw a box in an image using OpenCV
:param image: image to draw into
:type image: ndarray(h,w) or ndarray(h,w,nc)
:param bbox: bounding box matrix, defaults to None
:type bbox: ndarray(2,2), optional
:param bl: bottom-left corner, defaults to None
:type bl: array_like(2), optional
:param tl: top-left corner, defaults to None
:type tl: [array_like(2), optional
:param br: bottom-right corner, defaults to None
:type br: array_like(2), optional
:param tr: top-right corner, defaults to None
:type tr: array_like(2), optional
:param wh: width and height, defaults to None
:type wh: array_like(2), optional
:param centre: [description], defaults to None
:type centre: array_like(2), optional
:param color: box outline color
:type color: array_like(3) or str
:param fillcolor: box fill color
:type fillcolor: array_like(3) or str
:param thickness: [description], defaults to 1
:type thickness: int, optional
:return: bottom-left and top right-corners
:rtype: [type]
Draws a box into the specified image using OpenCV. The input ``image``
is modified.
The box can be specified in many ways:
- bounding box which is a 2x2 matrix [xmin, xmax; ymin, ymax]
- centre and width+height
- bottom-left and top-right corners
- bottom-left corner and width+height
- top-right corner and width+height
- top-left corner and width+height
where bottom-left is (xmin, ymin), top-left is (xmax, ymax)
"""
if not isinstance(color, int) and len(image.shape) == 2:
raise TypeError("can't draw color into a greyscale image")
if bbox is not None:
bl = tuple(bbox[:,0])
tr = tuple(bbox[:,1])
elif bl is not None and tl is None and tr is None and wh is not None and centre is None:
# bl + wh
bl = tuple(bl)
w, h = wh
tr = (bl[0] + w, bl[1] + h)
elif bl is not None and tl is None and tr is not None and wh is None and centre is None:
# bl + tr
bl = tuple(bl)
tr = tuple(tr)
elif bl is None and tl is None and tr is None and wh is not None and centre is not None:
# centre + wh
w, h = wh
bl = (centre[0] - w / 2, centre[1] - h / 2)
tr = (centre[0] + w / 2, centre[1] + h / 2)
elif bl is None and tl is None and tr is not None and wh is not None and centre is None:
# tr + wh
tr = tuple(tr)
w, h = wh
bl = (tr[0] - w, tr[1] - h)
elif bl is None and tl is not None and tr is None and wh is not None and centre is None:
# tl + wh
w, h = wh
bl = (tl[0], tl[1] - h)
tr = (tl[0] + w, tl[1])
if fillcolor is not None:
color = fillcolor
thickness = -1
if isinstance(color, str):
color = color_bgr(color)
if color is not None and len(color) == 3:
color = color[::-1]
bl = tuple([int(x) for x in bl])
tr = tuple([int(x) for x in tr])
cv.rectangle(image, bl, tr, color, thickness)
return bl, tr
def plot_labelbox(text, textcolor=None, **kwargs):
"""
Plot a labelled box using matplotlib
:param text: text label
:type text: str
:param textcolor: text color, defaults to None
:type textcolor: str or array_like(3), optional
The position of the box is specified using the same arguments as for
``plot_box``.
The label font is specified using the same arguments as for ``plot_text``.
:seealso: :func:`plot_box`, :func:`plot_text`
"""
rect = plot_box(**kwargs)
bbox = rect.get_bbox()
plot_text((bbox.xmin, bbox.ymin), text, color=textcolor, verticalalignment='bottom',
bbox=dict(facecolor=kwargs['color'], linewidth=0, edgecolor=None))
def draw_labelbox(image, text, textcolor='black',
font=cv.FONT_HERSHEY_SIMPLEX, fontsize=0.9, fontthickness=2, **kwargs):
"""
Draw a labelled box in the image using OpenCV
:param text: text label
:type text: str
:param textcolor: text color, defaults to black
:type textcolor: str or array_like(3), optional
:param font: OpenCV font, defaults to cv.FONT_HERSHEY_SIMPLEX
:type font: str, optional
:param fontsize: OpenCV font scale, defaults to 0.3
:type fontsize: float, optional
:param fontthickness: font thickness in pixels, defaults to 2
:type fontthickness: int, optional
The position of the box is specified using the same arguments as for
``draw_box``.
The label font is specified using the same arguments as for ``draw_text``.
:seealso: :func:`draw_box`, :func:`draw_text`
"""
if not isinstance(color, int) and len(image.shape) == 2:
raise TypeError("can't draw color into a greyscale image")
# get size of text: ((w,h), baseline)
twh = cv.getTextSize(text, font, fontsize, fontthickness)
# draw the box
bl, tr = draw_box(image, **kwargs)
# a bit of margin, 1/2 the text height
h = round(twh[0][1] / 2)
h2 = round(twh[0][1] / 4)
# draw background of the label
draw_box(image, tl=bl, wh=(twh[0][0] + h, twh[0][1] + h), fillcolor=kwargs['color'])
# draw the text over the background
draw_text(image, (bl[0] + h2, bl[1] - h2), text, color=textcolor,
font=font, fontsize=fontsize, fontthickness=fontthickness)
def plot_text(pos, text=None, ax=None, color=None, **kwargs):
"""
Plot text using matplotlib
:param pos: position of text
:type pos: array_like(2)
:param text: text
:type text: str
:param ax: axes to draw in, defaults to ``gca()``
:type ax: Axis, optional
:param color: text color, defaults to None
:type color: str or array_like(3), optional
:param kwargs: additional arguments passed to ``pyplot.text()``
"""
defaults = {
'horizontalalignment': 'left',
'verticalalignment': 'center'
}
for k, v in defaults.items():
if k not in kwargs:
kwargs[k] = v
if ax is None:
ax = plt.gca()
plt.text(pos[0], pos[1], text, color=color, **kwargs)
def draw_text(image, pos, text=None, color=None, font=cv.FONT_HERSHEY_SIMPLEX, fontsize=0.3, fontthickness=2):
"""
Draw text in image using OpenCV
:param image: image to draw into
:type image: ndarray(h,w) or ndarray(h,w,nc)
:param pos: position of text
:type pos: array_like(2)
:param text: text
:type text: str
:param color: color of text
:type color: scalar, array_like(3), str
:param font: OpenCV font, defaults to cv.FONT_HERSHEY_SIMPLEX
:type font: str, optional
:param fontsize: OpenCV font scale, defaults to 0.3
:type fontsize: float, optional
:param fontthickness: font thickness in pixels, defaults to 2
:type fontthickness: int, optional
The position corresponds to the bottom-left corner of the text box as seen
in the image.
"""
if not isinstance(color, int) and len(image.shape) == 2:
raise TypeError("can't draw color into a greyscale image")
if isinstance(color, str):
color = color_bgr(color)
cv.putText(image, text, pos, font, fontsize, color, fontthickness)
def plot_point(pos, marker='bs', text=None, ax=None, color=None, **kwargs):
"""
Plot a point using matplotlib
:param pos: position of marker
:type pos: array_like(2), ndarray(2,n), list of 2-tuples
:param marker: matplotlub marker style, defaults to 'bs'
:type marker: str or list of str, optional
:param text: text label, defaults to None
:type text: str, optional
:param ax: axes to plot in, defaults to ``gca()````
:type ax: Axis, optional
:param color: text color, defaults to None
:type color: str or array_like(3), optional
The color of the marker can be different to the color of the text,
the marker color is specified by a single letter in the marker string.
A point can multiple markers which will be overlaid, for instance ``["rx",
"ro"]`` will give a ⨂ symbol.
The optional text label is placed to the right of the marker, and vertically
aligned.
Multiple points can be marked if ``pos`` is a 2xn array or a list of
coordinate pairs. If a label is provided every point will have the same
label. However, the text is processed with ``format`` and is provided with a
single argument, the point index (starting at zero).
"""
if isinstance(pos, np.ndarray) and pos.shape[0] == 2:
x = pos[0,:]
y = pos[1,:]
elif isinstance(pos, (tuple, list)):
# [x, y]
# [(x,y), (x,y), ...]
# [xlist, ylist]
# [xarray, yarray]
if base.islistof(pos, (tuple, list)):
x = [z[0] for z in pos]
y = [z[1] for z in pos]
elif base.islistof(pos, np.ndarray):
x = pos[0]
y = pos[1]
else:
x = pos[0]
y = pos[1]
if ax is None:
ax = plt.gca()
if isinstance(marker, (list, tuple)):
for m in marker:
plt.plot(x, y, m, **kwargs)
else:
plt.plot(x, y, marker)
if text:
try:
for i, xy in enumerate(zip(x, y)):
plt.text(xy[0], xy[1], ' ' + text.format(i), horizontalalignment='left', verticalalignment='center', color=color, **kwargs)
except:
plt.text(x, y, ' ' + text, horizontalalignment='left', verticalalignment='center', color=color, **kwargs)
def draw_point(image, pos, marker='+', text=None, color=None, font=cv.FONT_HERSHEY_SIMPLEX, fontsize=0.3, fontthickness=2):
"""
Draw marker in image using OpenCV
:param image: image to draw into
:type image: ndarray(h,w) or ndarray(h,w,nc)
:param pos: position of marker
:type pos: array_like(2), ndarray(2,n), list of 2-tuples
:param marker: marker character, defaults to '+'
:type marker: str, optional
:param text: text label, defaults to None
:type text: str, optional
:param color: text color, defaults to None
:type color: str or array_like(3), optional
:param font: OpenCV font, defaults to cv.FONT_HERSHEY_SIMPLEX
:type font: str, optional
:param fontsize: OpenCV font scale, defaults to 0.3
:type fontsize: float, optional
:param fontthickness: font thickness in pixels, defaults to 2
:type fontthickness: int, optional
The text label is placed to the right of the marker, and vertically centred.
The color of the marker can be different to the color of the text, the
marker color is specified by a single letter in the marker string.
Multiple points can be marked if ``pos`` is a 2xn array or a list of
coordinate pairs. If a label is provided every point will have the same
label. However, the text is processed with ``format`` and is provided with
a single argument, the point index (starting at zero).
"""
if not isinstance(color, int) and len(image.shape) == 2:
raise TypeError("can't draw color into a greyscale image")
if isinstance(pos, np.ndarray) and pos.shape[0] == 2:
x = pos[0,:]
y = pos[1,:]
elif isinstance(pos, (tuple, list)):
if base.islistof(pos, (tuple, list)):
x = [z[0] for z in pos]
y = [z[1] for z in pos]
else:
x = pos[0]
y = pos[1]
if isinstance(color, str):
color = color_bgr(color)
for i, xy in enumerate(zip(x, y)):
s = marker
if text:
s += ' ' + text.format(i)
cv.putText(image, s, xy, font, fontsize, color, fontthickness)
def plot_histogram(c, n, clip=False, ax=None, block=False, xlabel=None, ylabel=None, grid=False, **kwargs):
if ax is None:
plt.figure()
ax = plt.gca()
# n = hist.h # number of pixels per class
# c = hist.x # class value
if clip:
nz, _ = np.where(n > 0)
start = nz[0]
end = nz[-1] + 1
n = n[start:end]
c = c[start:end]
ax.bar(c, n, **kwargs)
if xlabel is not None:
ax.set_xlabel(xlabel)
if ylabel is not None:
ax.set_ylabel(ylabel)
ax.grid(grid)
plt.show(block=block)
if __name__ == "__main__":
# from machinevisiontoolbox import iread, idisp
# im, file = iread('flowers1.png')
# idisp(im, darken=2)
# plot_box(centre=(300,200), wh=(40,40), fillcolor='red', alpha=0.5)
# plot_point([(200,200), (300, 300), (400,400)], marker='r*', color='blue', text="bob {}")
# plot_labelbox("hello", color='red', textcolor='white', centre=(300,300), wh=(60,40))
# plt.show()
import numpy as np
from machinevisiontoolbox import idisp, iread, Image
im = np.zeros((100,100,3), 'uint8')
# im, file = iread('flowers1.png')
# draw_box(im, color=(255,0,0), centre=(50,50), wh=(20,20))
# draw_point(im, [(200,200), (300, 300), (400,400)], color='blue')
# draw_labelbox(im, "box", thickness=3, centre=(100,100), wh=(100,30), color='red', textcolor='white')
idisp(im)
x = np.random.randint(0, 100, size=(10,))
y = np.random.randint(0, 100, size=(10,))
plot_point((x,y), 'w+')
plt.draw()
plt.show(block=True)
im = Image('penguins.png')
h = im.hist()
| 33.959016 | 139 | 0.620203 |
b9cbf3830b414d320cd27d33b46adb99ba2339a3 | 5,317 | py | Python | modules/nn/modules.py | notani/ntua-slp-semeval2018 | 2d6e03d37210a91fbc0ff99f44a4449abc59210c | [
"MIT"
] | null | null | null | modules/nn/modules.py | notani/ntua-slp-semeval2018 | 2d6e03d37210a91fbc0ff99f44a4449abc59210c | [
"MIT"
] | null | null | null | modules/nn/modules.py | notani/ntua-slp-semeval2018 | 2d6e03d37210a91fbc0ff99f44a4449abc59210c | [
"MIT"
] | null | null | null | from torch import nn, torch
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
from modules.nn.regularization import GaussianNoise
class RNNEncoder(nn.Module):
def __init__(self, input_size, rnn_size, num_layers,
bidirectional, dropout):
"""
A simple RNN Encoder.
Args:
input_size (int): the size of the input features
rnn_size (int):
num_layers (int):
bidirectional (bool):
dropout (float):
Returns: outputs, last_outputs
- **outputs** of shape `(batch, seq_len, hidden_size)`:
tensor containing the output features `(h_t)`
from the last layer of the LSTM, for each t.
- **last_outputs** of shape `(batch, hidden_size)`:
tensor containing the last output features
from the last layer of the LSTM, for each t=seq_len.
"""
super(RNNEncoder, self).__init__()
self.rnn = nn.LSTM(input_size=input_size,
hidden_size=rnn_size,
num_layers=num_layers,
bidirectional=bidirectional,
dropout=dropout,
batch_first=True)
# the dropout "layer" for the output of the RNN
self.drop_rnn = nn.Dropout(dropout)
# define output feature size
self.feature_size = rnn_size
if bidirectional:
self.feature_size *= 2
@staticmethod
def last_by_index(outputs, lengths):
# Index of the last output for each sequence.
idx = (lengths - 1).view(-1, 1).expand(outputs.size(0),
outputs.size(2)).unsqueeze(1)
return outputs.gather(1, idx).squeeze()
@staticmethod
def split_directions(outputs):
direction_size = int(outputs.size(-1) / 2)
forward = outputs[:, :, :direction_size]
backward = outputs[:, :, direction_size:]
return forward, backward
def last_timestep(self, outputs, lengths, bi=False):
if bi:
forward, backward = self.split_directions(outputs)
last_forward = self.last_by_index(forward, lengths)
last_backward = backward[:, 0, :]
return torch.cat((last_forward, last_backward), dim=-1)
else:
return self.last_by_index(outputs, lengths)
def forward(self, embs, lengths):
"""
This is the heart of the model. This function, defines how the data
passes through the network.
Args:
embs (): word embeddings
lengths (): the lengths of each sentence
Returns: the logits for each class
"""
# pack the batch
packed = pack_padded_sequence(embs, list(lengths.data),
batch_first=True).float()
out_packed, _ = self.rnn(packed)
# unpack output - no need if we are going to use only the last outputs
outputs, _ = pad_packed_sequence(out_packed, batch_first=True)
# get the outputs from the last *non-masked* timestep for each sentence
last_outputs = self.last_timestep(outputs, lengths,
self.rnn.bidirectional)
# apply dropout to the outputs of the RNN
last_outputs = self.drop_rnn(last_outputs)
return outputs, last_outputs
class Embed(nn.Module):
def __init__(self,
num_embeddings,
embedding_dim,
embeddings=None,
noise=.0,
dropout=.0,
trainable=False):
"""
Define the layer of the model and perform the initializations
of the layers (wherever it is necessary)
Args:
embeddings (numpy.ndarray): the 2D ndarray with the word vectors
noise (float):
dropout (float):
trainable (bool):
"""
super(Embed, self).__init__()
# define the embedding layer, with the corresponding dimensions
self.embedding = nn.Embedding(num_embeddings=num_embeddings,
embedding_dim=embedding_dim)
if embeddings is not None:
print("Initializing Embedding layer with pre-trained weights!")
self.init_embeddings(embeddings, trainable)
# the dropout "layer" for the word embeddings
self.dropout = nn.Dropout(dropout)
# the gaussian noise "layer" for the word embeddings
self.noise = GaussianNoise(noise)
def init_embeddings(self, weights, trainable):
self.embedding.weight = nn.Parameter(torch.from_numpy(weights),
requires_grad=trainable)
def forward(self, x):
"""
This is the heart of the model. This function, defines how the data
passes through the network.
Args:
x (): the input data (the sentences)
Returns: the logits for each class
"""
embeddings = self.embedding(x)
if self.noise.stddev > 0:
embeddings = self.noise(embeddings)
if self.dropout.p > 0:
embeddings = self.dropout(embeddings)
return embeddings
| 33.866242 | 79 | 0.579274 |
6db50d93680776fce3dfccd99cb4ea7052146458 | 70,022 | py | Python | src/spring-cloud/azext_spring_cloud/vendored_sdks/appplatform/v2019_05_01_preview/models/_models.py | Mannan2812/azure-cli-extensions | e2b34efe23795f6db9c59100534a40f0813c3d95 | [
"MIT"
] | 2 | 2021-06-05T17:51:26.000Z | 2021-11-17T11:17:56.000Z | src/spring-cloud/azext_spring_cloud/vendored_sdks/appplatform/v2019_05_01_preview/models/_models.py | Mannan2812/azure-cli-extensions | e2b34efe23795f6db9c59100534a40f0813c3d95 | [
"MIT"
] | 3 | 2020-05-27T20:16:26.000Z | 2020-07-23T19:46:49.000Z | src/spring-cloud/azext_spring_cloud/vendored_sdks/appplatform/v2019_05_01_preview/models/_models.py | Mannan2812/azure-cli-extensions | e2b34efe23795f6db9c59100534a40f0813c3d95 | [
"MIT"
] | 5 | 2020-05-09T17:47:09.000Z | 2020-10-01T19:52:06.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
from msrest.exceptions import HttpOperationError
class Resource(Model):
"""The core properties of ARM resources.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
class ProxyResource(Resource):
"""The resource model definition for a ARM proxy resource. It will have
everything other than required location and tags.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ProxyResource, self).__init__(**kwargs)
class AppResource(ProxyResource):
"""App resource payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param properties: Properties of the App resource
:type properties:
~azure.mgmt.appplatform.v2019_05_01_preview.models.AppResourceProperties
:param identity: The Managed Identity type of the app resource
:type identity:
~azure.mgmt.appplatform.v2019_05_01_preview.models.ManagedIdentityProperties
:param location: The GEO location of the application, always the same with
its parent resource
:type location: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'AppResourceProperties'},
'identity': {'key': 'identity', 'type': 'ManagedIdentityProperties'},
'location': {'key': 'location', 'type': 'str'},
}
def __init__(self, **kwargs):
super(AppResource, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
self.identity = kwargs.get('identity', None)
self.location = kwargs.get('location', None)
class AppResourceProperties(Model):
"""App resource properties payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:param public: Indicates whether the App exposes public endpoint
:type public: bool
:ivar url: URL of the App
:vartype url: str
:ivar provisioning_state: Provisioning state of the App. Possible values
include: 'Succeeded', 'Failed', 'Creating', 'Updating', 'Deleting'
:vartype provisioning_state: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.AppResourceProvisioningState
:param active_deployment_name: Name of the active deployment of the App
:type active_deployment_name: str
:param fqdn: Fully qualified dns Name.
:type fqdn: str
:param https_only: Indicate if only https is allowed.
:type https_only: bool
:ivar created_time: Date time when the resource is created
:vartype created_time: datetime
:param temporary_disk: Temporary disk settings
:type temporary_disk:
~azure.mgmt.appplatform.v2019_05_01_preview.models.TemporaryDisk
:param persistent_disk: Persistent disk settings
:type persistent_disk:
~azure.mgmt.appplatform.v2019_05_01_preview.models.PersistentDisk
"""
_validation = {
'url': {'readonly': True},
'provisioning_state': {'readonly': True},
'created_time': {'readonly': True},
}
_attribute_map = {
'public': {'key': 'public', 'type': 'bool'},
'url': {'key': 'url', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'active_deployment_name': {'key': 'activeDeploymentName', 'type': 'str'},
'fqdn': {'key': 'fqdn', 'type': 'str'},
'https_only': {'key': 'httpsOnly', 'type': 'bool'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'temporary_disk': {'key': 'temporaryDisk', 'type': 'TemporaryDisk'},
'persistent_disk': {'key': 'persistentDisk', 'type': 'PersistentDisk'},
}
def __init__(self, **kwargs):
super(AppResourceProperties, self).__init__(**kwargs)
self.public = kwargs.get('public', None)
self.url = None
self.provisioning_state = None
self.active_deployment_name = kwargs.get('active_deployment_name', None)
self.fqdn = kwargs.get('fqdn', None)
self.https_only = kwargs.get('https_only', None)
self.created_time = None
self.temporary_disk = kwargs.get('temporary_disk', None)
self.persistent_disk = kwargs.get('persistent_disk', None)
class AvailableRuntimeVersions(Model):
"""AvailableRuntimeVersions.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar value: A list of all supported runtime versions.
:vartype value:
list[~azure.mgmt.appplatform.v2019_05_01_preview.models.SupportedRuntimeVersion]
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SupportedRuntimeVersion]'},
}
def __init__(self, **kwargs):
super(AvailableRuntimeVersions, self).__init__(**kwargs)
self.value = None
class BindingResource(ProxyResource):
"""Binding resource payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param properties: Properties of the Binding resource
:type properties:
~azure.mgmt.appplatform.v2019_05_01_preview.models.BindingResourceProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'BindingResourceProperties'},
}
def __init__(self, **kwargs):
super(BindingResource, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
class BindingResourceProperties(Model):
"""Binding resource properties payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar resource_name: The name of the bound resource
:vartype resource_name: str
:ivar resource_type: The standard Azure resource type of the bound
resource
:vartype resource_type: str
:param resource_id: The Azure resource id of the bound resource
:type resource_id: str
:param key: The key of the bound resource
:type key: str
:param binding_parameters: Binding parameters of the Binding resource
:type binding_parameters: dict[str, object]
:ivar generated_properties: The generated Spring Boot property file for
this binding. The secret will be deducted.
:vartype generated_properties: str
:ivar created_at: Creation time of the Binding resource
:vartype created_at: str
:ivar updated_at: Update time of the Binding resource
:vartype updated_at: str
"""
_validation = {
'resource_name': {'readonly': True},
'resource_type': {'readonly': True},
'generated_properties': {'readonly': True},
'created_at': {'readonly': True},
'updated_at': {'readonly': True},
}
_attribute_map = {
'resource_name': {'key': 'resourceName', 'type': 'str'},
'resource_type': {'key': 'resourceType', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'key': {'key': 'key', 'type': 'str'},
'binding_parameters': {'key': 'bindingParameters', 'type': '{object}'},
'generated_properties': {'key': 'generatedProperties', 'type': 'str'},
'created_at': {'key': 'createdAt', 'type': 'str'},
'updated_at': {'key': 'updatedAt', 'type': 'str'},
}
def __init__(self, **kwargs):
super(BindingResourceProperties, self).__init__(**kwargs)
self.resource_name = None
self.resource_type = None
self.resource_id = kwargs.get('resource_id', None)
self.key = kwargs.get('key', None)
self.binding_parameters = kwargs.get('binding_parameters', None)
self.generated_properties = None
self.created_at = None
self.updated_at = None
class CertificateProperties(Model):
"""Certificate resource payload.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar thumbprint: The thumbprint of certificate.
:vartype thumbprint: str
:param vault_uri: Required. The vault uri of user key vault.
:type vault_uri: str
:param key_vault_cert_name: Required. The certificate name of key vault.
:type key_vault_cert_name: str
:param cert_version: The certificate version of key vault.
:type cert_version: str
:ivar issuer: The issuer of certificate.
:vartype issuer: str
:ivar issued_date: The issue date of certificate.
:vartype issued_date: str
:ivar expiration_date: The expiration date of certificate.
:vartype expiration_date: str
:ivar activate_date: The activate date of certificate.
:vartype activate_date: str
:ivar subject_name: The subject name of certificate.
:vartype subject_name: str
:ivar dns_names: The domain list of certificate.
:vartype dns_names: list[str]
"""
_validation = {
'thumbprint': {'readonly': True},
'vault_uri': {'required': True},
'key_vault_cert_name': {'required': True},
'issuer': {'readonly': True},
'issued_date': {'readonly': True},
'expiration_date': {'readonly': True},
'activate_date': {'readonly': True},
'subject_name': {'readonly': True},
'dns_names': {'readonly': True},
}
_attribute_map = {
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
'vault_uri': {'key': 'vaultUri', 'type': 'str'},
'key_vault_cert_name': {'key': 'keyVaultCertName', 'type': 'str'},
'cert_version': {'key': 'certVersion', 'type': 'str'},
'issuer': {'key': 'issuer', 'type': 'str'},
'issued_date': {'key': 'issuedDate', 'type': 'str'},
'expiration_date': {'key': 'expirationDate', 'type': 'str'},
'activate_date': {'key': 'activateDate', 'type': 'str'},
'subject_name': {'key': 'subjectName', 'type': 'str'},
'dns_names': {'key': 'dnsNames', 'type': '[str]'},
}
def __init__(self, **kwargs):
super(CertificateProperties, self).__init__(**kwargs)
self.thumbprint = None
self.vault_uri = kwargs.get('vault_uri', None)
self.key_vault_cert_name = kwargs.get('key_vault_cert_name', None)
self.cert_version = kwargs.get('cert_version', None)
self.issuer = None
self.issued_date = None
self.expiration_date = None
self.activate_date = None
self.subject_name = None
self.dns_names = None
class CertificateResource(ProxyResource):
"""Certificate resource payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param properties: Properties of the certificate resource payload.
:type properties:
~azure.mgmt.appplatform.v2019_05_01_preview.models.CertificateProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'CertificateProperties'},
}
def __init__(self, **kwargs):
super(CertificateResource, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
class CloudError(Model):
"""An error response from the service.
:param error:
:type error:
~azure.mgmt.appplatform.v2019_05_01_preview.models.CloudErrorBody
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'CloudErrorBody'},
}
def __init__(self, **kwargs):
super(CloudError, self).__init__(**kwargs)
self.error = kwargs.get('error', None)
class CloudErrorException(HttpOperationError):
"""Server responsed with exception of type: 'CloudError'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, deserialize, response, *args):
super(CloudErrorException, self).__init__(deserialize, response, 'CloudError', *args)
class CloudErrorBody(Model):
"""An error response from the service.
:param code: An identifier for the error. Codes are invariant and are
intended to be consumed programmatically.
:type code: str
:param message: A message describing the error, intended to be suitable
for display in a user interface.
:type message: str
:param target: The target of the particular error. For example, the name
of the property in error.
:type target: str
:param details: A list of additional details about the error.
:type details:
list[~azure.mgmt.appplatform.v2019_05_01_preview.models.CloudErrorBody]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[CloudErrorBody]'},
}
def __init__(self, **kwargs):
super(CloudErrorBody, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
self.details = kwargs.get('details', None)
class ClusterResourceProperties(Model):
"""Service properties payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar provisioning_state: Provisioning state of the Service. Possible
values include: 'Creating', 'Updating', 'Deleting', 'Deleted',
'Succeeded', 'Failed', 'Moving', 'Moved', 'MoveFailed'
:vartype provisioning_state: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.ProvisioningState
:param config_server_properties: Config server git properties of the
Service
:type config_server_properties:
~azure.mgmt.appplatform.v2019_05_01_preview.models.ConfigServerProperties
:param trace: Trace properties of the Service
:type trace:
~azure.mgmt.appplatform.v2019_05_01_preview.models.TraceProperties
:param network_profile: Network profile of the Service
:type network_profile:
~azure.mgmt.appplatform.v2019_05_01_preview.models.NetworkProfile
:ivar version: Version of the Service
:vartype version: int
:ivar service_id: ServiceInstanceEntity GUID which uniquely identifies a
created resource
:vartype service_id: str
"""
_validation = {
'provisioning_state': {'readonly': True},
'version': {'readonly': True},
'service_id': {'readonly': True},
}
_attribute_map = {
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'config_server_properties': {'key': 'configServerProperties', 'type': 'ConfigServerProperties'},
'trace': {'key': 'trace', 'type': 'TraceProperties'},
'network_profile': {'key': 'networkProfile', 'type': 'NetworkProfile'},
'version': {'key': 'version', 'type': 'int'},
'service_id': {'key': 'serviceId', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ClusterResourceProperties, self).__init__(**kwargs)
self.provisioning_state = None
self.config_server_properties = kwargs.get('config_server_properties', None)
self.trace = kwargs.get('trace', None)
self.network_profile = kwargs.get('network_profile', None)
self.version = None
self.service_id = None
class ConfigServerGitProperty(Model):
"""Property of git.
All required parameters must be populated in order to send to Azure.
:param repositories: Repositories of git.
:type repositories:
list[~azure.mgmt.appplatform.v2019_05_01_preview.models.GitPatternRepository]
:param uri: Required. URI of the repository
:type uri: str
:param label: Label of the repository
:type label: str
:param search_paths: Searching path of the repository
:type search_paths: list[str]
:param username: Username of git repository basic auth.
:type username: str
:param password: Password of git repository basic auth.
:type password: str
:param host_key: Public sshKey of git repository.
:type host_key: str
:param host_key_algorithm: SshKey algorithm of git repository.
:type host_key_algorithm: str
:param private_key: Private sshKey algorithm of git repository.
:type private_key: str
:param strict_host_key_checking: Strict host key checking or not.
:type strict_host_key_checking: bool
"""
_validation = {
'uri': {'required': True},
}
_attribute_map = {
'repositories': {'key': 'repositories', 'type': '[GitPatternRepository]'},
'uri': {'key': 'uri', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'search_paths': {'key': 'searchPaths', 'type': '[str]'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'host_key': {'key': 'hostKey', 'type': 'str'},
'host_key_algorithm': {'key': 'hostKeyAlgorithm', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
'strict_host_key_checking': {'key': 'strictHostKeyChecking', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(ConfigServerGitProperty, self).__init__(**kwargs)
self.repositories = kwargs.get('repositories', None)
self.uri = kwargs.get('uri', None)
self.label = kwargs.get('label', None)
self.search_paths = kwargs.get('search_paths', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.host_key = kwargs.get('host_key', None)
self.host_key_algorithm = kwargs.get('host_key_algorithm', None)
self.private_key = kwargs.get('private_key', None)
self.strict_host_key_checking = kwargs.get('strict_host_key_checking', None)
class ConfigServerProperties(Model):
"""Config server git properties payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar state: State of the config server. Possible values include:
'NotAvailable', 'Deleted', 'Failed', 'Succeeded', 'Updating'
:vartype state: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.ConfigServerState
:param error: Error when apply config server settings.
:type error: ~azure.mgmt.appplatform.v2019_05_01_preview.models.Error
:param config_server: Settings of config server.
:type config_server:
~azure.mgmt.appplatform.v2019_05_01_preview.models.ConfigServerSettings
"""
_validation = {
'state': {'readonly': True},
}
_attribute_map = {
'state': {'key': 'state', 'type': 'str'},
'error': {'key': 'error', 'type': 'Error'},
'config_server': {'key': 'configServer', 'type': 'ConfigServerSettings'},
}
def __init__(self, **kwargs):
super(ConfigServerProperties, self).__init__(**kwargs)
self.state = None
self.error = kwargs.get('error', None)
self.config_server = kwargs.get('config_server', None)
class ConfigServerSettings(Model):
"""The settings of config server.
:param git_property: Property of git environment.
:type git_property:
~azure.mgmt.appplatform.v2019_05_01_preview.models.ConfigServerGitProperty
"""
_attribute_map = {
'git_property': {'key': 'gitProperty', 'type': 'ConfigServerGitProperty'},
}
def __init__(self, **kwargs):
super(ConfigServerSettings, self).__init__(**kwargs)
self.git_property = kwargs.get('git_property', None)
class CustomDomainProperties(Model):
"""Custom domain of app resource payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:param thumbprint: The thumbprint of bound certificate.
:type thumbprint: str
:ivar app_name: The app name of domain.
:vartype app_name: str
:param cert_name: The bound certificate name of domain.
:type cert_name: str
"""
_validation = {
'app_name': {'readonly': True},
}
_attribute_map = {
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
'app_name': {'key': 'appName', 'type': 'str'},
'cert_name': {'key': 'certName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(CustomDomainProperties, self).__init__(**kwargs)
self.thumbprint = kwargs.get('thumbprint', None)
self.app_name = None
self.cert_name = kwargs.get('cert_name', None)
class CustomDomainResource(ProxyResource):
"""Custom domain resource payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param properties: Properties of the custom domain resource.
:type properties:
~azure.mgmt.appplatform.v2019_05_01_preview.models.CustomDomainProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'CustomDomainProperties'},
}
def __init__(self, **kwargs):
super(CustomDomainResource, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
class CustomDomainValidatePayload(Model):
"""Custom domain validate payload.
All required parameters must be populated in order to send to Azure.
:param name: Required. Name to be validated
:type name: str
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(self, **kwargs):
super(CustomDomainValidatePayload, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class CustomDomainValidateResult(Model):
"""Validation result for custom domain.
:param is_valid: Indicates if domain name is valid.
:type is_valid: bool
:param message: Message of why domain name is invalid.
:type message: str
"""
_attribute_map = {
'is_valid': {'key': 'isValid', 'type': 'bool'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, **kwargs):
super(CustomDomainValidateResult, self).__init__(**kwargs)
self.is_valid = kwargs.get('is_valid', None)
self.message = kwargs.get('message', None)
class DeploymentInstance(Model):
"""Deployment instance payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: Name of the deployment instance
:vartype name: str
:ivar status: Status of the deployment instance
:vartype status: str
:ivar reason: Failed reason of the deployment instance
:vartype reason: str
:ivar discovery_status: Discovery status of the deployment instance
:vartype discovery_status: str
:ivar start_time: Start time of the deployment instance
:vartype start_time: str
"""
_validation = {
'name': {'readonly': True},
'status': {'readonly': True},
'reason': {'readonly': True},
'discovery_status': {'readonly': True},
'start_time': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'reason': {'key': 'reason', 'type': 'str'},
'discovery_status': {'key': 'discoveryStatus', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DeploymentInstance, self).__init__(**kwargs)
self.name = None
self.status = None
self.reason = None
self.discovery_status = None
self.start_time = None
class DeploymentResource(ProxyResource):
"""Deployment resource payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param properties: Properties of the Deployment resource
:type properties:
~azure.mgmt.appplatform.v2019_05_01_preview.models.DeploymentResourceProperties
:param sku: Sku of the Deployment resource
:type sku: ~azure.mgmt.appplatform.v2019_05_01_preview.models.Sku
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'DeploymentResourceProperties'},
'sku': {'key': 'sku', 'type': 'Sku'},
}
def __init__(self, **kwargs):
super(DeploymentResource, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
self.sku = kwargs.get('sku', None)
class DeploymentResourceProperties(Model):
"""Deployment resource properties payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:param source: Uploaded source information of the deployment.
:type source:
~azure.mgmt.appplatform.v2019_05_01_preview.models.UserSourceInfo
:ivar app_name: App name of the deployment
:vartype app_name: str
:param deployment_settings: Deployment settings of the Deployment
:type deployment_settings:
~azure.mgmt.appplatform.v2019_05_01_preview.models.DeploymentSettings
:ivar provisioning_state: Provisioning state of the Deployment. Possible
values include: 'Creating', 'Updating', 'Succeeded', 'Failed', 'Deleting'
:vartype provisioning_state: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.DeploymentResourceProvisioningState
:ivar status: Status of the Deployment. Possible values include:
'Unknown', 'Stopped', 'Running', 'Failed', 'Allocating', 'Upgrading',
'Compiling'
:vartype status: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.DeploymentResourceStatus
:ivar active: Indicates whether the Deployment is active
:vartype active: bool
:ivar created_time: Date time when the resource is created
:vartype created_time: datetime
:ivar instances: Collection of instances belong to the Deployment
:vartype instances:
list[~azure.mgmt.appplatform.v2019_05_01_preview.models.DeploymentInstance]
"""
_validation = {
'app_name': {'readonly': True},
'provisioning_state': {'readonly': True},
'status': {'readonly': True},
'active': {'readonly': True},
'created_time': {'readonly': True},
'instances': {'readonly': True},
}
_attribute_map = {
'source': {'key': 'source', 'type': 'UserSourceInfo'},
'app_name': {'key': 'appName', 'type': 'str'},
'deployment_settings': {'key': 'deploymentSettings', 'type': 'DeploymentSettings'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'active': {'key': 'active', 'type': 'bool'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'instances': {'key': 'instances', 'type': '[DeploymentInstance]'},
}
def __init__(self, **kwargs):
super(DeploymentResourceProperties, self).__init__(**kwargs)
self.source = kwargs.get('source', None)
self.app_name = None
self.deployment_settings = kwargs.get('deployment_settings', None)
self.provisioning_state = None
self.status = None
self.active = None
self.created_time = None
self.instances = None
class DeploymentSettings(Model):
"""Deployment settings payload.
:param cpu: Required CPU, basic tier should be 1, standard tier should be
in range (1, 4). Default value: 1 .
:type cpu: int
:param memory_in_gb: Required Memory size in GB, basic tier should be in
range (1, 2), standard tier should be in range (1, 8). Default value: 1 .
:type memory_in_gb: int
:param jvm_options: JVM parameter
:type jvm_options: str
:param net_core_main_entry_path: The path to the .NET executable relative
to zip root
:type net_core_main_entry_path: str
:param instance_count: Instance count, basic tier should be in range (1,
25), standard tier should be in range (1, 500). Default value: 1 .
:type instance_count: int
:param environment_variables: Collection of environment variables
:type environment_variables: dict[str, str]
:param runtime_version: Runtime version. Possible values include:
'Java_8', 'Java_11', 'NetCore_31'
:type runtime_version: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.RuntimeVersion
"""
_attribute_map = {
'cpu': {'key': 'cpu', 'type': 'int'},
'memory_in_gb': {'key': 'memoryInGB', 'type': 'int'},
'jvm_options': {'key': 'jvmOptions', 'type': 'str'},
'net_core_main_entry_path': {'key': 'netCoreMainEntryPath', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'runtime_version': {'key': 'runtimeVersion', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DeploymentSettings, self).__init__(**kwargs)
self.cpu = kwargs.get('cpu', 1)
self.memory_in_gb = kwargs.get('memory_in_gb', 1)
self.jvm_options = kwargs.get('jvm_options', None)
self.net_core_main_entry_path = kwargs.get('net_core_main_entry_path', None)
self.instance_count = kwargs.get('instance_count', 1)
self.environment_variables = kwargs.get('environment_variables', None)
self.runtime_version = kwargs.get('runtime_version', None)
class Error(Model):
"""The error code compose of code and message.
:param code: The code of error.
:type code: str
:param message: The message of error.
:type message: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Error, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
class GitPatternRepository(Model):
"""Git repository property payload.
All required parameters must be populated in order to send to Azure.
:param name: Required. Name of the repository
:type name: str
:param pattern: Collection of pattern of the repository
:type pattern: list[str]
:param uri: Required. URI of the repository
:type uri: str
:param label: Label of the repository
:type label: str
:param search_paths: Searching path of the repository
:type search_paths: list[str]
:param username: Username of git repository basic auth.
:type username: str
:param password: Password of git repository basic auth.
:type password: str
:param host_key: Public sshKey of git repository.
:type host_key: str
:param host_key_algorithm: SshKey algorithm of git repository.
:type host_key_algorithm: str
:param private_key: Private sshKey algorithm of git repository.
:type private_key: str
:param strict_host_key_checking: Strict host key checking or not.
:type strict_host_key_checking: bool
"""
_validation = {
'name': {'required': True},
'uri': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'pattern': {'key': 'pattern', 'type': '[str]'},
'uri': {'key': 'uri', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'search_paths': {'key': 'searchPaths', 'type': '[str]'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'host_key': {'key': 'hostKey', 'type': 'str'},
'host_key_algorithm': {'key': 'hostKeyAlgorithm', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
'strict_host_key_checking': {'key': 'strictHostKeyChecking', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(GitPatternRepository, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.pattern = kwargs.get('pattern', None)
self.uri = kwargs.get('uri', None)
self.label = kwargs.get('label', None)
self.search_paths = kwargs.get('search_paths', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.host_key = kwargs.get('host_key', None)
self.host_key_algorithm = kwargs.get('host_key_algorithm', None)
self.private_key = kwargs.get('private_key', None)
self.strict_host_key_checking = kwargs.get('strict_host_key_checking', None)
class LogFileUrlResponse(Model):
"""Log file URL payload.
All required parameters must be populated in order to send to Azure.
:param url: Required. URL of the log file
:type url: str
"""
_validation = {
'url': {'required': True},
}
_attribute_map = {
'url': {'key': 'url', 'type': 'str'},
}
def __init__(self, **kwargs):
super(LogFileUrlResponse, self).__init__(**kwargs)
self.url = kwargs.get('url', None)
class LogSpecification(Model):
"""Specifications of the Log for Azure Monitoring.
:param name: Name of the log
:type name: str
:param display_name: Localized friendly display name of the log
:type display_name: str
:param blob_duration: Blob duration of the log
:type blob_duration: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'blob_duration': {'key': 'blobDuration', 'type': 'str'},
}
def __init__(self, **kwargs):
super(LogSpecification, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.blob_duration = kwargs.get('blob_duration', None)
class ManagedIdentityProperties(Model):
"""Managed identity properties retrieved from ARM request headers.
:param type: Possible values include: 'None', 'SystemAssigned',
'UserAssigned', 'SystemAssigned,UserAssigned'
:type type: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.ManagedIdentityType
:param principal_id:
:type principal_id: str
:param tenant_id:
:type tenant_id: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ManagedIdentityProperties, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.principal_id = kwargs.get('principal_id', None)
self.tenant_id = kwargs.get('tenant_id', None)
class MetricDimension(Model):
"""Specifications of the Dimension of metrics.
:param name: Name of the dimension
:type name: str
:param display_name: Localized friendly display name of the dimension
:type display_name: str
:param to_be_exported_for_shoebox: Whether this dimension should be
included for the Shoebox export scenario
:type to_be_exported_for_shoebox: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(MetricDimension, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.to_be_exported_for_shoebox = kwargs.get('to_be_exported_for_shoebox', None)
class MetricSpecification(Model):
"""Specifications of the Metrics for Azure Monitoring.
:param name: Name of the metric
:type name: str
:param display_name: Localized friendly display name of the metric
:type display_name: str
:param display_description: Localized friendly description of the metric
:type display_description: str
:param unit: Unit that makes sense for the metric
:type unit: str
:param category: Name of the metric category that the metric belongs to. A
metric can only belong to a single category.
:type category: str
:param aggregation_type: Only provide one value for this field. Valid
values: Average, Minimum, Maximum, Total, Count.
:type aggregation_type: str
:param supported_aggregation_types: Supported aggregation types
:type supported_aggregation_types: list[str]
:param supported_time_grain_types: Supported time grain types
:type supported_time_grain_types: list[str]
:param fill_gap_with_zero: Optional. If set to true, then zero will be
returned for time duration where no metric is emitted/published.
:type fill_gap_with_zero: bool
:param dimensions: Dimensions of the metric
:type dimensions:
list[~azure.mgmt.appplatform.v2019_05_01_preview.models.MetricDimension]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'display_description': {'key': 'displayDescription', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'category': {'key': 'category', 'type': 'str'},
'aggregation_type': {'key': 'aggregationType', 'type': 'str'},
'supported_aggregation_types': {'key': 'supportedAggregationTypes', 'type': '[str]'},
'supported_time_grain_types': {'key': 'supportedTimeGrainTypes', 'type': '[str]'},
'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'},
'dimensions': {'key': 'dimensions', 'type': '[MetricDimension]'},
}
def __init__(self, **kwargs):
super(MetricSpecification, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.display_description = kwargs.get('display_description', None)
self.unit = kwargs.get('unit', None)
self.category = kwargs.get('category', None)
self.aggregation_type = kwargs.get('aggregation_type', None)
self.supported_aggregation_types = kwargs.get('supported_aggregation_types', None)
self.supported_time_grain_types = kwargs.get('supported_time_grain_types', None)
self.fill_gap_with_zero = kwargs.get('fill_gap_with_zero', None)
self.dimensions = kwargs.get('dimensions', None)
class NameAvailability(Model):
"""Name availability result payload.
:param name_available: Indicates whether the name is available
:type name_available: bool
:param reason: Reason why the name is not available
:type reason: str
:param message: Message why the name is not available
:type message: str
"""
_attribute_map = {
'name_available': {'key': 'nameAvailable', 'type': 'bool'},
'reason': {'key': 'reason', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, **kwargs):
super(NameAvailability, self).__init__(**kwargs)
self.name_available = kwargs.get('name_available', None)
self.reason = kwargs.get('reason', None)
self.message = kwargs.get('message', None)
class NameAvailabilityParameters(Model):
"""Name availability parameters payload.
All required parameters must be populated in order to send to Azure.
:param type: Required. Type of the resource to check name availability
:type type: str
:param name: Required. Name to be checked
:type name: str
"""
_validation = {
'type': {'required': True},
'name': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(self, **kwargs):
super(NameAvailabilityParameters, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.name = kwargs.get('name', None)
class NetworkProfile(Model):
"""Service network profile payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:param service_runtime_subnet_id: Fully qualified resource Id of the
subnet to host Azure Spring Cloud Service Runtime
:type service_runtime_subnet_id: str
:param app_subnet_id: Fully qualified resource Id of the subnet to host
Azure Spring Cloud Apps
:type app_subnet_id: str
:param service_cidr: Azure Spring Cloud service reserved CIDR
:type service_cidr: str
:param service_runtime_network_resource_group: Name of the resource group
containing network resources of Azure Spring Cloud Service Runtime
:type service_runtime_network_resource_group: str
:param app_network_resource_group: Name of the resource group containing
network resources of Azure Spring Cloud Apps
:type app_network_resource_group: str
:ivar outbound_ips: Desired outbound IP resources for Azure Spring Cloud
instance.
:vartype outbound_ips:
~azure.mgmt.appplatform.v2019_05_01_preview.models.NetworkProfileOutboundIPs
"""
_validation = {
'outbound_ips': {'readonly': True},
}
_attribute_map = {
'service_runtime_subnet_id': {'key': 'serviceRuntimeSubnetId', 'type': 'str'},
'app_subnet_id': {'key': 'appSubnetId', 'type': 'str'},
'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
'service_runtime_network_resource_group': {'key': 'serviceRuntimeNetworkResourceGroup', 'type': 'str'},
'app_network_resource_group': {'key': 'appNetworkResourceGroup', 'type': 'str'},
'outbound_ips': {'key': 'outboundIPs', 'type': 'NetworkProfileOutboundIPs'},
}
def __init__(self, **kwargs):
super(NetworkProfile, self).__init__(**kwargs)
self.service_runtime_subnet_id = kwargs.get('service_runtime_subnet_id', None)
self.app_subnet_id = kwargs.get('app_subnet_id', None)
self.service_cidr = kwargs.get('service_cidr', None)
self.service_runtime_network_resource_group = kwargs.get('service_runtime_network_resource_group', None)
self.app_network_resource_group = kwargs.get('app_network_resource_group', None)
self.outbound_ips = None
class NetworkProfileOutboundIPs(Model):
"""Desired outbound IP resources for Azure Spring Cloud instance.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar public_ips: A list of public IP addresses.
:vartype public_ips: list[str]
"""
_validation = {
'public_ips': {'readonly': True},
}
_attribute_map = {
'public_ips': {'key': 'publicIPs', 'type': '[str]'},
}
def __init__(self, **kwargs):
super(NetworkProfileOutboundIPs, self).__init__(**kwargs)
self.public_ips = None
class OperationDetail(Model):
"""Operation detail payload.
:param name: Name of the operation
:type name: str
:param is_data_action: Indicates whether the operation is a data action
:type is_data_action: bool
:param display: Display of the operation
:type display:
~azure.mgmt.appplatform.v2019_05_01_preview.models.OperationDisplay
:param origin: Origin of the operation
:type origin: str
:param properties: Properties of the operation
:type properties:
~azure.mgmt.appplatform.v2019_05_01_preview.models.OperationProperties
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'is_data_action': {'key': 'isDataAction', 'type': 'bool'},
'display': {'key': 'display', 'type': 'OperationDisplay'},
'origin': {'key': 'origin', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'OperationProperties'},
}
def __init__(self, **kwargs):
super(OperationDetail, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.is_data_action = kwargs.get('is_data_action', None)
self.display = kwargs.get('display', None)
self.origin = kwargs.get('origin', None)
self.properties = kwargs.get('properties', None)
class OperationDisplay(Model):
"""Operation display payload.
:param provider: Resource provider of the operation
:type provider: str
:param resource: Resource of the operation
:type resource: str
:param operation: Localized friendly name for the operation
:type operation: str
:param description: Localized friendly description for the operation
:type description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(self, **kwargs):
super(OperationDisplay, self).__init__(**kwargs)
self.provider = kwargs.get('provider', None)
self.resource = kwargs.get('resource', None)
self.operation = kwargs.get('operation', None)
self.description = kwargs.get('description', None)
class OperationProperties(Model):
"""Extra Operation properties.
:param service_specification: Service specifications of the operation
:type service_specification:
~azure.mgmt.appplatform.v2019_05_01_preview.models.ServiceSpecification
"""
_attribute_map = {
'service_specification': {'key': 'serviceSpecification', 'type': 'ServiceSpecification'},
}
def __init__(self, **kwargs):
super(OperationProperties, self).__init__(**kwargs)
self.service_specification = kwargs.get('service_specification', None)
class PersistentDisk(Model):
"""Persistent disk payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:param size_in_gb: Size of the persistent disk in GB
:type size_in_gb: int
:ivar used_in_gb: Size of the used persistent disk in GB
:vartype used_in_gb: int
:param mount_path: Mount path of the persistent disk
:type mount_path: str
"""
_validation = {
'size_in_gb': {'maximum': 50, 'minimum': 0},
'used_in_gb': {'readonly': True, 'maximum': 50, 'minimum': 0},
}
_attribute_map = {
'size_in_gb': {'key': 'sizeInGB', 'type': 'int'},
'used_in_gb': {'key': 'usedInGB', 'type': 'int'},
'mount_path': {'key': 'mountPath', 'type': 'str'},
}
def __init__(self, **kwargs):
super(PersistentDisk, self).__init__(**kwargs)
self.size_in_gb = kwargs.get('size_in_gb', None)
self.used_in_gb = None
self.mount_path = kwargs.get('mount_path', None)
class RegenerateTestKeyRequestPayload(Model):
"""Regenerate test key request payload.
All required parameters must be populated in order to send to Azure.
:param key_type: Required. Type of the test key. Possible values include:
'Primary', 'Secondary'
:type key_type: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.TestKeyType
"""
_validation = {
'key_type': {'required': True},
}
_attribute_map = {
'key_type': {'key': 'keyType', 'type': 'str'},
}
def __init__(self, **kwargs):
super(RegenerateTestKeyRequestPayload, self).__init__(**kwargs)
self.key_type = kwargs.get('key_type', None)
class ResourceSku(Model):
"""Describes an available Azure Spring Cloud SKU.
:param resource_type: Gets the type of resource the SKU applies to.
:type resource_type: str
:param name: Gets the name of SKU.
:type name: str
:param tier: Gets the tier of SKU.
:type tier: str
:param capacity: Gets the capacity of SKU.
:type capacity:
~azure.mgmt.appplatform.v2019_05_01_preview.models.SkuCapacity
:param locations: Gets the set of locations that the SKU is available.
:type locations: list[str]
:param location_info: Gets a list of locations and availability zones in
those locations where the SKU is available.
:type location_info:
list[~azure.mgmt.appplatform.v2019_05_01_preview.models.ResourceSkuLocationInfo]
:param restrictions: Gets the restrictions because of which SKU cannot be
used. This is
empty if there are no restrictions.
:type restrictions:
list[~azure.mgmt.appplatform.v2019_05_01_preview.models.ResourceSkuRestrictions]
"""
_attribute_map = {
'resource_type': {'key': 'resourceType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'SkuCapacity'},
'locations': {'key': 'locations', 'type': '[str]'},
'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'},
'restrictions': {'key': 'restrictions', 'type': '[ResourceSkuRestrictions]'},
}
def __init__(self, **kwargs):
super(ResourceSku, self).__init__(**kwargs)
self.resource_type = kwargs.get('resource_type', None)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.capacity = kwargs.get('capacity', None)
self.locations = kwargs.get('locations', None)
self.location_info = kwargs.get('location_info', None)
self.restrictions = kwargs.get('restrictions', None)
class ResourceSkuCapabilities(Model):
"""ResourceSkuCapabilities.
:param name: Gets an invariant to describe the feature.
:type name: str
:param value: Gets an invariant if the feature is measured by quantity.
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ResourceSkuCapabilities, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
class ResourceSkuLocationInfo(Model):
"""ResourceSkuLocationInfo.
:param location: Gets location of the SKU
:type location: str
:param zones: Gets list of availability zones where the SKU is supported.
:type zones: list[str]
:param zone_details: Gets details of capabilities available to a SKU in
specific zones.
:type zone_details:
list[~azure.mgmt.appplatform.v2019_05_01_preview.models.ResourceSkuZoneDetails]
"""
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'zones': {'key': 'zones', 'type': '[str]'},
'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'},
}
def __init__(self, **kwargs):
super(ResourceSkuLocationInfo, self).__init__(**kwargs)
self.location = kwargs.get('location', None)
self.zones = kwargs.get('zones', None)
self.zone_details = kwargs.get('zone_details', None)
class ResourceSkuRestrictionInfo(Model):
"""ResourceSkuRestrictionInfo.
:param locations: Gets locations where the SKU is restricted
:type locations: list[str]
:param zones: Gets list of availability zones where the SKU is restricted.
:type zones: list[str]
"""
_attribute_map = {
'locations': {'key': 'locations', 'type': '[str]'},
'zones': {'key': 'zones', 'type': '[str]'},
}
def __init__(self, **kwargs):
super(ResourceSkuRestrictionInfo, self).__init__(**kwargs)
self.locations = kwargs.get('locations', None)
self.zones = kwargs.get('zones', None)
class ResourceSkuRestrictions(Model):
"""ResourceSkuRestrictions.
:param type: Gets the type of restrictions. Possible values include:
'Location', 'Zone'
:type type: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.ResourceSkuRestrictionsType
:param values: Gets the value of restrictions. If the restriction type is
set to
location. This would be different locations where the SKU is restricted.
:type values: list[str]
:param restriction_info: Gets the information about the restriction where
the SKU cannot be used.
:type restriction_info:
~azure.mgmt.appplatform.v2019_05_01_preview.models.ResourceSkuRestrictionInfo
:param reason_code: Gets the reason for restriction. Possible values
include: 'QuotaId', 'NotAvailableForSubscription'
:type reason_code: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.ResourceSkuRestrictionsReasonCode
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'values': {'key': 'values', 'type': '[str]'},
'restriction_info': {'key': 'restrictionInfo', 'type': 'ResourceSkuRestrictionInfo'},
'reason_code': {'key': 'reasonCode', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ResourceSkuRestrictions, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.values = kwargs.get('values', None)
self.restriction_info = kwargs.get('restriction_info', None)
self.reason_code = kwargs.get('reason_code', None)
class ResourceSkuZoneDetails(Model):
"""ResourceSkuZoneDetails.
:param name: Gets the set of zones that the SKU is available in with the
specified capabilities.
:type name: list[str]
:param capabilities: Gets a list of capabilities that are available for
the SKU in the
specified list of zones.
:type capabilities:
list[~azure.mgmt.appplatform.v2019_05_01_preview.models.ResourceSkuCapabilities]
"""
_attribute_map = {
'name': {'key': 'name', 'type': '[str]'},
'capabilities': {'key': 'capabilities', 'type': '[ResourceSkuCapabilities]'},
}
def __init__(self, **kwargs):
super(ResourceSkuZoneDetails, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.capabilities = kwargs.get('capabilities', None)
class ResourceUploadDefinition(Model):
"""Resource upload definition payload.
:param relative_path: Source relative path
:type relative_path: str
:param upload_url: Upload URL
:type upload_url: str
"""
_attribute_map = {
'relative_path': {'key': 'relativePath', 'type': 'str'},
'upload_url': {'key': 'uploadUrl', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ResourceUploadDefinition, self).__init__(**kwargs)
self.relative_path = kwargs.get('relative_path', None)
self.upload_url = kwargs.get('upload_url', None)
class TrackedResource(Resource):
"""The resource model definition for a ARM tracked top level resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param location: The GEO location of the resource.
:type location: str
:param tags: Tags of the service which is a list of key value pairs that
describe the resource.
:type tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, **kwargs):
super(TrackedResource, self).__init__(**kwargs)
self.location = kwargs.get('location', None)
self.tags = kwargs.get('tags', None)
class ServiceResource(TrackedResource):
"""Service resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param location: The GEO location of the resource.
:type location: str
:param tags: Tags of the service which is a list of key value pairs that
describe the resource.
:type tags: dict[str, str]
:param properties: Properties of the Service resource
:type properties:
~azure.mgmt.appplatform.v2019_05_01_preview.models.ClusterResourceProperties
:param sku: Sku of the Service resource
:type sku: ~azure.mgmt.appplatform.v2019_05_01_preview.models.Sku
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': 'ClusterResourceProperties'},
'sku': {'key': 'sku', 'type': 'Sku'},
}
def __init__(self, **kwargs):
super(ServiceResource, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
self.sku = kwargs.get('sku', None)
class ServiceSpecification(Model):
"""Service specification payload.
:param log_specifications: Specifications of the Log for Azure Monitoring
:type log_specifications:
list[~azure.mgmt.appplatform.v2019_05_01_preview.models.LogSpecification]
:param metric_specifications: Specifications of the Metrics for Azure
Monitoring
:type metric_specifications:
list[~azure.mgmt.appplatform.v2019_05_01_preview.models.MetricSpecification]
"""
_attribute_map = {
'log_specifications': {'key': 'logSpecifications', 'type': '[LogSpecification]'},
'metric_specifications': {'key': 'metricSpecifications', 'type': '[MetricSpecification]'},
}
def __init__(self, **kwargs):
super(ServiceSpecification, self).__init__(**kwargs)
self.log_specifications = kwargs.get('log_specifications', None)
self.metric_specifications = kwargs.get('metric_specifications', None)
class Sku(Model):
"""Sku of Azure Spring Cloud.
:param name: Name of the Sku
:type name: str
:param tier: Tier of the Sku
:type tier: str
:param capacity: Current capacity of the target resource
:type capacity: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'int'},
}
def __init__(self, **kwargs):
super(Sku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.capacity = kwargs.get('capacity', None)
class SkuCapacity(Model):
"""The SKU capacity.
All required parameters must be populated in order to send to Azure.
:param minimum: Required. Gets or sets the minimum.
:type minimum: int
:param maximum: Gets or sets the maximum.
:type maximum: int
:param default: Gets or sets the default.
:type default: int
:param scale_type: Gets or sets the type of the scale. Possible values
include: 'None', 'Manual', 'Automatic'
:type scale_type: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.SkuScaleType
"""
_validation = {
'minimum': {'required': True},
}
_attribute_map = {
'minimum': {'key': 'minimum', 'type': 'int'},
'maximum': {'key': 'maximum', 'type': 'int'},
'default': {'key': 'default', 'type': 'int'},
'scale_type': {'key': 'scaleType', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SkuCapacity, self).__init__(**kwargs)
self.minimum = kwargs.get('minimum', None)
self.maximum = kwargs.get('maximum', None)
self.default = kwargs.get('default', None)
self.scale_type = kwargs.get('scale_type', None)
class SupportedRuntimeVersion(Model):
"""Supported deployment runtime version descriptor.
:param value: The raw value which could be passed to deployment CRUD
operations. Possible values include: 'Java_8', 'Java_11', 'NetCore_31'
:type value: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.SupportedRuntimeValue
:param platform: The platform of this runtime version (possible values:
"Java" or ".NET"). Possible values include: 'Java', '.NET Core'
:type platform: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.SupportedRuntimePlatform
:param version: The detailed version (major.minor) of the platform.
:type version: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'platform': {'key': 'platform', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SupportedRuntimeVersion, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.platform = kwargs.get('platform', None)
self.version = kwargs.get('version', None)
class TemporaryDisk(Model):
"""Temporary disk payload.
:param size_in_gb: Size of the temporary disk in GB
:type size_in_gb: int
:param mount_path: Mount path of the temporary disk
:type mount_path: str
"""
_validation = {
'size_in_gb': {'maximum': 5, 'minimum': 0},
}
_attribute_map = {
'size_in_gb': {'key': 'sizeInGB', 'type': 'int'},
'mount_path': {'key': 'mountPath', 'type': 'str'},
}
def __init__(self, **kwargs):
super(TemporaryDisk, self).__init__(**kwargs)
self.size_in_gb = kwargs.get('size_in_gb', None)
self.mount_path = kwargs.get('mount_path', None)
class TestKeys(Model):
"""Test keys payload.
:param primary_key: Primary key
:type primary_key: str
:param secondary_key: Secondary key
:type secondary_key: str
:param primary_test_endpoint: Primary test endpoint
:type primary_test_endpoint: str
:param secondary_test_endpoint: Secondary test endpoint
:type secondary_test_endpoint: str
:param enabled: Indicates whether the test endpoint feature enabled or not
:type enabled: bool
"""
_attribute_map = {
'primary_key': {'key': 'primaryKey', 'type': 'str'},
'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
'primary_test_endpoint': {'key': 'primaryTestEndpoint', 'type': 'str'},
'secondary_test_endpoint': {'key': 'secondaryTestEndpoint', 'type': 'str'},
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(TestKeys, self).__init__(**kwargs)
self.primary_key = kwargs.get('primary_key', None)
self.secondary_key = kwargs.get('secondary_key', None)
self.primary_test_endpoint = kwargs.get('primary_test_endpoint', None)
self.secondary_test_endpoint = kwargs.get('secondary_test_endpoint', None)
self.enabled = kwargs.get('enabled', None)
class TraceProperties(Model):
"""Trace properties payload.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar state: State of the trace proxy. Possible values include:
'NotAvailable', 'Failed', 'Succeeded', 'Updating'
:vartype state: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.TraceProxyState
:param error: Error when apply trace proxy changes.
:type error: ~azure.mgmt.appplatform.v2019_05_01_preview.models.Error
:param enabled: Indicates whether enable the tracing functionality
:type enabled: bool
:param app_insight_instrumentation_key: Target application insight
instrumentation key
:type app_insight_instrumentation_key: str
"""
_validation = {
'state': {'readonly': True},
}
_attribute_map = {
'state': {'key': 'state', 'type': 'str'},
'error': {'key': 'error', 'type': 'Error'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'app_insight_instrumentation_key': {'key': 'appInsightInstrumentationKey', 'type': 'str'},
}
def __init__(self, **kwargs):
super(TraceProperties, self).__init__(**kwargs)
self.state = None
self.error = kwargs.get('error', None)
self.enabled = kwargs.get('enabled', None)
self.app_insight_instrumentation_key = kwargs.get('app_insight_instrumentation_key', None)
class UserSourceInfo(Model):
"""Source information for a deployment.
:param type: Type of the source uploaded. Possible values include: 'Jar',
'NetCoreZip', 'Source'
:type type: str or
~azure.mgmt.appplatform.v2019_05_01_preview.models.UserSourceType
:param relative_path: Relative path of the storage which stores the source
:type relative_path: str
:param version: Version of the source
:type version: str
:param artifact_selector: Selector for the artifact to be used for the
deployment for multi-module projects. This should be
the relative path to the target module/project.
:type artifact_selector: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'artifact_selector': {'key': 'artifactSelector', 'type': 'str'},
}
def __init__(self, **kwargs):
super(UserSourceInfo, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.relative_path = kwargs.get('relative_path', None)
self.version = kwargs.get('version', None)
self.artifact_selector = kwargs.get('artifact_selector', None)
| 36.168388 | 112 | 0.645083 |
e2bfbe70a9183325ede8ead9818ee8105c54154b | 3,549 | py | Python | openwisp_users/mixins.py | pandafy/openwisp-users | 747ac34ac59c7fee843f8ad250cc5d12292c722b | [
"BSD-3-Clause"
] | 176 | 2017-03-28T14:00:11.000Z | 2022-03-31T15:10:24.000Z | openwisp_users/mixins.py | pandafy/openwisp-users | 747ac34ac59c7fee843f8ad250cc5d12292c722b | [
"BSD-3-Clause"
] | 290 | 2017-05-12T18:27:15.000Z | 2022-03-19T13:07:33.000Z | openwisp_users/mixins.py | pandafy/openwisp-users | 747ac34ac59c7fee843f8ad250cc5d12292c722b | [
"BSD-3-Clause"
] | 73 | 2017-06-05T12:50:03.000Z | 2022-02-06T07:44:13.000Z | """
mixins used by other openwisp components to implement multi-tenancy
"""
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import ugettext_lazy as _
from swapper import get_model_name
class ValidateOrgMixin(object):
"""
- implements ``_validate_org_relation`` method
"""
def _validate_org_relation(self, rel, field_error='organization'):
"""
if the relation is owned by a specific organization
this object must be related to the same organization
"""
# avoid exceptions caused by the relation not being set
if not hasattr(self, rel):
return
rel = getattr(self, rel)
if (
rel
and rel.organization_id
and str(self.organization_id) != str(rel.organization_id)
):
message = _(
'Please ensure that the organization of this {object_label} '
'and the organization of the related {related_object_label} match.'
)
message = message.format(
object_label=self._meta.verbose_name,
related_object_label=rel._meta.verbose_name,
)
raise ValidationError({field_error: message})
def _validate_org_reverse_relation(self, rel_name, field_error='organization'):
"""
prevents changing organization for existing objects
which have relations specified by ``rel_name`` pointing to them,
in order to prevent inconsistencies
(relations belonging to different organizations)
"""
# do nothing on new objects, because they
# cannot have relations pointing to them
if self._state.adding:
return
old_self = self.__class__.objects.get(pk=self.pk)
old_org = old_self.organization
# org hasn't been changed, everything ok
if old_org == self.organization:
return
rel = getattr(self, rel_name)
count = rel.count()
if count:
rel_meta = rel.model._meta
related_label = (
rel_meta.verbose_name if count == 1 else rel_meta.verbose_name_plural
)
verb = _('is') if count == 1 else _('are')
message = _(
'The organization of this {object_label} cannot be changed '
'because {0} {related_object_label} {verb} still '
'related to it'.format(
count,
object_label=self._meta.verbose_name,
related_object_label=related_label,
verb=verb,
)
)
raise ValidationError({field_error: message})
class OrgMixin(ValidateOrgMixin, models.Model):
"""
- adds a ``ForeignKey`` field to the ``Organization`` model
(the relation cannot be NULL)
- implements ``_validate_org_relation`` method
"""
organization = models.ForeignKey(
get_model_name('openwisp_users', 'Organization'),
verbose_name=_('organization'),
on_delete=models.CASCADE,
)
class Meta:
abstract = True
class ShareableOrgMixin(OrgMixin):
"""
like ``OrgMixin``, but the relation can be NULL, in which
case it means that the object can be shared between multiple organizations
"""
class Meta:
abstract = True
_org_field = ShareableOrgMixin._meta.get_field('organization')
_org_field.blank = True
_org_field.null = True
| 33.168224 | 85 | 0.614258 |
7c2b51d37a09bf68a104caadd54bf2380d7c8432 | 1,796 | py | Python | frechette_PD/tests.py | Charlotte-exp/Multichannel-Games | 83ebb452454ed5d1a8535b59dac49099a9509be4 | [
"MIT"
] | null | null | null | frechette_PD/tests.py | Charlotte-exp/Multichannel-Games | 83ebb452454ed5d1a8535b59dac49099a9509be4 | [
"MIT"
] | 1 | 2021-01-20T11:48:18.000Z | 2021-01-20T11:48:18.000Z | frechette_PD/tests.py | Charlotte-exp/Multichannel-Games | 83ebb452454ed5d1a8535b59dac49099a9509be4 | [
"MIT"
] | 2 | 2021-01-21T15:29:19.000Z | 2022-03-29T09:26:36.000Z | from otree.api import Currency as c, currency_range, expect
from . import pages
from ._builtin import Bot
from .models import Constants
class PlayerBot(Bot):
def play_round(self):
if self.round_number <= self.participant.vars['last_round']:
yield pages.Decision, {"decision": 1}
# if self.round_number <= self.participant.vars['last_round']:
# if self.participant.vars['subgroup'] == 'high':
# yield pages.Decision, dict(decision_high=1)
# else:
# if self.participant.vars['subgroup'] == 'low':
# yield pages.Decision, dict(decision_low=1)
# if self.round_number % 2 == 0:
# if self.participant.vars['subgroup'] == 'high':
# yield pages.Decision, dict(decision_high=1)
# else:
# if self.participant.vars['subgroup'] == 'low':
# yield pages.Decision, dict(decision_low=1)
# else:
# if self.participant.vars['subgroup'] == 'high':
# yield pages.Decision, dict(decision_high=0)
# else:
# if self.participant.vars['subgroup'] == 'low':
# yield pages.Decision, dict(decision_low=0)
yield pages.Results
# yield pages.Previous
if self.round_number == self.participant.vars['last_round']:
yield pages.End
yield pages.Demographics, {"age": '22', "gender": 'Female', "income": '£10.000 - £29.999',
"education": 'Postgraduate degree', "ethnicity": 'White'}
yield pages.CommentBox, {"comment_box": 'n/a'}
yield pages.Payment
yield pages.ProlificLink
| 46.051282 | 102 | 0.541203 |
d23654ed62e00ad62d8bbed3b2a6ab56ae3f5da9 | 2,055 | py | Python | src/collectors/network/test/testnetwork.py | dreamhost/Diamond | 31034af8b1f4c7bf291078c2f0a80b787f4d1f36 | [
"MIT"
] | null | null | null | src/collectors/network/test/testnetwork.py | dreamhost/Diamond | 31034af8b1f4c7bf291078c2f0a80b787f4d1f36 | [
"MIT"
] | null | null | null | src/collectors/network/test/testnetwork.py | dreamhost/Diamond | 31034af8b1f4c7bf291078c2f0a80b787f4d1f36 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
StringIO # workaround for pyflakes issue #13
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from network import NetworkCollector
################################################################################
class TestNetworkCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('NetworkCollector', {
'interval': 10,
})
self.collector = NetworkCollector(config, None)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_net_dev(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/net/dev')
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
NetworkCollector.PROC = self.getFixturePath('proc_net_dev_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
NetworkCollector.PROC = self.getFixturePath('proc_net_dev_2')
self.collector.collect()
metrics = {
'eth0.rx_megabyte': (2.504, 2),
'eth0.tx_megabyte': (4.707, 2),
'eth1.rx_megabyte': (0.0, 2),
'eth1.tx_megabyte': (0.0, 2)
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
################################################################################
if __name__ == "__main__":
unittest.main()
| 31.615385 | 80 | 0.589294 |
d6e11f1bdf3bfb60a153cbc22549e9b3f037f94d | 896 | py | Python | Python/LeetCode/Largest Number.py | honghaoz/Interview-Algorithm-in-Swift | b8895b4e05a510e2a5d13d744c084a768f042d0c | [
"MIT"
] | 1 | 2020-12-17T02:10:57.000Z | 2020-12-17T02:10:57.000Z | Python/LeetCode/Largest Number.py | honghaoz/DataStructure-Algorithm | b8895b4e05a510e2a5d13d744c084a768f042d0c | [
"MIT"
] | null | null | null | Python/LeetCode/Largest Number.py | honghaoz/DataStructure-Algorithm | b8895b4e05a510e2a5d13d744c084a768f042d0c | [
"MIT"
] | null | null | null | # # Largest Number
# # Given a list of non negative integers, arrange them such that they form the largest number.
# # For example, given [3, 30, 34, 5, 9], the largest formed number is 9534330.
# # Note: The result may be very large, so you need to return a string instead of an integer.
class Solution:
# @param num, a list of integers
# @return a string
def largestNumber(self, num):
comp=lambda a,b:1 if a+b>b+a else -1 if a+b<b+a else 0
num=map(str,num)
num.sort(cmp=comp,reverse=True)
return str(int("".join(num)))
def main():
s = Solution()
print "Passed" if "21" == s.largestNumber([1,2]) else "Failed"
print "Passed" if "4321" == s.largestNumber([1,2,43]) else "Failed"
print "Passed" if "434021" == s.largestNumber([1,2,43,40]) else "Failed"
print "Passed" if "12121" == s.largestNumber([121,12]) else "Failed"
main() | 37.333333 | 95 | 0.640625 |
b0cc467f6ad02c5cdb53fa8d910a69e261606420 | 1,601 | py | Python | ros2multicast/setup.py | Adlink-ROS/ros2cli | 025870f29da10877484c06a8b929ac3aaa591a44 | [
"Apache-2.0"
] | null | null | null | ros2multicast/setup.py | Adlink-ROS/ros2cli | 025870f29da10877484c06a8b929ac3aaa591a44 | [
"Apache-2.0"
] | null | null | null | ros2multicast/setup.py | Adlink-ROS/ros2cli | 025870f29da10877484c06a8b929ac3aaa591a44 | [
"Apache-2.0"
] | null | null | null | from setuptools import find_packages
from setuptools import setup
package_name = 'ros2multicast'
setup(
name=package_name,
version='0.9.7',
packages=find_packages(exclude=['test']),
data_files=[
('share/' + package_name, ['package.xml']),
('share/ament_index/resource_index/packages',
['resource/' + package_name]),
],
install_requires=['ros2cli'],
zip_safe=True,
author='Dirk Thomas',
author_email='dthomas@osrfoundation.org',
maintainer='Dirk Thomas',
maintainer_email='dthomas@osrfoundation.org',
url='https://github.com/ros2/ros2cli/tree/master/ros2multicast',
download_url='https://github.com/ros2/ros2cli/releases',
keywords=[],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
],
description='The multicast command for ROS 2 command line tools.',
long_description="""\
The package provides the multicast command for the ROS 2 command line tools.""",
license='Apache License, Version 2.0',
tests_require=['pytest'],
entry_points={
'ros2cli.command': [
'multicast = ros2multicast.command.multicast:MulticastCommand',
],
'ros2cli.extension_point': [
'ros2multicast.verb = ros2multicast.verb:VerbExtension',
],
'ros2multicast.verb': [
'receive = ros2multicast.verb.receive:ReceiveVerb',
'send = ros2multicast.verb.send:SendVerb',
],
},
)
| 33.354167 | 80 | 0.643348 |
a6277c6b5bb3035deaeee7d927ed2c51295d90f2 | 1,146 | py | Python | models/experiments/phiseg_rev_7_5_56.py | gigantenbein/UNet-Zoo | d157c22ef8041ed743aa7bbcf377f0f8ad85e755 | [
"Apache-2.0"
] | 20 | 2020-02-16T07:20:23.000Z | 2022-03-14T04:11:02.000Z | models/experiments/phiseg_rev_7_5_56.py | suyanzhou626/UNet-Zoo | 76d23952d90a45a01da1cc2926b4d3a24a1adb75 | [
"Apache-2.0"
] | 6 | 2021-06-08T21:03:07.000Z | 2022-03-17T13:28:33.000Z | models/experiments/phiseg_rev_7_5_56.py | suyanzhou626/UNet-Zoo | 76d23952d90a45a01da1cc2926b4d3a24a1adb75 | [
"Apache-2.0"
] | 5 | 2020-03-20T02:04:49.000Z | 2021-10-20T17:37:52.000Z | import torch
import torch.nn as nn
from models.phiseg import PHISeg
from utils import normalise_image
from data.lidc_data import lidc_data
experiment_name = 'PHISegRev_7_5_56'
log_dir_name = 'lidc'
data_loader = lidc_data
# number of filter for the latent levels, they will be applied in the order as loaded into the list
filter_channels = [32, 64, 128, 192, 192, 192, 192]
latent_levels = 5
latent_dim = 2
iterations = 5000000
n_classes = 2
num_labels_per_subject = 4
no_convs_fcomb = 4 # not used
beta = 10.0 # not used
#
use_reversible = True
exponential_weighting = True
# use 1 for grayscale, 3 for RGB images
input_channels = 1
epochs_to_train = 20
batch_size = 56
image_size = (1, 128, 128)
augmentation_options = {'do_flip_lr': True,
'do_flip_ud': True,
'do_rotations': True,
'do_scaleaug': True,
'nlabels': n_classes}
input_normalisation = normalise_image
validation_samples = 16
num_validation_images = 100
logging_frequency = 1000
validation_frequency = 1000
weight_decay = 10e-5
pretrained_model = None
# model
model = PHISeg
| 22.038462 | 99 | 0.707679 |
1ca780644097454a1dab1d52b948fff75d37f9cc | 86 | py | Python | output/models/ms_data/regex/re_q2_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 1 | 2021-08-14T17:59:21.000Z | 2021-08-14T17:59:21.000Z | output/models/ms_data/regex/re_q2_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 4 | 2020-02-12T21:30:44.000Z | 2020-04-15T20:06:46.000Z | output/models/ms_data/regex/re_q2_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | null | null | null | from output.models.ms_data.regex.re_q2_xsd.re_q2 import Doc
__all__ = [
"Doc",
]
| 14.333333 | 59 | 0.709302 |
87826be29a38365b6bbc11b31b1b53a8fc5def19 | 2,890 | py | Python | django/contrib/admin/actions.py | alex/django-old | 6f964c8f03e5d25c9e36898a001c8463f82fbb81 | [
"BSD-3-Clause"
] | 2 | 2015-11-05T06:07:13.000Z | 2019-01-04T07:35:59.000Z | django/contrib/admin/actions.py | alex/django-old | 6f964c8f03e5d25c9e36898a001c8463f82fbb81 | [
"BSD-3-Clause"
] | null | null | null | django/contrib/admin/actions.py | alex/django-old | 6f964c8f03e5d25c9e36898a001c8463f82fbb81 | [
"BSD-3-Clause"
] | null | null | null | """
Built-in, globally-available admin actions.
"""
from django import template
from django.core.exceptions import PermissionDenied
from django.contrib.admin import helpers
from django.contrib.admin.util import get_deleted_objects, model_ngettext
from django.shortcuts import render_to_response
from django.utils.encoding import force_unicode
from django.utils.translation import ugettext_lazy, ugettext as _
def delete_selected(modeladmin, request, queryset):
"""
Default action which deletes the selected objects.
This action first displays a confirmation page whichs shows all the
deleteable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it delets all selected objects and redirects back to the change list.
"""
opts = modeladmin.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not modeladmin.has_delete_permission(request):
raise PermissionDenied
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, perms_needed = get_deleted_objects(queryset, opts, request.user, modeladmin.admin_site, levels_to_root=2)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_unicode(obj)
modeladmin.log_deletion(request, obj, obj_display)
queryset.delete()
modeladmin.message_user(request, _("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(modeladmin.opts, n)
})
# Return None to display the change list page again.
return None
context = {
"title": _("Are you sure?"),
"object_name": force_unicode(opts.verbose_name),
"deletable_objects": [deletable_objects],
'queryset': queryset,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": modeladmin.admin_site.root_path,
"app_label": app_label,
'action_checkbox_name': helpers.ACTION_CHECKBOX_NAME,
}
# Display the confirmation page
return render_to_response(modeladmin.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context, context_instance=template.RequestContext(request))
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
| 40.704225 | 128 | 0.708304 |
2116d0129e2aebf08aaf09b040d854a6e594e3df | 6,196 | py | Python | src/compas_rhino/artists/mixins/vertexartist.py | mpopescu/compas | 55f259607deea501f862cbaea79bd97d7e56ead6 | [
"MIT"
] | null | null | null | src/compas_rhino/artists/mixins/vertexartist.py | mpopescu/compas | 55f259607deea501f862cbaea79bd97d7e56ead6 | [
"MIT"
] | null | null | null | src/compas_rhino/artists/mixins/vertexartist.py | mpopescu/compas | 55f259607deea501f862cbaea79bd97d7e56ead6 | [
"MIT"
] | null | null | null | from compas.utilities import color_to_colordict
import compas_rhino
__all__ = ['VertexArtist']
class VertexArtist(object):
__module__ = "compas_rhino.artists.mixins"
def clear_vertices(self, keys=None):
"""Clear all vertices previously drawn by the ``VertexArtist``.
Parameters
----------
keys : list, optional
The keys of a specific set of vertices that should be cleared.
Default is to clear all vertices.
"""
if not keys:
name = '{}.vertex.*'.format(self.datastructure.name)
guids = compas_rhino.get_objects(name=name)
else:
guids = []
for key in keys:
name = self.datastructure.vertex_name(key)
guid = compas_rhino.get_object(name=name)
guids.append(guid)
compas_rhino.delete_objects(guids)
def clear_vertexlabels(self, keys=None):
"""Clear all vertex labels previously drawn by the ``VertexArtist``.
Parameters
----------
keys : list, optional
The keys of a specific set of vertex labels that should be cleared.
Default is to clear all vertex labels.
"""
if not keys:
name = '{}.vertex.label.*'.format(self.datastructure.name)
guids = compas_rhino.get_objects(name=name)
else:
guids = []
for key in keys:
name = self.datastructure.vertex_label_name(key)
guid = compas_rhino.get_object(name=name)
guids.append(guid)
compas_rhino.delete_objects(guids)
def draw_vertices(self, keys=None, color=None):
"""Draw a selection of vertices.
Parameters
----------
keys : list
A list of vertex keys identifying which vertices to draw.
Default is ``None``, in which case all vertices are drawn.
color : str, tuple, dict
The color specififcation for the vertices.
Colors should be specified in the form of a string (hex colors) or
as a tuple of RGB components.
To apply the same color to all vertices, provide a single color
specification. Individual colors can be assigned using a dictionary
of key-color pairs. Missing keys will be assigned the default vertex
color (``self.defaults['color.vertex']``).
The default is ``None``, in which case all vertices are assigned the
default vertex color.
Notes
-----
The vertices are named using the following template:
``"{}.vertex.{}".format(self.datastructure.name], key)``.
This name is used afterwards to identify vertices in the Rhino model.
"""
keys = keys or list(self.datastructure.vertices())
colordict = color_to_colordict(color,
keys,
default=self.defaults.get('color.vertex'),
colorformat='rgb',
normalize=False)
points = []
for key in keys:
points.append({
'pos': self.datastructure.vertex_coordinates(key),
'name': self.datastructure.vertex_name(key),
'color': colordict[key],
'layer': self.datastructure.get_vertex_attribute(key, 'layer', None)
})
return compas_rhino.draw_points(points, layer=self.layer, clear=False, redraw=False)
def draw_vertexlabels(self, text=None, color=None):
"""Draw labels for a selection vertices.
Parameters
----------
text : dict
A dictionary of vertex labels as key-text pairs.
The default value is ``None``, in which case every vertex will be labelled with its key.
color : str, tuple, dict
The color sepcification of the labels.
String values are interpreted as hex colors (e.g. ``'#ff0000'`` for red).
Tuples are interpreted as RGB component specifications (e.g. ``(255, 0, 0) for red``.
If a dictionary of specififcations is provided, the keys of the
should refer to vertex keys and the values should be color
specifications in the form of strings or tuples.
The default value is ``None``, in which case the labels are assigned
the default vertex color (``self.defaults['color.vertex']``).
Notes
-----
All labels are assigned a name using the folling template:
``"{}.vertex.label.{}".format(self.datastructure.name, key)``.
"""
if text is None:
textdict = {key: str(key) for key in self.datastructure.vertices()}
elif isinstance(text, dict):
textdict = text
elif text == 'key':
textdict = {key: str(key) for key in self.datastructure.vertices()}
elif text == 'index':
textdict = {key: str(index) for index, key in enumerate(self.datastructure.vertices())}
else:
raise NotImplementedError
colordict = color_to_colordict(color,
textdict.keys(),
default=self.defaults.get('color.vertex'),
colorformat='rgb',
normalize=False)
labels = []
for key, text in iter(textdict.items()):
labels.append({
'pos': self.datastructure.vertex_coordinates(key),
'name': self.datastructure.vertex_label_name(key),
'color': colordict[key],
'text': textdict[key],
'layer': self.datastructure.get_vertex_attribute(key, 'layer', None)
})
return compas_rhino.draw_labels(labels, layer=self.layer, clear=False, redraw=False)
# ==============================================================================
# Main
# ==============================================================================
if __name__ == "__main__":
pass
| 39.464968 | 100 | 0.550839 |
c270029c2dfa44d47cd42344983d223b450ad937 | 1,448 | py | Python | pygcn/layers.py | Venray-std/pygcn | a4dc12605e011890fc06c78163fd761ce3a44fea | [
"MIT"
] | null | null | null | pygcn/layers.py | Venray-std/pygcn | a4dc12605e011890fc06c78163fd761ce3a44fea | [
"MIT"
] | null | null | null | pygcn/layers.py | Venray-std/pygcn | a4dc12605e011890fc06c78163fd761ce3a44fea | [
"MIT"
] | null | null | null | import math
import torch
from torch.nn.parameter import Parameter
from torch.nn.modules.module import Module
class GraphConvolution(Module):
"""
Simple GCN layer, similar to https://arxiv.org/abs/1609.02907
"""
def __init__(self, in_features, out_features, bias=True):
super(GraphConvolution, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.weight = Parameter(torch.FloatTensor(in_features, out_features)) # compile
if bias:
self.bias = Parameter(torch.FloatTensor(out_features))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
# a weight initial method
stdv = 1. / math.sqrt(self.weight.size(1))
self.weight.data.uniform_(-stdv, stdv)
if self.bias is not None:
self.bias.data.uniform_(-stdv, stdv)
def forward(self, input, adj):
support = torch.mm(input, self.weight) # H * W
output = torch.spmm(adj, support) # A*H*W adjacency matrix is sparse to storage.
if self.bias is not None:
return output + self.bias
else:
return output
def __repr__(self):
# equivalentto operate<< or tostring()
return self.__class__.__name__ + ' (' \
+ str(self.in_features) + ' -> ' \
+ str(self.out_features) + ')'
| 31.478261 | 91 | 0.617403 |
ab9b6ed335c761ca43f684c6a3162dd8bcd1857b | 222 | py | Python | problems/tests/test_p003.py | maximkir/project-euler | ef9d0c37732b65af0b3351307f9c190921218738 | [
"Apache-2.0"
] | null | null | null | problems/tests/test_p003.py | maximkir/project-euler | ef9d0c37732b65af0b3351307f9c190921218738 | [
"Apache-2.0"
] | null | null | null | problems/tests/test_p003.py | maximkir/project-euler | ef9d0c37732b65af0b3351307f9c190921218738 | [
"Apache-2.0"
] | null | null | null | from problems.p003 import largest_prime_factor
def test_largest_prime_factor():
assert largest_prime_factor(10) == 5
assert largest_prime_factor(13195) == 29
assert largest_prime_factor(600851475143) == 6857
| 27.75 | 53 | 0.783784 |
a398a9838d58e6b7430640d6eab31d0c03c9625d | 1,609 | py | Python | setup.py | 3con/cointrader | abb3d13d1105e11db0070a9052c45cb8a87f168c | [
"MIT"
] | 103 | 2017-03-10T07:23:12.000Z | 2021-08-24T17:39:22.000Z | setup.py | altfund/cointrader-1 | abb3d13d1105e11db0070a9052c45cb8a87f168c | [
"MIT"
] | 91 | 2017-03-11T06:23:09.000Z | 2021-11-15T17:47:06.000Z | setup.py | fwolfst/cointrader | abb3d13d1105e11db0070a9052c45cb8a87f168c | [
"MIT"
] | 36 | 2017-03-23T17:48:08.000Z | 2020-02-21T23:42:03.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
'Click>=6.0',
'requests',
'sqlalchemy',
'stockstats',
'termcolor',
'terminaltables'
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='cointrader',
version='0.5.0',
description="Cointrader is a trading application for crypto currencies.",
long_description=readme + '\n\n' + history,
author="Torsten Irländer",
author_email='torsten.irlaender@googlemail.com',
url='https://github.com/toirl/cointrader',
packages=find_packages(),
entry_points={
'console_scripts': [
'cointrader=cointrader.cli:main'
]
},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='cointrader coins crypto currency trading bot exchange poloniex bitcoin dash digital cash',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
| 27.271186 | 104 | 0.646986 |
7c0d16661c470a4b5a8ede5da6f188c3b7e06172 | 2,795 | py | Python | tools/trn_thumos/extract_features.py | FedericoVasile1/TRN.pytorch | 0a51a93a624e9b5e0a1297885df37ea8a888495e | [
"MIT"
] | null | null | null | tools/trn_thumos/extract_features.py | FedericoVasile1/TRN.pytorch | 0a51a93a624e9b5e0a1297885df37ea8a888495e | [
"MIT"
] | null | null | null | tools/trn_thumos/extract_features.py | FedericoVasile1/TRN.pytorch | 0a51a93a624e9b5e0a1297885df37ea8a888495e | [
"MIT"
] | null | null | null | import _init_paths
import utils as utl
import os
import numpy as np
from torchvision import models, transforms
import torch
import torch.nn as nn
from PIL import Image
class Flatten(nn.Module):
def __init__(self):
super(Flatten, self).__init__()
def forward(self, x):
return x.view(x.shape[0], -1)
def main():
os.environ['CUDA_VISIBLE_DEVICES'] = str(1)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = models.vgg16(pretrained=True)
model.classifier = model.classifier[:2] # take output of fc6 layer
FEAT_VECT_DIM = model.classifier[0].out_features # 4096
model = model.to(device)
model.train(False)
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
SAMPLE_FRAMES = 6 # take only the central frame every six frames
DATA_ROOT = 'data/THUMOS'
VIDEO_FRAMES = 'video_frames_24fps' # base folder where the video folders (containing the frames) are
TARGET_FRAMES = 'target_frames_24fps' # labels for the frames above
VIDEO_FEATURES = 'vgg16-fc6'
OPTIC_FEATURES = 'bn_inception'
TARGET_FEATURES = 'target'
with torch.set_grad_enabled(False):
videos_dir = os.listdir(os.path.join(DATA_ROOT, VIDEO_FRAMES))
videos_dir = [dir for dir in videos_dir if 'video' in dir]
for dir in videos_dir:
num_frames = len(os.listdir(os.path.join(DATA_ROOT, VIDEO_FRAMES, dir)))
num_frames = num_frames - (num_frames % SAMPLE_FRAMES)
frames = torch.zeros(num_frames//SAMPLE_FRAMES, FEAT_VECT_DIM)
junk = torch.zeros(num_frames//SAMPLE_FRAMES, 1024) # optical flow will not be used
count = 0
for idx_frame in range(SAMPLE_FRAMES//2, num_frames, SAMPLE_FRAMES):
# idx_frame+1 because frames start from 1. e.g. 1.jpg
frame = Image.open(os.path.join(DATA_ROOT, VIDEO_FRAMES, dir, str(idx_frame+1)+'.jpg')).convert('RGB')
frame = transform(frame).to(device)
# forward pass
feat_vect = model(frame.unsqueeze(0)) # TODO: load a batch instead of a single sample
frames[count] = feat_vect.squeeze(0)
count += 1
np.save(os.path.join(DATA_ROOT, VIDEO_FEATURES, str(dir)+'.npy'), frames.numpy())
np.save(os.path.join(DATA_ROOT, OPTIC_FEATURES, str(dir) +'.npy'), junk.numpy())
target = np.load(os.path.join(DATA_ROOT, TARGET_FRAMES, dir+'.npy'))[:num_frames]
target = target[SAMPLE_FRAMES//2::SAMPLE_FRAMES]
np.save(os.path.join(DATA_ROOT, TARGET_FEATURES, str(dir)+'.npy'), target)
if __name__ == '__main__':
main() | 37.266667 | 118 | 0.646154 |
11e7ea31504c883274f1282262afe14bffe8e9c7 | 5,630 | py | Python | mindspore/python/mindspore/ops/_op_impl/aicpu/__init__.py | Aaron911/mindspore | 0868568453bf53bd2cdf072a639b4ff96dd527a4 | [
"Apache-2.0"
] | null | null | null | mindspore/python/mindspore/ops/_op_impl/aicpu/__init__.py | Aaron911/mindspore | 0868568453bf53bd2cdf072a639b4ff96dd527a4 | [
"Apache-2.0"
] | null | null | null | mindspore/python/mindspore/ops/_op_impl/aicpu/__init__.py | Aaron911/mindspore | 0868568453bf53bd2cdf072a639b4ff96dd527a4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020-2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""aicpu ops"""
from .hsv_to_rgb import _hsv_to_rgb_aicpu
from .unique import _unique_aicpu
from .lu_solve import _lu_solve_aicpu
from .cholesky_inverse import _cholesky_inverse_aicpu
from .no_repeat_ngram import _no_repeat_ngram_aicpu
from .init_data_set_queue import _init_data_set_queue_aicpu
from .embedding_lookup import _embedding_lookup_aicpu
from .padding import _padding_aicpu
from .gather import _gather_aicpu
from .gather_grad import _gather_grad_aicpu
from .scatter import _scatter_aicpu
from .identity import _identity_aicpu
from .edit_distance import _edit_distance_aicpu
from .unique_with_pad import _unique_with_pad_aicpu
from .add_n import _add_n_aicpu
from .sub_and_filter import _sub_and_filter_aicpu
from .pad_and_shift import _pad_and_shift_aicpu
from .dropout_genmask import _dropout_genmask_aicpu
from .dropout_genmask_v3 import _dropout_genmask_v3_aicpu
from .dropout2d import _dropout2d_aicpu
from .dropout3d import _dropout3d_aicpu
from .dynamic_stitch import _dynamic_stitch_aicpu
from .get_next import _get_next_aicpu
from .print_tensor import _print_aicpu
from .topk import _top_k_aicpu
from .logical_xor import _logical_xor_aicpu
from .asin import _asin_aicpu
from .asin_grad import _asin_grad_aicpu
from .is_finite import _is_finite_aicpu
from .is_inf import _is_inf_aicpu
from .is_nan import _is_nan_aicpu
from .reshape import _reshape_aicpu
from .flatten import _flatten_aicpu
from .sin import _sin_aicpu
from .cos import _cos_aicpu
from .sinh import _sinh_aicpu
from .cosh import _cosh_aicpu
from .squeeze import _squeeze_aicpu
from .acos import _acos_aicpu
from .acos_grad import _acos_grad_aicpu
from .expand_dims import _expand_dims_aicpu
from .randperm import _randperm_aicpu
from .random_choice_with_mask import _random_choice_with_mask_aicpu
from .rsqrt import _rsqrt_aicpu
from .rsqrt_grad import _rsqrt_grad_aicpu
from .search_sorted import _search_sorted_aicpu
from .stack import _stack_aicpu
from .uniform_candidate_sampler import _uniform_candidate_sampler_aicpu
from .log_uniform_candidate_sampler import _log_uniform_candidate_sampler_aicpu
from .compute_accidental_hits import _compute_accidental_hits_aicpu
from .ctcloss import _ctcloss_aicpu
from .reverse_sequence import _reverse_sequence_aicpu
from .matrix_inverse import _matrix_inverse_aicpu
from .matrix_determinant import _matrix_determinant_aicpu
from .log_matrix_determinant import _log_matrix_determinant_aicpu
from .lstsq import _lstsq_aicpu
from .crop_and_resize import _crop_and_resize_aicpu
from .acosh import _acosh_aicpu
from .acosh_grad import _acosh_grad_aicpu
from .rnnt_loss import _rnnt_loss_aicpu
from .random_categorical import _random_categorical_aicpu
from .cast import _cast_aicpu
from .coalesce import _coalesce_aicpu
from .mirror_pad import _mirror_pad_aicpu
from .masked_select import _masked_select_aicpu
from .masked_select_grad import _masked_select_grad_aicpu
from .mirror_pad_grad import _mirror_pad_grad_aicpu
from .standard_normal import _standard_normal_aicpu
from .gamma import _gamma_aicpu
from .poisson import _poisson_aicpu
from .update_cache import _update_cache_aicpu
from .cache_swap_table import _cache_swap_table_aicpu
from .uniform_int import _uniform_int_aicpu
from .uniform_real import _uniform_real_aicpu
from .standard_laplace import _standard_laplace_aicpu
from .strided_slice import _strided_slice_aicpu
from .neg import _neg_aicpu
from .strided_slice_grad import _strided_slice_grad_aicpu
from .end_of_sequence import _end_of_sequence_aicpu
from .fused_sparse_adam import _fused_sparse_adam_aicpu
from .fused_sparse_lazy_adam import _fused_sparse_lazy_adam_aicpu
from .fused_sparse_ftrl import _fused_sparse_ftrl_aicpu
from .fused_sparse_proximal_adagrad import _fused_sparse_proximal_adagrad_aicpu
from .meshgrid import _meshgrid_aicpu
from .trans_data import _trans_data_aicpu
from .stack_push_pop import _stack_init_aicpu
from .stack_push_pop import _stack_push_aicpu
from .stack_push_pop import _stack_pop_aicpu
from .asinh import _asinh_aicpu
from .asinh_grad import _asinh_grad_aicpu
from .stack_push_pop import _stack_destroy_aicpu
from .ctc_greedy_decoder import _ctc_greedy_decoder_aicpu
from .resize_bilinear import _resize_bilinear_aicpu
from .resize_bilinear_grad import _resize_bilinear_grad_aicpu
from .scatter_elements import _scatter_elements_aicpu
from .non_max_suppression import _non_max_suppression_aicpu
from .square import _square_aicpu
from .lower_bound import _lower_bound_aicpu
from .upper_bound import _upper_bound_aicpu
from .zeros_like import _zeros_like_aicpu
from .ones_like import _ones_like_aicpu
from .grid_sampler_3d import _grid_sampler_3d_aicpu
from .grid_sampler_3d_grad import _grid_sampler_3d_grad_aicpu
from .environ_create import _environ_create_aicpu
from .environ_set import _environ_set_aicpu
from .environ_get import _environ_get_aicpu
from .environ_destroy_all import _environ_destroy_all_aicpu
from .cross import _cross_aicpu
from .cummax import _cummax_aicpu
from .floor_div import _floor_div_aicpu
from .one_hot import _one_hot_aicpu
| 44.330709 | 79 | 0.875488 |
2aed167eea1315befd8468a253ab1a0e22aefaf9 | 4,716 | py | Python | tests/portfolio/test_snapshot_history.py | cmvandrevala/finance_scripts | dc256d2284bc3fa9cf35572b771e7c9538ad2309 | [
"MIT"
] | 2 | 2020-05-13T14:52:49.000Z | 2022-03-20T04:32:10.000Z | tests/portfolio/test_snapshot_history.py | cmvandrevala/finance_scripts | dc256d2284bc3fa9cf35572b771e7c9538ad2309 | [
"MIT"
] | 1 | 2021-10-09T16:21:42.000Z | 2021-10-09T16:21:42.000Z | tests/portfolio/test_snapshot_history.py | cmvandrevala/finance_scripts | dc256d2284bc3fa9cf35572b771e7c9538ad2309 | [
"MIT"
] | 1 | 2020-05-13T14:52:52.000Z | 2020-05-13T14:52:52.000Z | import unittest
from utilities.epoch_date_converter import EpochDateConverter
from portfolio.snapshot import Snapshot
from portfolio.snapshot_history import SnapshotHistory
class SnapshotHistoryTestCase(unittest.TestCase):
def setUp(self):
self.history = SnapshotHistory()
self.converter = EpochDateConverter()
def test_imports_a_snapshot(self):
snapshot = Snapshot(self.converter.date_to_epoch(), 1000)
self.history.import_snapshot(snapshot)
self.assertEqual(self.history.all(), [snapshot])
def test_imports_two_snapshots(self):
snapshot1 = Snapshot(self.converter.date_to_epoch(), 1000)
snapshot2 = Snapshot(self.converter.date_to_epoch(), 100)
self.history.import_snapshot(snapshot1)
self.history.import_snapshot(snapshot2)
self.assertEqual(self.history.all(), [snapshot1, snapshot2])
def test_it_has_a_value_of_zero_if_there_are_no_snapshots(self):
self.assertEqual(self.history.value(), 0)
def test_it_returns_an_value_of_zero_when_queried_before_a_snapshot(self):
timestamp = self.converter.date_to_epoch()
query_time = timestamp - 20
self.history.import_snapshot(Snapshot(timestamp, 100))
value = self.history.value(query_time)
self.assertEqual(value, 0)
def test_it_returns_the_correct_value_when_queried_after_a_snapshot(self):
timestamp = self.converter.date_to_epoch()
query_time = timestamp + 20
self.history.import_snapshot(Snapshot(timestamp, 100))
value = self.history.value(query_time)
self.assertEqual(value, 100)
def test_it_returns_the_correct_value_when_queried_in_between_two_snapshots(self):
later_timestamp = self.converter.date_to_epoch()
earlier_timestamp = later_timestamp - 120
query_time = (earlier_timestamp + later_timestamp) / 2
self.history.import_snapshot(Snapshot(earlier_timestamp, 300))
self.history.import_snapshot(Snapshot(later_timestamp, 250))
value = self.history.value(query_time)
self.assertEqual(value, 300)
def test_it_updates_the_value_at_the_time_the_snapshot_is_recorded(self):
epoch = EpochDateConverter().date_to_epoch("2014-02-03")
self.history.import_snapshot(Snapshot(epoch, 3060))
value = self.history.value(epoch)
self.assertEqual(value, 3060)
def test_the_order_in_which_snapshots_are_imported_makes_no_difference(self):
timestamp1 = self.converter.date_to_epoch()
timestamp2 = timestamp1 - 1
timestamp3 = timestamp1 - 2
query_time = timestamp1 + 1
self.history.import_snapshot(Snapshot(timestamp2, 20))
self.history.import_snapshot(Snapshot(timestamp1, 10))
self.history.import_snapshot(Snapshot(timestamp3, 30))
value = self.history.value(query_time)
self.assertEqual(value, 10)
def test_it_defaults_to_the_current_epoch_if_no_argument_is_given(self):
timestamp = self.converter.date_to_epoch()
self.history.import_snapshot(Snapshot(timestamp - 5, 10))
self.history.import_snapshot(Snapshot(timestamp - 10, 20))
value = self.history.value()
self.assertEqual(value, 10)
def test_it_returns_the_latest_timestamp_for_one_snapshot(self):
current_epoch = self.converter.date_to_epoch()
formatted_date = EpochDateConverter().epoch_to_date(current_epoch)
snapshot = Snapshot(current_epoch, 1000)
self.history.import_snapshot(snapshot)
self.assertEqual(self.history.last_updated(), formatted_date)
def test_it_returns_the_latest_timestamp_for_two_snapshots(self):
current_epoch = self.converter.date_to_epoch()
formatted_date = EpochDateConverter().epoch_to_date(current_epoch)
snapshot = Snapshot(current_epoch, 1000)
self.history.import_snapshot(snapshot)
snapshot = Snapshot(current_epoch - 1000000, 2000)
self.history.import_snapshot(snapshot)
self.assertEqual(self.history.last_updated(), formatted_date)
def test_it_returns_the_latest_timestamp_for_three_snapshots(self):
current_epoch = self.converter.date_to_epoch()
formatted_date = EpochDateConverter().epoch_to_date(current_epoch)
snapshot = Snapshot(current_epoch, 1000)
self.history.import_snapshot(snapshot)
snapshot = Snapshot(current_epoch - 1000000, 2000)
self.history.import_snapshot(snapshot)
snapshot = Snapshot(current_epoch - 2000000, 2000)
self.history.import_snapshot(snapshot)
self.assertEqual(self.history.last_updated(), formatted_date)
if __name__ == '__main__':
unittest.main()
| 44.914286 | 86 | 0.731552 |
6603973ee7774b608afb54bdc81fdc8399ee4756 | 4,919 | py | Python | ui_tests/caseworker/pages/case_page.py | django-doctor/lite-frontend | 330ff9575fd22d7c4c42698ac2d653244e6180d6 | [
"MIT"
] | 1 | 2021-10-16T16:36:58.000Z | 2021-10-16T16:36:58.000Z | ui_tests/caseworker/pages/case_page.py | django-doctor/lite-frontend | 330ff9575fd22d7c4c42698ac2d653244e6180d6 | [
"MIT"
] | 45 | 2020-08-11T14:37:46.000Z | 2022-03-29T17:03:02.000Z | ui_tests/caseworker/pages/case_page.py | django-doctor/lite-frontend | 330ff9575fd22d7c4c42698ac2d653244e6180d6 | [
"MIT"
] | 3 | 2021-02-01T06:26:19.000Z | 2022-02-21T23:02:46.000Z | import time
from ui_tests.caseworker.pages.shared import Shared
from ui_tests.caseworker.pages.BasePage import BasePage
from tests_common import selectors
from tests_common.tools.helpers import scroll_to_element_by_id
class CaseTabs:
DETAILS = "details"
USER_ADVICE = "user-advice"
TEAM_ADVICE = "team-advice"
FINAL_ADVICE = "final-advice"
ADDITIONAL_CONTACTS = "additional-contacts"
ECJU_QUERIES = "ecju-queries"
DOCUMENTS = "documents"
ACTIVITY = "activity"
COMPLIANCE_LICENCES = "compliance-licences"
class CasePage(BasePage):
TABLE_GOODS_ID = "table-goods"
TABLE_DESTINATIONS_ID = "table-destinations"
TABLE_DELETED_ENTITIES_ID = "table-inactive-entities"
BUTTON_RERUN_ROUTING_RULES_ID = "button-rerun-routing-rules"
BUTTON_SET_GOODS_FLAGS_ID = "button-edit-goods-flags"
BUTTON_SET_DESTINATIONS_FLAGS_ID = "button-edit-destinations-flags"
LINK_CHANGE_STATUS_ID = "link-change-status"
LINK_CHANGE_CASE_FLAGS_ID = "link-change-flags"
LINK_ASSIGN_CASE_OFFICER_ID = "link-change-case-officer"
LINK_ASSIGN_USERS_ID = "link-change-assigned-users"
LINK_SET_NEXT_REVIEW_DATE_ID = "link-change-review-date"
NEXT_REVIEW_DATE_ID = "next-review-date"
BANNER_REFERENCE_CODE_ID = "reference-code"
def change_tab(self, tab: str):
if tab == CaseTabs.USER_ADVICE or tab == CaseTabs.TEAM_ADVICE or tab == CaseTabs.FINAL_ADVICE:
self.driver.find_element_by_id("tab-collection-advice").click()
self.driver.find_element_by_id("tab-" + tab).click()
def click_change_case_flags(self):
self.driver.find_element_by_id(self.LINK_CHANGE_CASE_FLAGS_ID).click()
def click_assign_case_officer(self):
scroll_to_element_by_id(self.driver, self.LINK_ASSIGN_CASE_OFFICER_ID)
self.driver.find_element_by_id(self.LINK_ASSIGN_CASE_OFFICER_ID).click()
def click_set_next_review_date(self):
scroll_to_element_by_id(self.driver, self.LINK_SET_NEXT_REVIEW_DATE_ID)
self.driver.find_element_by_id(self.LINK_SET_NEXT_REVIEW_DATE_ID).click()
def get_next_review_date(self):
return self.driver.find_element_by_id(self.NEXT_REVIEW_DATE_ID).text
def click_assign_users(self):
scroll_to_element_by_id(self.driver, self.LINK_ASSIGN_USERS_ID)
self.driver.find_element_by_id(self.LINK_ASSIGN_USERS_ID).click()
def click_change_status(self):
self.driver.find_element_by_id(self.LINK_CHANGE_STATUS_ID).click()
def click_rerun_routing_rules(self):
self.driver.find_element_by_id(self.BUTTON_RERUN_ROUTING_RULES_ID).click()
def get_goods(self):
return self.driver.find_elements_by_css_selector(f"#{self.TABLE_GOODS_ID} {Shared(self.driver).TABLE_ROW_CSS}")
def select_first_good(self):
scroll_to_element_by_id(self.driver, self.TABLE_GOODS_ID)
self.driver.find_element_by_css_selector(f"#{self.TABLE_GOODS_ID} {selectors.CHECKBOX}").click()
def get_goods_text(self):
return self.driver.find_element_by_id(self.TABLE_GOODS_ID).text
def get_destinations(self):
return self.driver.find_elements_by_css_selector(
f"#{self.TABLE_DESTINATIONS_ID} {Shared(self.driver).TABLE_ROW_CSS}"
)
def get_destinations_text(self):
return self.driver.find_element_by_id(self.TABLE_DESTINATIONS_ID).text
def get_deleted_entities_text(self):
return self.driver.find_element_by_id(self.TABLE_DELETED_ENTITIES_ID).text
def select_destinations(self):
for destination in self.driver.find_elements_by_css_selector(self.TABLE_DESTINATIONS_ID + selectors.CHECKBOX):
destination.click()
def is_flag_applied(self, flag_name):
self.driver.find_element_by_id("candy-flags").click()
# TODO Make this an implicit wait!
time.sleep(0.5)
return flag_name in self.driver.find_element_by_id("popup-flags").text
def is_flag_in_applied_flags_list(self, flag_name):
text = self.driver.find_element_by_id("checkbox-counter").text
return flag_name in text
def is_goods_flag_applied(self, flag_name):
return flag_name in self.driver.find_element_by_id(self.TABLE_GOODS_ID).text
def click_edit_goods_flags(self):
self.driver.find_element_by_id(self.BUTTON_SET_GOODS_FLAGS_ID).click()
def click_edit_destinations_flags(self):
scroll_to_element_by_id(self.driver, self.BUTTON_SET_DESTINATIONS_FLAGS_ID)
self.driver.find_element_by_id(self.BUTTON_SET_DESTINATIONS_FLAGS_ID).click()
def select_destination(self, index):
scroll_to_element_by_id(self.driver, self.TABLE_DESTINATIONS_ID)
self.driver.find_elements_by_css_selector(f"#{self.TABLE_DESTINATIONS_ID} {selectors.CHECKBOX}")[index].click()
def get_reference_code_text(self):
return self.driver.find_element_by_id(self.BANNER_REFERENCE_CODE_ID).text
| 41.336134 | 119 | 0.756455 |
861e499cd36f931ea5704020908b3bdabb10b184 | 520 | py | Python | primes/coprimes.py | kwhjvdkamp/PythonTutotial | cbe52c83b0ff2b30f746977f698186dad055b1f4 | [
"MIT"
] | null | null | null | primes/coprimes.py | kwhjvdkamp/PythonTutotial | cbe52c83b0ff2b30f746977f698186dad055b1f4 | [
"MIT"
] | 5 | 2021-07-12T16:34:28.000Z | 2022-03-12T00:59:38.000Z | primes/coprimes.py | kwhjvdkamp/PythonTutotial | cbe52c83b0ff2b30f746977f698186dad055b1f4 | [
"MIT"
] | null | null | null | list1 = [5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70]
list2 = [7, 14, 21, 28, 35, 42, 49, 56, 63, 70, 77, 84, 91, 98]
def gcd(a, b):
print("input:", a, b)
# Define the while loop as described
while b != 0:
temp_a = a
a = b
b = temp_a % b
# Complete the return statement
print("output:", a, b)
return a
# Create a list of tuples defining pairs of coprime numbers
coprimes = [(i,j) for i in list1 for j in list2 if gcd(i,j) == 1]
print(coprimes) | 24.761905 | 65 | 0.546154 |
1102c3bae9a4638a8e99325debcf9b048107ac17 | 1,689 | py | Python | tests/plugins/mockserver/test_mockserver.py | okutane/yandex-taxi-testsuite | 7e2e3dd5a65869ecbf37bf3f79cba7bb4e782b0c | [
"MIT"
] | 128 | 2020-03-10T09:13:41.000Z | 2022-02-11T20:16:16.000Z | tests/plugins/mockserver/test_mockserver.py | okutane/yandex-taxi-testsuite | 7e2e3dd5a65869ecbf37bf3f79cba7bb4e782b0c | [
"MIT"
] | 3 | 2021-11-01T12:31:27.000Z | 2022-02-11T13:08:38.000Z | tests/plugins/mockserver/test_mockserver.py | okutane/yandex-taxi-testsuite | 7e2e3dd5a65869ecbf37bf3f79cba7bb4e782b0c | [
"MIT"
] | 22 | 2020-03-05T07:13:12.000Z | 2022-03-15T10:30:58.000Z | # pylint: disable=protected-access
import aiohttp
import pytest
class Client:
def __init__(self, *, base_url, session):
self._session = session
self._base_url = base_url
def get(self, path, **kwargs):
return self._request('GET', path, **kwargs)
def _request(self, method, path, **kwargs):
url = _build_url(self._base_url, path)
return self._session.request(method, url, **kwargs)
def _build_url(base_url, path):
return '%s/%s' % (base_url.rstrip('/'), path.lstrip('/'))
@pytest.fixture
async def mockserver_client(mockserver):
async with aiohttp.ClientSession() as session:
yield Client(base_url=mockserver.base_url, session=session)
async def test_json_handler(mockserver, mockserver_client):
@mockserver.json_handler('/foo')
def _foo_handler(request):
return {'msg': 'hello'}
response = await mockserver_client.get('/foo')
assert response.status == 200
data = await response.json()
assert data == {'msg': 'hello'}
async def test_async_json_handler(mockserver, mockserver_client):
@mockserver.json_handler('/foo')
async def _foo_handler(request):
return {'msg': 'hello'}
response = await mockserver_client.get('/foo')
assert response.status == 200
data = await response.json()
assert data == {'msg': 'hello'}
async def test_handler(mockserver, mockserver_client):
@mockserver.json_handler('/foo')
def _foo_handler(request):
return aiohttp.web.Response(body='hello')
response = await mockserver_client.get('/foo')
assert response.status == 200
data = await response.content.read()
assert data == b'hello'
| 26.809524 | 67 | 0.676732 |
e3676ecc2934e0eab10bf11fd07663cc227c835b | 705 | py | Python | LeetCodeSolutions/python/437_Path_Sum_III.py | ChuanleiGuo/AlgorithmsPlayground | 90b6287b742c8bfd3797540c408d679be2821a40 | [
"MIT"
] | 1 | 2017-03-27T13:38:37.000Z | 2017-03-27T13:38:37.000Z | LeetCodeSolutions/python/437_Path_Sum_III.py | ChuanleiGuo/AlgorithmsPlayground | 90b6287b742c8bfd3797540c408d679be2821a40 | [
"MIT"
] | null | null | null | LeetCodeSolutions/python/437_Path_Sum_III.py | ChuanleiGuo/AlgorithmsPlayground | 90b6287b742c8bfd3797540c408d679be2821a40 | [
"MIT"
] | null | null | null | class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def pathSum(self, root, s):
"""
:type root: TreeNode
:type s: int
:rtype: int
"""
if not root:
return 0
return self.find_path(root, s) + self.pathSum(root.left, s) + \
self.pathSum(root.right, s)
def find_path(self, node, s):
res = 0
if not node:
return res
if s == node.val:
res += 1
res += self.find_path(node.left, s - node.val)
res += self.find_path(node.right, s - node.val)
return res
| 23.5 | 71 | 0.496454 |
205e5e7c64a9a20b43ed1f0316cb87551cfee84b | 159 | py | Python | welcomer/__init__.py | duanegtr/legendv3-cogs | ffde1452a75ad42b4f6511b612ce486e96fcd6de | [
"MIT"
] | 3 | 2020-09-03T13:39:30.000Z | 2021-05-21T01:37:00.000Z | welcomer/__init__.py | darcyle/tl-cogs | 6b13c4a6247115571c5a2bb6ea98ed1fe2d44d79 | [
"MIT"
] | null | null | null | welcomer/__init__.py | darcyle/tl-cogs | 6b13c4a6247115571c5a2bb6ea98ed1fe2d44d79 | [
"MIT"
] | 4 | 2020-09-24T04:24:02.000Z | 2021-04-25T03:48:44.000Z | from .welcomer import Welcomer
async def setup(bot):
cog = Welcomer(bot=bot)
await cog.crtoken()
await cog.load_menu_module()
bot.add_cog(cog) | 22.714286 | 32 | 0.698113 |
f2d4c053a116de1a16891f8244786c080e6a0114 | 206 | py | Python | main.py | Akhlak-Hossain-Jim/jimmi.py | 6fe6069058640541234beb7dc28d2e4981acbd6b | [
"Apache-2.0"
] | null | null | null | main.py | Akhlak-Hossain-Jim/jimmi.py | 6fe6069058640541234beb7dc28d2e4981acbd6b | [
"Apache-2.0"
] | null | null | null | main.py | Akhlak-Hossain-Jim/jimmi.py | 6fe6069058640541234beb7dc28d2e4981acbd6b | [
"Apache-2.0"
] | null | null | null | print("Hi there whats your name?")
name = input()
print('hello again ' + name)
print("I\'m Jimmi bot and just started learning to being intelligent. So I can have some error to answering your questions.")
| 41.2 | 125 | 0.73301 |
cd81585b4d12cbd3550c29983ab40d46b7bf5e4d | 2,322 | py | Python | pyunity/window/templateWindow.py | Knight1632/pyunity | 4d96311bef1bb51aee6afbdcc156f9a290c29815 | [
"MIT"
] | null | null | null | pyunity/window/templateWindow.py | Knight1632/pyunity | 4d96311bef1bb51aee6afbdcc156f9a290c29815 | [
"MIT"
] | null | null | null | pyunity/window/templateWindow.py | Knight1632/pyunity | 4d96311bef1bb51aee6afbdcc156f9a290c29815 | [
"MIT"
] | null | null | null | """Template window provider, use this for creating new window providers"""
from ..errors import *
from ..core import Clock
from ..input import KeyCode
from .. import config
class Window:
"""
A template window provider.
"""
def __init__(self, name, resize):
self.resize = resize
def quit(self):
pass
def start(self, update_func):
"""
Start the main loop of the window.
Parameters
----------
update_func : function
The function that calls the OpenGL calls.
"""
self.update_func = update_func
clock = Clock()
clock.Start(config.fps)
while True:
try:
self.update_func()
clock.Maintain()
except KeyboardInterrupt:
break
self.quit()
keyMap = {
KeyCode.A: "A",
KeyCode.B: "B",
KeyCode.C: "C",
KeyCode.D: "D",
KeyCode.E: "E",
KeyCode.F: "F",
KeyCode.G: "G",
KeyCode.H: "H",
KeyCode.I: "I",
KeyCode.J: "J",
KeyCode.K: "K",
KeyCode.L: "L",
KeyCode.M: "M",
KeyCode.N: "N",
KeyCode.O: "O",
KeyCode.P: "P",
KeyCode.Q: "Q",
KeyCode.R: "R",
KeyCode.S: "S",
KeyCode.T: "T",
KeyCode.U: "U",
KeyCode.V: "V",
KeyCode.W: "W",
KeyCode.X: "X",
KeyCode.Y: "Y",
KeyCode.Z: "Z",
KeyCode.Space: "SPACE",
KeyCode.Alpha0: "0",
KeyCode.Alpha1: "1",
KeyCode.Alpha2: "2",
KeyCode.Alpha3: "3",
KeyCode.Alpha4: "4",
KeyCode.Alpha5: "5",
KeyCode.Alpha6: "6",
KeyCode.Alpha7: "7",
KeyCode.Alpha8: "8",
KeyCode.Alpha9: "9",
KeyCode.F1: "F1",
KeyCode.F2: "F2",
KeyCode.F3: "F3",
KeyCode.F4: "F4",
KeyCode.F5: "F5",
KeyCode.F6: "F6",
KeyCode.F7: "F7",
KeyCode.F8: "F8",
KeyCode.F9: "F9",
KeyCode.F10: "F10",
KeyCode.F11: "F11",
KeyCode.F12: "F12",
KeyCode.Keypad0: "KP_0",
KeyCode.Keypad1: "KP_1",
KeyCode.Keypad2: "KP_2",
KeyCode.Keypad3: "KP_3",
KeyCode.Keypad4: "KP_4",
KeyCode.Keypad5: "KP_5",
KeyCode.Keypad6: "KP_6",
KeyCode.Keypad7: "KP_7",
KeyCode.Keypad8: "KP_8",
KeyCode.Keypad9: "KP_9",
KeyCode.Up: "UP",
KeyCode.Down: "DOWN",
KeyCode.Left: "LEFT",
KeyCode.Right: "RIGHT"
}
| 21.5 | 74 | 0.533161 |
eb1537cec17d11f0a98babb952bb5c8a6c24b302 | 114,301 | py | Python | google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py | TheMichaelHu/python-aiplatform | e03f373a7e44c354eda88875a41c771f6d7e3ce1 | [
"Apache-2.0"
] | null | null | null | google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py | TheMichaelHu/python-aiplatform | e03f373a7e44c354eda88875a41c771f6d7e3ce1 | [
"Apache-2.0"
] | null | null | null | google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py | TheMichaelHu/python-aiplatform | e03f373a7e44c354eda88875a41c771f6d7e3ce1 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation as gac_operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.aiplatform_v1beta1.services.featurestore_service import pagers
from google.cloud.aiplatform_v1beta1.types import encryption_spec
from google.cloud.aiplatform_v1beta1.types import entity_type
from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type
from google.cloud.aiplatform_v1beta1.types import feature
from google.cloud.aiplatform_v1beta1.types import feature as gca_feature
from google.cloud.aiplatform_v1beta1.types import feature_monitoring_stats
from google.cloud.aiplatform_v1beta1.types import featurestore
from google.cloud.aiplatform_v1beta1.types import featurestore as gca_featurestore
from google.cloud.aiplatform_v1beta1.types import featurestore_monitoring
from google.cloud.aiplatform_v1beta1.types import featurestore_service
from google.cloud.aiplatform_v1beta1.types import operation as gca_operation
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import FeaturestoreServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import FeaturestoreServiceGrpcAsyncIOTransport
from .client import FeaturestoreServiceClient
class FeaturestoreServiceAsyncClient:
"""The service that handles CRUD and List for resources for
Featurestore.
"""
_client: FeaturestoreServiceClient
DEFAULT_ENDPOINT = FeaturestoreServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = FeaturestoreServiceClient.DEFAULT_MTLS_ENDPOINT
entity_type_path = staticmethod(FeaturestoreServiceClient.entity_type_path)
parse_entity_type_path = staticmethod(
FeaturestoreServiceClient.parse_entity_type_path
)
feature_path = staticmethod(FeaturestoreServiceClient.feature_path)
parse_feature_path = staticmethod(FeaturestoreServiceClient.parse_feature_path)
featurestore_path = staticmethod(FeaturestoreServiceClient.featurestore_path)
parse_featurestore_path = staticmethod(
FeaturestoreServiceClient.parse_featurestore_path
)
common_billing_account_path = staticmethod(
FeaturestoreServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
FeaturestoreServiceClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(FeaturestoreServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(
FeaturestoreServiceClient.parse_common_folder_path
)
common_organization_path = staticmethod(
FeaturestoreServiceClient.common_organization_path
)
parse_common_organization_path = staticmethod(
FeaturestoreServiceClient.parse_common_organization_path
)
common_project_path = staticmethod(FeaturestoreServiceClient.common_project_path)
parse_common_project_path = staticmethod(
FeaturestoreServiceClient.parse_common_project_path
)
common_location_path = staticmethod(FeaturestoreServiceClient.common_location_path)
parse_common_location_path = staticmethod(
FeaturestoreServiceClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FeaturestoreServiceAsyncClient: The constructed client.
"""
return FeaturestoreServiceClient.from_service_account_info.__func__(FeaturestoreServiceAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FeaturestoreServiceAsyncClient: The constructed client.
"""
return FeaturestoreServiceClient.from_service_account_file.__func__(FeaturestoreServiceAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return FeaturestoreServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> FeaturestoreServiceTransport:
"""Returns the transport used by the client instance.
Returns:
FeaturestoreServiceTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(FeaturestoreServiceClient).get_transport_class,
type(FeaturestoreServiceClient),
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, FeaturestoreServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the featurestore service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.FeaturestoreServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = FeaturestoreServiceClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def create_featurestore(
self,
request: Union[featurestore_service.CreateFeaturestoreRequest, dict] = None,
*,
parent: str = None,
featurestore: gca_featurestore.Featurestore = None,
featurestore_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a new Featurestore in a given project and
location.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_create_featurestore():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.CreateFeaturestoreRequest(
parent="parent_value",
featurestore_id="featurestore_id_value",
)
# Make the request
operation = client.create_featurestore(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.CreateFeaturestoreRequest, dict]):
The request object. Request message for
[FeaturestoreService.CreateFeaturestore][google.cloud.aiplatform.v1beta1.FeaturestoreService.CreateFeaturestore].
parent (:class:`str`):
Required. The resource name of the Location to create
Featurestores. Format:
``projects/{project}/locations/{location}'``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
featurestore (:class:`google.cloud.aiplatform_v1beta1.types.Featurestore`):
Required. The Featurestore to create.
This corresponds to the ``featurestore`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
featurestore_id (:class:`str`):
Required. The ID to use for this Featurestore, which
will become the final component of the Featurestore's
resource name.
This value may be up to 60 characters, and valid
characters are ``[a-z0-9_]``. The first character cannot
be a number.
The value must be unique within the project and
location.
This corresponds to the ``featurestore_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.Featurestore` Vertex AI Feature Store provides a centralized repository for organizing,
storing, and serving ML features. The Featurestore is
a top-level container for your features and their
values.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, featurestore, featurestore_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.CreateFeaturestoreRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if featurestore is not None:
request.featurestore = featurestore
if featurestore_id is not None:
request.featurestore_id = featurestore_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_featurestore,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gca_featurestore.Featurestore,
metadata_type=featurestore_service.CreateFeaturestoreOperationMetadata,
)
# Done; return the response.
return response
async def get_featurestore(
self,
request: Union[featurestore_service.GetFeaturestoreRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> featurestore.Featurestore:
r"""Gets details of a single Featurestore.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_get_featurestore():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.GetFeaturestoreRequest(
name="name_value",
)
# Make the request
response = client.get_featurestore(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.GetFeaturestoreRequest, dict]):
The request object. Request message for
[FeaturestoreService.GetFeaturestore][google.cloud.aiplatform.v1beta1.FeaturestoreService.GetFeaturestore].
name (:class:`str`):
Required. The name of the
Featurestore resource.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.Featurestore:
Vertex AI Feature Store provides a
centralized repository for organizing,
storing, and serving ML features. The
Featurestore is a top-level container
for your features and their values.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.GetFeaturestoreRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_featurestore,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def list_featurestores(
self,
request: Union[featurestore_service.ListFeaturestoresRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListFeaturestoresAsyncPager:
r"""Lists Featurestores in a given project and location.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_list_featurestores():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.ListFeaturestoresRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_featurestores(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.ListFeaturestoresRequest, dict]):
The request object. Request message for
[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListFeaturestores].
parent (:class:`str`):
Required. The resource name of the Location to list
Featurestores. Format:
``projects/{project}/locations/{location}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.services.featurestore_service.pagers.ListFeaturestoresAsyncPager:
Response message for
[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListFeaturestores].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.ListFeaturestoresRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_featurestores,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListFeaturestoresAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def update_featurestore(
self,
request: Union[featurestore_service.UpdateFeaturestoreRequest, dict] = None,
*,
featurestore: gca_featurestore.Featurestore = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates the parameters of a single Featurestore.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_update_featurestore():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.UpdateFeaturestoreRequest(
)
# Make the request
operation = client.update_featurestore(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.UpdateFeaturestoreRequest, dict]):
The request object. Request message for
[FeaturestoreService.UpdateFeaturestore][google.cloud.aiplatform.v1beta1.FeaturestoreService.UpdateFeaturestore].
featurestore (:class:`google.cloud.aiplatform_v1beta1.types.Featurestore`):
Required. The Featurestore's ``name`` field is used to
identify the Featurestore to be updated. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
This corresponds to the ``featurestore`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Field mask is used to specify the fields to be
overwritten in the Featurestore resource by the update.
The fields specified in the update_mask are relative to
the resource, not the full request. A field will be
overwritten if it is in the mask. If the user does not
provide a mask then only the non-empty fields present in
the request will be overwritten. Set the update_mask to
``*`` to override all fields.
Updatable fields:
- ``labels``
- ``online_serving_config.fixed_node_count``
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.Featurestore` Vertex AI Feature Store provides a centralized repository for organizing,
storing, and serving ML features. The Featurestore is
a top-level container for your features and their
values.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([featurestore, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.UpdateFeaturestoreRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if featurestore is not None:
request.featurestore = featurestore
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_featurestore,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("featurestore.name", request.featurestore.name),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gca_featurestore.Featurestore,
metadata_type=featurestore_service.UpdateFeaturestoreOperationMetadata,
)
# Done; return the response.
return response
async def delete_featurestore(
self,
request: Union[featurestore_service.DeleteFeaturestoreRequest, dict] = None,
*,
name: str = None,
force: bool = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a single Featurestore. The Featurestore must not contain
any EntityTypes or ``force`` must be set to true for the request
to succeed.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_delete_featurestore():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.DeleteFeaturestoreRequest(
name="name_value",
)
# Make the request
operation = client.delete_featurestore(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.DeleteFeaturestoreRequest, dict]):
The request object. Request message for
[FeaturestoreService.DeleteFeaturestore][google.cloud.aiplatform.v1beta1.FeaturestoreService.DeleteFeaturestore].
name (:class:`str`):
Required. The name of the Featurestore to be deleted.
Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
force (:class:`bool`):
If set to true, any EntityTypes and
Features for this Featurestore will also
be deleted. (Otherwise, the request will
only work if the Featurestore has no
EntityTypes.)
This corresponds to the ``force`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, force])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.DeleteFeaturestoreRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if force is not None:
request.force = force
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_featurestore,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=gca_operation.DeleteOperationMetadata,
)
# Done; return the response.
return response
async def create_entity_type(
self,
request: Union[featurestore_service.CreateEntityTypeRequest, dict] = None,
*,
parent: str = None,
entity_type: gca_entity_type.EntityType = None,
entity_type_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a new EntityType in a given Featurestore.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_create_entity_type():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.CreateEntityTypeRequest(
parent="parent_value",
entity_type_id="entity_type_id_value",
)
# Make the request
operation = client.create_entity_type(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.CreateEntityTypeRequest, dict]):
The request object. Request message for
[FeaturestoreService.CreateEntityType][google.cloud.aiplatform.v1beta1.FeaturestoreService.CreateEntityType].
parent (:class:`str`):
Required. The resource name of the Featurestore to
create EntityTypes. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
entity_type (:class:`google.cloud.aiplatform_v1beta1.types.EntityType`):
The EntityType to create.
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
entity_type_id (:class:`str`):
Required. The ID to use for the EntityType, which will
become the final component of the EntityType's resource
name.
This value may be up to 60 characters, and valid
characters are ``[a-z0-9_]``. The first character cannot
be a number.
The value must be unique within a featurestore.
This corresponds to the ``entity_type_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.EntityType` An entity type is a type of object in a system that needs to be modeled and
have stored information about. For example, driver is
an entity type, and driver0 is an instance of an
entity type driver.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, entity_type, entity_type_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.CreateEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if entity_type is not None:
request.entity_type = entity_type
if entity_type_id is not None:
request.entity_type_id = entity_type_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_entity_type,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gca_entity_type.EntityType,
metadata_type=featurestore_service.CreateEntityTypeOperationMetadata,
)
# Done; return the response.
return response
async def get_entity_type(
self,
request: Union[featurestore_service.GetEntityTypeRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> entity_type.EntityType:
r"""Gets details of a single EntityType.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_get_entity_type():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.GetEntityTypeRequest(
name="name_value",
)
# Make the request
response = client.get_entity_type(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.GetEntityTypeRequest, dict]):
The request object. Request message for
[FeaturestoreService.GetEntityType][google.cloud.aiplatform.v1beta1.FeaturestoreService.GetEntityType].
name (:class:`str`):
Required. The name of the EntityType resource. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.EntityType:
An entity type is a type of object in
a system that needs to be modeled and
have stored information about. For
example, driver is an entity type, and
driver0 is an instance of an entity type
driver.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.GetEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_entity_type,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def list_entity_types(
self,
request: Union[featurestore_service.ListEntityTypesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListEntityTypesAsyncPager:
r"""Lists EntityTypes in a given Featurestore.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_list_entity_types():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.ListEntityTypesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_entity_types(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.ListEntityTypesRequest, dict]):
The request object. Request message for
[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListEntityTypes].
parent (:class:`str`):
Required. The resource name of the Featurestore to list
EntityTypes. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.services.featurestore_service.pagers.ListEntityTypesAsyncPager:
Response message for
[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListEntityTypes].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.ListEntityTypesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_entity_types,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListEntityTypesAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def update_entity_type(
self,
request: Union[featurestore_service.UpdateEntityTypeRequest, dict] = None,
*,
entity_type: gca_entity_type.EntityType = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gca_entity_type.EntityType:
r"""Updates the parameters of a single EntityType.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_update_entity_type():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.UpdateEntityTypeRequest(
)
# Make the request
response = client.update_entity_type(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.UpdateEntityTypeRequest, dict]):
The request object. Request message for
[FeaturestoreService.UpdateEntityType][google.cloud.aiplatform.v1beta1.FeaturestoreService.UpdateEntityType].
entity_type (:class:`google.cloud.aiplatform_v1beta1.types.EntityType`):
Required. The EntityType's ``name`` field is used to
identify the EntityType to be updated. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Field mask is used to specify the fields to be
overwritten in the EntityType resource by the update.
The fields specified in the update_mask are relative to
the resource, not the full request. A field will be
overwritten if it is in the mask. If the user does not
provide a mask then only the non-empty fields present in
the request will be overwritten. Set the update_mask to
``*`` to override all fields.
Updatable fields:
- ``description``
- ``labels``
- ``monitoring_config.snapshot_analysis.disabled``
- ``monitoring_config.snapshot_analysis.monitoring_interval_days``
- ``monitoring_config.snapshot_analysis.staleness_days``
- ``monitoring_config.import_features_analysis.state``
- ``monitoring_config.import_features_analysis.anomaly_detection_baseline``
- ``monitoring_config.numerical_threshold_config.value``
- ``monitoring_config.categorical_threshold_config.value``
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.EntityType:
An entity type is a type of object in
a system that needs to be modeled and
have stored information about. For
example, driver is an entity type, and
driver0 is an instance of an entity type
driver.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([entity_type, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.UpdateEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if entity_type is not None:
request.entity_type = entity_type
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_entity_type,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("entity_type.name", request.entity_type.name),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def delete_entity_type(
self,
request: Union[featurestore_service.DeleteEntityTypeRequest, dict] = None,
*,
name: str = None,
force: bool = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a single EntityType. The EntityType must not have any
Features or ``force`` must be set to true for the request to
succeed.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_delete_entity_type():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.DeleteEntityTypeRequest(
name="name_value",
)
# Make the request
operation = client.delete_entity_type(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.DeleteEntityTypeRequest, dict]):
The request object. Request message for
[FeaturestoreService.DeleteEntityTypes][].
name (:class:`str`):
Required. The name of the EntityType to be deleted.
Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
force (:class:`bool`):
If set to true, any Features for this
EntityType will also be deleted.
(Otherwise, the request will only work
if the EntityType has no Features.)
This corresponds to the ``force`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, force])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.DeleteEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if force is not None:
request.force = force
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_entity_type,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=gca_operation.DeleteOperationMetadata,
)
# Done; return the response.
return response
async def create_feature(
self,
request: Union[featurestore_service.CreateFeatureRequest, dict] = None,
*,
parent: str = None,
feature: gca_feature.Feature = None,
feature_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a new Feature in a given EntityType.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_create_feature():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
feature = aiplatform_v1beta1.Feature()
feature.value_type = "BYTES"
request = aiplatform_v1beta1.CreateFeatureRequest(
parent="parent_value",
feature=feature,
feature_id="feature_id_value",
)
# Make the request
operation = client.create_feature(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.CreateFeatureRequest, dict]):
The request object. Request message for
[FeaturestoreService.CreateFeature][google.cloud.aiplatform.v1beta1.FeaturestoreService.CreateFeature].
parent (:class:`str`):
Required. The resource name of the EntityType to create
a Feature. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
feature (:class:`google.cloud.aiplatform_v1beta1.types.Feature`):
Required. The Feature to create.
This corresponds to the ``feature`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
feature_id (:class:`str`):
Required. The ID to use for the Feature, which will
become the final component of the Feature's resource
name.
This value may be up to 60 characters, and valid
characters are ``[a-z0-9_]``. The first character cannot
be a number.
The value must be unique within an EntityType.
This corresponds to the ``feature_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.Feature` Feature Metadata information that describes an attribute of an entity type.
For example, apple is an entity type, and color is a
feature that describes apple.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, feature, feature_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.CreateFeatureRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if feature is not None:
request.feature = feature
if feature_id is not None:
request.feature_id = feature_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_feature,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gca_feature.Feature,
metadata_type=featurestore_service.CreateFeatureOperationMetadata,
)
# Done; return the response.
return response
async def batch_create_features(
self,
request: Union[featurestore_service.BatchCreateFeaturesRequest, dict] = None,
*,
parent: str = None,
requests: Sequence[featurestore_service.CreateFeatureRequest] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a batch of Features in a given EntityType.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_batch_create_features():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
requests = aiplatform_v1beta1.CreateFeatureRequest()
requests.parent = "parent_value"
requests.feature.value_type = "BYTES"
requests.feature_id = "feature_id_value"
request = aiplatform_v1beta1.BatchCreateFeaturesRequest(
parent="parent_value",
requests=requests,
)
# Make the request
operation = client.batch_create_features(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.BatchCreateFeaturesRequest, dict]):
The request object. Request message for
[FeaturestoreService.BatchCreateFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.BatchCreateFeatures].
parent (:class:`str`):
Required. The resource name of the EntityType to create
the batch of Features under. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
requests (:class:`Sequence[google.cloud.aiplatform_v1beta1.types.CreateFeatureRequest]`):
Required. The request message specifying the Features to
create. All Features must be created under the same
parent EntityType. The ``parent`` field in each child
request message can be omitted. If ``parent`` is set in
a child request, then the value must match the
``parent`` value in this request message.
This corresponds to the ``requests`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.aiplatform_v1beta1.types.BatchCreateFeaturesResponse`
Response message for
[FeaturestoreService.BatchCreateFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.BatchCreateFeatures].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, requests])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.BatchCreateFeaturesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if requests:
request.requests.extend(requests)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_create_features,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
featurestore_service.BatchCreateFeaturesResponse,
metadata_type=featurestore_service.BatchCreateFeaturesOperationMetadata,
)
# Done; return the response.
return response
async def get_feature(
self,
request: Union[featurestore_service.GetFeatureRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> feature.Feature:
r"""Gets details of a single Feature.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_get_feature():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.GetFeatureRequest(
name="name_value",
)
# Make the request
response = client.get_feature(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.GetFeatureRequest, dict]):
The request object. Request message for
[FeaturestoreService.GetFeature][google.cloud.aiplatform.v1beta1.FeaturestoreService.GetFeature].
name (:class:`str`):
Required. The name of the Feature resource. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.Feature:
Feature Metadata information that
describes an attribute of an entity
type. For example, apple is an entity
type, and color is a feature that
describes apple.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.GetFeatureRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_feature,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def list_features(
self,
request: Union[featurestore_service.ListFeaturesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListFeaturesAsyncPager:
r"""Lists Features in a given EntityType.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_list_features():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.ListFeaturesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_features(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.ListFeaturesRequest, dict]):
The request object. Request message for
[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListFeatures].
parent (:class:`str`):
Required. The resource name of the Location to list
Features. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.services.featurestore_service.pagers.ListFeaturesAsyncPager:
Response message for
[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListFeatures].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.ListFeaturesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_features,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListFeaturesAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def update_feature(
self,
request: Union[featurestore_service.UpdateFeatureRequest, dict] = None,
*,
feature: gca_feature.Feature = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gca_feature.Feature:
r"""Updates the parameters of a single Feature.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_update_feature():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
feature = aiplatform_v1beta1.Feature()
feature.value_type = "BYTES"
request = aiplatform_v1beta1.UpdateFeatureRequest(
feature=feature,
)
# Make the request
response = client.update_feature(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.UpdateFeatureRequest, dict]):
The request object. Request message for
[FeaturestoreService.UpdateFeature][google.cloud.aiplatform.v1beta1.FeaturestoreService.UpdateFeature].
feature (:class:`google.cloud.aiplatform_v1beta1.types.Feature`):
Required. The Feature's ``name`` field is used to
identify the Feature to be updated. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}``
This corresponds to the ``feature`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Field mask is used to specify the fields to be
overwritten in the Features resource by the update. The
fields specified in the update_mask are relative to the
resource, not the full request. A field will be
overwritten if it is in the mask. If the user does not
provide a mask then only the non-empty fields present in
the request will be overwritten. Set the update_mask to
``*`` to override all fields.
Updatable fields:
- ``description``
- ``labels``
- ``disable_monitoring``
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.Feature:
Feature Metadata information that
describes an attribute of an entity
type. For example, apple is an entity
type, and color is a feature that
describes apple.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([feature, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.UpdateFeatureRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if feature is not None:
request.feature = feature
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_feature,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("feature.name", request.feature.name),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def delete_feature(
self,
request: Union[featurestore_service.DeleteFeatureRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a single Feature.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_delete_feature():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.DeleteFeatureRequest(
name="name_value",
)
# Make the request
operation = client.delete_feature(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.DeleteFeatureRequest, dict]):
The request object. Request message for
[FeaturestoreService.DeleteFeature][google.cloud.aiplatform.v1beta1.FeaturestoreService.DeleteFeature].
name (:class:`str`):
Required. The name of the Features to be deleted.
Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.DeleteFeatureRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_feature,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=gca_operation.DeleteOperationMetadata,
)
# Done; return the response.
return response
async def import_feature_values(
self,
request: Union[featurestore_service.ImportFeatureValuesRequest, dict] = None,
*,
entity_type: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Imports Feature values into the Featurestore from a
source storage.
The progress of the import is tracked by the returned
operation. The imported features are guaranteed to be
visible to subsequent read operations after the
operation is marked as successfully done.
If an import operation fails, the Feature values
returned from reads and exports may be inconsistent. If
consistency is required, the caller must retry the same
import request again and wait till the new operation
returned is marked as successfully done.
There are also scenarios where the caller can cause
inconsistency.
- Source data for import contains multiple distinct
Feature values for the same entity ID and timestamp.
- Source is modified during an import. This includes
adding, updating, or removing source data and/or
metadata. Examples of updating metadata include but are
not limited to changing storage location, storage class,
or retention policy.
- Online serving cluster is under-provisioned.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_import_feature_values():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
avro_source = aiplatform_v1beta1.AvroSource()
avro_source.gcs_source.uris = ['uris_value_1', 'uris_value_2']
feature_specs = aiplatform_v1beta1.FeatureSpec()
feature_specs.id = "id_value"
request = aiplatform_v1beta1.ImportFeatureValuesRequest(
avro_source=avro_source,
feature_time_field="feature_time_field_value",
entity_type="entity_type_value",
feature_specs=feature_specs,
)
# Make the request
operation = client.import_feature_values(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.ImportFeatureValuesRequest, dict]):
The request object. Request message for
[FeaturestoreService.ImportFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.ImportFeatureValues].
entity_type (:class:`str`):
Required. The resource name of the EntityType grouping
the Features for which values are being imported.
Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}``
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.aiplatform_v1beta1.types.ImportFeatureValuesResponse`
Response message for
[FeaturestoreService.ImportFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.ImportFeatureValues].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([entity_type])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.ImportFeatureValuesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if entity_type is not None:
request.entity_type = entity_type
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.import_feature_values,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("entity_type", request.entity_type),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
featurestore_service.ImportFeatureValuesResponse,
metadata_type=featurestore_service.ImportFeatureValuesOperationMetadata,
)
# Done; return the response.
return response
async def batch_read_feature_values(
self,
request: Union[featurestore_service.BatchReadFeatureValuesRequest, dict] = None,
*,
featurestore: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Batch reads Feature values from a Featurestore.
This API enables batch reading Feature values, where
each read instance in the batch may read Feature values
of entities from one or more EntityTypes. Point-in-time
correctness is guaranteed for Feature values of each
read instance as of each instance's read timestamp.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_batch_read_feature_values():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
csv_read_instances = aiplatform_v1beta1.CsvSource()
csv_read_instances.gcs_source.uris = ['uris_value_1', 'uris_value_2']
destination = aiplatform_v1beta1.FeatureValueDestination()
destination.bigquery_destination.output_uri = "output_uri_value"
entity_type_specs = aiplatform_v1beta1.EntityTypeSpec()
entity_type_specs.entity_type_id = "entity_type_id_value"
entity_type_specs.feature_selector.id_matcher.ids = ['ids_value_1', 'ids_value_2']
request = aiplatform_v1beta1.BatchReadFeatureValuesRequest(
csv_read_instances=csv_read_instances,
featurestore="featurestore_value",
destination=destination,
entity_type_specs=entity_type_specs,
)
# Make the request
operation = client.batch_read_feature_values(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.BatchReadFeatureValuesRequest, dict]):
The request object. Request message for
[FeaturestoreService.BatchReadFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.BatchReadFeatureValues].
featurestore (:class:`str`):
Required. The resource name of the Featurestore from
which to query Feature values. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
This corresponds to the ``featurestore`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.aiplatform_v1beta1.types.BatchReadFeatureValuesResponse`
Response message for
[FeaturestoreService.BatchReadFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.BatchReadFeatureValues].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([featurestore])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.BatchReadFeatureValuesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if featurestore is not None:
request.featurestore = featurestore
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_read_feature_values,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("featurestore", request.featurestore),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
featurestore_service.BatchReadFeatureValuesResponse,
metadata_type=featurestore_service.BatchReadFeatureValuesOperationMetadata,
)
# Done; return the response.
return response
async def export_feature_values(
self,
request: Union[featurestore_service.ExportFeatureValuesRequest, dict] = None,
*,
entity_type: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Exports Feature values from all the entities of a
target EntityType.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_export_feature_values():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
destination = aiplatform_v1beta1.FeatureValueDestination()
destination.bigquery_destination.output_uri = "output_uri_value"
feature_selector = aiplatform_v1beta1.FeatureSelector()
feature_selector.id_matcher.ids = ['ids_value_1', 'ids_value_2']
request = aiplatform_v1beta1.ExportFeatureValuesRequest(
entity_type="entity_type_value",
destination=destination,
feature_selector=feature_selector,
)
# Make the request
operation = client.export_feature_values(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.ExportFeatureValuesRequest, dict]):
The request object. Request message for
[FeaturestoreService.ExportFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.ExportFeatureValues].
entity_type (:class:`str`):
Required. The resource name of the EntityType from which
to export Feature values. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.aiplatform_v1beta1.types.ExportFeatureValuesResponse`
Response message for
[FeaturestoreService.ExportFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.ExportFeatureValues].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([entity_type])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.ExportFeatureValuesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if entity_type is not None:
request.entity_type = entity_type
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.export_feature_values,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("entity_type", request.entity_type),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
featurestore_service.ExportFeatureValuesResponse,
metadata_type=featurestore_service.ExportFeatureValuesOperationMetadata,
)
# Done; return the response.
return response
async def search_features(
self,
request: Union[featurestore_service.SearchFeaturesRequest, dict] = None,
*,
location: str = None,
query: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.SearchFeaturesAsyncPager:
r"""Searches Features matching a query in a given
project.
.. code-block:: python
from google.cloud import aiplatform_v1beta1
def sample_search_features():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.SearchFeaturesRequest(
location="location_value",
)
# Make the request
page_result = client.search_features(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.aiplatform_v1beta1.types.SearchFeaturesRequest, dict]):
The request object. Request message for
[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.SearchFeatures].
location (:class:`str`):
Required. The resource name of the Location to search
Features. Format:
``projects/{project}/locations/{location}``
This corresponds to the ``location`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
query (:class:`str`):
Query string that is a conjunction of field-restricted
queries and/or field-restricted filters.
Field-restricted queries and filters can be combined
using ``AND`` to form a conjunction.
A field query is in the form FIELD:QUERY. This
implicitly checks if QUERY exists as a substring within
Feature's FIELD. The QUERY and the FIELD are converted
to a sequence of words (i.e. tokens) for comparison.
This is done by:
- Removing leading/trailing whitespace and tokenizing
the search value. Characters that are not one of
alphanumeric ``[a-zA-Z0-9]``, underscore ``_``, or
asterisk ``*`` are treated as delimiters for tokens.
``*`` is treated as a wildcard that matches
characters within a token.
- Ignoring case.
- Prepending an asterisk to the first and appending an
asterisk to the last token in QUERY.
A QUERY must be either a singular token or a phrase. A
phrase is one or multiple words enclosed in double
quotation marks ("). With phrases, the order of the
words is important. Words in the phrase must be matching
in order and consecutively.
Supported FIELDs for field-restricted queries:
- ``feature_id``
- ``description``
- ``entity_type_id``
Examples:
- ``feature_id: foo`` --> Matches a Feature with ID
containing the substring ``foo`` (eg. ``foo``,
``foofeature``, ``barfoo``).
- ``feature_id: foo*feature`` --> Matches a Feature
with ID containing the substring ``foo*feature`` (eg.
``foobarfeature``).
- ``feature_id: foo AND description: bar`` --> Matches
a Feature with ID containing the substring ``foo``
and description containing the substring ``bar``.
Besides field queries, the following exact-match filters
are supported. The exact-match filters do not support
wildcards. Unlike field-restricted queries, exact-match
filters are case-sensitive.
- ``feature_id``: Supports = comparisons.
- ``description``: Supports = comparisons. Multi-token
filters should be enclosed in quotes.
- ``entity_type_id``: Supports = comparisons.
- ``value_type``: Supports = and != comparisons.
- ``labels``: Supports key-value equality as well as
key presence.
- ``featurestore_id``: Supports = comparisons.
Examples:
- ``description = "foo bar"`` --> Any Feature with
description exactly equal to ``foo bar``
- ``value_type = DOUBLE`` --> Features whose type is
DOUBLE.
- ``labels.active = yes AND labels.env = prod`` -->
Features having both (active: yes) and (env: prod)
labels.
- ``labels.env: *`` --> Any Feature which has a label
with ``env`` as the key.
This corresponds to the ``query`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.services.featurestore_service.pagers.SearchFeaturesAsyncPager:
Response message for
[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.SearchFeatures].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([location, query])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.SearchFeaturesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if location is not None:
request.location = location
if query is not None:
request.query = query
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.search_features,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.SearchFeaturesAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-aiplatform",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("FeaturestoreServiceAsyncClient",)
| 40.9681 | 191 | 0.60961 |
d7230fc99e283369806439f44c31fbc31ff8b9d6 | 1,544 | py | Python | tre-0.7.5-win32/python/setup.py | s4ros/scalpel | 6726968dc48e41ca4ce5b6b2bc92cf69bc142333 | [
"Apache-2.0"
] | 501 | 2015-01-06T09:09:22.000Z | 2022-03-10T18:02:20.000Z | tre-0.7.5-win32/python/setup.py | s4ros/scalpel | 6726968dc48e41ca4ce5b6b2bc92cf69bc142333 | [
"Apache-2.0"
] | 37 | 2015-01-08T16:41:22.000Z | 2022-02-16T18:10:16.000Z | tre-0.7.5-win32/python/setup.py | s4ros/scalpel | 6726968dc48e41ca4ce5b6b2bc92cf69bc142333 | [
"Apache-2.0"
] | 109 | 2015-01-07T19:08:00.000Z | 2022-03-04T21:43:18.000Z | # setup.py - Builds and installs the TRE Python language bindings module
#
# Copyright (c) 2004-2006 Nikolai SAOUKH <nms+python@otdel-1.org>
#
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from distutils.core import setup, Extension
import sys
from glob import glob
from os.path import normpath
import re
def ospath(fl):
return [normpath(f) for f in fl]
VERSION = "0.7.5"
SOURCES = ["tre-python.c"]
INCDIRS = ospath(["..", "../lib"])
setup(
name = "tre",
version = VERSION,
description = "Python module for TRE",
author = "Nikolai SAOUKH",
author_email = "nms+python@otdel-1.org",
license = "LGPL",
url = "http://laurikari.net/tre/",
ext_modules = [
Extension(
"tre",
SOURCES,
include_dirs = INCDIRS,
define_macros = [("HAVE_CONFIG_H", None)],
libraries=["tre"]
),
],
)
| 29.132075 | 76 | 0.690415 |
0763814154dbaff9ef61bb298b5047634d8dc1bc | 197 | py | Python | projects/api/pagination.py | mrunal2504/unicode-website | a8151c3e6cad594a6518e9529c501aff3165efbb | [
"MIT"
] | 6 | 2018-01-02T13:01:44.000Z | 2020-05-08T11:46:39.000Z | projects/api/pagination.py | mrunal2504/unicode-website | a8151c3e6cad594a6518e9529c501aff3165efbb | [
"MIT"
] | 22 | 2018-12-24T19:10:44.000Z | 2022-02-10T10:58:26.000Z | projects/api/pagination.py | mrunal2504/unicode-website | a8151c3e6cad594a6518e9529c501aff3165efbb | [
"MIT"
] | 22 | 2018-12-26T02:52:57.000Z | 2020-10-01T19:09:56.000Z | from rest_framework.pagination import (
LimitOffsetPagination,
PageNumberPagination,
)
class ProjectLimitOffsetPagination(LimitOffsetPagination):
default_limit = 2
max_limit = 10
| 19.7 | 58 | 0.781726 |
1143c979dc452528e2170a52fec0814ebb3d1669 | 11,297 | py | Python | dynamics.py | saxenam06/Approximate-Dynamic-Programming | de613c10e087ae6b4a87a1730104c59442b33797 | [
"Apache-2.0"
] | 1 | 2021-12-28T04:25:36.000Z | 2021-12-28T04:25:36.000Z | dynamics.py | saxenam06/Approximate-Dynamic-Programming | de613c10e087ae6b4a87a1730104c59442b33797 | [
"Apache-2.0"
] | null | null | null | dynamics.py | saxenam06/Approximate-Dynamic-Programming | de613c10e087ae6b4a87a1730104c59442b33797 | [
"Apache-2.0"
] | null | null | null | from __future__ import print_function
import torch
import numpy as np
from config import DynamicsConfig
import matplotlib.pyplot as plt
import math
PI = 3.1415926
class VehicleDynamics(DynamicsConfig):
def __init__(self):
self._state = torch.zeros([self.BATCH_SIZE, self.DYNAMICS_DIM])
self.init_state = torch.zeros([self.BATCH_SIZE, self.DYNAMICS_DIM])
self._reset_index = np.zeros([self.BATCH_SIZE, 1])
self.initialize_state()
super(VehicleDynamics, self).__init__()
def initialize_state(self):
"""
random initialization of state.
Returns
-------
"""
self.init_state[:, 0] = torch.normal(0.0, 0.6, [self.BATCH_SIZE,])
self.init_state[:, 1] = torch.normal(0.0, 0.4, [self.BATCH_SIZE,])
self.init_state[:, 2] = torch.normal(0.0, 0.15, [self.BATCH_SIZE,])
self.init_state[:, 3] = torch.normal(0.0, 0.1, [self.BATCH_SIZE,])
self.init_state[:, 4] = torch.linspace(0.0, np.pi, self.BATCH_SIZE)
init_ref = self.reference_trajectory(self.init_state[:, 4])
init_ref_all = torch.cat((init_ref, torch.zeros([self.BATCH_SIZE,1])),1)
self._state = self.init_state
init_state = self.init_state + init_ref_all
return init_state
def relative_state(self, state):
x_ref = self.reference_trajectory(state[:, -1])
state_r = state.detach().clone()[:, 0:4] - x_ref # relative state # todo:修改所有相对坐标更新
return state_r
def _state_function(self, state, control):
"""
State function of vehicle with Pacejka tire model, i.e. \dot(x)=f(x,u)
Parameters
----------
state: tensor shape: [BATCH_SIZE, STATE_DIMENSION]
current state
control: tensor shape: [BATCH_SIZE, ACTION_DIMENSION]
input
Returns
-------
deri_state.T: tensor shape: [BATCH_SIZE, ]
f(x,u)
F_y1: tensor shape: [BATCH_SIZE, ]
front axle lateral force
F_y2: tensor shape: [BATCH_SIZE, ]
rear axle lateral force
alpha_1: tensor shape: [BATCH_SIZE, ]
front wheel slip angle
alpha_2: tensor shape: [BATCH_SIZE, ]
rear wheel slip angle
"""
# state variable
y = state[:, 0] # lateral position
u_lateral = state[:, 1] # lateral speed
beta = u_lateral / self.u # yaw angle
psi = state[:, 2] # heading angle
omega_r = state[:, 3] # yaw rate
x = state[:, 4] # longitudinal position
# inputs
delta = control[:, 0] # front wheel steering angle
delta.requires_grad_(True)
# slip angle of front and rear wheels
alpha_1 = -delta + beta + self.a * omega_r / self.u
alpha_2 = beta - self.b * omega_r / self.u
# cornering force of front and rear angle, Pacejka tire model
F_y1 = -self.D * torch.sin(self.C * torch.atan(self.B * alpha_1)) * self.F_z1
F_y2 = -self.D * torch.sin(self.C * torch.atan(self.B * alpha_2)) * self.F_z2
# derivative of state
deri_y = self.u * torch.sin(psi) + u_lateral * torch.cos(psi)
deri_u_lat = (torch.mul(F_y1, torch.cos(delta)) + F_y2) / (self.m) - self.u * omega_r
deri_psi = omega_r
deri_omega_r = (torch.mul(self.a * F_y1, torch.cos(delta)) - self.b * F_y2) / self.I_zz
deri_x = self.u * torch.cos(psi) - u_lateral * torch.sin(psi)
deri_state = torch.cat((deri_y[np.newaxis, :],
deri_u_lat[np.newaxis, :],
deri_psi[np.newaxis, :],
deri_omega_r[np.newaxis, :],
deri_x[np.newaxis, :]), 0)
return deri_state.T, F_y1, F_y2, alpha_1, alpha_2
def _state_function_linear(self, state, control):
"""
State function of vehicle with linear tire model and linear approximation, i.e. \dot(x) = Ax + Bu
Parameters
----------
state: tensor shape: [BATCH_SIZE, STATE_DIMENSION]
current state
control: tensor shape: [BATCH_SIZE, ACTION_DIMENSION]
input
Returns
-------
deri_state.T: tensor shape: [BATCH_SIZE, ]
f(x,u)
F_y1: tensor shape: [BATCH_SIZE, ]
front axle lateral force
F_y2: tensor shape: [BATCH_SIZE, ]
rear axle lateral force
alpha_1: tensor shape: [BATCH_SIZE, ]
front wheel slip angle
alpha_2: tensor shape: [BATCH_SIZE, ]
rear wheel slip angle
"""
# state variable
y = state[:, 0] # lateral position
u_lateral = state[:, 1] # lateral speed
beta = u_lateral / self.u # yaw angle
psi = state[:, 2] # heading angle
omega_r = state[:, 3] # yaw rate
x = state[:, 4] # longitudinal position
# inputs
delta = control[:, 0] # front wheel steering angle
delta.requires_grad_(True)
# slip angle of front and rear wheels, with small angle approximation
alpha_1 = -delta + beta + self.a * omega_r / self.u
alpha_2 = beta - self.b * omega_r / self.u
# cornering force of front and rear angle, linear tire model
F_y1 = - self.k1 * alpha_1
F_y2 = - self.k2 * alpha_2
# derivative of state
# deri_y = self.u * psi + u_lateral
deri_y = self.u * torch.sin(psi) + u_lateral * torch.cos(psi)
deri_u_lat = (torch.mul(F_y1, torch.cos(delta)) + F_y2) / (self.m) - self.u * omega_r
deri_psi = omega_r
deri_omega_r = (torch.mul(self.a * F_y1, torch.cos(delta)) - self.b * F_y2) / self.I_zz
deri_x = self.u * torch.cos(psi) - u_lateral * torch.sin(psi)
deri_state = torch.cat((deri_y[np.newaxis, :],
deri_u_lat[np.newaxis, :],
deri_psi[np.newaxis, :],
deri_omega_r[np.newaxis, :],
deri_x[np.newaxis, :]), 0)
return deri_state.T, F_y1, F_y2, alpha_1, alpha_2
def reference_trajectory(self, state):
"""
Parameters
----------
state shape: [BATCH_SIZE,] longitudinal location x
Returns
-------
state_ref.T: shape: [BATCH_SIZE, 4] reference trajectory
"""
if self.reference_traj == 'SIN':
k = self.k_curve
a = self.a_curve
y_ref = a * torch.sin(k * state)
psi_ref = torch.atan(a * k * torch.cos(k * state))
elif self.reference_traj == 'DLC':
width = 3.5
line1 = 50
straight = 50
cycle = 3 * straight + 2 * line1
x = state % cycle
lane_position = torch.zeros([len(state), ])
lane_angle = torch.zeros([len(state), ])
for i in range(len(state)):
if x[i] <= 50:
lane_position[i] = 0
lane_angle[i] = 0
elif 50 < x[i] and x[i] <= 90:
lane_position[i] = 3.5 / 40 * x[i] - 4.375
lane_angle[i] = np.arctan(3.5 / 40)
elif 90 < x[i] and x[i] <= 140:
lane_position[i] = 3.5
lane_angle[i] = 0
elif x[i] > 180:
lane_position[i] = 0
lane_angle[i] = 0
elif 140 < x[i] and x[i] <= 180:
lane_position[i] = -3.5 / 40 * x[i] + 15.75
lane_angle[i] = -np.arctan(3.5 / 40)
else:
lane_position[i] = 0.
lane_angle[i] = 0.
y_ref = lane_position
psi_ref = lane_angle
zeros = torch.zeros([len(state), ])
state_ref = torch.cat((y_ref[np.newaxis, :],
zeros[np.newaxis, :],
psi_ref[np.newaxis, :],
zeros[np.newaxis, :]), 0)
return state_ref.T
def step(self, state, control):
"""
step ahead with discrete state function, i.e. x'=f(x,u)
Parameters
----------
state: tensor shape: [BATCH_SIZE, STATE_DIMENSION]
current state
control: tensor shape: [BATCH_SIZE, ACTION_DIMENSION]
current control signal
Returns
-------
state_next: tensor shape: [BATCH_SIZE, ]
x'
f_xu: tensor shape: [BATCH_SIZE, ]
f(x,u)
utility: tensor shape: [BATCH_SIZE, ]
utility, i.e. l(x,u)
F_y1: tensor shape: [BATCH_SIZE, ]
front axle lateral force
F_y2: tensor shape: [BATCH_SIZE, ]
rear axle lateral force
alpha_1: tensor shape: [BATCH_SIZE, ]
front wheel slip angle
alpha_2: tensor shape: [BATCH_SIZE, ]
rear wheel slip angle
"""
if self.nonlinearity:
deri_state, F_y1, F_y2, alpha_1, alpha_2 = self._state_function(state, control)
else:
deri_state, F_y1, F_y2, alpha_1, alpha_2 = self._state_function_linear(state, control)
state_next = state + self.Ts * deri_state
utility = self.utility(state, control)
f_xu = deri_state[:, 0:4]
return state_next, f_xu, utility, F_y1, F_y2, alpha_1, alpha_2
def step_relative(self, state, u):
"""
Parameters
----------
state_r
u_r
Returns
-------
"""
x_ref = self.reference_trajectory(state[:, -1])
state_r = state.detach().clone() # relative state
state_r[:, 0:4] = state_r[:, 0:4] - x_ref
state_next, deri_state, utility, F_y1, F_y2, alpha_1, alpha_2 = self.step(state, u)
state_r_next_bias, _, _, _, _, _, _ = self.step(state_r, u) # update by relative value
state_r_next = state_r_next_bias.detach().clone()
state_r_next_bias[:, [0, 2]] = state_next[:, [0, 2]] # y psi with reference update by absolute value
x_ref_next = self.reference_trajectory(state_next[:, -1])
state_r_next[:, 0:4] = state_r_next_bias[:, 0:4] - x_ref_next
utility = self.utility(state_r_next, u)
return state_next.clone().detach(), state_r_next.clone().detach()
@staticmethod
def utility(state, control):
"""
Parameters
----------
state: tensor shape: [BATCH_SIZE, STATE_DIMENSION]
current state
control: tensor shape: [BATCH_SIZE, ACTION_DIMENSION]
current control signal
Returns
-------
utility: tensor shape: [BATCH_SIZE, ]
utility, i.e. l(x,u)
"""
utility = 0.5 * (10 * torch.pow(state[:, 0], 2) + 5 * torch.pow(state[:, 2], 2) + 5 * torch.pow(control[:, 0], 2))
return utility
| 37.782609 | 122 | 0.522705 |
30b01d9eb4a4c9bef88d9ea8f2356ba276eacdc1 | 8,515 | py | Python | globus_cli/config.py | jaswilli/globus-cli | 834bb9b7594a5996be81dfbbe4461e66ec06cbe6 | [
"Apache-2.0"
] | null | null | null | globus_cli/config.py | jaswilli/globus-cli | 834bb9b7594a5996be81dfbbe4461e66ec06cbe6 | [
"Apache-2.0"
] | null | null | null | globus_cli/config.py | jaswilli/globus-cli | 834bb9b7594a5996be81dfbbe4461e66ec06cbe6 | [
"Apache-2.0"
] | null | null | null | import logging.config
import os
import globus_sdk
from configobj import ConfigObj
__all__ = [
# option name constants
"OUTPUT_FORMAT_OPTNAME",
"MYPROXY_USERNAME_OPTNAME",
"AUTH_RT_OPTNAME",
"AUTH_AT_OPTNAME",
"AUTH_AT_EXPIRES_OPTNAME",
"TRANSFER_RT_OPTNAME",
"TRANSFER_AT_OPTNAME",
"AUTH_AT_EXPIRES_OPTNAME",
"CLIENT_ID_OPTNAME",
"CLIENT_SECRET_OPTNAME",
"GLOBUS_ENV",
"internal_native_client",
"internal_auth_client",
"get_output_format",
"get_auth_tokens",
"get_transfer_tokens",
"get_config_obj",
"write_option",
"remove_option",
"lookup_option",
]
# constants for use whenever we need to do things using
# instance clients from the CLI Native App Template
# primarily accessed via `internal_auth_client()`
CLIENT_ID_OPTNAME = "client_id"
CLIENT_SECRET_OPTNAME = "client_secret"
TEMPLATE_ID_OPTNAME = "template_id"
DEFAULT_TEMPLATE_ID = "95fdeba8-fac2-42bd-a357-e068d82ff78e"
# constants for global use
OUTPUT_FORMAT_OPTNAME = "output_format"
MYPROXY_USERNAME_OPTNAME = "default_myproxy_username"
AUTH_RT_OPTNAME = "auth_refresh_token"
AUTH_AT_OPTNAME = "auth_access_token"
AUTH_AT_EXPIRES_OPTNAME = "auth_access_token_expires"
TRANSFER_RT_OPTNAME = "transfer_refresh_token"
TRANSFER_AT_OPTNAME = "transfer_access_token"
TRANSFER_AT_EXPIRES_OPTNAME = "transfer_access_token_expires"
# get the environment from env var (not exported)
GLOBUS_ENV = os.environ.get("GLOBUS_SDK_ENVIRONMENT")
# if the env is set, rewrite the option names to have it as a prefix
if GLOBUS_ENV:
AUTH_RT_OPTNAME = "{0}_auth_refresh_token".format(GLOBUS_ENV)
AUTH_AT_OPTNAME = "{0}_auth_access_token".format(GLOBUS_ENV)
AUTH_AT_EXPIRES_OPTNAME = "{0}_auth_access_token_expires".format(GLOBUS_ENV)
TRANSFER_RT_OPTNAME = "{0}_transfer_refresh_token".format(GLOBUS_ENV)
TRANSFER_AT_OPTNAME = "{0}_transfer_access_token".format(GLOBUS_ENV)
TRANSFER_AT_EXPIRES_OPTNAME = "{0}_transfer_access_token_expires".format(GLOBUS_ENV)
CLIENT_ID_OPTNAME = "{0}_client_id".format(GLOBUS_ENV)
CLIENT_SECRET_OPTNAME = "{0}_client_secret".format(GLOBUS_ENV)
TEMPLATE_ID_OPTNAME = "{0}_template_id".format(GLOBUS_ENV)
DEFAULT_TEMPLATE_ID = {
"sandbox": "33b6a241-bce4-4359-9c6d-09f88b3c9eef",
"integration": "e0c31fd1-663b-44e1-840f-f4304bb9ee7a",
"test": "0ebfd058-452f-40c3-babf-5a6b16a7b337",
"staging": "3029c3cb-c8d9-4f2b-979c-c53330aa7327",
"preview": "b2867dbb-0846-4579-8486-dc70763d700b",
}.get(GLOBUS_ENV, DEFAULT_TEMPLATE_ID)
def get_config_obj(system=False, file_error=False):
if system:
path = "/etc/globus.cfg"
else:
path = os.path.expanduser("~/.globus.cfg")
conf = ConfigObj(path, encoding="utf-8", file_error=file_error)
# delete any old whomai values in the cli section
for key in conf.get("cli", {}):
if "whoami_identity_" in key:
del conf["cli"][key]
conf.write()
return conf
def lookup_option(option, section="cli", environment=None):
conf = get_config_obj()
try:
if environment:
return conf["environment " + environment][option]
else:
return conf[section][option]
except KeyError:
return None
def remove_option(option, section="cli", system=False):
conf = get_config_obj(system=system)
# if there's no section for the option we're removing, just return None
try:
section = conf[section]
except KeyError:
return None
try:
opt_val = section[option]
# remove value and flush to disk
del section[option]
conf.write()
except KeyError:
opt_val = None
# return the just-deleted value
return opt_val
def write_option(option, value, section="cli", system=False):
"""
Write an option to disk -- doesn't handle config reloading
"""
# deny rwx to Group and World -- don't bother storing the returned old mask
# value, since we'll never restore it in the CLI anyway
# do this on every call to ensure that we're always consistent about it
os.umask(0o077)
# FIXME: DRY violation with config_commands.helpers
conf = get_config_obj(system=system)
# add the section if absent
if section not in conf:
conf[section] = {}
conf[section][option] = value
conf.write()
def get_output_format():
return lookup_option(OUTPUT_FORMAT_OPTNAME)
def get_auth_tokens():
expires = lookup_option(AUTH_AT_EXPIRES_OPTNAME)
if expires is not None:
expires = int(expires)
return {
"refresh_token": lookup_option(AUTH_RT_OPTNAME),
"access_token": lookup_option(AUTH_AT_OPTNAME),
"access_token_expires": expires,
}
def set_auth_tokens(access_token, refresh_token, expires_at):
write_option(AUTH_AT_OPTNAME, access_token)
write_option(AUTH_RT_OPTNAME, refresh_token)
write_option(AUTH_AT_EXPIRES_OPTNAME, expires_at)
def get_transfer_tokens():
expires = lookup_option(TRANSFER_AT_EXPIRES_OPTNAME)
if expires is not None:
expires = int(expires)
return {
"refresh_token": lookup_option(TRANSFER_RT_OPTNAME),
"access_token": lookup_option(TRANSFER_AT_OPTNAME),
"access_token_expires": expires,
}
def set_transfer_tokens(access_token, refresh_token, expires_at):
write_option(TRANSFER_AT_OPTNAME, access_token)
write_option(TRANSFER_RT_OPTNAME, refresh_token)
write_option(TRANSFER_AT_EXPIRES_OPTNAME, expires_at)
def internal_native_client():
template_id = lookup_option(TEMPLATE_ID_OPTNAME) or DEFAULT_TEMPLATE_ID
return globus_sdk.NativeAppAuthClient(template_id)
def internal_auth_client(requires_instance=False, force_new_client=False):
"""
Looks up the values for this CLI's Instance Client in config
If none exists and requires_instance is True or force_new_client is True,
registers a new Instance Client with Globus Auth
If none exists and requires_instance is false, defaults to a Native Client
for backwards compatibility
Returns either a NativeAppAuthClient or a ConfidentialAppAuthClient
"""
client_id = lookup_option(CLIENT_ID_OPTNAME)
client_secret = lookup_option(CLIENT_SECRET_OPTNAME)
template_id = lookup_option(TEMPLATE_ID_OPTNAME) or DEFAULT_TEMPLATE_ID
template_client = internal_native_client()
existing = client_id and client_secret
# if we are forcing a new client, delete any existing client
if force_new_client and existing:
existing_client = globus_sdk.ConfidentialAppAuthClient(client_id, client_secret)
try:
existing_client.delete("/v2/api/clients/{}".format(client_id))
# if the client secret has been invalidated or the client has
# already been removed, we continue on
except globus_sdk.exc.AuthAPIError:
pass
# if we require a new client to be made
if force_new_client or (requires_instance and not existing):
# register a new instance client with auth
body = {"client": {"template_id": template_id, "name": "Globus CLI"}}
res = template_client.post("/v2/api/clients", json_body=body)
# get values and write to config
credential_data = res["included"]["client_credential"]
client_id = credential_data["client"]
client_secret = credential_data["secret"]
write_option(CLIENT_ID_OPTNAME, client_id)
write_option(CLIENT_SECRET_OPTNAME, client_secret)
return globus_sdk.ConfidentialAppAuthClient(
client_id, client_secret, app_name="Globus CLI"
)
# if we already have a client, just return it
elif existing:
return globus_sdk.ConfidentialAppAuthClient(
client_id, client_secret, app_name="Globus CLI"
)
# fall-back to a native client to not break old logins
# TOOD: eventually remove this behavior
else:
return template_client
def setup_logging(level="DEBUG"):
conf = {
"version": 1,
"formatters": {
"basic": {"format": "[%(levelname)s] %(name)s::%(funcName)s() %(message)s"}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": level,
"formatter": "basic",
}
},
"loggers": {"globus_sdk": {"level": level, "handlers": ["console"]}},
}
logging.config.dictConfig(conf)
| 32.253788 | 88 | 0.700881 |
4f8ac5e4cd6e4f44c934f1a37bc63814365fd678 | 2,467 | py | Python | var/spack/repos/builtin/packages/r-delayedmatrixstats/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2,360 | 2017-11-06T08:47:01.000Z | 2022-03-31T14:45:33.000Z | var/spack/repos/builtin/packages/r-delayedmatrixstats/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 13,838 | 2017-11-04T07:49:45.000Z | 2022-03-31T23:38:39.000Z | var/spack/repos/builtin/packages/r-delayedmatrixstats/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1,793 | 2017-11-04T07:45:50.000Z | 2022-03-30T14:31:53.000Z | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RDelayedmatrixstats(RPackage):
"""Functions that Apply to Rows and Columns of 'DelayedMatrix' Objects
A port of the 'matrixStats' API for use with DelayedMatrix objects from
the 'DelayedArray' package. High-performing functions operating on rows
and columns of DelayedMatrix objects, e.g. col / rowMedians(), col /
rowRanks(), and col / rowSds(). Functions optimized per data type and
for subsetted calculations such that both memory usage and processing
time is minimized."""
homepage = "https://github.com/PeteHaitch/DelayedMatrixStats"
git = "https://git.bioconductor.org/packages/DelayedMatrixStats.git"
version('1.12.3', commit='2b3091dfa9b3bab914e3a4157182063714ba86ae')
version('1.6.1', commit='4378d1898a403305a94b122c4f36d1215fa7708d')
version('1.4.0', commit='eb5b390ef99651fe87a346848f807de95afe8971')
version('1.2.0', commit='de868e730be6280dfad41a280ab09f4d3083c9ac')
version('1.0.3', commit='e29a3444980ff727c5b12286884b06dfaebf5b5b')
depends_on('r-matrixgenerics', when='@1.12.2:', type=('build', 'run'))
depends_on('r-delayedarray', type=('build', 'run'))
depends_on('r-delayedarray@0.5.27:', when='@1.2.0:', type=('build', 'run'))
depends_on('r-delayedarray@0.7.37:', when='@1.4.0:', type=('build', 'run'))
depends_on('r-delayedarray@0.9.8:', when='@1.6.1:', type=('build', 'run'))
depends_on('r-delayedarray@0.15.3:', when='@1.12.2:', type=('build', 'run'))
depends_on('r-matrixstats@0.53.1:', type=('build', 'run'))
depends_on('r-matrixstats@0.55.0:', when='@1.6.1:', type=('build', 'run'))
depends_on('r-matrixstats@0.56.0:', when='@1.12.2:', type=('build', 'run'))
depends_on('r-sparsematrixstats', when='@1.12.2:', type=('build', 'run'))
depends_on('r-matrix', type=('build', 'run'))
depends_on('r-s4vectors', type=('build', 'run'))
depends_on('r-s4vectors@0.17.5:', when='@1.2.0:', type=('build', 'run'))
depends_on('r-iranges', type=('build', 'run'))
depends_on('r-hdf5array@1.7.10:', when='@1.4.0:', type=('build', 'run'))
depends_on('r-hdf5array@1.17.2:', when='@1.12.2:', type=('build', 'run'))
depends_on('r-biocparallel', when='@1.4.0:', type=('build', 'run'))
| 54.822222 | 80 | 0.665586 |
4e44a6bcf68865d2d53a1504f65a36d02e0a012c | 1,368 | py | Python | build/fbcode_builder_config.py | autoantwort/proxygen | 1cedfc101966163f647241b8c2564d55e4f31454 | [
"BSD-3-Clause"
] | 5,852 | 2015-01-01T06:12:49.000Z | 2022-03-31T07:28:30.000Z | build/fbcode_builder_config.py | autoantwort/proxygen | 1cedfc101966163f647241b8c2564d55e4f31454 | [
"BSD-3-Clause"
] | 345 | 2015-01-02T22:15:43.000Z | 2022-03-28T23:33:28.000Z | build/fbcode_builder_config.py | autoantwort/proxygen | 1cedfc101966163f647241b8c2564d55e4f31454 | [
"BSD-3-Clause"
] | 1,485 | 2015-01-04T14:39:26.000Z | 2022-03-22T02:32:08.000Z | #!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import, division, print_function, unicode_literals
import specs.fizz as fizz
import specs.fmt as fmt
import specs.folly as folly
import specs.gmock as gmock
import specs.mvfst as mvfst
import specs.proxygen_quic as proxygen_quic
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
"fbcode_builder steps to build & test Proxygen"
def fbcode_builder_spec(builder):
return {
"depends_on": [
gmock,
fmt,
folly,
wangle,
fizz,
sodium,
zstd,
mvfst,
proxygen_quic,
],
"steps": [
# Tests for the full build with no QUIC/HTTP3
# Proxygen is the last step, so we are still in its working dir.
builder.step(
"Run proxygen tests",
[builder.run(ShellQuoted("env CTEST_OUTPUT_ON_FAILURE=1 make test"))],
)
],
}
config = {
"github_project": "facebook/proxygen",
"fbcode_builder_spec": fbcode_builder_spec,
}
| 25.811321 | 86 | 0.640351 |
33e42e1fcb343699c2a27331b7786ec61a8eb7d9 | 1,914 | py | Python | python/houseOfCards.py | cl33per/java-baby | 05febea73f9ea78c6d3f88b066a14542fbcdb163 | [
"MIT"
] | null | null | null | python/houseOfCards.py | cl33per/java-baby | 05febea73f9ea78c6d3f88b066a14542fbcdb163 | [
"MIT"
] | null | null | null | python/houseOfCards.py | cl33per/java-baby | 05febea73f9ea78c6d3f88b066a14542fbcdb163 | [
"MIT"
] | null | null | null | import random
class Card:
def __init__(self, suit, val):
self.suit = suit
self.value = val
def show(self,name):
print ("{} has {} of {}".format(name, self.value,self.suit))
class Deck:
def __init__(self):
self.cards = []
self.build()
def build(self):
for s in["Spades","Clubs","Hearts","Diamonds"]:
for v in ["2","3","4","5","6","7","8","9","J","Q","K","A"]:
self.cards.append(Card(s,v))
def show(self):
for c in self.cards:
c.show()
def shuffle(self):
for i in range(len(self.cards) -1, 0, -1):
r = random.randint(0, i)
self.cards[i], self.cards[r] = self.cards[r], self.cards[i]
def drawCard(self):
return self.cards.pop()
class Player:
def __init__(self, name):
self.name = name
self.hand = []
def draw(self,deck):
self.hand.append(deck.drawCard())
return self
def showHand(self):
for card in self.hand:
card.show(self.name)
class Game:
deck = Deck()
deck.shuffle()
def playGame(self):
numberOfPlayers = int(input("Number of Players?: "))
playersCounter = 0
playerList = []
while playersCounter < numberOfPlayers:
name = raw_input("Enter Player Name: ")
playerList.append(Player(name))
playersCounter = playersCounter + 1
for players in playerList:
players.draw(self.deck)
players.showHand()
# print (playerList[players])
# for players in range(len(playerList)):
# print (playerList[players])
def compareCards(self):
a = [1, 2, 3, 4, 5]
for x in range(len(a)):
print (a[x],)
# Suffle deck before assigning players
game = Game()
game.playGame()
# game.compareCards() | 25.184211 | 71 | 0.532915 |
941440509694143cedcf54c296cc4058914e6413 | 5,036 | py | Python | tests/test_preprocess.py | dsapandora/DeepForest | 3f9313c218f8a733b3a151c84e01afc8e732f9da | [
"MIT"
] | null | null | null | tests/test_preprocess.py | dsapandora/DeepForest | 3f9313c218f8a733b3a151c84e01afc8e732f9da | [
"MIT"
] | null | null | null | tests/test_preprocess.py | dsapandora/DeepForest | 3f9313c218f8a733b3a151c84e01afc8e732f9da | [
"MIT"
] | 2 | 2020-09-29T22:04:50.000Z | 2022-03-24T16:26:48.000Z | # test preprocessing
import glob
import os
import numpy as np
import pandas as pd
import pytest
from PIL import Image
from deepforest import get_data
from deepforest import preprocess
from deepforest import utilities
@pytest.fixture("module")
def config():
config = utilities.read_config(get_data("deepforest_config.yml"))
config["patch_size"] = 200
config["patch_overlap"] = 0.25
config["annotations_xml"] = get_data("OSBS_029.xml")
config["rgb_dir"] = "data"
config["annotations_file"] = "tests/data/OSBS_029.csv"
config["path_to_raster"] = get_data("OSBS_029.tif")
# Create a clean config test data
annotations = utilities.xml_to_annotations(xml_path=config["annotations_xml"])
annotations.to_csv("tests/data/OSBS_029.csv", index=False)
return config
@pytest.fixture()
def numpy_image(config):
raster = Image.open(config["path_to_raster"])
return np.array(raster)
def test_compute_windows(config, numpy_image):
windows = preprocess.compute_windows(numpy_image, config["patch_size"],
config["patch_overlap"])
assert len(windows) == 9
def test_select_annotations(config, numpy_image):
windows = preprocess.compute_windows(numpy_image, config["patch_size"],
config["patch_overlap"])
image_annotations = pd.read_csv("tests/data/OSBS_029.csv")
selected_annotations = preprocess.select_annotations(image_annotations,
windows,
index=7)
# Returns a 5 column matrix
assert selected_annotations.shape[0] == 17
# image name should be name of image plus the index .tif
assert selected_annotations.image_path.unique()[0] == "OSBS_029_7.png"
def test_select_annotations_tile(config, numpy_image):
config["patch_size"] = 50
windows = preprocess.compute_windows(numpy_image, config["patch_size"],
config["patch_overlap"])
image_annotations = pd.read_csv("tests/data/OSBS_029.csv")
selected_annotations = preprocess.select_annotations(image_annotations,
windows,
index=10)
# The largest box cannot be off the edge of the window
assert selected_annotations.xmin.min() >= 0
assert selected_annotations.ymin.min() >= 0
assert selected_annotations.xmax.max() <= config["patch_size"]
assert selected_annotations.ymax.max() <= config["patch_size"]
def test_split_raster(config):
annotations_file = preprocess.split_raster(config["path_to_raster"],
config["annotations_file"], "tests/data/",
config["patch_size"],
config["patch_overlap"])
# Returns a 6 column pandas array
assert annotations_file.shape[1] == 6
def test_split_raster_empty(config):
# Clean output folder
for f in glob.glob("tests/output/empty/*"):
os.remove(f)
# Blank annotations file
blank_annotations = pd.DataFrame({
"image_path": "OSBS_029.tif",
"xmin": [""],
"ymin": [""],
"xmax": [""],
"ymax": [""],
"label": [""]
})
blank_annotations.to_csv("tests/data/blank_annotations.csv", index=False)
# Ignore blanks
with pytest.raises(ValueError):
annotations_file = preprocess.split_raster(config["path_to_raster"],
"tests/data/blank_annotations.csv",
"tests/output/empty/",
config["patch_size"],
config["patch_overlap"],
allow_empty=False)
assert annotations_file.shape[0] == 0
assert not os.path.exists("tests/output/empty/OSBS_029_1.png")
# Include blanks
annotations_file = preprocess.split_raster(config["path_to_raster"],
"tests/data/blank_annotations.csv",
"tests/output/empty/",
config["patch_size"],
config["patch_overlap"],
allow_empty=True)
assert annotations_file.shape[0] > 0
assert os.path.exists("tests/output/empty/OSBS_029_1.png")
def test_split_size_error(config):
with pytest.raises(ValueError):
annotations_file = preprocess.split_raster(config["path_to_raster"],
config["annotations_file"],
"tests/data/", 2000,
config["patch_overlap"])
| 39.03876 | 89 | 0.560365 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.