text string | size int64 | token_count int64 |
|---|---|---|
import pdb
def get_empty_graph(message):
emptiness = {
"layout": {
"xaxis": {
"visible": False
},
"yaxis": {
"visible": False
},
"annotations": [
{
"text": message,
"xref": "paper",
"yref": "paper",
"showarrow": False,
"font": {
"size": 28
}
}
]
}
}
return emptiness
| 497 | 140 |
import sys
from Bio import SeqIO
import re
import numpy as np
def split_genome(genome="ATCGATATACCA", k=3):
return re.findall('.'*k, genome)
def genearte_one_genome(genome='ATCGATATACCA', k=3):
_genome = genome
_sentence = split_genome(genome=_genome, k=k)
return _sentence
def fasta2kmers(fasta_file, kmer, out_file):
'''
Convert a fasta file into a word/sentence file
'''
# traverse the fasta file
fo = open(out_file + '.sentences', 'w')
fo2 = open(out_file + '.headers', 'w')
for record in SeqIO.parse(fasta_file, 'fasta'):
_genome = str(record.seq).upper()
sentences = genearte_one_genome(genome=_genome, k=kmer)
fo.write(" ".join(sentences) + '\n')
fo2.write(record.description + "\t" + str(len(sentences)) + '\n')
| 808 | 309 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------------------
# HOW TO USE THIS SOLVER:
# -------------------------------------------------------------------------------
#
# Compile the C++ code setCoversa.cpp into an executable setCoversa
# Linux: g++ setCoversa.cpp -O3 -o setCoversa
# Run this accompanying solver.py script
# python solver.py ./data/sc_25_0
#
# If you want the solver to think longer, you can increase MAXREPS in SimAnn()
import os
from subprocess import Popen, PIPE
def solve_it(input_data):
# Writes the inputData to a temporay file
tmp_file_name = 'tmp.data'
tmp_file = open(tmp_file_name, 'w')
tmp_file.write(input_data)
tmp_file.close()
# Runs the command: java Solver -file=tmp.data
process = Popen(['setCoversa'], stdout=PIPE)
(stdout, stderr) = process.communicate()
# removes the temporay file
os.remove(tmp_file_name)
return stdout.strip()
import sys
if __name__ == '__main__':
if len(sys.argv) > 1:
file_location = sys.argv[1].strip()
input_data_file = open(file_location, 'r')
input_data = ''.join(input_data_file.readlines())
input_data_file.close()
print 'Solving:', file_location
print solve_it(input_data)
else:
print 'This test requires an input file. Please select one from the data directory. (i.e. python solver.py ./data/sc_6_1)'
| 1,466 | 483 |
# import ctypes
#
# from PIL import ImageTk, Image
# import cv2
# import tkinter as tk
# import music_control
# import speed_detection
# import threading
#
# class MainWindow(tk.Frame):
# def __init__(self, master=None):
# super().__init__(master)
# self.newMaster = master
# self.master = master
# self.pack()
#
# user32 = ctypes.windll.user32
# self.screensize = user32.GetSystemMetrics(0), user32.GetSystemMetrics(1)
# # back = tk.Frame(width=self.screensize[0], height=self.screensize[1], bg='white')
#
#
# self.lmain1 = tk.Label(self, text="hi")
# self.lmain1.grid(row=0, column=0)
#
# myimage = tk.PhotoImage(file='songo.png')
# label = tk.Label(self, image=myimage)
# label.image = myimage # the reference
# label.grid(row=1, column=0)
#
#
#
#
# self.video_stream()
#
#
# def video_stream(self):
# _, frame1 = self.cap.read()
# speed_detection.speed_detection(frame1)
# cv2image1 = cv2.cvtColor(frame1, cv2.COLOR_BGR2RGBA)
# img1 = Image.fromarray(cv2image1)
# imgtk1 = ImageTk.PhotoImage(image=img1)
# self.lmain1.imgtk = imgtk1
# self.lmain1.configure(image=imgtk1)
#
# self.flag = self.music_on()
# if self.flag:
# self.lmain1.after(1, self.video_stream)
# else:
# print("finish")
# self.cap1 = cv2.VideoCapture('videoplayback.mp4')
# self.video_stream1()
#
# def video_stream1(self):
# print("here")
# _, frame1 = self.cap1.read()
# cv2image1 = cv2.cvtColor(frame1, cv2.COLOR_BGR2RGBA)
# img1 = Image.fromarray(cv2image1)
# imgtk1 = ImageTk.PhotoImage(image=img1)
# self.lmain1.imgtk = imgtk1
# self.lmain1.configure(image=imgtk1)
# self.flag = self.continue_movie()
# if self.flag:
# self.lmain1.after(1, self.video_stream)
# else:
# print("finish")
# self.cap1.release()
# # cv2.destroyAllWindows()
# # self.master = None
# #conclusion = Conclusion(self.newMaster, speed=[23, 54, 6, 88, 34, 32, 5, 7], volume=[4, 33, 8,2,67,98,46,3])
#
# def continue_movie(self):
# if self.counter == 0:
# return True
# self.counter -= 1
#
# def music_on(self):
# return music_control.get_busy()
#
# def is_one_not_dance(mp_list):
# pass
#
# def conclusion(list_of_mph):
# pass
#
#
# def calaulate_volum(avg):
# print("========vol=================")
#
# if avg > 500:
# music_control.set_volume(1)
# print('1')
# elif avg <= 500 and avg > 400:
# print('0.1')
# music_control.set_volume(0.9)
# elif avg <= 400 and avg > 320:
# music_control.set_volume(0.8)
# print('0.8')
# elif avg <= 320 and avg > 250:
# music_control.set_volume(0.7)
# print('0.7')
# elif avg <= 250 and avg > 210:
# music_control.set_volume(0.7)
# print('0.6')
# elif avg <= 210 and avg > 170:
# music_control.set_volume(0.7)
# print('0.5')
# elif avg <= 170 and avg > 120:
# music_control.set_volume(0.4)
# print('0.4')
# elif avg <= 120 and avg > 70:
# music_control.set_volume(0.3)
# print('0.3')
# elif avg <= 70 and avg > 30:
# music_control.set_volume(0.2)
# print('0.2')
# elif avg <= 30 and avg > 1:
# music_control.set_volume(0.1)
# print('0.1')
# elif avg <= 1:
# music_control.set_volume(0)
# print('0')
import ctypes
from PIL import ImageTk, Image
import cv2
import tkinter as tk
import music_control
import speed_detection
import threading
import last_screen
class ImageWindow(tk.Frame):
def __init__(self, master=None):
super().__init__(master)
self.master = master
self.grid()
user32 = ctypes.windll.user32
self.screensize = user32.GetSystemMetrics(0), user32.GetSystemMetrics(1)
a = "C:\\Users\\שרה ויסברגר\\Desktop\\EXELLENTIM\\excellenteam-hackathon-ella-songo\\fa.png"
lf = [a, a, a, a, a, a, a, a, a]
imgee = last_screen.last_screen(lf)
img = tk.PhotoImage(imgee)
panel = tk.Label(self, text="image pro")
panel.image = img
panel.grid()
import ctypes
from PIL import ImageTk, Image
import cv2
import tkinter as tk
import music_control
import speed_detection
import threading
import last_screen
import os
class ImageWindow(tk.Frame):
def __init__(self, master=None):
super().__init__(master)
self.master = master
self.grid()
user32 = ctypes.windll.user32
self.screensize = user32.GetSystemMetrics(0), user32.GetSystemMetrics(1)
File = os.listdir('images_collection/')
lf = []
for i in range(9):
lf.append('images_collection/' + File[1])
# img = ImageTk.PhotoImage(Image.open(FileDir))
# a = "C:\\Users\\שרה ויסברגר\\Desktop\\EXELLENTIM\\excellenteam-hackathon-ella-songo\\fa.png"
# lf = [a, a, a, a, a, a, a, a, a]
# print(lf)
imgee = last_screen.last_screen(lf)
img = tk.PhotoImage(imgee)
panel = tk.Label(self, text="image pro")
panel.image = img
panel.grid()
| 5,441 | 2,042 |
from transformers import AutoTokenizer
from pathlib import Path
import torch
from src.reader.pan_hatespeech import AUTHOR_SEP, AUTHOR_ID
import numpy as np
from sklearn.model_selection import StratifiedKFold
from src.utils import RANDOM_SEED
from pathlib import Path
import xml.etree.ElementTree as ET
from transformers import AutoTokenizer
from torch.utils.data import TensorDataset
from tqdm import tqdm
class ExistTaskDataset(torch.utils.data.Dataset):
pass
class PanHateSpeechTaskDataset(torch.utils.data.Dataset):
def __init__(self, files, tokenizer, max_seq_len, ground_truth=None, mode='joined'):
self.files = files
self.ground_truth = ground_truth
self.mode = mode
self.tokenizer = tokenizer
self.max_seq_len = max_seq_len
@staticmethod
def process_text(text):
text = text.replace('#URL#', "[URL]")
text = text.replace('#HASHTAG#', "[HASHTAG]")
text = text.replace('#USER#:', "[USER]")
text = text.replace('#USER#', "[USER]")
text = text.replace('RT', "[RT]")
return text
def __getitem__(self, item):
selected_files = [self.files[item]]
tokenized_texts = []
labels = []
author_ids = []
for profile_file in selected_files:
tree = ET.parse(profile_file)
root = tree.getroot()
if self.ground_truth:
labels.append(self.ground_truth[profile_file.stem])
author_ids.append(profile_file.stem)
if self.mode == 'joined':
for child in root:
posts = []
for ch in child:
posts.append(ch.text)
content = ' '.join(posts)
content = PanHateSpeechTaskDataset.process_text(content)
tokenized_texts.append(content)
elif self.mode == 'joined_post_aware':
for child in root:
posts = []
for ch in child:
posts.append(f'[POSTSTART] {ch.text} [POSTEND]')
content = ' '.join(posts)
content = PanHateSpeechTaskDataset.process_text(content)
tokenized_texts.append(content)
elif self.mode == 'hierarchical':
posts = []
for child in root:
for ch in child:
posts.append(PanHateSpeechTaskDataset.process_text(ch.text))
tokenized_texts.append(posts)
if 'joined' in self.mode:
encoding = self.tokenizer.encode_plus(tokenized_texts[0], add_special_tokens=True,
# Add '[CLS]' and '[SEP]'
max_length=self.max_seq_len,
padding='max_length', # Pad & truncate all sentences.
truncation=True,
return_token_type_ids=False,
return_attention_mask=True, # Construct attn. masks.
return_tensors='pt' # Return pytorch tensors.
)
if self.ground_truth:
return dict(
input_ids=encoding['input_ids'],
attention_mask=encoding['attention_mask'],
labels=torch.LongTensor(labels),
text=tokenized_texts,
author_id=author_ids,
)
else:
return dict(
input_ids=encoding['input_ids'],
attention_mask=encoding['attention_mask'],
text=tokenized_texts,
author_id=author_ids,
)
else:
input_ids = []
attention_masks = []
for idx, tokenized_text in enumerate(tokenized_texts[0]):
encoding = self.tokenizer.encode_plus(tokenized_text, add_special_tokens=True,
# Add '[CLS]' and '[SEP]'
max_length=self.max_seq_len,
padding='max_length', # Pad & truncate all sentences.
truncation=True,
return_token_type_ids=False,
return_attention_mask=True, # Construct attn. masks.
return_tensors='pt' # Return pytorch tensors.
)
input_ids.append(encoding['input_ids'])
attention_masks.append(encoding['attention_mask'])
if self.ground_truth:
return dict(
input_ids=torch.stack(input_ids),
attention_mask=torch.stack(attention_masks),
labels=torch.LongTensor(labels),
text=tokenized_texts,
author_id=author_ids,
)
else:
return dict(
input_ids=torch.stack(input_ids),
attention_mask=torch.stack(attention_masks),
text=tokenized_texts,
author_id=author_ids,
)
def __len__(self):
return len(self.files)
class PANHateSpeechTaskDatasetWrapper:
def create_cv_folds(self):
kf = StratifiedKFold(n_splits=self.cv, random_state=RANDOM_SEED, shuffle=True)
train_folds = []
test_folds = []
for train_index, test_index in kf.split(self.profile_files, list(self.ground_truth.values())):
train_folds.append(train_index)
test_folds.append(test_index)
return train_folds, test_folds
SPECIAL_TOKENS = {
'joined': {'additional_special_tokens': ["[RT]", "[USER]", "[URL]", "[HASHTAG]"]},
'hierarchical': {'additional_special_tokens': ["[RT]", "[USER]", "[URL]", "[HASHTAG]"]},
'joined_post_aware': {
'additional_special_tokens': ["[RT]", "[USER]", "[URL]", "[HASHTAG]", "[POSTSTART]", "[POSTEND]"]}
}
def __init__(self, args):
self.cv = args.cv
self.tokenizer = AutoTokenizer.from_pretrained(args.tokenizer)
# self.tokenizer.save_pretrained(f'trained_models/{args.tokenizer}')
self.special_tokens_dict = PANHateSpeechTaskDatasetWrapper.SPECIAL_TOKENS[args.input_mode]
self.tokenizer.add_special_tokens(self.special_tokens_dict)
if args.lang == 'en_es' or args.lang == 'es_en':
data_path = Path(args.data)
lang_en = data_path / 'en'
files_en = np.asarray([path for path in lang_en.glob('*.xml')])
lang_es = data_path / 'es'
files_es = np.asarray([path for path in lang_es.glob('*.xml')])
self.profile_files = np.concatenate((files_en, files_es), axis=None)
labels_path_en = data_path / 'en' / 'truth.txt'
self.ground_truth = {}
with open(labels_path_en, 'r') as r:
labels = r.readlines()
for label in labels:
label = label.split(AUTHOR_SEP)
self.ground_truth[label[0]] = int(label[1])
labels_path_es = data_path / 'es' / 'truth.txt'
with open(labels_path_es, 'r') as r:
labels = r.readlines()
for label in labels:
label = label.split(AUTHOR_SEP)
self.ground_truth[label[0]] = int(label[1])
else:
data_path = Path(args.data)
self.profile_files = np.asarray([path for path in data_path.glob('*.xml')])
labels_path = data_path / 'truth.txt'
if labels_path.exists():
self.ground_truth = {}
with open(labels_path, 'r') as r:
labels = r.readlines()
for label in labels:
label = label.split(AUTHOR_SEP)
self.ground_truth[label[0]] = int(label[1])
else:
self.ground_truth = None
if self.cv:
train_folds, test_folds = self.create_cv_folds()
self.dataset = []
for idx, train_fold in enumerate(train_folds):
train_files = self.profile_files[train_fold]
test_files = self.profile_files[test_folds[idx]]
self.dataset.append(
(PanHateSpeechTaskDataset(train_files, max_seq_len=args.max_seq_len, tokenizer=self.tokenizer,
ground_truth=self.ground_truth, mode=args.input_mode),
PanHateSpeechTaskDataset(test_files, max_seq_len=args.max_seq_len, tokenizer=self.tokenizer,
ground_truth=self.ground_truth,
mode=args.input_mode)))
else:
# TODO for test files, the files without labels
test_files = self.profile_files
self.dataset = PanHateSpeechTaskDataset(test_files, max_seq_len=args.max_seq_len, tokenizer=self.tokenizer,
ground_truth=self.ground_truth,
mode=args.input_mode)
DATA_LOADERS = {
'pan_hatespeech': PANHateSpeechTaskDatasetWrapper,
'exist': ExistTaskDataset
}
| 9,762 | 2,758 |
"""
MIT License
Copyright (c) 2021 molokhovdmitry
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
"""
This file downloads frames from streams in top categories of twitch.
Pseudocode:
while `Enter` is not pressed:
Loop:
1) Get top games from twitch api, save them in a database.
2) Get a game with a minimum number of saved frames.
3) Get logins of streams that are live in that category.
4) Download frames from 5 random streams,
save frame info in the database.
"""
import random
import time
import requests
from threading import Thread
from pathlib import Path
from termcolor import colored
from streamlink import Streamlink
from data.download_functions import download_frames
from data.api import get_top_games, get_streams
from data.db_functions import (session_scope, get_game_count, update_games,
min_data_category, max_data_category, add_frame)
from config import DOWNLOAD_PATH, MAX_GAMES
data_path = Path.joinpath(Path(DOWNLOAD_PATH), "frames")
def update_data():
"""Updates data while no input (Enter not pressed)."""
# Start helper threads.
input_list = []
Thread(target=input_thread, args=(input_list, )).start()
print("Press Enter any time to stop downloading.")
Thread(target=info_thread, args=(input_list, )).start()
# Start a streamlink session.
streamlink_session = Streamlink()
# Start an api session.
api_session = requests.session()
downloaded_streams = 0
fail_count = 0
frame_count = 0
while not input_list:
# Add games if game limit is not exceeded.
with session_scope() as db_session:
game_count = get_game_count(db_session)
if game_count < MAX_GAMES:
games = get_top_games(api_session)
if not games:
print("Error. Could not get top games.")
continue
# Update the database with new games.
with session_scope() as db_session:
update_games(db_session, games)
# Get a category with the minimum number of frames.
with session_scope() as db_session:
game_id = min_data_category(db_session)[0]
# Get streams from the category.
streams = get_streams(api_session, game_id)
if not streams:
print("Error. Could not get streams.")
continue
# Update the category (download frames from 5 streams).
download_count = 0
download_attempts = 0
while streams and download_count < 5 and download_attempts < 10:
if input_list:
break
# Get a random stream.
stream = random.choice(list(streams))
streams.discard(stream)
# Download frames from a stream, update the database.
print(f"Downloading frames from '{stream}', gameID: {game_id}.")
download = False
for frame_path in download_frames(streamlink_session,
stream, game_id):
# Save a frame in the database.
with session_scope() as db_session:
add_frame(db_session, frame_path, game_id, stream)
download = True
frame_count += 1
download_count += download
download_attempts += 1
downloaded_streams += download
fail_count += not download
print_dataset_info()
print("Done.")
print(f"Downloaded {frame_count} frame(s) from {downloaded_streams} "
f"stream(s). Failed {fail_count} time(s).")
def input_thread(input_list):
"""Thread that waits for an input."""
input()
input_list.append(True)
print(colored("Interrupting. Please wait.", 'green'))
def info_thread(input_list):
"""
Thread that shows how much data is downloaded and min/max data categories
every `n` seconds.
"""
n = 300
print_dataset_info()
# Repeat every `n` seconds.
i = 0
while not input_list:
if i != n:
time.sleep(1)
i += 1
continue
i = 0
print_dataset_info()
def print_dataset_info():
"""Prints dataset info."""
# Print dataset size.
print(colored(dir_size(data_path), 'green'))
# Print the number of games.
with session_scope() as db_session:
game_count = get_game_count(db_session)
print(colored(f"{game_count} game(s)", 'green'))
# Print categories with minumum and maximum number of frames.
print_min_max()
def dir_size(path):
"""Returns the size of `path` folder."""
files = list(path.glob('**/*'))
size = 0
for file in files:
if file.is_file():
size += file.stat().st_size
# Convert to GB.
size = size / 1073741824
return "Data size: " + '{:.2f}'.format(size) + " GB"
def print_min_max():
"""Prints categories with minumum and maximum number of frames."""
with session_scope() as db_session:
min_category = min_data_category(db_session)
max_category = max_data_category(db_session)
if min_category:
print(colored("Minimum: {} frame(s) in category {}."
.format(min_category[1], min_category[0]),
'green'))
print(colored("Maximum: {} frame(s) in category {}."
.format(max_category[1], max_category[0]),
'green'))
if __name__ == "__main__":
update_data()
| 6,585 | 1,938 |
import time
from selenium import webdriver
def main():
driver = webdriver.Chrome(executable_path='chromedriver.exe')
driver.get('https://www.w3schools.com/')
#input()
time.sleep(5)
driver.find_element_by_css_selector('#w3loginbtn').click() #Conseguiremos copiando del objeto el selector del objeto.
input()
if __name__ == '__main__':
main()
#time.sleep(3) | 385 | 132 |
import time
from model_zoo import load_model, resnet, googLeNet
import ensembel_model
import utils
import cv2
import numpy as np
import torch
from torch import nn
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "{}".format(1)
# test resnet
version = 'resnet-tiny-n7'
new_model = resnet.my_resnet
mean = np.array([[[[113.91022]],
[[123.0098]],
[[125.40064]]]], dtype=np.float32)
# model, create_new_model = load_model.load_model(
# version=version,
# new_model=new_model,
# just_weights=False,
# retrain=False,
# to_cuda=False
# )
model = ensembel_model.my_ensembel_model(False)
model.eval()
test_size = 20
time_cost = []
for i in range(test_size):
img = cv2.imread("get_data/data_sample/{}.png".format(i))
img = img.transpose([2, 0, 1]).reshape([1, 3, 32, 32]).astype(np.float32)
img = (img - mean)/64.15484306
time_start = time.time()
x = torch.Tensor(img)
y = model(x).detach()
y = nn.Softmax(dim=1)(y).numpy()
time_end = time.time()
time_cost.append(time_end - time_start)
time_cost = np.array(time_cost)
print(time_cost.mean()*1000, time_cost.std()*1000)
| 1,158 | 492 |
import os
from setuptools import setup
setup(
name='simpleseo',
version='0.0.1',
author='Glamping Hub',
author_email='it@glampinghub.com',
packages=['simpleseo'],
url='https://github.com/Glamping-Hub/django-simple-seo',
license='LICENSE',
description='Simple SEO app for django framework',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
requires=[
'Django (>=1.11.0)',
],
include_package_data=True,
zip_safe=False,
)
| 516 | 189 |
import os
session = None
| 26 | 9 |
import os
from pathlib import Path
from tempfile import tempdir
import numpy as np
import pytest
from numpy.testing import assert_allclose, assert_almost_equal
import ross as rs
from ross.defects.misalignment import MisalignmentFlex
from ross.units import Q_
steel2 = rs.Material(name="Steel", rho=7850, E=2.17e11, Poisson=0.2992610837438423)
# Rotor with 6 DoFs, with internal damping, with 10 shaft elements, 2 disks and 2 bearings.
i_d = 0
o_d = 0.019
n = 33
# fmt: off
L = np.array(
[0 , 25, 64, 104, 124, 143, 175, 207, 239, 271,
303, 335, 345, 355, 380, 408, 436, 466, 496, 526,
556, 586, 614, 647, 657, 667, 702, 737, 772, 807,
842, 862, 881, 914]
)/ 1000
# fmt: on
L = [L[i] - L[i - 1] for i in range(1, len(L))]
shaft_elem = [
rs.ShaftElement6DoF(
material=steel2,
L=l,
idl=i_d,
odl=o_d,
idr=i_d,
odr=o_d,
alpha=8.0501,
beta=1.0e-5,
rotary_inertia=True,
shear_effects=True,
)
for l in L
]
Id = 0.003844540885417
Ip = 0.007513248437500
disk0 = rs.DiskElement6DoF(n=12, m=2.6375, Id=Id, Ip=Ip)
disk1 = rs.DiskElement6DoF(n=24, m=2.6375, Id=Id, Ip=Ip)
kxx1 = 4.40e5
kyy1 = 4.6114e5
kzz = 0
cxx1 = 27.4
cyy1 = 2.505
czz = 0
kxx2 = 9.50e5
kyy2 = 1.09e8
cxx2 = 50.4
cyy2 = 100.4553
bearing0 = rs.BearingElement6DoF(
n=4, kxx=kxx1, kyy=kyy1, cxx=cxx1, cyy=cyy1, kzz=kzz, czz=czz
)
bearing1 = rs.BearingElement6DoF(
n=31, kxx=kxx2, kyy=kyy2, cxx=cxx2, cyy=cyy2, kzz=kzz, czz=czz
)
rotor = rs.Rotor(shaft_elem, [disk0, disk1], [bearing0, bearing1])
@pytest.fixture
def rub():
unbalance_magnitudet = np.array([5e-4, 0])
unbalance_phaset = np.array([-np.pi / 2, 0])
rubbing = rotor.run_rubbing(
dt=0.001,
tI=0,
tF=0.5,
deltaRUB=7.95e-5,
kRUB=1.1e6,
cRUB=40,
miRUB=0.3,
posRUB=12,
speed=125.66370614359172,
unbalance_magnitude=unbalance_magnitudet,
unbalance_phase=unbalance_phaset,
print_progress=True,
)
return rubbing
@pytest.fixture
def rub_units():
unbalance_magnitudet = Q_(np.array([0.043398083107259365, 0]), "lb*in")
unbalance_phaset = Q_(np.array([-90.0, 0.0]), "degrees")
rubbing = rotor.run_rubbing(
dt=0.001,
tI=0,
tF=0.5,
deltaRUB=7.95e-5,
kRUB=1.1e6,
cRUB=40,
miRUB=0.3,
posRUB=12,
speed=Q_(1200, "RPM"),
unbalance_magnitude=unbalance_magnitudet,
unbalance_phase=unbalance_phaset,
print_progress=True,
)
return rubbing
def test_rub_parameters(rub):
assert rub.dt == 0.001
assert rub.tI == 0
assert rub.tF == 0.5
assert rub.deltaRUB == 7.95e-5
assert rub.kRUB == 1.1e6
assert rub.cRUB == 40
assert rub.miRUB == 0.3
assert rub.posRUB == 12
assert rub.speed == 125.66370614359172
def test_rub_parameters_units(rub_units):
assert rub_units.dt == 0.001
assert rub_units.tI == 0
assert rub_units.tF == 0.5
assert rub_units.deltaRUB == 7.95e-5
assert rub_units.kRUB == 1.1e6
assert rub_units.cRUB == 40
assert rub_units.miRUB == 0.3
assert rub_units.posRUB == 12
assert rub_units.speed == 125.66370614359172
def test_rub_forces(rub):
assert rub.forces_rub[rub.posRUB * 6, :] == pytest.approx(
# fmt: off
np.array([ 0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
1.33959978, 2.38449456, 2.49659676, 1.81196092,
0.59693967, -1.62826881, -4.24183226, -6.00328692,
-6.20469077, -4.73542742, -2.72934246, -2.58572889,
-6.09380802, -11.89929423, -16.28700512, -16.40580808,
-12.28949661, -6.4715516 , -1.9219398 , 0.03597126,
0.1483467 , 0.06399221, 0.18405941, 0.44370982,
0.59170921, 0.55028863, 0.61661081, 1.15972902,
1.99145991, 2.2827491 , 1.21484512, 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , -0.16254596,
-1.15328767, -1.31402774, -0.57077859, 0.17589894,
0. , 0. , 0. , 0. ,
1.35177422, 4.63052083, 8.26595768, 10.77229714,
11.33408396, 9.97185414, 7.43725544, 4.8091569 ,
2.90279915, 1.9280518 , 1.60783411, 1.51278525,
1.30371098, 0.82428403, 0.11364267, -1.19150903,
-2.28684891, -2.8371608 , -2.70293006, -1.7191864 ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
-0.36389397, -0.02296953, -0.58251754, -1.26899237,
-1.37833025, -0.83533185, 0. , -1.54209802,
-3.90213044, -5.84909057, -5.54764546, -2.82245046,
0. , 0. , -0.9813435 , -5.02620683,
-7.74948602, -7.28498629, -4.21292376, -0.68113248,
0. , 0. , 0. , 0. ,
0.49651344, 0.94005508, 1.20771384, 1.01602262,
0.58504824, 0.42249176, 0.74712263, 1.21263464,
1.24036134, 0.53226162, 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0.81085807, 2.24845489,
2.79783564, 2.36749471, 1.48390821, 0.91606906,
1.10872277, 1.79762142, 2.27408108, 2.02409761,
1.09722613, 0.03554649, 0. , 0. ,
0. , 0. , 0. , -3.20746741,
-5.67109902, -5.84109266, -3.27138013, 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , -0.94605595,
-2.89024152, -2.82355527, -0.91081202, 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0.29208245, 0.31276202, 0.55407755,
0.65238154, 0.38599931, 0. , 0. ,
0. , 0. , 0. , 0.30206142,
1.24645948, 2.17118657, 2.79095166, 2.92291644,
2.4997066 , 1.57624218, 0.3448333 , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
-0.25292217, 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , -1.16895344, -3.13003066,
-4.31801297, -3.79661585, -1.72270664, 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0.22784948, 0.25813886,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , -1.0783812 , -1.3305057 ,
0.04528174, 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. ])
# fmt: on
)
assert rub.forces_rub[rub.posRUB * 6 + 1, :] == pytest.approx(
# fmt: off
np.array([ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
-2.04052219e+00, -4.37686754e+00, -6.57561714e+00, -8.28614638e+00,
-9.37724617e+00, -9.76009440e+00, -9.25692987e+00, -7.68859694e+00,
-5.19404219e+00, -2.47936767e+00, -6.07948333e-01, -1.39115180e-01,
-2.90615182e-01, 5.97112915e-01, 3.77091904e+00, 7.17958236e+00,
8.50566481e+00, 6.92594573e+00, 3.76719699e+00, 1.38672233e+00,
1.14491558e+00, 2.47008298e+00, 3.68895320e+00, 3.55463037e+00,
2.15169896e+00, 7.16172655e-01, 4.58130638e-01, 1.31722645e+00,
2.08017814e+00, 1.73698314e+00, 3.66421390e-01, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.41608952e+00,
3.91872725e+00, 6.08751103e+00, 6.04879589e+00, 3.34005262e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
1.32257678e+00, 4.72982449e+00, 5.65624377e+00, 3.74610313e+00,
2.79267497e-01, -1.61571947e+00, -2.61760973e+00, -2.76295228e+00,
-2.46754501e+00, -2.27912450e+00, -2.49834149e+00, -3.08123496e+00,
-3.77081696e+00, -4.26182369e+00, -4.32814157e+00, -3.92465771e+00,
-3.18784204e+00, -2.30089569e+00, -1.36585055e+00, -4.14179880e-01,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
-2.27746914e-01, -2.70148968e+00, -3.76415772e+00, -3.18400597e+00,
-1.61071020e+00, -1.55937047e-01, 0.00000000e+00, -3.48135704e-01,
-1.20173785e+00, -1.41164098e+00, -8.22789836e-01, 2.22972608e-02,
0.00000000e+00, 0.00000000e+00, 1.11826215e+00, 3.42190067e+00,
6.43880255e+00, 7.73939551e+00, 6.02642177e+00, 2.15551882e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
1.30262708e+00, 2.86790215e+00, 2.69095371e+00, 1.30596104e+00,
6.27355622e-02, -1.46409693e-01, -1.61596825e-03, 2.02368230e-01,
7.76880971e-02, -1.13943983e-01, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, -1.20894403e-01, -9.67454370e-02,
-3.75553448e-01, -6.69880393e-01, -7.81524379e-01, -8.20393160e-01,
-1.19541101e+00, -2.17352648e+00, -3.38963807e+00, -3.97901667e+00,
-3.24152409e+00, -1.21249778e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, -1.71167570e+00,
-2.63258535e+00, -2.11381589e+00, -7.29071480e-01, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.55809454e-01,
-1.33550998e-01, 1.30622371e-01, 5.14772296e-01, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 5.87400070e-01, 2.52352092e+00, 3.18829790e+00,
2.12154931e+00, 1.96463435e-01, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, -1.71590151e-01,
8.49398637e-02, -3.92130275e-03, -3.10907319e-01, -7.32520442e-01,
-1.04949045e+00, -1.03758926e+00, -5.57304261e-01, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
-2.22986515e-01, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 1.36141783e-01, -1.62089253e-01,
-6.56719131e-02, 3.67462355e-01, 6.11527272e-01, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, -7.23117550e-02, -4.63871628e-02,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 8.90041966e-01, 1.14524591e+00,
4.21896347e-01, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00])
# fmt: on
)
def test_rub_forces_units(rub_units):
assert rub_units.forces_rub[rub_units.posRUB * 6, :] == pytest.approx(
# fmt: off
np.array([ 0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
1.33959978, 2.38449456, 2.49659676, 1.81196092,
0.59693967, -1.62826881, -4.24183226, -6.00328692,
-6.20469077, -4.73542742, -2.72934246, -2.58572889,
-6.09380802, -11.89929423, -16.28700512, -16.40580808,
-12.28949661, -6.4715516 , -1.9219398 , 0.03597126,
0.1483467 , 0.06399221, 0.18405941, 0.44370982,
0.59170921, 0.55028863, 0.61661081, 1.15972902,
1.99145991, 2.2827491 , 1.21484512, 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , -0.16254596,
-1.15328767, -1.31402774, -0.57077859, 0.17589894,
0. , 0. , 0. , 0. ,
1.35177422, 4.63052083, 8.26595768, 10.77229714,
11.33408396, 9.97185414, 7.43725544, 4.8091569 ,
2.90279915, 1.9280518 , 1.60783411, 1.51278525,
1.30371098, 0.82428403, 0.11364267, -1.19150903,
-2.28684891, -2.8371608 , -2.70293006, -1.7191864 ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
-0.36389397, -0.02296953, -0.58251754, -1.26899237,
-1.37833025, -0.83533185, 0. , -1.54209802,
-3.90213044, -5.84909057, -5.54764546, -2.82245046,
0. , 0. , -0.9813435 , -5.02620683,
-7.74948602, -7.28498629, -4.21292376, -0.68113248,
0. , 0. , 0. , 0. ,
0.49651344, 0.94005508, 1.20771384, 1.01602262,
0.58504824, 0.42249176, 0.74712263, 1.21263464,
1.24036134, 0.53226162, 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0.81085807, 2.24845489,
2.79783564, 2.36749471, 1.48390821, 0.91606906,
1.10872277, 1.79762142, 2.27408108, 2.02409761,
1.09722613, 0.03554649, 0. , 0. ,
0. , 0. , 0. , -3.20746741,
-5.67109902, -5.84109266, -3.27138013, 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , -0.94605595,
-2.89024152, -2.82355527, -0.91081202, 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0.29208245, 0.31276202, 0.55407755,
0.65238154, 0.38599931, 0. , 0. ,
0. , 0. , 0. , 0.30206142,
1.24645948, 2.17118657, 2.79095166, 2.92291644,
2.4997066 , 1.57624218, 0.3448333 , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
-0.25292217, 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , -1.16895344, -3.13003066,
-4.31801297, -3.79661585, -1.72270664, 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0.22784948, 0.25813886,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , -1.0783812 , -1.3305057 ,
0.04528174, 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. ])
# fmt: on
)
assert rub_units.forces_rub[rub_units.posRUB * 6 + 1, :] == pytest.approx(
# fmt: off
np.array([ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
-2.04052219e+00, -4.37686754e+00, -6.57561714e+00, -8.28614638e+00,
-9.37724617e+00, -9.76009440e+00, -9.25692987e+00, -7.68859694e+00,
-5.19404219e+00, -2.47936767e+00, -6.07948333e-01, -1.39115180e-01,
-2.90615182e-01, 5.97112915e-01, 3.77091904e+00, 7.17958236e+00,
8.50566481e+00, 6.92594573e+00, 3.76719699e+00, 1.38672233e+00,
1.14491558e+00, 2.47008298e+00, 3.68895320e+00, 3.55463037e+00,
2.15169896e+00, 7.16172655e-01, 4.58130638e-01, 1.31722645e+00,
2.08017814e+00, 1.73698314e+00, 3.66421390e-01, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.41608952e+00,
3.91872725e+00, 6.08751103e+00, 6.04879589e+00, 3.34005262e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
1.32257678e+00, 4.72982449e+00, 5.65624377e+00, 3.74610313e+00,
2.79267497e-01, -1.61571947e+00, -2.61760973e+00, -2.76295228e+00,
-2.46754501e+00, -2.27912450e+00, -2.49834149e+00, -3.08123496e+00,
-3.77081696e+00, -4.26182369e+00, -4.32814157e+00, -3.92465771e+00,
-3.18784204e+00, -2.30089569e+00, -1.36585055e+00, -4.14179880e-01,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
-2.27746914e-01, -2.70148968e+00, -3.76415772e+00, -3.18400597e+00,
-1.61071020e+00, -1.55937047e-01, 0.00000000e+00, -3.48135704e-01,
-1.20173785e+00, -1.41164098e+00, -8.22789836e-01, 2.22972608e-02,
0.00000000e+00, 0.00000000e+00, 1.11826215e+00, 3.42190067e+00,
6.43880255e+00, 7.73939551e+00, 6.02642177e+00, 2.15551882e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
1.30262708e+00, 2.86790215e+00, 2.69095371e+00, 1.30596104e+00,
6.27355622e-02, -1.46409693e-01, -1.61596825e-03, 2.02368230e-01,
7.76880971e-02, -1.13943983e-01, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, -1.20894403e-01, -9.67454370e-02,
-3.75553448e-01, -6.69880393e-01, -7.81524379e-01, -8.20393160e-01,
-1.19541101e+00, -2.17352648e+00, -3.38963807e+00, -3.97901667e+00,
-3.24152409e+00, -1.21249778e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, -1.71167570e+00,
-2.63258535e+00, -2.11381589e+00, -7.29071480e-01, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.55809454e-01,
-1.33550998e-01, 1.30622371e-01, 5.14772296e-01, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 5.87400070e-01, 2.52352092e+00, 3.18829790e+00,
2.12154931e+00, 1.96463435e-01, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, -1.71590151e-01,
8.49398637e-02, -3.92130275e-03, -3.10907319e-01, -7.32520442e-01,
-1.04949045e+00, -1.03758926e+00, -5.57304261e-01, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
-2.22986515e-01, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 1.36141783e-01, -1.62089253e-01,
-6.56719131e-02, 3.67462355e-01, 6.11527272e-01, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, -7.23117550e-02, -4.63871628e-02,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 8.90041966e-01, 1.14524591e+00,
4.21896347e-01, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00])
# fmt: on
)
| 38,435 | 25,441 |
#!/usr/bin/python
from pynma import PyNMA
| 45 | 20 |
import iterlib
from distutils.core import setup
setup(name=iterlib.__name__,
version=iterlib.__version__,
description=iterlib.__doc__,
author=iterlib.__author__,
author_email=iterlib.__email__,
url=iterlib.__url__,
packages=[iterlib.__name__])
| 285 | 92 |
import cpauto
session = cpauto.CoreClient('admin', 'cisco123', '192.168.0.250')
output = session.login()
policies = cpauto.PolicyPackage(session)
output = policies.show_all()
outputdictfromserver = output.json()
outputdictfromserver['packages']
counter=0
print outputdictfromserver
output = session.logout()
print output.status_code
| 364 | 121 |
#!/usr/bin/env python
#
# Copyright 2016 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Create the asset and upload it."""
import os
import subprocess
import sys
import tempfile
FILE_DIR = os.path.dirname(os.path.abspath(__file__))
ASSET = os.path.basename(FILE_DIR)
def main():
sk = os.path.realpath(os.path.join(
FILE_DIR, os.pardir, os.pardir, os.pardir, os.pardir, 'bin', 'sk'))
if os.name == 'nt':
sk += '.exe'
if not os.path.isfile(sk):
raise Exception('`sk` not found at %s; maybe you need to run bin/fetch-sk?')
# CIPD is picky about where files are downloaded. Use a subdirectory of the
# asset dir rather than /tmp.
tmp_prefix = os.path.join(FILE_DIR, '.')
with tempfile.TemporaryDirectory(prefix=tmp_prefix) as tmp:
subprocess.check_call([sk, 'asset', 'download', ASSET, tmp], cwd=FILE_DIR)
# Allow the user to modify the contents of the target dir.
input('Previous SKImage contents have been downloaded. Please make '
'your desired changes in the following directory and press enter '
'to continue:\n%s\n' % tmp)
subprocess.check_call([sk, 'asset', 'upload', '--in', tmp, ASSET],
cwd=FILE_DIR)
if __name__ == '__main__':
main()
| 1,315 | 451 |
from mxnet import nd
# 二维互相关
def corr2d(X, K):
h, w = K.shape
Y = nd.zeros((X.shape[0] - h + 1, X.shape[1] - w + 1))
for i in range(Y.shape[0]):
for j in range(Y.shape[1]):
Y[i, j] = (X[i:i + h, j:j + w] * K).sum()
return Y
# 对多通道输入的二维互相关
def corr2d_multi_in(X, K):
# 首先沿着X和K的第0维(通道维)遍历
# 然后使用*将结果列表变成add_n函数的位置参数来进行相加
return nd.add_n(*[corr2d(x, k) for x, k in zip(X, K)])
# 对多通道输入和多通道输出的二维互相关
def corr2d_multi_in_out(X, K):
# 对K的第0维(通道维)遍历,每次与输入X做互相关计算
# 所有结果使用stack函数合并在一起
return nd.stack(*[corr2d_multi_in(X, k) for k in K])
# 对多通道输入和多通道输出使用1×1卷积核的二维互相关
def corr2d_multi_in_out_1x1(X, K):
c_i, h, w = X.shape
c_o = K.shape[0]
X = X.reshape((c_i, h * w))
K = K.reshape((c_o, c_i))
# 全连接层的矩阵乘法
Y = nd.dot(K, X)
return Y.reshape((c_o, h, w))
if __name__ == '__main__':
X = nd.array([[[0, 1, 2], [3, 4, 5], [6, 7, 8]],
[[1, 2, 3], [4, 5, 6], [7, 8, 9]]])
K = nd.array([[[0, 1], [2, 3]], [[1, 2], [3, 4]]])
print(corr2d_multi_in(X, K))
# 核数组K与K+1和K+2连接起来构造一个输出通道数为3的卷积核
# K+1即K中每个元素加1,K+2同理
K = nd.stack(K, K + 1, K + 2)
print(K.shape)
print(corr2d_multi_in_out(X, K))
# 做1×1卷积时,corr2d_multi_in_out_1x1和corr2d_multi_in_out等价
X = nd.random.uniform(shape=(3, 3, 3))
K = nd.random.uniform(shape=(2, 3, 1, 1))
Y1 = corr2d_multi_in_out_1x1(X, K)
Y2 = corr2d_multi_in_out(X, K)
print((Y1 - Y2).norm().asscalar() < 1e-6)
| 1,487 | 967 |
import importlib.util # https://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path
import json
from pathlib import Path
from fastapi import APIRouter, Depends
from fastapi.responses import HTMLResponse
from fastapi import Request, Form
from fastapi.templating import Jinja2Templates
templates = Jinja2Templates(directory="app/templates")
from app.util.login import get_current_username
router = APIRouter(dependencies=[Depends(get_current_username)])
@router.get("/sentences")
async def create(request: Request):
new_lang = Path.cwd() / "new_lang"
if len(list(new_lang.iterdir())) > 0:
path = list(new_lang.iterdir())[0]
path = path / "examples.py"
spec = importlib.util.spec_from_file_location("sentences", str(path))
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
sentences = module.sentences
#ltr or rtl
nlp = get_nlp()
writing_system = nlp.vocab.writing_system['direction']
return templates.TemplateResponse(
"sentences.html", {"request": request, "sentences": sentences, "writing_system":writing_system}
)
else:
return templates.TemplateResponse(
"error_please_create.html", {"request": request}
)
@router.post("/update_sentences")
async def update_sentences(request: Request, sentences: str = Form(...)):
sentences = json.loads(sentences)
new_lang = Path.cwd() / "new_lang"
if new_lang.exists():
if len(list(new_lang.iterdir())) > 0:
name = list(new_lang.iterdir())[0].name
examples_file = Path.cwd() / "new_lang" / name / "examples.py"
examples = examples_file.read_text()
start = examples.find("sentences = [") + 13
end = examples.find("]")
sents = ""
for sentence in sentences:
sentence = sentence.replace('&nbsp','').replace(' ','').replace('\n','').strip() #bug from the template
sents += '"""' + sentence + '""",'
examples_file.write_text(examples[:start] + sents + examples[end:])
return sentences
def get_nlp():
# Load language object as nlp
new_lang = Path.cwd() / "new_lang"
lang_name = list(new_lang.iterdir())[0].name
try:
mod = __import__(f"new_lang.{lang_name}", fromlist=[lang_name.capitalize()])
except SyntaxError: # Unable to load __init__ due to syntax error
# redirect /edit?file_name=examples.py
message = "[*] SyntaxError, please correct this file to proceed."
return RedirectResponse(url="/edit?file_name=tokenizer_exceptions.py")
cls = getattr(mod, lang_name.capitalize())
nlp = cls()
return nlp
| 2,777 | 842 |
"""Tests for pythontexfigures LaTeX package options."""
from pathlib import Path
import pytest
from util import build # noqa: I900
DOCUMENT_TEMPLATE = r"""
\documentclass{article}
%(pre)s
\usepackage{pgf}
\usepackage{pythontex}
\usepackage[%(options)s]{pythontexfigures}
%(post)s
\begin{document}
%(body)s
\end{document}
"""
BODY = r"\pyfig{test.py}"
SCRIPT = """
import matplotlib
def main():
open("result.txt", "w").write(str(matplotlib.rcParams["font.size"]))
"""
def document(options="", pre="", post="", body=BODY):
"""Fill in LaTeX document template."""
return DOCUMENT_TEMPLATE % dict(options=options, pre=pre, post=post, body=body)
def test_build_default(in_temp_dir):
"""Test building a simple document with a simple figure using default options."""
Path("main.tex").write_text(document())
Path("test.py").write_text(SCRIPT)
build("main.tex")
def test_missing_figure(in_temp_dir):
"""Test build fails if a figure script is missing."""
Path("main.tex").write_text(document())
with pytest.raises(AssertionError):
build("main.tex")
def test_build_in_subfolder(in_temp_dir):
"""Test keeping scripts in a subfolder."""
Path("main.tex").write_text(document(post=r"\pythontexfigurespath{scripts}"))
Path("scripts").mkdir()
Path("scripts/test.py").write_text(SCRIPT)
build("main.tex")
@pytest.mark.parametrize(
"name,expected",
(
("normalsize", 10),
("small", 9),
("footnotesize", 8),
("scriptsize", 7),
("6", 6),
),
)
def test_font_size(in_temp_dir, name: str, expected: float):
"""Test building with different font sizes."""
Path("main.tex").write_text(document(options="fontsize=" + name))
Path("test.py").write_text(SCRIPT)
build("main.tex")
assert float(Path("result.txt").read_text()) == expected
def test_relative(in_temp_dir):
"""Test building with the relative option."""
Path("main.tex").write_text(
document(
options="relative",
pre=r"\usepackage[abspath]{currfile}",
body=r"\include{tex/body.tex}",
)
)
Path("tex").mkdir()
Path("tex/body.tex").write_text(BODY)
Path("tex/test.py").write_text(SCRIPT)
build("main.tex")
def test_relative_subfolder(in_temp_dir):
"""Test building with the relative option and scripts in a subfolder."""
Path("main.tex").write_text(
document(
options="relative",
pre=r"\usepackage[abspath]{currfile}",
post=r"\pythontexfigurespath{scripts}",
body=r"\include{tex/body.tex}",
)
)
Path("tex/scripts").mkdir(parents=True)
Path("tex/body.tex").write_text(BODY)
Path("tex/scripts/test.py").write_text(SCRIPT)
build("main.tex")
| 2,797 | 938 |
# -*- coding:ascii -*-
from mako import runtime, filters, cache
UNDEFINED = runtime.UNDEFINED
__M_dict_builtin = dict
__M_locals_builtin = locals
_magic_number = 9
_modified_time = 1397084402.354378
_enable_loop = True
_template_filename = 'C:\\Users\\Jordan Carlson\\Desktop\\MyStuff\\manager\\templates/dash.html'
_template_uri = 'dash.html'
_source_encoding = 'ascii'
import os, os.path, re
_exports = ['content']
def _mako_get_namespace(context, name):
try:
return context.namespaces[(__name__, name)]
except KeyError:
_mako_generate_namespaces(context)
return context.namespaces[(__name__, name)]
def _mako_generate_namespaces(context):
pass
def _mako_inherit(template, context):
_mako_generate_namespaces(context)
return runtime._inherit_from(context, 'base.htm', _template_uri)
def render_body(context,**pageargs):
__M_caller = context.caller_stack._push_frame()
try:
__M_locals = __M_dict_builtin(pageargs=pageargs)
request = context.get('request', UNDEFINED)
def content():
return render_content(context._locals(__M_locals))
__M_writer = context.writer()
# SOURCE LINE 1
__M_writer('<!--## This is the base page for both the dashboards. Sprouting off of this one will be a manager and an admin page with minute\r\n')
# SOURCE LINE 3
__M_writer('\r\n')
# SOURCE LINE 6
__M_writer(' \r\n')
# SOURCE LINE 8
__M_writer(' ')
__M_writer('\r\n\r\n')
if 'parent' not in context._data or not hasattr(context._data['parent'], 'content'):
context['self'].content(**pageargs)
# SOURCE LINE 23
__M_writer('<!--ends content-->\r\n')
return ''
finally:
context.caller_stack._pop_frame()
def render_content(context,**pageargs):
__M_caller = context.caller_stack._push_frame()
try:
request = context.get('request', UNDEFINED)
def content():
return render_content(context)
__M_writer = context.writer()
# SOURCE LINE 10
__M_writer(' ')
__M_writer('\r\n <h2>Welcome back, ')
# SOURCE LINE 11
__M_writer(str(request.user.first_name))
__M_writer(' ')
__M_writer(str(request.user.last_name))
__M_writer("!</h2></br>\r\n <p>Use the left-side navigation bar to view connecting pages and options by clicking on each section heading. To view your own account information, log out, or return to your dashboard, use the dropdown menu in the upper right hand corner.</p>\r\n\r\n <div class='vertical_spacer6'></div>\r\n <div class='vertical_spacer6'></div>\r\n <div class='vertical_spacer6'></div>\r\n <div class='vertical_spacer6'></div>\r\n <div class='vertical_spacer6'></div>\r\n <div class='vertical_spacer6'></div>\r\n <div class='vertical_spacer6'></div>\r\n <div class='vertical_spacer6'></div>\r\n\r\n")
return ''
finally:
context.caller_stack._pop_frame()
| 3,030 | 1,043 |
from typing import Any
class KEY:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __eq__(self, obj):
return hash(self) == hash(obj)
def __hash__(self):
def _hash(param: Any):
if isinstance(param, tuple):
return tuple(map(_hash, param))
if isinstance(param, dict):
return tuple(map(_hash, param.items()))
elif hasattr(param, '__dict__'):
return str(vars(param))
else:
return str(param)
return hash(_hash(self.args) + _hash(self.kwargs))
| 636 | 180 |
import torch.nn as nn
class ContextModule(nn.Module):
def __init__(self, ctx=None):
super().__init__()
self._ctx = ctx
@property
def ctx(self):
return self._ctx
@ctx.setter
def ctx(self, ctx):
for module in self.modules():
if isinstance(module, ContextModule):
module._ctx = ctx
| 322 | 109 |
import logging
from auctions.domain.entities.auction import Auction
from auctions.domain.value_objects.money import Money
from auctions.domain.value_objects.currency import USD
logger = logging.getLogger()
def test_auction_class():
money = Money(USD, '100.00')
auction = Auction(1, '도자기', money, [])
logger.info(F"{auction}") | 341 | 119 |
from . import test_wave_parsing
| 34 | 13 |
from fastapi import FastAPI
from app.routes.auth import router as auth_router
from app.routes.user import router as user_router
from app.routes.posts import router as posts_router
app = FastAPI()
app.include_router(auth_router)
app.include_router(user_router)
app.include_router(posts_router)
| 296 | 89 |
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from collections import OrderedDict
from collections import deque
from sklearn.neighbors import KernelDensity
from scipy.stats import entropy as scientropy
import random
class NoveltyMemory:
def __init__(self, bonus=0):
self.bonus = bonus
self.memory = []
def __call__(self, state):
return self.forward(state)
def forward(self, state):
if state in self.memory:
bonus = 0
else:
self.memory.append(state)
bonus = self.bonus
return bonus
def state_dict(self):
return self.memory
def load_state_dict(self, state_dict):
self.memory = state_dict
class CountMemory:
"""A simple state counter."""
def __init__(self):
self.memory = dict()
def __call__(self, state):
return self.forward(state)
def forward(self, state):
# Init?
if state not in self.memory:
self.memory[state] = 0
# Update count in memory
# and then return it
self.memory[state] += 1
return self.memory[state]
def state_dict(self):
return self.memory
def load_state_dict(self, state_dict):
self.memory = state_dict
class EntropyMemory:
"""Estimate policy entropy."""
def __init__(self, initial_bins=None, initial_count=1, base=None):
# Init the count model
if initial_bins is None:
self.N = 1
else:
self.N = len(initial_bins)
self.base = base
self.initial_count = initial_count
self.memory = dict()
# Preinit its values?
if initial_bins is not None:
for x in initial_bins:
self.memory[x] = self.initial_count
def __call__(self, action):
return self.forward(action)
def forward(self, action):
# Init?
if action not in self.memory:
self.memory[action] = self.initial_count
# Update count in memory
self.N += 1
self.memory[action] += 1
# Estimate H
self.probs = [(n / self.N) for n in self.memory.values()]
return scientropy(np.asarray(self.probs), base=self.base)
def state_dict(self):
return self.memory
def load_state_dict(self, state_dict):
self.memory = state_dict
class ModulusMemory:
"""A very generic memory system, with a finite capacity."""
def __init__(self, capacity):
self.capacity = capacity
self.memory = []
self.position = 0
def push(self, *args):
"""Saves a transition."""
if len(self.memory) < self.capacity:
self.memory.append(None)
self.memory[self.position] = args
self.position = (self.position + 1) % self.capacity
def sample(self, batch_size):
return random.sample(self.memory, batch_size)
def reset(self):
self.memory = []
self.position = 0
def state_dict(self):
return {'position': self.position, 'memory': self.memory}
def load_state_dict(self, state_dict):
self.memory = state_dict['memory']
self.position = state_dict['position']
def __len__(self):
return len(self.memory)
class DiscreteDistribution:
"""A discrete distribution."""
def __init__(self, initial_bins=None, initial_count=1):
# Init the count model
self.N = 0
self.initial_count = initial_count
self.count = OrderedDict()
# Preinit its values?
if initial_bins is not None:
for x in initial_bins:
self.count[x] = self.initial_count
def __len__(self):
return len(self.count)
def __call__(self, x):
return self.forward(x)
def update(self, x):
# Init, if necessary
if x not in self.count:
self.count[x] = self.initial_count
# Update the counts
self.count[x] += 1
self.N += 1
def forward(self, x):
if x not in self.count:
return 0
elif self.N == 0:
return 0
else:
return self.count[x] / self.N
def keys(self):
return list(self.count.keys())
def values(self):
return list(self.count.values())
def state_dict(self):
return self.count
def load_state_dict(self, state_dict):
self.count = state_dict
class Kernel:
"""A continous distribution, estimated using a kernel
NOTE: This is a thin wrapper around KernelDensity from the sklearn
library.
For information on its hyperparamers see,
https://scikit-learn.org/stable/modules/density.html#kernel-density
"""
def __init__(self, **kernel_kwargs):
self.dist = KernelDensity(**kernel_kwargs)
self.X = []
def update(self, x):
self.X.append(x)
self.Xvec = np.vstack(self.X)
# X -> Xvec : (n_samples, n_features)
# per sklearn standard shape
if self.Xvec.ndim == 1:
self.Xvec = np.expand_dims(self.Xvec, 1)
elif self.Xvec.ndim > 2:
raise ValueError("x must be a scalar or 1d list/array.")
else:
pass
# Refit the dist over all the data seen so far; this must
# be done w/ each new sample. Not eff. But this is a limit
# of the sklearn API (it seems).
self.dist.fit(self.Xvec)
def forward(self, x):
# Data can be a scalar or a list.
# Reshape it to match the expected (1, n_feature) where '1' stands
# in for 1 sample.
x = np.asarray(x)
if x.ndim == 0:
x = x.reshape(1, 1)
elif x.ndim == 1:
x = np.expand_dims(x, 0)
else:
pass
# Scores of log(p), but we want p.
return float(np.exp(self.dist.score_samples(x)))
# # class ConditionalCount(Count):
# # """A conditional discrete distribution."""
# # def __init__(self):
# # self.Ns = []
# # self.conds = []
# # self.counts = []
# # def __call__(self, x, cond):
# # return self.forward(x, cond)
# # def keys(self, cond):
# # if cond in self.conds:
# # i = self.conds.index(cond)
# # return list(self.counts[i].keys())
# # else:
# # return []
# # def update(self, x, cond):
# # # Add cond?
# # if cond not in self.conds:
# # self.conds.append(cond)
# # self.counts.append(OrderedDict())
# # self.Ns.append(0)
# # # Locate cond.
# # i = self.conds.index(cond)
# # # Update counts for cond
# # if x in self.counts[i]:
# # self.counts[i][x] += 1
# # else:
# # self.counts[i][x] = 1
# # # Update cond count normalizer
# # self.Ns[i] += 1
# # def forward(self, x, cond):
# # # Locate cond.
# # if cond not in self.conds:
# # return 0
# # else:
# # i = self.conds.index(cond)
# # # Get p(x|cond)
# # if x not in self.counts[i]:
# # return 0
# # elif self.Ns[i] == 0:
# # return 0
# # else:
# # return self.counts[i][x] / self.Ns[i]
# # def probs(self, xs, conds):
# # p = []
# # for x, cond in zip(xs, conds):
# # p.append(self.forward(x, cond))
# # return p
# # def values(self, xs, conds):
# # return self.probs(xs, conds)
# class ConditionalMean(Memory):
# """An averaging memory."""
# def __init__(self):
# self.conds = []
# self.means = []
# self.N = 1
# def __call__(self, x, cond):
# return self.forward(x, cond)
# def update(self, x, cond):
# # Add cond?
# if cond not in self.conds:
# self.conds.append(cond)
# self.deltas.append(x)
# # Locate cond.
# i = self.conds.index(cond)
# # Update the mean
# delta = x - self.means[i]
# self.means[i] += delta / self.N
# # Update count
# self.N += 1
# def forward(self, x, cond):
# # Locate cond.
# if cond not in self.conds:
# return 0
# else:
# i = self.conds.index(cond)
# # Get the mean
# return self.means[i]
# def values(self, xs, conds):
# p = []
# for x, cond in zip(xs, conds):
# p.append(self.forward(x, cond))
# return p
# class ConditionalDeviance(Memory):
# """A memory for deviance."""
# def __init__(self):
# self.mean = ConditionalMean()
# def __call__(self, x, cond):
# return self.forward(x, cond)
# def update(self, x, cond):
# self.mean.update(x, cond)
# def forward(self, x, cond):
# return x - self.mean(x, cond)
# def values(self, xs, conds):
# p = []
# for x, cond in zip(xs, conds):
# p.append(self.forward(x, cond))
# return p
# class ConditionalDerivative(Memory):
# """A memory for change."""
# def __init__(self, delta_t=1):
# self.conds = []
# self.deltas = []
# self.delta_t = delta_t
# if self.delta_t < 0:
# raise ValueError("delta_t must be positive")
# def __call__(self, x, cond):
# return self.forward(x, cond)
# def update(self, x, cond):
# # Add cond?
# if cond not in self.conds:
# self.conds.append(cond)
# self.deltas.append(x)
# # Locate cond.
# i = self.conds.index(cond)
# # Update counts for cond
# self.deltas[i] = x - self.deltas[i]
# def forward(self, x, cond):
# # Locate cond.
# if cond not in self.conds:
# return 0
# else:
# i = self.conds.index(cond)
# # Est. the dirative
# return self.deltas[i] / self.delta_t
# def values(self, xs, conds):
# p = []
# for x, cond in zip(xs, conds):
# p.append(self.forward(x, cond))
# return p
# class EfficientConditionalCount(Memory):
# """Forget x when over-capacity"""
# def __init__(self, capacity=1):
# if capacity < 1:
# raise ValueError("capacity must be >= 1")
# self.capacity = capacity
# self.conds = []
# self.datas = []
# def __call__(self, x, cond):
# return self.forward(x, cond)
# def update(self, x, cond):
# # Add cond?
# if cond not in self.conds:
# self.conds.append(cond)
# self.datas.append(deque(maxlen=self.capacity))
# # Locate cond.
# i = self.conds.index(cond)
# # Update
# self.datas[i].append(x)
# def forward(self, x, cond):
# # Locate cond.
# if cond not in self.conds:
# return 0
# else:
# i = self.conds.index(cond)
# count = self.datas[i].count(x)
# return count / self.capacity
# def probs(self, xs, conds):
# p = []
# for x, cond in zip(xs, conds):
# p.append(self.forward(x, cond))
# return p
# def values(self, xs, conds):
# return self.probs(xs, conds)
# class ForgetfulConditionalCount(Memory):
# """Forget conditions when over-capacity"""
# def __init__(self, capacity=1):
# if capacity < 1:
# raise ValueError("capacity must be >= 1")
# self.capacity = capacity
# self.Ns = deque(maxlen=self.capacity)
# self.conds = deque(maxlen=self.capacity)
# self.counts = deque(maxlen=self.capacity)
# def __call__(self, x, cond):
# return self.forward(x, cond)
# def update(self, x, cond):
# # Add cond?
# if cond not in self.conds:
# self.conds.append(cond)
# self.counts.append(OrderedDict())
# self.Ns.append(0)
# # Locate cond.
# i = self.conds.index(cond)
# # Update counts for cond
# if x in self.counts[i]:
# self.counts[i][x] += 1
# else:
# self.counts[i][x] = 1
# # Update cond count normalizer
# self.Ns[i] += 1
# def forward(self, x, cond):
# # Locate cond.
# if cond not in self.conds:
# return 0
# else:
# i = self.conds.index(cond)
# # Get p(x|cond)
# if x not in self.counts[i]:
# return 0
# elif self.Ns[i] == 0:
# return 0
# else:
# return self.counts[i][x] / self.Ns[i]
# def probs(self, xs, conds):
# p = []
# for x, cond in zip(xs, conds):
# p.append(self.forward(x, cond))
# return p
# def values(self, xs, conds):
# return self.probs(xs, conds)
# ----------------------------------------------------------------------------
# """
# TODO: move to Memory API; how to train/update this VAE?
# Implements Masked AutoEncoder for Density Estimation, by Germain et al. 2015
# Re-implementation by Andrej Karpathy based on https://arxiv.org/abs/1502.03509
# """
# class MaskedLinear(nn.Linear):
# """ same as Linear except has a configurable mask on the weights """
# def __init__(self, in_features, out_features, bias=True):
# super().__init__(in_features, out_features, bias)
# self.register_buffer('mask', torch.ones(out_features, in_features))
# def set_mask(self, mask):
# self.mask.data.copy_(torch.from_numpy(mask.astype(np.uint8).T))
# def forward(self, input):
# return F.linear(input, self.mask * self.weight, self.bias)
# class MADE(nn.Module):
# def __init__(self,
# nin,
# hidden_sizes,
# nout,
# num_masks=1,
# natural_ordering=False):
# """
# nin: integer; number of inputs
# hidden sizes: a list of integers; number of units in hidden layers
# nout: integer; number of outputs, which usually collectively parameterize some kind of 1D distribution
# note: if nout is e.g. 2x larger than nin (perhaps the mean and std), then the first nin
# will be all the means and the second nin will be stds. i.e. output dimensions depend on the
# same input dimensions in "chunks" and should be carefully decoded downstream appropriately.
# the output of running the tests for this file makes this a bit more clear with examples.
# num_masks: can be used to train ensemble over orderings/connections
# natural_ordering: force natural ordering of dimensions, don't use random permutations
# """
# super().__init__()
# self.nin = nin
# self.nout = nout
# self.hidden_sizes = hidden_sizes
# assert self.nout % self.nin == 0, "nout must be integer multiple of nin"
# # define a simple MLP neural net
# self.net = []
# hs = [nin] + hidden_sizes + [nout]
# for h0, h1 in zip(hs, hs[1:]):
# self.net.extend([
# MaskedLinear(h0, h1),
# nn.ReLU(),
# ])
# self.net.pop() # pop the last ReLU for the output layer
# self.net = nn.Sequential(*self.net)
# # seeds for orders/connectivities of the model ensemble
# self.natural_ordering = natural_ordering
# self.num_masks = num_masks
# self.seed = 0 # for cycling through num_masks orderings
# self.m = {}
# self.updatemasks() # builds the initial self.m connectivity
# # note, we could also precompute the masks and cache them, but this
# # could get memory expensive for large number of masks.
# def updatemasks(self):
# if self.m and self.num_masks == 1:
# return # only a single seed, skip for efficiency
# L = len(self.hidden_sizes)
# # fetch the next seed and construct a random stream
# rng = np.random.RandomState(self.seed)
# self.seed = (self.seed + 1) % self.num_masks
# # sample the order of the inputs and the connectivity of all neurons
# self.m[-1] = np.arange(
# self.nin) if self.natural_ordering else rng.permutation(self.nin)
# for l in range(L):
# self.m[l] = rng.randint(self.m[l - 1].min(),
# self.nin - 1,
# size=self.hidden_sizes[l])
# # construct the mask matrices
# masks = [
# self.m[l - 1][:, None] <= self.m[l][None, :] for l in range(L)
# ]
# masks.append(self.m[L - 1][:, None] < self.m[-1][None, :])
# # handle the case where nout = nin * k, for integer k > 1
# if self.nout > self.nin:
# k = int(self.nout / self.nin)
# # replicate the mask across the other outputs
# masks[-1] = np.concatenate([masks[-1]] * k, axis=1)
# # set the masks in all MaskedLinear layers
# layers = [l for l in self.net.modules() if isinstance(l, MaskedLinear)]
# for l, m in zip(layers, masks):
# l.set_mask(m)
# def forward(self, x):
# return self.net(x)
| 17,444 | 5,633 |
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 3 15:07:16 2017
@author: Suhas Somnath
"""
from __future__ import division, print_function, unicode_literals, absolute_import
import unittest
import os
import h5py
import numpy as np
import sys
sys.path.append("../../../pycroscopy/")
from pycroscopy.io.virtual_data import VirtualGroup, VirtualDataset
from pycroscopy.io.hdf_writer import HDFwriter
from pyUSID.io.hdf_utils import get_attr, get_h5_obj_refs # Until an elegant solution presents itself
class TestHDFWriter(unittest.TestCase):
@staticmethod
def __delete_existing_file(file_path):
if os.path.exists(file_path):
os.remove(file_path)
def test_init_invalid_input(self):
with self.assertRaises(TypeError):
_ = HDFwriter(4)
def test_init_path_non_existant_file_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
writer = HDFwriter(file_path)
self.assertIsInstance(writer, HDFwriter, "writer should be an HDFwriter")
writer.close()
os.remove(file_path)
def test_init_path_existing_file_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
h5_f = h5py.File(file_path)
h5_f.close()
# Existing h5 file
writer = HDFwriter(file_path)
self.assertIsInstance(writer, HDFwriter, "writer should be an HDFwriter")
writer.close()
os.remove(file_path)
def test_init_h5_handle_r_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
h5_f = h5py.File(file_path)
h5_f.close()
h5_f = h5py.File(file_path, mode='r')
# hdf handle but of mode r
with self.assertRaises(TypeError):
_ = HDFwriter(h5_f)
os.remove(file_path)
def test_init_h5_handle_r_plus_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
h5_f = h5py.File(file_path)
h5_f.close()
h5_f = h5py.File(file_path, mode='r+')
# open h5 file handle or mode r+
writer = HDFwriter(h5_f)
self.assertIsInstance(writer, HDFwriter, "writer should be an HDFwriter")
writer.close()
os.remove(file_path)
def test_init_h5_handle_w_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
h5_f = h5py.File(file_path)
h5_f.close()
h5_f = h5py.File(file_path, mode='w')
# open h5 file handle or mode w
writer = HDFwriter(h5_f)
self.assertIsInstance(writer, HDFwriter, "writer should be an HDFwriter")
writer.close()
os.remove(file_path)
def test_init_h5_handle_closed(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
h5_f = h5py.File(file_path)
h5_f.close()
# Existing h5 file but closed
with self.assertRaises(ValueError):
_ = HDFwriter(h5_f)
os.remove(file_path)
def test_simple_dset_write_success_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
dtype = np.uint16
dset_name = 'test'
data = np.random.randint(0, high=15, size=5, dtype=dtype)
microdset = VirtualDataset(dset_name, data)
writer = HDFwriter(h5_f)
h5_d = writer._create_simple_dset(h5_f, microdset)
self.assertIsInstance(h5_d, h5py.Dataset)
self.assertEqual(h5_d.parent, h5_f)
self.assertEqual(h5_d.name, '/' + dset_name)
self.assertEqual(h5_d.shape, data.shape)
self.assertTrue(np.allclose(h5_d[()], data))
self.assertEqual(h5_d.dtype, dtype)
os.remove(file_path)
def test_simple_dset_write_success_more_options_02(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
dset_name = 'test'
data = np.random.rand(16, 1024)
dtype = data.dtype
compression = 'gzip'
chunking=(1, 1024)
microdset = VirtualDataset(dset_name, data, dtype=dtype, compression=compression, chunking=chunking)
writer = HDFwriter(h5_f)
h5_d = writer._create_simple_dset(h5_f, microdset)
self.assertIsInstance(h5_d, h5py.Dataset)
self.assertEqual(h5_d.parent, h5_f)
self.assertEqual(h5_d.name, '/' + dset_name)
self.assertEqual(h5_d.shape, data.shape)
self.assertTrue(np.allclose(h5_d[()], data))
self.assertEqual(h5_d.dtype, dtype)
self.assertEqual(h5_d.compression, compression)
self.assertEqual(h5_d.chunks, chunking)
os.remove(file_path)
def test_simple_dset_write_success_more_options_03(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
dset_name = 'test'
data = np.random.rand(16, 1024)
dtype = np.float16
compression = 'gzip'
chunking=(1, 1024)
microdset = VirtualDataset(dset_name, data, dtype=dtype, compression=compression, chunking=chunking)
writer = HDFwriter(h5_f)
h5_d = writer._create_simple_dset(h5_f, microdset)
self.assertIsInstance(h5_d, h5py.Dataset)
self.assertEqual(h5_d.parent, h5_f)
self.assertEqual(h5_d.name, '/' + dset_name)
self.assertEqual(h5_d.shape, data.shape)
self.assertEqual(h5_d.dtype, dtype)
self.assertEqual(h5_d.compression, compression)
self.assertEqual(h5_d.chunks, chunking)
self.assertTrue(np.all(h5_d[()] - data < 1E-3))
os.remove(file_path)
def test_empty_dset_write_success_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
dset_name = 'test'
maxshape = (16, 1024)
microdset = VirtualDataset(dset_name, None, maxshape=maxshape)
writer = HDFwriter(h5_f)
h5_d = writer._create_empty_dset(h5_f, microdset)
self.assertIsInstance(h5_d, h5py.Dataset)
self.assertEqual(h5_d.parent, h5_f)
self.assertEqual(h5_d.name, '/' + dset_name)
self.assertEqual(h5_d.shape, maxshape)
self.assertEqual(h5_d.maxshape, maxshape)
# dtype is assigned automatically by h5py. Not to be tested here
os.remove(file_path)
def test_empty_dset_write_success_w_options_02(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
dset_name = 'test'
maxshape = (16, 1024)
chunking = (1, 1024)
compression = 'gzip'
dtype = np.float16
microdset = VirtualDataset(dset_name, None, maxshape=maxshape,
dtype=dtype, compression=compression, chunking=chunking)
writer = HDFwriter(h5_f)
h5_d = writer._create_empty_dset(h5_f, microdset)
self.assertIsInstance(h5_d, h5py.Dataset)
self.assertEqual(h5_d.parent, h5_f)
self.assertEqual(h5_d.name, '/' + dset_name)
self.assertEqual(h5_d.dtype, dtype)
self.assertEqual(h5_d.compression, compression)
self.assertEqual(h5_d.chunks, chunking)
self.assertEqual(h5_d.shape, maxshape)
self.assertEqual(h5_d.maxshape, maxshape)
os.remove(file_path)
def test_expandable_dset_write_success_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
dset_name = 'test'
maxshape = (None, 1024)
data = np.random.rand(1, 1024)
microdset = VirtualDataset(dset_name, data, maxshape=maxshape)
writer = HDFwriter(h5_f)
h5_d = writer._create_resizeable_dset(h5_f, microdset)
self.assertIsInstance(h5_d, h5py.Dataset)
self.assertEqual(h5_d.parent, h5_f)
self.assertEqual(h5_d.name, '/' + dset_name)
self.assertEqual(h5_d.shape, data.shape)
self.assertEqual(h5_d.maxshape, maxshape)
self.assertTrue(np.allclose(h5_d[()], data))
# Now test to make sure that the dataset can be expanded:
# TODO: add this to the example!
expansion_axis = 0
h5_d.resize(h5_d.shape[expansion_axis] + 1, axis=expansion_axis)
self.assertEqual(h5_d.shape, (data.shape[0]+1, data.shape[1]))
self.assertEqual(h5_d.maxshape, maxshape)
# Finally try checking to see if this new data is also present in the file
new_data = np.random.rand(1024)
h5_d[1] = new_data
data = np.vstack((np.squeeze(data), new_data))
self.assertTrue(np.allclose(h5_d[()], data))
os.remove(file_path)
# TODO: will have to check to see if the parent is correctly declared for the group
def test_group_create_non_indexed_simple_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
grp_name = 'test'
micro_group = VirtualGroup(grp_name)
writer = HDFwriter(h5_f)
h5_grp = writer._create_group(h5_f, micro_group)
self.assertIsInstance(h5_grp, h5py.Group)
self.assertEqual(h5_grp.parent, h5_f)
self.assertEqual(h5_grp.name, '/' + grp_name)
# self.assertEqual(len(h5_grp.items), 0)
os.remove(file_path)
def test_group_create_indexed_simple_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
grp_name = 'test_'
micro_group = VirtualGroup(grp_name)
writer = HDFwriter(h5_f)
h5_grp = writer._create_group(h5_f, micro_group)
self.assertIsInstance(h5_grp, h5py.Group)
self.assertEqual(h5_grp.parent, h5_f)
self.assertEqual(h5_grp.name, '/' + grp_name + '000')
# self.assertEqual(len(h5_grp.items), 0)
os.remove(file_path)
def test_group_create_root_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
grp_name = ''
micro_group = VirtualGroup(grp_name)
writer = HDFwriter(h5_f)
with self.assertRaises(ValueError):
_ = writer._create_group(h5_f, micro_group)
os.remove(file_path)
def test_group_create_indexed_nested_01(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
outer_grp_name = 'outer_'
micro_group = VirtualGroup(outer_grp_name)
writer = HDFwriter(h5_f)
h5_outer_grp = writer._create_group(h5_f, micro_group)
self.assertIsInstance(h5_outer_grp, h5py.Group)
self.assertEqual(h5_outer_grp.parent, h5_f)
self.assertEqual(h5_outer_grp.name, '/' + outer_grp_name + '000')
inner_grp_name = 'inner_'
micro_group = VirtualGroup(inner_grp_name)
h5_inner_grp = writer._create_group(h5_outer_grp, micro_group)
self.assertIsInstance(h5_inner_grp, h5py.Group)
self.assertEqual(h5_inner_grp.parent, h5_outer_grp)
self.assertEqual(h5_inner_grp.name, h5_outer_grp.name + '/' + inner_grp_name + '000')
os.remove(file_path)
def test_write_legal_reg_ref_multi_dim_data(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(5, 7)
h5_dset = writer._create_simple_dset(h5_f, VirtualDataset('test', data))
self.assertIsInstance(h5_dset, h5py.Dataset)
attrs = {'labels': {'even_rows': (slice(0, None, 2), slice(None)),
'odd_rows': (slice(1, None, 2), slice(None))}}
writer._write_dset_attributes(h5_dset, attrs.copy())
h5_f.flush()
# two atts point to region references. one for labels
self.assertEqual(len(h5_dset.attrs), 1 + len(attrs['labels']))
# check if the labels attribute was written:
self.assertTrue(np.all([x in list(attrs['labels'].keys()) for x in get_attr(h5_dset, 'labels')]))
expected_data = [data[:None:2], data[1:None:2]]
written_data = [h5_dset[h5_dset.attrs['even_rows']], h5_dset[h5_dset.attrs['odd_rows']]]
for exp, act in zip(expected_data, written_data):
self.assertTrue(np.allclose(exp, act))
os.remove(file_path)
def test_write_legal_reg_ref_multi_dim_data_2nd_dim(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(5, 3)
h5_dset = writer._create_simple_dset(h5_f, VirtualDataset('test', data))
self.assertIsInstance(h5_dset, h5py.Dataset)
attrs = {'labels': {'even_rows': (slice(None), slice(0, None, 2)),
'odd_rows': (slice(None), slice(1, None, 2))}}
writer._write_dset_attributes(h5_dset, attrs.copy())
h5_f.flush()
# two atts point to region references. one for labels
self.assertEqual(len(h5_dset.attrs), 1 + len(attrs['labels']))
# check if the labels attribute was written:
self.assertTrue(np.all([x in list(attrs['labels'].keys()) for x in get_attr(h5_dset, 'labels')]))
expected_data = [data[:, 0:None:2], data[:, 1:None:2]]
written_data = [h5_dset[h5_dset.attrs['even_rows']], h5_dset[h5_dset.attrs['odd_rows']]]
for exp, act in zip(expected_data, written_data):
self.assertTrue(np.allclose(exp, act))
os.remove(file_path)
def test_write_legal_reg_ref_one_dim_data(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(7)
h5_dset = writer._create_simple_dset(h5_f, VirtualDataset('test', data))
self.assertIsInstance(h5_dset, h5py.Dataset)
attrs = {'labels': {'even_rows': (slice(0, None, 2)),
'odd_rows': (slice(1, None, 2))}}
writer._write_dset_attributes(h5_dset, attrs.copy())
h5_f.flush()
# two atts point to region references. one for labels
self.assertEqual(len(h5_dset.attrs), 1 + len(attrs['labels']))
# check if the labels attribute was written:
self.assertTrue(np.all([x in list(attrs['labels'].keys()) for x in get_attr(h5_dset, 'labels')]))
expected_data = [data[:None:2], data[1:None:2]]
written_data = [h5_dset[h5_dset.attrs['even_rows']], h5_dset[h5_dset.attrs['odd_rows']]]
for exp, act in zip(expected_data, written_data):
self.assertTrue(np.allclose(exp, act))
os.remove(file_path)
def test_generate_and_write_reg_ref_legal(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(2, 7)
h5_dset = writer._create_simple_dset(h5_f, VirtualDataset('test', data))
self.assertIsInstance(h5_dset, h5py.Dataset)
attrs = {'labels': ['row_1', 'row_2']}
if sys.version_info.major == 3:
with self.assertWarns(UserWarning):
writer._write_dset_attributes(h5_dset, attrs.copy())
else:
writer._write_dset_attributes(h5_dset, attrs.copy())
h5_f.flush()
# two atts point to region references. one for labels
self.assertEqual(len(h5_dset.attrs), 1 + len(attrs['labels']))
# check if the labels attribute was written:
self.assertTrue(np.all([x in list(attrs['labels']) for x in get_attr(h5_dset, 'labels')]))
expected_data = [data[0], data[1]]
written_data = [h5_dset[h5_dset.attrs['row_1']], h5_dset[h5_dset.attrs['row_2']]]
for exp, act in zip(expected_data, written_data):
self.assertTrue(np.allclose(np.squeeze(exp), np.squeeze(act)))
os.remove(file_path)
def test_generate_and_write_reg_ref_illegal(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(3, 7)
h5_dset = writer._create_simple_dset(h5_f, VirtualDataset('test', data))
self.assertIsInstance(h5_dset, h5py.Dataset)
# with self.assertWarns(UserWarning):
writer._write_dset_attributes(h5_dset, {'labels': ['row_1', 'row_2']})
self.assertEqual(len(h5_dset.attrs), 0)
h5_f.flush()
os.remove(file_path)
def test_generate_and_write_reg_ref_illegal(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(2, 7)
h5_dset = writer._create_simple_dset(h5_f, VirtualDataset('test', data))
self.assertIsInstance(h5_dset, h5py.Dataset)
# with self.assertWarns(UserWarning):
with self.assertRaises(TypeError):
writer._write_dset_attributes(h5_dset, {'labels': [1, np.arange(3)]})
os.remove(file_path)
def test_write_illegal_reg_ref_too_many_slices(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(5, 7)
h5_dset = writer._create_simple_dset(h5_f, VirtualDataset('test', data))
self.assertIsInstance(h5_dset, h5py.Dataset)
attrs = {'labels': {'even_rows': (slice(0, None, 2), slice(None), slice(None)),
'odd_rows': (slice(1, None, 2), slice(None), slice(None))}}
with self.assertRaises(ValueError):
writer._write_dset_attributes(h5_dset, attrs.copy())
os.remove(file_path)
def test_write_illegal_reg_ref_too_few_slices(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(5, 7)
h5_dset = writer._create_simple_dset(h5_f, VirtualDataset('test', data))
self.assertIsInstance(h5_dset, h5py.Dataset)
attrs = {'labels': {'even_rows': (slice(0, None, 2)),
'odd_rows': (slice(1, None, 2))}}
with self.assertRaises(ValueError):
writer._write_dset_attributes(h5_dset, attrs.copy())
os.remove(file_path)
def test_write_reg_ref_slice_dim_larger_than_data(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(5, 7)
h5_dset = writer._create_simple_dset(h5_f, VirtualDataset('test', data))
self.assertIsInstance(h5_dset, h5py.Dataset)
attrs = {'labels': {'even_rows': (slice(0, 15, 2), slice(None)),
'odd_rows': (slice(1, 15, 2), slice(None))}}
writer._write_dset_attributes(h5_dset, attrs.copy())
h5_f.flush()
# two atts point to region references. one for labels
self.assertEqual(len(h5_dset.attrs), 1 + len(attrs['labels']))
# check if the labels attribute was written:
self.assertTrue(np.all([x in list(attrs['labels'].keys()) for x in get_attr(h5_dset, 'labels')]))
expected_data = [data[:None:2], data[1:None:2]]
written_data = [h5_dset[h5_dset.attrs['even_rows']], h5_dset[h5_dset.attrs['odd_rows']]]
for exp, act in zip(expected_data, written_data):
self.assertTrue(np.allclose(exp, act))
os.remove(file_path)
def test_write_illegal_reg_ref_not_slice_objs(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(5, 7)
h5_dset = writer._create_simple_dset(h5_f, VirtualDataset('test', data))
self.assertIsInstance(h5_dset, h5py.Dataset)
attrs = {'labels': {'even_rows': (slice(0, None, 2), 15),
'odd_rows': (slice(1, None, 2), 'hello')}}
with self.assertRaises(TypeError):
writer._write_dset_attributes(h5_dset, attrs.copy())
os.remove(file_path)
def test_write_simple_atts_reg_ref_to_dset(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(5, 7)
h5_dset = writer._create_simple_dset(h5_f, VirtualDataset('test', data))
self.assertIsInstance(h5_dset, h5py.Dataset)
attrs = {'att_1': 'string_val',
'att_2': 1.2345,
'att_3': [1, 2, 3, 4],
'att_4': ['str_1', 'str_2', 'str_3'],
'labels': {'even_rows': (slice(0, None, 2), slice(None)),
'odd_rows': (slice(1, None, 2), slice(None))}
}
writer._write_dset_attributes(h5_dset, attrs.copy())
reg_ref = attrs.pop('labels')
self.assertEqual(len(h5_dset.attrs), len(attrs) + 1 + len(reg_ref))
for key, expected_val in attrs.items():
self.assertTrue(np.all(get_attr(h5_dset, key) == expected_val))
self.assertTrue(np.all([x in list(reg_ref.keys()) for x in get_attr(h5_dset, 'labels')]))
expected_data = [data[:None:2], data[1:None:2]]
written_data = [h5_dset[h5_dset.attrs['even_rows']], h5_dset[h5_dset.attrs['odd_rows']]]
for exp, act in zip(expected_data, written_data):
self.assertTrue(np.allclose(exp, act))
os.remove(file_path)
def test_write_invalid_input(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
with self.assertRaises(TypeError):
_ = writer.write(np.arange(5))
def test_write_dset_under_root(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
data = np.random.rand(5, 7)
attrs = {'att_1': 'string_val',
'att_2': 1.2345,
'att_3': [1, 2, 3, 4],
'att_4': ['str_1', 'str_2', 'str_3'],
'labels': {'even_rows': (slice(0, None, 2), slice(None)),
'odd_rows': (slice(1, None, 2), slice(None))}
}
micro_dset = VirtualDataset('test', data)
micro_dset.attrs = attrs.copy()
[h5_dset] = writer.write(micro_dset)
self.assertIsInstance(h5_dset, h5py.Dataset)
reg_ref = attrs.pop('labels')
self.assertEqual(len(h5_dset.attrs), len(attrs) + 1 + len(reg_ref))
for key, expected_val in attrs.items():
self.assertTrue(np.all(get_attr(h5_dset, key) == expected_val))
self.assertTrue(np.all([x in list(reg_ref.keys()) for x in get_attr(h5_dset, 'labels')]))
expected_data = [data[:None:2], data[1:None:2]]
written_data = [h5_dset[h5_dset.attrs['even_rows']], h5_dset[h5_dset.attrs['odd_rows']]]
for exp, act in zip(expected_data, written_data):
self.assertTrue(np.allclose(exp, act))
os.remove(file_path)
def test_write_dset_under_existing_group(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
h5_g = writer._create_group(h5_f, VirtualGroup('test_group'))
self.assertIsInstance(h5_g, h5py.Group)
data = np.random.rand(5, 7)
attrs = {'att_1': 'string_val',
'att_2': 1.2345,
'att_3': [1, 2, 3, 4],
'att_4': ['str_1', 'str_2', 'str_3'],
'labels': {'even_rows': (slice(0, None, 2), slice(None)),
'odd_rows': (slice(1, None, 2), slice(None))}
}
micro_dset = VirtualDataset('test', data, parent='/test_group')
micro_dset.attrs = attrs.copy()
[h5_dset] = writer.write(micro_dset)
self.assertIsInstance(h5_dset, h5py.Dataset)
self.assertEqual(h5_dset.parent, h5_g)
reg_ref = attrs.pop('labels')
self.assertEqual(len(h5_dset.attrs), len(attrs) + 1 + len(reg_ref))
for key, expected_val in attrs.items():
self.assertTrue(np.all(get_attr(h5_dset, key) == expected_val))
self.assertTrue(np.all([x in list(reg_ref.keys()) for x in get_attr(h5_dset, 'labels')]))
expected_data = [data[:None:2], data[1:None:2]]
written_data = [h5_dset[h5_dset.attrs['even_rows']], h5_dset[h5_dset.attrs['odd_rows']]]
for exp, act in zip(expected_data, written_data):
self.assertTrue(np.allclose(exp, act))
os.remove(file_path)
def test_write_dset_under_invalid_group(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
with self.assertRaises(KeyError):
_ = writer.write(VirtualDataset('test', np.random.rand(5, 7), parent='/does_not_exist'))
os.remove(file_path)
def test_write_root(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
attrs = {'att_1': 'string_val',
'att_2': 1.2345,
'att_3': [1, 2, 3, 4],
'att_4': ['str_1', 'str_2', 'str_3']}
micro_group = VirtualGroup('')
micro_group.attrs = attrs
writer = HDFwriter(h5_f)
[ret_val] = writer.write(micro_group)
self.assertIsInstance(ret_val, h5py.File)
self.assertEqual(h5_f, ret_val)
for key, expected_val in attrs.items():
self.assertTrue(np.all(get_attr(h5_f, key) == expected_val))
os.remove(file_path)
def test_write_single_group(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
attrs = {'att_1': 'string_val',
'att_2': 1.2345,
'att_3': [1, 2, 3, 4],
'att_4': ['str_1', 'str_2', 'str_3']}
micro_group = VirtualGroup('Test_')
micro_group.attrs = attrs
writer = HDFwriter(h5_f)
[h5_group] = writer.write(micro_group)
for key, expected_val in attrs.items():
self.assertTrue(np.all(get_attr(h5_group, key) == expected_val))
os.remove(file_path)
def test_group_indexing_sequential(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
writer = HDFwriter(h5_f)
micro_group_0 = VirtualGroup('Test_', attrs={'att_1': 'string_val', 'att_2': 1.2345})
[h5_group_0] = writer.write(micro_group_0)
_ = writer.write(VirtualGroup('blah'))
self.assertIsInstance(h5_group_0, h5py.Group)
self.assertEqual(h5_group_0.name, '/Test_000')
for key, expected_val in micro_group_0.attrs.items():
self.assertTrue(np.all(get_attr(h5_group_0, key) == expected_val))
micro_group_1 = VirtualGroup('Test_', attrs={'att_3': [1, 2, 3, 4], 'att_4': ['str_1', 'str_2', 'str_3']})
[h5_group_1] = writer.write(micro_group_1)
self.assertIsInstance(h5_group_1, h5py.Group)
self.assertEqual(h5_group_1.name, '/Test_001')
for key, expected_val in micro_group_1.attrs.items():
self.assertTrue(np.all(get_attr(h5_group_1, key) == expected_val))
os.remove(file_path)
def test_group_indexing_simultaneous(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
micro_group_0 = VirtualGroup('Test_', attrs = {'att_1': 'string_val', 'att_2': 1.2345})
micro_group_1 = VirtualGroup('Test_', attrs={'att_3': [1, 2, 3, 4], 'att_4': ['str_1', 'str_2', 'str_3']})
root_group = VirtualGroup('', children=[VirtualGroup('blah'), micro_group_0,
VirtualGroup('meh'), micro_group_1])
writer = HDFwriter(h5_f)
h5_refs_list = writer.write(root_group)
[h5_group_1] = get_h5_obj_refs(['Test_001'], h5_refs_list)
[h5_group_0] = get_h5_obj_refs(['Test_000'], h5_refs_list)
self.assertIsInstance(h5_group_0, h5py.Group)
self.assertEqual(h5_group_0.name, '/Test_000')
for key, expected_val in micro_group_0.attrs.items():
self.assertTrue(np.all(get_attr(h5_group_0, key) == expected_val))
self.assertIsInstance(h5_group_1, h5py.Group)
self.assertEqual(h5_group_1.name, '/Test_001')
for key, expected_val in micro_group_1.attrs.items():
self.assertTrue(np.all(get_attr(h5_group_1, key) == expected_val))
os.remove(file_path)
def test_write_simple_tree(self):
file_path = 'test.h5'
self.__delete_existing_file(file_path)
with h5py.File(file_path) as h5_f:
inner_dset_data = np.random.rand(5, 7)
inner_dset_attrs = {'att_1': 'string_val',
'att_2': 1.2345,
'att_3': [1, 2, 3, 4],
'att_4': ['str_1', 'str_2', 'str_3'],
'labels': {'even_rows': (slice(0, None, 2), slice(None)),
'odd_rows': (slice(1, None, 2), slice(None))}
}
inner_dset = VirtualDataset('inner_dset', inner_dset_data)
inner_dset.attrs = inner_dset_attrs.copy()
attrs_inner_grp = {'att_1': 'string_val',
'att_2': 1.2345,
'att_3': [1, 2, 3, 4],
'att_4': ['str_1', 'str_2', 'str_3']}
inner_group = VirtualGroup('indexed_inner_group_')
inner_group.attrs = attrs_inner_grp
inner_group.add_children(inner_dset)
outer_dset_data = np.random.rand(5, 7)
outer_dset_attrs = {'att_1': 'string_val',
'att_2': 1.2345,
'att_3': [1, 2, 3, 4],
'att_4': ['str_1', 'str_2', 'str_3'],
'labels': {'even_rows': (slice(0, None, 2), slice(None)),
'odd_rows': (slice(1, None, 2), slice(None))}
}
outer_dset = VirtualDataset('test', outer_dset_data, parent='/test_group')
outer_dset.attrs = outer_dset_attrs.copy()
attrs_outer_grp = {'att_1': 'string_val',
'att_2': 1.2345,
'att_3': [1, 2, 3, 4],
'att_4': ['str_1', 'str_2', 'str_3']}
outer_group = VirtualGroup('unindexed_outer_group')
outer_group.attrs = attrs_outer_grp
outer_group.add_children([inner_group, outer_dset])
writer = HDFwriter(h5_f)
h5_refs_list = writer.write(outer_group)
# I don't know of a more elegant way to do this:
[h5_outer_dset] = get_h5_obj_refs([outer_dset.name], h5_refs_list)
[h5_inner_dset] = get_h5_obj_refs([inner_dset.name], h5_refs_list)
[h5_outer_group] = get_h5_obj_refs([outer_group.name], h5_refs_list)
[h5_inner_group] = get_h5_obj_refs(['indexed_inner_group_000'], h5_refs_list)
self.assertIsInstance(h5_outer_dset, h5py.Dataset)
self.assertIsInstance(h5_inner_dset, h5py.Dataset)
self.assertIsInstance(h5_outer_group, h5py.Group)
self.assertIsInstance(h5_inner_group, h5py.Group)
# check assertions for the inner dataset first
self.assertEqual(h5_inner_dset.parent, h5_inner_group)
reg_ref = inner_dset_attrs.pop('labels')
self.assertEqual(len(h5_inner_dset.attrs), len(inner_dset_attrs) + 1 + len(reg_ref))
for key, expected_val in inner_dset_attrs.items():
self.assertTrue(np.all(get_attr(h5_inner_dset, key) == expected_val))
self.assertTrue(np.all([x in list(reg_ref.keys()) for x in get_attr(h5_inner_dset, 'labels')]))
expected_data = [inner_dset_data[:None:2], inner_dset_data[1:None:2]]
written_data = [h5_inner_dset[h5_inner_dset.attrs['even_rows']], h5_inner_dset[h5_inner_dset.attrs['odd_rows']]]
for exp, act in zip(expected_data, written_data):
self.assertTrue(np.allclose(exp, act))
# check assertions for the inner data group next:
self.assertEqual(h5_inner_group.parent, h5_outer_group)
for key, expected_val in attrs_inner_grp.items():
self.assertTrue(np.all(get_attr(h5_inner_group, key) == expected_val))
# check the outer dataset next:
self.assertEqual(h5_outer_dset.parent, h5_outer_group)
reg_ref = outer_dset_attrs.pop('labels')
self.assertEqual(len(h5_outer_dset.attrs), len(outer_dset_attrs) + 1 + len(reg_ref))
for key, expected_val in outer_dset_attrs.items():
self.assertTrue(np.all(get_attr(h5_outer_dset, key) == expected_val))
self.assertTrue(np.all([x in list(reg_ref.keys()) for x in get_attr(h5_outer_dset, 'labels')]))
expected_data = [outer_dset_data[:None:2], outer_dset_data[1:None:2]]
written_data = [h5_outer_dset[h5_outer_dset.attrs['even_rows']],
h5_outer_dset[h5_outer_dset.attrs['odd_rows']]]
for exp, act in zip(expected_data, written_data):
self.assertTrue(np.allclose(exp, act))
# Finally check the outer group:
self.assertEqual(h5_outer_group.parent, h5_f)
for key, expected_val in attrs_outer_grp.items():
self.assertTrue(np.all(get_attr(h5_outer_group, key) == expected_val))
os.remove(file_path)
if __name__ == '__main__':
unittest.main()
| 36,224 | 12,865 |
from paprika.repositories.FileRepository import FileRepository
from paprika.repositories.ProcessPropertyRepository import ProcessPropertyRepository
from paprika.repositories.ProcessRepository import ProcessRepository
from paprika.system.logger.Logger import Logger
from paprika.actions.Actionable import Actionable
class Pipe(Actionable):
def __init__(self):
Actionable.__init__(self)
def execute(self, connector, process_action):
job_name = process_action['job_name']
logger = Logger(connector, self)
file_repository = FileRepository(connector)
process_repository = ProcessRepository(connector)
process_property_repository = ProcessPropertyRepository(connector)
# retrieve the file properties
process = process_repository.find_by_id(process_action['pcs_id'])
file_id = process_property_repository.get_property(process, 'file_id')
file = file_repository.find_by_id(file_id)
filename = file['filename']
locked = file_repository.locked(file)
if locked:
logger.info(job_name, 'file: ' + filename + " locked ")
return process_action
else:
logger.info(job_name, 'file: ' + filename + " not locked ")
logger.info(job_name, filename + " state: " + file['state'])
| 1,332 | 357 |
from optconstruct import OptionAbstract
from optconstruct.types import Toggle, Prefixed, KWOption, ListOption
from messaging_components.clients.external.command.options.client_options import ControlOptionsCommon, \
ControlOptionsSenderReceiver, ControlOptionsReceiver, ConnectionOptionsCommon
"""
Specialized options for external Java client commands (cli-qpid.jar).
"""
class JavaControlOptionsCommon(ControlOptionsCommon):
"""
Specialized implementation of control options for java client commands.
"""
def __init__(self, broker: str='127.0.0.1:5672', count: int=1,
timeout: int=None, sync_mode: str=None, close_sleep: int=None):
super(JavaControlOptionsCommon, self).__init__(count, timeout, sync_mode, close_sleep)
self.broker = broker
# No timeout on java client is -1
if timeout is None:
self.timeout = -1
def valid_options(self) -> list:
return ControlOptionsCommon.valid_options(self) + [
Prefixed('broker', '--broker')
]
class JavaControlOptionsSenderReceiver(ControlOptionsSenderReceiver, JavaControlOptionsCommon):
"""
Specialized implementation of control options for Sender and Receiver Java client commands.
"""
def __init__(self, broker: str='127.0.0.1:5672', address: str='examples', count: int=1,
timeout: int=None, sync_mode: str=None, close_sleep: int=None,
duration: int=None, duration_mode: str=None, capacity: int=None):
ControlOptionsSenderReceiver.__init__(self, duration=duration, duration_mode=duration_mode, capacity=capacity)
JavaControlOptionsCommon.__init__(self, broker=broker, count=count, timeout=timeout,
sync_mode=sync_mode, close_sleep=close_sleep)
self.address = address # type: str
def valid_options(self) -> list:
return JavaControlOptionsCommon.valid_options(self) + [
Prefixed('address', '--address')
]
class JavaControlOptionsReceiver(ControlOptionsReceiver, JavaControlOptionsSenderReceiver):
"""
Specialized implementation of control options for Receiver Java client command.
"""
def __init__(self, broker: str='127.0.0.1:5672', address: str='examples', count: int=1,
timeout: int=None, sync_mode: str=None, duration: int=None,
duration_mode: str=None, capacity: int=None, dynamic: bool=False):
ControlOptionsReceiver.__init__(self, dynamic=dynamic)
JavaControlOptionsSenderReceiver.__init__(self, broker=broker, address=address, count=count,
timeout=timeout, sync_mode=sync_mode, duration=duration,
duration_mode=duration_mode, capacity=capacity)
def valid_options(self) -> list:
return JavaControlOptionsSenderReceiver.valid_options(self)
class JavaConnectionOptionsCommon(ConnectionOptionsCommon):
def __init__(self, conn_auth_mechanisms: str=None, conn_username: str=None,
conn_password: str = None, conn_ssl_keystore_location: str=None,
conn_ssl_keystore_password: str=None, conn_ssl_key_alias: str=None,
conn_ssl_trust_all: str=None, conn_ssl_verify_host: str=None,
urls: str=None, reconnect: bool=None,
reconnect_interval: int=None, reconnect_limit: int=None, reconnect_timeout: int=None,
heartbeat: int=None, max_frame_size: int=None):
ConnectionOptionsCommon.__init__(self, urls=urls, reconnect=reconnect, reconnect_interval=reconnect_interval,
reconnect_limit=reconnect_limit, reconnect_timeout=reconnect_timeout,
heartbeat=heartbeat, max_frame_size=max_frame_size)
self.conn_auth_mechanisms = conn_auth_mechanisms
self.conn_username = conn_username
self.conn_password = conn_password
self.conn_ssl_keystore_location = conn_ssl_keystore_location
self.conn_ssl_keystore_password = conn_ssl_keystore_password
self.conn_ssl_key_alias = conn_ssl_key_alias
self.conn_ssl_trust_all = conn_ssl_trust_all
self.conn_ssl_verify_host = conn_ssl_verify_host
def valid_options(self) -> list:
return ConnectionOptionsCommon.valid_options(self) + [
Prefixed('conn-auth-mechanisms', '--conn-auth-mechanisms'),
Prefixed('conn-username', '--conn-username'),
Prefixed('conn-password', '--conn-password'),
Prefixed('conn-ssl-keystore-location', '--conn-ssl-keystore-location'),
Prefixed('conn-ssl-keystore-password', '--conn-ssl-keystore-password'),
Prefixed('conn-ssl-key-alias', '--conn-ssl-key-alias'),
Prefixed('conn-ssl-trust-all', '--conn-ssl-trust-all'),
Prefixed('conn-ssl-verify-host', '--conn-ssl-verify-host')
]
| 4,987 | 1,403 |
from typing import Dict, Optional
import logging
import numpy as np
from .orthogonal import OrthogonalProcrustesModel
class GeneralizedProcrustesAnalysis:
"""https://en.wikipedia.org/wiki/Generalized_Procrustes_analysis"""
def __init__(self, *, max_iter: int = 10):
self.dim: Optional[int] = None
self.procrustes_distance: Optional[float] = None
self.n_samples: Optional[int] = None
self.orthogonal_models: Dict[int, OrthogonalProcrustesModel] = dict() # map embedding_id --> model
self.max_iter: int = max_iter
assert max_iter >= 2
def fit(self, aligned_embedding_samples: Dict[int, np.ndarray]) -> None:
assert self.max_iter >= 2 # otherwise one of the models will be left undefined
assert len(set(samples.shape[0] for samples in aligned_embedding_samples.values())) == 1, "alignment error"
dims: Dict[int, int] = {e: samples.shape[1] for e, samples in aligned_embedding_samples.items()}
# take max dim manifold as reference
reference_embedding_id: Optional[int] = max(dims.keys(), key=dims.get) # this will become None after iter 0
reference: np.ndarray = aligned_embedding_samples[reference_embedding_id]
dim: int = reference.shape[1]
# superimpose all other instances to current reference shape
models: Dict[int, OrthogonalProcrustesModel] = dict()
procrustes_distance: Optional[float] = None
i: int = -1
for i in range(self.max_iter):
mean = np.zeros_like(reference)
for embedding_id, src_dim in dims.items():
x = aligned_embedding_samples[embedding_id]
if embedding_id == reference_embedding_id:
logging.debug(f'Using embedding #{embedding_id} as reference with {dim=} ...')
# R would be the identity matrix
mean += x
else:
logging.debug(f'Fitting orthogonal procrustes for embedding #{embedding_id} {src_dim=} ...')
model = OrthogonalProcrustesModel(src_dim, dim)
models[embedding_id] = model
model.fit(x, reference)
logging.debug(f'Fitted with {model.scale/reference.shape[0]=:.2%} ...')
y = model.predict(x)
mean += y
# compute the mean shape of the current set of superimposed shapes
mean /= len(aligned_embedding_samples)
old_procrustes_distance = procrustes_distance
procrustes_distance = np.linalg.norm(mean - reference) / np.sqrt(mean.shape[0])
# take as new reference the average shape along the axis of the manifolds and repeat until convergence
reference_embedding_id = None
reference = mean
logging.debug(f'Done GPA iteration #{i} ({procrustes_distance=:.2%}) ...')
if old_procrustes_distance is not None and procrustes_distance / old_procrustes_distance >= .99:
break
assert procrustes_distance is not None
logging.debug(f'GPA fitted ({procrustes_distance=:.2%} '
f'{"reached max_iter" if i >= self.max_iter-1 else "converged"}).')
# store results only at end, to avoid inconsistent state
self.dim = dim
self.procrustes_distance = float(procrustes_distance)
self.n_samples = reference.shape[0]
self.orthogonal_models = models
def predict(self, src_embedding_id: int, dest_embedding_id: int, x: np.ndarray):
a = self.orthogonal_models[src_embedding_id]
assert a.src_dim <= self.dim
b = self.orthogonal_models[dest_embedding_id]
assert b.src_dim <= self.dim
assert a.dest_dim == b.dest_dim == self.dim
assert x.shape[1] == a.src_dim
y = a.transform(x)
assert y.shape[0] == x.shape[0]
assert y.shape[1] == a.dest_dim == b.dest_dim == self.dim
z = b.inverse_transform(y)
assert z.shape[0] == x.shape[0]
assert z.shape[1] == b.src_dim
return z
| 4,113 | 1,231 |
__version__ = "2.1.0"
from . import lagrangian_walker
from . import parallel_routing
from . import particle_track
from . import routines
| 138 | 46 |
#!/usr/bin/env python
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, "core"))
import re
import csv
from utils import dump_all_stats, filter_repository
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cmudbac.settings")
import django
django.setup()
from library.models import *
TABLES_DIRECTORY = 'tables'
def table_stats(directory = '.'):
stats = {}
for repo in Repository.objects.exclude(latest_successful_attempt = None):
if filter_repository(repo):
continue
statistics = Statistic.objects.filter(attempt = repo.latest_successful_attempt)
if len(statistics) == 0:
continue
for s in statistics:
if s.description == 'num_transactions':
continue
if s.description not in stats:
stats[s.description] = {}
project_type_name = repo.project_type.name
if project_type_name not in stats[s.description]:
stats[s.description][project_type_name] = []
stats[s.description][project_type_name].append(s.count)
dump_all_stats(directory, stats)
def column_stats(directory = '.'):
stats = {'column_nullable': {}, 'column_type': {}, 'column_extra': {}, 'column_num': {}}
for repo in Repository.objects.exclude(latest_successful_attempt = None):
if filter_repository(repo):
continue
column_informations = Information.objects.filter(attempt = repo.latest_successful_attempt).filter(name = 'columns')
constraint_informations = Information.objects.filter(attempt = repo.latest_successful_attempt).filter(name = 'constraints')
num_table_statistics = Statistic.objects.filter(attempt = repo.latest_successful_attempt).filter(description = 'num_tables')
if len(column_informations) > 0 and len(constraint_informations) > 0 and len(num_table_statistics) > 0:
column_information = column_informations[0]
constraint_information = constraint_informations[0]
num_tables = num_table_statistics[0].count
project_type_name = repo.project_type.name
if project_type_name not in stats['column_nullable']:
stats['column_nullable'][project_type_name] = {}
if project_type_name not in stats['column_type']:
stats['column_type'][project_type_name] = {}
if project_type_name not in stats['column_extra']:
stats['column_extra'][project_type_name] = {}
if project_type_name not in stats['column_num']:
stats['column_num'][project_type_name] = []
if repo.latest_successful_attempt.database.name == 'PostgreSQL':
regex = '(\(.*?\))[,\]]'
elif repo.latest_successful_attempt.database.name == 'MySQL':
regex = '(\(.*?\))[,\)]'
table_stats = {'column_nullable': {}, 'column_type': {}, 'column_extra': {}, 'column_num': {}}
for column in re.findall(regex, column_information.description):
cells = column.split(',')
table = str(cells[2]).replace("'", "").strip()
nullable = str(cells[6]).replace("'", "").strip()
if table not in table_stats['column_nullable']:
table_stats['column_nullable'][table] = {}
table_stats['column_nullable'][table][nullable] = table_stats['column_nullable'][table].get(nullable, 0) + 1
_type = str(cells[7]).replace("'", "").strip()
if table not in table_stats['column_type']:
table_stats['column_type'][table] = {}
table_stats['column_type'][table][_type] = table_stats['column_type'][table].get(_type, 0) + 1
extra = str(cells[16]).replace("'", "").strip()
if extra:
if table not in table_stats['column_extra']:
table_stats['column_extra'][table] = {}
table_stats['column_extra'][table][extra] = table_stats['column_extra'][table].get(extra, 0) + 1
if table not in table_stats['column_num']:
table_stats['column_num'][table] = 0
table_stats['column_num'][table] += 1
for column in re.findall(regex, constraint_information.description):
cells = column.split(',')
if repo.latest_successful_attempt.database.name == 'PostgreSQL':
constraint_type = str(cells[6]).replace("'", "").strip()
elif repo.latest_successful_attempt.database.name == 'MySQL':
constraint_type = str(cells[5])[:-1].replace("'", "").strip()
if repo.latest_successful_attempt.database.name == 'PostgreSQL':
table = str(cells[5]).replace("'", "").strip()
elif repo.latest_successful_attempt.database.name == 'MySQL':
table = str(cells[4]).replace("'", "").strip()
if table not in table_stats['column_extra']:
table_stats['column_extra'][table] = {}
table_stats['column_extra'][table][constraint_type] = table_stats['column_extra'][table].get(constraint_type, 0) + 1
for stats_type in table_stats:
for table in table_stats[stats_type]:
if isinstance(table_stats[stats_type][table], dict):
for second_type in table_stats[stats_type][table]:
if second_type not in stats[stats_type][project_type_name]:
stats[stats_type][project_type_name][second_type] = []
stats[stats_type][project_type_name][second_type].append(table_stats[stats_type][table][second_type])
else:
stats[stats_type][project_type_name].append(table_stats[stats_type][table])
dump_all_stats(directory, stats)
def index_stats(directory = TABLES_DIRECTORY):
stats = {'index_type': {}}
for repo in Repository.objects.exclude(latest_successful_attempt = None):
if filter_repository(repo):
continue
index_informations = Information.objects.filter(attempt = repo.latest_successful_attempt).filter(name = 'indexes')
if len(index_informations) > 0:
index_information = index_informations[0]
project_type_name = repo.project_type.name
if project_type_name not in stats['index_type']:
stats['index_type'][project_type_name] = {}
if repo.latest_successful_attempt.database.name == 'PostgreSQL':
regex = '(\(.*?\))[,\]]'
elif repo.latest_successful_attempt.database.name == 'MySQL':
regex = '(\(.*?\))[,\)]'
for column in re.findall(regex, index_information.description):
cells = column.split(',')
_type = cells[13].replace("'", "").strip()
stats['index_type'][project_type_name][_type] = stats['index_type'][project_type_name].get(_type, 0) + 1
dump_all_stats(directory, stats)
def main():
# active
table_stats(TABLES_DIRECTORY)
column_stats(TABLES_DIRECTORY)
index_stats(TABLES_DIRECTORY)
# working
# deprecated
if __name__ == '__main__':
main()
| 7,538 | 2,200 |
# coding=utf-8
"""Drawing 4 shapes with different transformations"""
import glfw
from OpenGL.GL import *
import OpenGL.GL.shaders
import numpy as np
import sys
import os.path
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import grafica.basic_shapes as bs
import grafica.easy_shaders as es
import grafica.transformations as tr
import grafica.performance_monitor as pm
__author__ = "Daniel Calderon"
__license__ = "MIT"
# We will use 32 bits data, so an integer has 4 bytes
# 1 byte = 8 bits
SIZE_IN_BYTES = 4
# A class to store the application control
class Controller:
def __init__(self):
self.fillPolygon = True
# we will use the global controller as communication with the callback function
controller = Controller()
# This function will be executed whenever a key is pressed or released
def on_key(window, key, scancode, action, mods):
if action != glfw.PRESS:
return
global controller
if key == glfw.KEY_SPACE:
controller.fillPolygon = not controller.fillPolygon
elif key == glfw.KEY_ESCAPE:
glfw.set_window_should_close(window, True)
else:
print('Unknown key')
if __name__ == "__main__":
# Initialize glfw
if not glfw.init():
glfw.set_window_should_close(window, True)
# Creating a glfw window
width = 600
height = 600
title = "Displaying multiple shapes - Modern OpenGL"
window = glfw.create_window(width, height, title, None, None)
if not window:
glfw.terminate()
glfw.set_window_should_close(window, True)
glfw.make_context_current(window)
# Connecting the callback function 'on_key' to handle keyboard events
glfw.set_key_callback(window, on_key)
# Binding artificial vertex array object for validation
VAO = glGenVertexArrays(1)
glBindVertexArray(VAO)
# Creating our shader program and telling OpenGL to use it
pipeline = es.SimpleTransformShaderProgram()
glUseProgram(pipeline.shaderProgram)
# Setting up the clear screen color
glClearColor(0.15, 0.15, 0.15, 1.0)
# Creating shapes on GPU memory
shapeTriangle = bs.createRainbowTriangle()
gpuTriangle = es.GPUShape().initBuffers()
pipeline.setupVAO(gpuTriangle)
gpuTriangle.fillBuffers(shapeTriangle.vertices, shapeTriangle.indices, GL_STATIC_DRAW)
shapeQuad = bs.createRainbowQuad()
gpuQuad = es.GPUShape().initBuffers()
pipeline.setupVAO(gpuQuad)
gpuQuad.fillBuffers(shapeQuad.vertices, shapeQuad.indices, GL_STATIC_DRAW)
perfMonitor = pm.PerformanceMonitor(glfw.get_time(), 0.5)
# glfw will swap buffers as soon as possible
glfw.swap_interval(0)
# Application loop
while not glfw.window_should_close(window):
# Measuring performance
perfMonitor.update(glfw.get_time())
glfw.set_window_title(window, title + str(perfMonitor))
# Using GLFW to check for input events
glfw.poll_events()
# Filling or not the shapes depending on the controller state
if (controller.fillPolygon):
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
else:
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE)
# Clearing the screen
glClear(GL_COLOR_BUFFER_BIT)
# Using the time as the theta parameter
theta = glfw.get_time()
# Triangle
triangleTransform = tr.matmul([
tr.translate(0.5, 0.5, 0),
tr.rotationZ(2 * theta),
tr.uniformScale(0.5)
])
# Updating the transform attribute
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "transform"), 1, GL_TRUE, triangleTransform)
# Drawing function
pipeline.drawCall(gpuTriangle)
# Another instance of the triangle
triangleTransform2 = tr.matmul([
tr.translate(-0.5, 0.5, 0),
tr.scale(
0.5 + 0.2 * np.cos(1.5 * theta),
0.5 + 0.2 * np.sin(2 * theta),
0)
])
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "transform"), 1, GL_TRUE, triangleTransform2)
pipeline.drawCall(gpuTriangle)
# Quad
quadTransform = tr.matmul([
tr.translate(-0.5, -0.5, 0),
tr.rotationZ(-theta),
tr.uniformScale(0.7)
])
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "transform"), 1, GL_TRUE, quadTransform)
pipeline.drawCall(gpuQuad)
# Another instance of the Quad
quadTransform2 = tr.matmul([
tr.translate(0.5, -0.5, 0),
tr.shearing(0.3 * np.cos(theta), 0, 0, 0, 0, 0),
tr.uniformScale(0.7)
])
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "transform"), 1, GL_TRUE, quadTransform2)
pipeline.drawCall(gpuQuad)
# Once the drawing is rendered, buffers are swap so an uncomplete drawing is never seen.
glfw.swap_buffers(window)
# freeing GPU memory
gpuTriangle.clear()
gpuQuad.clear()
glfw.terminate() | 5,107 | 1,677 |
# -*- coding:utf-8 -*-
from bika.lims.interfaces import IAnalysisRequestsFolder, IBatch, IClient
from bika.lims.interfaces import IATWidgetVisibility
from bika.lims.utils import getHiddenAttributesForClass
from Products.CMFCore.utils import getToolByName
from Products.CMFCore.WorkflowCore import WorkflowException
from types import DictType
from zope.interface import implements
_marker = []
class WorkflowAwareWidgetVisibility(object):
"""This adapter allows the schema definition to have different widget visibility
settings for different workflow states in the primary review_state workflow.
With this it is possible to write:
StringField(
'fieldName',
widget=StringWidget(
label=_('field Name'),
visible = {
'edit': 'visible', # regular AT uses these and they override
'view': 'visible', # everything, without 'edit' you cannot edit
'wf_state': {'edit': 'invisible', 'view': 'visible' },
'other_state': {'edit': 'visible', 'view': 'invisible'},
}
The rules about defaults, "hidden", "visible" and "invisible" are the same
as those from the default Products.Archetypes.Widget.TypesWidget#isVisible
"""
implements(IATWidgetVisibility)
def __init__(self, context):
self.context = context
self.sort = 100
def __call__(self, context, mode, field, default):
"""
"""
state = default if default else 'visible'
workflow = getToolByName(self.context, 'portal_workflow')
try:
review_state = workflow.getInfoFor(self.context, 'review_state')
except WorkflowException:
return state
vis_dic = field.widget.visible
if type(vis_dic) is not DictType or review_state not in vis_dic:
return state
inner_vis_dic = vis_dic.get(review_state, state)
if inner_vis_dic is _marker:
state = state
if type(inner_vis_dic) is DictType:
state = inner_vis_dic.get(mode, state)
state = state
elif not inner_vis_dic:
state = 'invisible'
elif inner_vis_dic < 0:
state = 'hidden'
return state
class SamplingWorkflowWidgetVisibility(object):
"""This will force the 'Sampler' and 'DateSampled' widget default to 'visible'.
We must check the attribute saved on the sample, not the bika_setup value.
"""
implements(IATWidgetVisibility)
def __init__(self, context):
self.context = context
self.sort = 100
def __call__(self, context, mode, field, default):
sw_fields = ['Sampler', 'DateSampled']
state = default if default else 'invisible'
fieldName = field.getName()
if fieldName in sw_fields \
and hasattr(self.context, 'getSamplingWorkflowEnabled') \
and self.context.getSamplingWorkflowEnabled():
if mode == 'header_table':
state = 'prominent'
elif mode == 'view':
state = 'visible'
return state
class ClientFieldWidgetVisibility(object):
"""The Client field is editable by default in ar_add. This adapter
will force the Client field to be hidden when it should not be set
by the user.
"""
implements(IATWidgetVisibility)
def __init__(self, context):
self.context = context
self.sort = 10
def __call__(self, context, mode, field, default):
state = default if default else 'hidden'
fieldName = field.getName()
if fieldName != 'Client':
return state
parent = self.context.aq_parent
if IBatch.providedBy(parent):
if parent.getClient():
return 'hidden'
if IClient.providedBy(parent):
return 'hidden'
return state
class BatchARAdd_BatchFieldWidgetVisibility(object):
"""This will force the 'Batch' field to 'hidden' in ar_add when the parent
context is a Batch.
"""
implements(IATWidgetVisibility)
def __init__(self, context):
self.context = context
self.sort = 10
def __call__(self, context, mode, field, default):
state = default if default else 'visible'
fieldName = field.getName()
if fieldName == 'Batch' and context.aq_parent.portal_type == 'Batch':
return 'hidden'
return state
class OptionalFieldsWidgetVisibility(object):
"""Remove 'hidden attributes' (fields in registry bika.lims.hiddenattributes).
fieldName = field.getName()
"""
implements(IATWidgetVisibility)
def __init__(self, context):
self.context = context
self.sort = 5
def __call__(self, context, mode, field, default):
state = default if default else 'visible'
hiddenattributes = getHiddenAttributesForClass(context.portal_type)
if field.getName() in hiddenattributes:
state = "hidden"
return state
class HideARPriceFields(object):
"""Hide related fields in ARs when ShowPrices is disabled
"""
implements(IATWidgetVisibility)
def __init__(self, context):
self.context = context
self.sort = 3
def __call__(self, context, mode, field, default):
fields = ['InvoiceExclude']
ShowPrices = context.bika_setup.getShowPrices()
state = default if default else 'invisible'
fieldName = field.getName()
if fieldName in fields and not ShowPrices:
state = 'invisible'
return state
class HideClientDiscountFields(object):
"""Hide related fields in ARs when ShowPrices is disabled
"""
implements(IATWidgetVisibility)
def __init__(self, context):
self.context = context
self.sort = 3
def __call__(self, context, mode, field, default):
fields = ['BulkDiscount', 'MemberDiscountApplies']
ShowPrices = context.bika_setup.getShowPrices()
state = default if default else 'invisible'
fieldName = field.getName()
if fieldName in fields and not ShowPrices:
state = 'invisible'
return state
| 6,240 | 1,674 |
class ListNode:
def __init__(self, key: int, val: int):
self.pair = (key, val)
self.next = None
class MyHashMap:
def __init__(self):
self.size = 1000
self.store = [None] * self.size
def _get_hash(self, key):
return key % self.size
def put(self, key: int, value: int) -> None:
hash = self._get_hash(key)
if self.store[hash] is None:
self.store[hash] = ListNode(key, value)
else:
curr = self.store[hash]
while True:
if curr.pair[0] == key:
curr.pair = (key, value)
return
if curr.next is None:
break
curr = curr.next
curr.next = ListNode(key, value)
def get(self, key: int) -> int:
hash = self._get_hash(key)
curr = self.store[hash]
while curr:
if curr.pair[0] == key:
return curr.pair[1]
else:
curr = curr.next
return -1
def remove(self, key: int) -> None:
hash = self._get_hash(key)
curr = prev = self.store[hash]
if not curr:
return
if curr.pair[0] == key:
self.store[hash] = curr.next
else:
curr = curr.next
while curr:
if curr.pair[0] == key:
prev.next = curr.next
break
else:
curr, prev = curr.next, prev.next
| 1,534 | 452 |
"""
pydia: Color.py - 2021.09
================================================
Template code generated from UML diagrams of PyDia
with 'dia2code -t python ../dia2code/PyDiaObjects.dia'
Note: Experimental, will be obsolete if we found a way to use libdia /pydia libraries
Author: SoSie@sos-productions.com
License: LGPL
"""
import _once
_once.imported['Color']=None
class Color:
"""Class Color
A color either defined by a color string or by a tuple with three elements (r, g, b) with type float 0.0 ... 1.0 or range int 0 ... 65535"""
# Attributes:
alpha = None # () double: alpha color component [0 .. 1.0]
blue = None # () double: blue color component [0 .. 1.0]
green = None # () double: green color component [0 .. 1.0]
red = None # () double: red color component [0 .. 1.0]
# Operations
def __init__(self) :
self.alpha = None #
self.blue = None #
self.green = None #
self.red = None #
pass
_once.imported['Color']= Color
for name, object in _once.imported.items():
globals()[name]=object
| 1,128 | 387 |
'''
Shakespeares World Text Extractor
---------------------------------
This module provides a fuction to extract the `text` data from
annotations made on Shakespeares World and AnnoTate.
'''
import bs4
from collections import OrderedDict
import copy
import numpy as np
import html
import warnings
from .extractor_wrapper import extractor_wrapper
warnings.filterwarnings("ignore", category=UserWarning, module='bs4')
tag_whitelist = [
'sw-ex',
'sl',
'brev-y',
'sw-ins',
'sw-del',
'sw-unclear',
'sw-sup',
'label',
'graphic'
]
def clean_text(s):
'''
Clean text from Shakespeares World and AnnoTate classification to prepare
it for aggregation. Unicode characters, `xml`, and `html` are removed.
Parameters
----------
s : string
A string to be cleaned
Returns
-------
clean_s : string
The string with all unicode, `xml`, and `html` removed
'''
s_out = s.encode('ascii', 'ignore').decode('ascii')
if '<xml>' in s_out:
# the user copy and pasted in from micorsoft office
# these classifications are a mess, just strip all tags
soup = bs4.BeautifulSoup(s_out, 'lxml')
s_out = soup.get_text().replace('\n', '')
elif '<' in s_out:
# remove html tags (these should never have been in the text to begin with)
soup = bs4.BeautifulSoup(s_out, 'html.parser')
for match in soup.findAll():
if (match.text.strip() == '') or (match.name not in tag_whitelist):
match.unwrap()
s_out = str(soup)
# unescape html and repalce (\xa0) with a normal space
s_out = html.unescape(s_out).replace('\xa0', ' ')
return s_out
@extractor_wrapper(gold_standard=True)
def sw_extractor(classification, gold_standard=False, **kwargs):
'''Extract text annotations from Shakespeares World and AnnoTate.
Parameters
----------
classification : dict
A dictionary containing an `annotations` key that is a list of
panoptes annotations
Returns
-------
extraction : dict
A dictionary with one key for each `frame`. The value for each frame
is a dict with `text`, a list-of-lists of transcribe words, `points`, a
dict with the list-of-lists of `x` and `y` postions of each space between words,
and `slope`, a list of the slopes (in deg) of each line drawn.
For `points` and `text` there is one inner list for each annotaiton made
on the frame.
'''
extract = OrderedDict()
blank_frame = OrderedDict([
('points', OrderedDict([('x', []), ('y', [])])),
('text', []),
('slope', []),
('gold_standard', gold_standard)
])
frame = 'frame0'
extract[frame] = copy.deepcopy(blank_frame)
if len(classification['annotations']) > 0:
annotation = classification['annotations'][0]
if isinstance(annotation['value'], list):
for value in annotation['value']:
if ('startPoint' in value) and ('endPoint' in value) and ('text' in value):
x = [value['startPoint']['x'], value['endPoint']['x']]
y = [value['startPoint']['y'], value['endPoint']['y']]
if (None not in x) and (None not in y):
text = [clean_text(value['text'])]
dx = x[-1] - x[0]
dy = y[-1] - y[0]
slope = np.rad2deg(np.arctan2(dy, dx))
extract[frame]['text'].append(text)
extract[frame]['points']['x'].append(x)
extract[frame]['points']['y'].append(y)
extract[frame]['slope'].append(slope)
return extract
| 3,777 | 1,115 |
"""Models for iemre API."""
# pylint: disable=no-name-in-module,too-few-public-methods
from typing import List
from pydantic import BaseModel, Field
class HourlyItem(BaseModel):
"""Data Schema."""
valid_utc: str = Field(..., title="UTC Timestamp")
valid_local: str = Field(..., title="Local Station Timestamp")
skyc_percent: float = Field(..., title="Sky Cloud Coverage [%]")
air_temp_f: float = Field(..., title="Air Temperature @2m [F]")
dew_point_f: float = Field(..., title="Air Dew Point @2m [F]")
uwnd_mps: float = Field(..., title="Wind Speed u-component @10m [ms-1]")
vwnd_mps: float = Field(..., title="Wind Speed v-component @10m [ms-1]")
hourly_precip_in: float = Field(..., title="Hourly Precip [inch]")
class HourlySchema(BaseModel):
"""The schema used by this service."""
data: List[HourlyItem]
| 860 | 313 |
import numpy as np
from openmdao.api import Group, IndepVarComp, ParallelGroup, ScipyGMRES, NLGaussSeidel
from openmdao.core.mpi_wrap import MPI
if MPI:
from openmdao.api import PetscKSP
from wakeexchange.floris import floris_wrapper, add_floris_params_IndepVarComps
from wakeexchange.gauss import add_gauss_params_IndepVarComps
from GeneralWindFarmComponents import WindFrame, AdjustCtCpYaw, MUX, WindFarmAEP, DeMUX, \
CPCT_Interpolate_Gradients_Smooth, WindDirectionPower, add_gen_params_IdepVarComps, \
CPCT_Interpolate_Gradients
class RotorSolveGroup(Group):
def __init__(self, nTurbines, direction_id=0, datasize=0, differentiable=True,
use_rotor_components=False, nSamples=0, wake_model=floris_wrapper,
wake_model_options=None):
super(RotorSolveGroup, self).__init__()
if wake_model_options is None:
wake_model_options = {'differentiable': differentiable, 'use_rotor_components': use_rotor_components,
'nSamples': nSamples}
from openmdao.core.mpi_wrap import MPI
# set up iterative solvers
epsilon = 1E-6
if MPI:
self.ln_solver = PetscKSP()
else:
self.ln_solver = ScipyGMRES()
self.nl_solver = NLGaussSeidel()
self.ln_solver.options['atol'] = epsilon
self.add('CtCp', CPCT_Interpolate_Gradients_Smooth(nTurbines, direction_id=direction_id, datasize=datasize),
promotes=['gen_params:*', 'yaw%i' % direction_id,
'wtVelocity%i' % direction_id, 'Cp_out'])
# TODO refactor the model component instance
self.add('floris', wake_model(nTurbines, direction_id=direction_id, wake_model_options=wake_model_options),
promotes=(['model_params:*', 'wind_speed', 'axialInduction',
'turbineXw', 'turbineYw', 'rotorDiameter', 'yaw%i' % direction_id, 'hubHeight',
'wtVelocity%i' % direction_id]
if (nSamples == 0) else
['model_params:*', 'wind_speed', 'axialInduction',
'turbineXw', 'turbineYw', 'rotorDiameter', 'yaw%i' % direction_id, 'hubHeight',
'wtVelocity%i' % direction_id, 'wsPositionX', 'wsPositionY', 'wsPositionZ',
'wsArray%i' % direction_id]))
self.connect('CtCp.Ct_out', 'floris.Ct')
class DirectionGroup(Group):
"""
Group containing all necessary components for wind plant calculations
in a single direction
"""
def __init__(self, nTurbines, direction_id=0, use_rotor_components=False, datasize=0,
differentiable=True, add_IdepVarComps=True, params_IdepVar_func=add_floris_params_IndepVarComps,
params_IndepVar_args=None, nSamples=0, wake_model=floris_wrapper, wake_model_options=None, cp_points=1,
cp_curve_spline=None):
super(DirectionGroup, self).__init__()
if add_IdepVarComps:
if params_IdepVar_func is not None:
if (params_IndepVar_args is None) and (wake_model is floris_wrapper):
params_IndepVar_args = {'use_rotor_components': False}
elif params_IndepVar_args is None:
params_IndepVar_args = {}
params_IdepVar_func(self, **params_IndepVar_args)
add_gen_params_IdepVarComps(self, datasize=datasize)
self.add('directionConversion', WindFrame(nTurbines, differentiable=differentiable, nSamples=nSamples),
promotes=['*'])
if use_rotor_components:
self.add('rotorGroup', RotorSolveGroup(nTurbines, direction_id=direction_id,
datasize=datasize, differentiable=differentiable,
nSamples=nSamples, use_rotor_components=use_rotor_components,
wake_model=wake_model, wake_model_options=wake_model_options),
promotes=(['gen_params:*', 'yaw%i' % direction_id, 'wtVelocity%i' % direction_id,
'model_params:*', 'wind_speed', 'axialInduction',
'turbineXw', 'turbineYw', 'rotorDiameter', 'hubHeight']
if (nSamples == 0) else
['gen_params:*', 'yaw%i' % direction_id, 'wtVelocity%i' % direction_id,
'model_params:*', 'wind_speed', 'axialInduction',
'turbineXw', 'turbineYw', 'rotorDiameter', 'hubHeight', 'wsPositionX', 'wsPositionY', 'wsPositionZ',
'wsArray%i' % direction_id]))
else:
self.add('CtCp', AdjustCtCpYaw(nTurbines, direction_id, differentiable),
promotes=['Ct_in', 'Cp_in', 'gen_params:*', 'yaw%i' % direction_id])
self.add('myModel', wake_model(nTurbines, direction_id=direction_id, wake_model_options=wake_model_options),
promotes=(['model_params:*', 'wind_speed', 'axialInduction',
'turbineXw', 'turbineYw', 'rotorDiameter', 'yaw%i' % direction_id, 'hubHeight',
'wtVelocity%i' % direction_id]
if (nSamples == 0) else
['model_params:*', 'wind_speed', 'axialInduction',
'turbineXw', 'turbineYw', 'rotorDiameter', 'yaw%i' % direction_id, 'hubHeight',
'wtVelocity%i' % direction_id, 'wsPositionXw', 'wsPositionYw', 'wsPositionZ',
'wsArray%i' % direction_id]))
self.add('powerComp', WindDirectionPower(nTurbines=nTurbines, direction_id=direction_id, differentiable=True,
use_rotor_components=use_rotor_components, cp_points=cp_points,
cp_curve_spline=cp_curve_spline),
promotes=['air_density', 'generatorEfficiency', 'rotorDiameter',
'wtVelocity%i' % direction_id, 'rated_power',
'wtPower%i' % direction_id, 'dir_power%i' % direction_id, 'cut_in_speed', 'cp_curve_cp',
'cp_curve_vel'])
if use_rotor_components:
self.connect('rotorGroup.Cp_out', 'powerComp.Cp')
else:
self.connect('CtCp.Ct_out', 'myModel.Ct')
self.connect('CtCp.Cp_out', 'powerComp.Cp')
class AEPGroup(Group):
"""
Group containing all necessary components for wind plant AEP calculations using the FLORIS model
"""
def __init__(self, nTurbines, nDirections=1, use_rotor_components=False, datasize=0,
differentiable=True, optimizingLayout=False, nSamples=0, wake_model=floris_wrapper,
wake_model_options=None, params_IdepVar_func=add_floris_params_IndepVarComps,
params_IndepVar_args=None, cp_points=1, cp_curve_spline=None, rec_func_calls=False):
super(AEPGroup, self).__init__()
if wake_model_options is None:
wake_model_options = {'differentiable': differentiable, 'use_rotor_components': use_rotor_components,
'nSamples': nSamples, 'verbose': False}
# providing default unit types for general MUX/DeMUX components
power_units = 'kW'
direction_units = 'deg'
wind_speed_units = 'm/s'
# add necessary inputs for group
self.add('dv0', IndepVarComp('windDirections', np.zeros(nDirections), units=direction_units), promotes=['*'])
self.add('dv1', IndepVarComp('windSpeeds', np.zeros(nDirections), units=wind_speed_units), promotes=['*'])
self.add('dv2', IndepVarComp('windFrequencies', np.ones(nDirections)), promotes=['*'])
self.add('dv3', IndepVarComp('turbineX', np.zeros(nTurbines), units='m'), promotes=['*'])
self.add('dv4', IndepVarComp('turbineY', np.zeros(nTurbines), units='m'), promotes=['*'])
self.add('dv4p5', IndepVarComp('hubHeight', np.zeros(nTurbines), units='m'), promotes=['*'])
# add vars to be seen by MPI and gradient calculations
self.add('dv5', IndepVarComp('rotorDiameter', np.zeros(nTurbines), units='m'), promotes=['*'])
self.add('dv6', IndepVarComp('axialInduction', np.zeros(nTurbines)), promotes=['*'])
self.add('dv7', IndepVarComp('generatorEfficiency', np.zeros(nTurbines)), promotes=['*'])
self.add('dv8', IndepVarComp('air_density', val=1.1716, units='kg/(m*m*m)'), promotes=['*'])
self.add('dv9', IndepVarComp('rated_power', np.ones(nTurbines)*5000., units='kW',
desc='rated power for each turbine', pass_by_obj=True), promotes=['*'])
if not use_rotor_components:
self.add('dv10', IndepVarComp('Ct_in', np.zeros(nTurbines)), promotes=['*'])
self.add('dv11', IndepVarComp('Cp_in', np.zeros(nTurbines)), promotes=['*'])
self.add('dv12', IndepVarComp('cp_curve_cp', np.zeros(datasize),
desc='cp curve cp data', pass_by_obj=True), promotes=['*'])
self.add('dv13', IndepVarComp('cp_curve_vel', np.zeros(datasize), units='m/s',
desc='cp curve velocity data', pass_by_obj=True), promotes=['*'])
self.add('dv14', IndepVarComp('cut_in_speed', np.zeros(nTurbines), units='m/s',
desc='cut-in speed of wind turbines', pass_by_obj=True), promotes=['*'])
# add variable tree IndepVarComps
add_gen_params_IdepVarComps(self, datasize=datasize)
# indep variable components for wake model
if params_IdepVar_func is not None:
if (params_IndepVar_args is None) and (wake_model is floris_wrapper):
params_IndepVar_args = {'use_rotor_components': False}
elif params_IndepVar_args is None:
params_IndepVar_args = {}
params_IdepVar_func(self, **params_IndepVar_args)
# add components and groups
self.add('windDirectionsDeMUX', DeMUX(nDirections, units=direction_units))
self.add('windSpeedsDeMUX', DeMUX(nDirections, units=wind_speed_units))
# print("initializing parallel groups")
# if use_parallel_group:
# direction_group = ParallelGroup()
# else:
# direction_group = Group()
pg = self.add('all_directions', ParallelGroup(), promotes=['*'])
if use_rotor_components:
for direction_id in np.arange(0, nDirections):
# print('assigning direction group %i'.format(direction_id))
pg.add('direction_group%i' % direction_id,
DirectionGroup(nTurbines=nTurbines, direction_id=direction_id,
use_rotor_components=use_rotor_components, datasize=datasize,
differentiable=differentiable, add_IdepVarComps=False, nSamples=nSamples,
wake_model=wake_model, wake_model_options=wake_model_options, cp_points=cp_points),
promotes=(['gen_params:*', 'model_params:*', 'air_density',
'axialInduction', 'generatorEfficiency', 'turbineX', 'turbineY', 'hubHeight',
'yaw%i' % direction_id, 'rotorDiameter', 'rated_power', 'wtVelocity%i' % direction_id,
'wtPower%i' % direction_id, 'dir_power%i' % direction_id]
if (nSamples == 0) else
['gen_params:*', 'model_params:*', 'air_density',
'axialInduction', 'generatorEfficiency', 'turbineX', 'turbineY', 'hubHeight',
'yaw%i' % direction_id, 'rotorDiameter', 'rated_power', 'wsPositionX', 'wsPositionY',
'wsPositionZ', 'wtVelocity%i' % direction_id,
'wtPower%i' % direction_id, 'dir_power%i' % direction_id, 'wsArray%i' % direction_id]))
else:
for direction_id in np.arange(0, nDirections):
# print('assigning direction group %i'.format(direction_id))
pg.add('direction_group%i' % direction_id,
DirectionGroup(nTurbines=nTurbines, direction_id=direction_id,
use_rotor_components=use_rotor_components, datasize=datasize,
differentiable=differentiable, add_IdepVarComps=False, nSamples=nSamples,
wake_model=wake_model, wake_model_options=wake_model_options, cp_points=cp_points,
cp_curve_spline=cp_curve_spline),
promotes=(['Ct_in', 'Cp_in', 'gen_params:*', 'model_params:*', 'air_density', 'axialInduction',
'generatorEfficiency', 'turbineX', 'turbineY', 'yaw%i' % direction_id, 'rotorDiameter',
'hubHeight', 'rated_power', 'wtVelocity%i' % direction_id, 'wtPower%i' % direction_id,
'dir_power%i' % direction_id, 'cut_in_speed', 'cp_curve_cp', 'cp_curve_vel']
if (nSamples == 0) else
['Ct_in', 'Cp_in', 'gen_params:*', 'model_params:*', 'air_density', 'axialInduction',
'generatorEfficiency', 'turbineX', 'turbineY', 'yaw%i' % direction_id, 'rotorDiameter',
'hubHeight', 'rated_power', 'cut_in_speed', 'wsPositionX', 'wsPositionY', 'wsPositionZ',
'wtVelocity%i' % direction_id, 'wtPower%i' % direction_id,
'dir_power%i' % direction_id, 'wsArray%i' % direction_id, 'cut_in_speed', 'cp_curve_cp',
'cp_curve_vel']))
# print("parallel groups initialized")
self.add('powerMUX', MUX(nDirections, units=power_units))
self.add('AEPcomp', WindFarmAEP(nDirections, rec_func_calls=rec_func_calls), promotes=['*'])
# connect components
self.connect('windDirections', 'windDirectionsDeMUX.Array')
self.connect('windSpeeds', 'windSpeedsDeMUX.Array')
for direction_id in np.arange(0, nDirections):
self.add('y%i' % direction_id, IndepVarComp('yaw%i' % direction_id, np.zeros(nTurbines), units='deg'), promotes=['*'])
self.connect('windDirectionsDeMUX.output%i' % direction_id, 'direction_group%i.wind_direction' % direction_id)
self.connect('windSpeedsDeMUX.output%i' % direction_id, 'direction_group%i.wind_speed' % direction_id)
self.connect('dir_power%i' % direction_id, 'powerMUX.input%i' % direction_id)
self.connect('powerMUX.Array', 'dirPowers') | 15,111 | 4,815 |
from .checks import *
from ..config import *
from ..pug import Pug
from ..team import Team
from ..utils import *
@check(input_too_long, not_in_pug, team_already_exists, already_in_team)
async def team(message, pugs, user_input, client):
existing_pug = find_in_list(lambda pug: message.author in pug.players, pugs)
team_name = user_input["arguments"]
existing_pug.add_team(message.author, team_name)
await update_status(message.channel, existing_pug) | 468 | 160 |
class Led():
"""
This class represents a led connected to one of the board's digital pins.
It also needs the mqtt client in order to send notifications to the user.
"""
def __init__(self, pin, client):
self._pin = pin
pinMode(self._pin, OUTPUT)
self._client = client
# turned off by default
self._is_on = False
digitalWrite(self._pin, LOW)
def on(self):
"""
Turns the led on and notifies the user via MQTT.
"""
if self._is_on:
return
digitalWrite(self._pin, HIGH)
self._is_on = True
self._client.publish("iot-marco/data/led", "on")
def off(self):
"""
Turns the led off and notifies the user via MQTT.
"""
if not self._is_on:
return
digitalWrite(self._pin, LOW)
self._is_on = False
self._client.publish("iot-marco/data/led", "off")
def state(self):
"""
Returns a string representing the current state of the device.
"""
return "on" if self._is_on else "off"
def control(self, command):
"""
Usually called when a message is received in the "iot-marco/commands/led" subtopic.
The payload of the message (the command parameter here) is the action to perform.
"""
if command == "get-state":
self._client.publish("iot-marco/data/led", self.state())
elif command == "turn-on":
self.on()
elif command == "turn-off":
self.off()
| 1,571 | 454 |
# -*- coding: utf-8 -*-
__all__ = (
'check_transparent_proxy',
'external_ip',
'external_headers',
'online',
'ntp_time_diff',
'check',
'bits_to_dict',
'PortQuiz',
)
import tinyhttp
import socket
import time
import threading
import random
import urllib2
import scan
import netaddr
import struct
import igd
import sys
import json
from log import get_logger
logger = get_logger('online')
import stun
import ntplib
ONLINE_STATUS = None
ONLINE_STATUS_CHECKED = None
ONLINE_CAPTIVE = 1 << 0
ONLINE_MS = 1 << 1
ONLINE = ONLINE_MS | ONLINE_CAPTIVE
HOTSPOT = 1 << 2
DNS = 1 << 3
DIRECT_DNS = 1 << 4
HTTP = 1 << 5
HTTPS = 1 << 6
HTTPS_NOCERT = 1 << 7
HTTPS_MITM = 1 << 8
PROXY = 1 << 9
TRANSPARENT = 1 << 10
IGD = 1 << 11
PASTEBIN = 1 << 12
HASTEBIN = 1 << 13
IXIO = 1 << 14
DPASTE = 1 << 15
VPASTE = 1 << 16
PASTEOPENSTACK = 1 << 17
GHOSTBIN = 1 << 18
PHPASTE = 1 << 19
FRIENDPASTE = 1 << 20
LPASTE = 1 << 21
STUN_NAT_VALUE = 7 << 22
STUN_NAT_BLOCKED = 0 << 22
STUN_NAT_OPEN = 1 << 22
STUN_NAT_CLONE = 2 << 22
STUN_NAT_UDP_FW = 3 << 22
STUN_NAT_RESTRICT = 4 << 22
STUN_NAT_PORT = 5 << 22
STUN_NAT_SYMMETRIC = 6 << 22
STUN_NAT_ERROR = 7 << 22
NTP = 1 << 25
STUN_NAT_DESCRIPTION = {
STUN_NAT_BLOCKED: stun.Blocked,
STUN_NAT_OPEN: stun.OpenInternet,
STUN_NAT_CLONE: stun.FullCone,
STUN_NAT_UDP_FW: stun.SymmetricUDPFirewall,
STUN_NAT_RESTRICT: stun.RestricNAT,
STUN_NAT_PORT: stun.RestricPortNAT,
STUN_NAT_SYMMETRIC: stun.SymmetricNAT,
STUN_NAT_ERROR: stun.ChangedAddressError,
}
NTP_SERVER = 'pool.ntp.org'
STUN_HOST = 'stun.l.google.com'
STUN_PORT = 19302
# Don't want to import large (200k - 1Mb) dnslib/python dns just for that..
OPENDNS_REQUEST = '\xe4\x9a\x01\x00\x00\x01\x00\x00\x00\x00\x00\x00\x04' \
'myip\x07opendns\x03com\x00\x00\x01\x00\x01'
OPENDNS_RESPONSE = '\xe4\x9a\x81\x80\x00\x01\x00\x01\x00\x00\x00\x00\x04' \
'myip\x07opendns\x03com\x00\x00\x01\x00\x01\xc0\x0c\x00' \
'\x01\x00\x01\x00\x00\x00\x00\x00\x04'
PASTEBINS = {
'https://pastebin.com': PASTEBIN,
'https://hastebin.com': HASTEBIN,
'http://ix.io': IXIO,
'http://dpaste.com': DPASTE,
'http://vpaste.net': VPASTE,
# 'https://lpaste.net': LPASTE, # Down now?
'http://paste.openstack.org': PASTEOPENSTACK,
# 'https://ghostbin.com': GHOSTBIN, # Cloudflare page causes false negative
'https://phpaste.sourceforge.io': PHPASTE,
'https://friendpaste.com': FRIENDPASTE
}
CHECKS = {
'msonline': {
'url': 'http://www.msftncsi.com/ncsi.txt',
'text': 'Microsoft NCSI',
},
'http': {
'url': 'http://lame.sourceforge.net/license.txt',
'text': 'Can I use LAME in my commercial program?',
},
'https': {
'url': 'https://www.apache.org/licenses/LICENSE-2.0',
'text': 'APPENDIX: How to apply the Apache License to your work.',
'ca':'''MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
jjxDah2nGN59PRbxYvnKkKj9'''
},
}
CAPTIVE_URLS = [
'http://connectivitycheck.gstatic.com/generate_204',
'http://clients3.google.com/generate_204',
]
KNOWN_DNS = {
'opendns.org': '67.215.92.210',
'quad9.net': '216.21.3.77',
}
IP_KNOWN_TO_BE_DOWN='1.2.3.4'
OWN_IP = [
'ifconfig.co',
'ifconfig.me/ip',
'eth0.me',
'ipecho.net/plain',
'icanhazip.com',
'curlmyip.com',
'l2.io/ip'
]
LAST_EXTERNAL_IP = None
LAST_EXTERNAL_IP_TIME = None
def check_transparent_proxy():
logger.debug('Check for transparent proxy')
try:
s = socket.create_connection((IP_KNOWN_TO_BE_DOWN, 80), timeout=5)
s.settimeout(5)
s.send('GET / HTTP/3.0\r\n\r\n')
data = s.recv(12)
if data.startswith('HTTP'):
return True
except Exception, e:
logger.debug('Check transparent proxy: %s', e)
return False
def external_ip(force_ipv4=False):
global LAST_EXTERNAL_IP, LAST_EXTERNAL_IP_TIME
if LAST_EXTERNAL_IP_TIME is not None:
if time.time() - LAST_EXTERNAL_IP_TIME < 3600:
logger.debug('Return cached IP (last ts=%d): %s',
LAST_EXTERNAL_IP_TIME, LAST_EXTERNAL_IP)
return LAST_EXTERNAL_IP
logger.debug('Retrieve IP using external services')
try:
stun_ip = stun.get_ip(stun_host=STUN_HOST, stun_port=STUN_PORT)
if stun_ip is not None:
stun_ip = netaddr.IPAddress(stun_ip)
LAST_EXTERNAL_IP = stun_ip
LAST_EXTERNAL_IP_TIME = time.time()
return LAST_EXTERNAL_IP
except Exception, e:
logger.debug('external_ip: STUN failed: %s', e)
ctx = tinyhttp.HTTP(timeout=5, headers={'User-Agent': 'curl/7.12.3'})
for service in OWN_IP:
for scheme in ['https', 'http']:
try:
data, code = ctx.get(scheme + '://' + service, code=True)
if code == 200:
addr = netaddr.IPAddress(data.strip())
if force_ipv4 and addr.version == 6:
continue
LAST_EXTERNAL_IP = addr
LAST_EXTERNAL_IP_TIME = time.time()
return LAST_EXTERNAL_IP
except Exception, e:
logger.debug('Get IP service failed: %s: %s (%s)', service, e, type(e))
LAST_EXTERNAL_IP = dns_external_ip()
if LAST_EXTERNAL_IP:
LAST_EXTERNAL_IP_TIME = time.time()
return LAST_EXTERNAL_IP
def dns_external_ip():
logger.debug('Retrieve IP using DNS')
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
try:
s.settimeout(5)
s.sendto(OPENDNS_REQUEST, ('resolver1.opendns.com', 53))
data = s.recv(256)
if data.startswith(OPENDNS_RESPONSE):
return netaddr.IPAddress(struct.unpack('>I', data[-4:])[0])
except Exception, e:
logger.debug('DNS External IP failed: %s', e)
return None
def external_headers():
logger.debug('Retrieve external headers')
try:
ctx = tinyhttp.HTTP(timeout=15, headers={'User-Agent': 'curl/7.12.3'})
data = ctx.get('http://httpbin.org/headers')
data = json.loads(data)
return data['headers']
except Exception, e:
logger.debug('External headers failed: %s', e)
return {}
def online():
headers = {
'User-Agent': 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Trident/6.0)'
}
ctx = tinyhttp.HTTP(timeout=5, headers=headers)
try:
data = ctx.get(CHECKS['msonline']['url'])
if data == CHECKS['msonline']['text']:
return True
except Exception, e:
logger.debug('MS Online check failed: %s', e)
return False
def ntp_time_diff():
client = ntplib.NTPClient()
response = client.request(NTP_SERVER, version=3)
return int(response.offset * 1000000)
def check():
global ONLINE_STATUS_CHECKED
global ONLINE_STATUS
if ONLINE_STATUS_CHECKED is not None:
if time.time() - ONLINE_STATUS_CHECKED < 3600:
return ONLINE_STATUS
logger.debug('Online check started')
headers = {
'User-Agent': 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Trident/6.0)'
}
ctx = tinyhttp.HTTP(proxy=True, noverify=False, timeout=15, headers=headers)
ctx_nocert = tinyhttp.HTTP(proxy=True, timeout=15, noverify=True, headers=headers)
ctx_noproxy = tinyhttp.HTTP(proxy=False, timeout=15, headers=headers)
ctx_mitm = tinyhttp.HTTP(
proxy=True, noverify=False, timeout=15,
cadata=CHECKS['https']['ca'].decode('base64'), headers=headers)
result = 0
mintime = None
offset = 0
ok = 0
now = time.time()
for url in CAPTIVE_URLS:
try:
data, code = ctx.get(url, code=True)
t = time.time()
if mintime is None or mintime > t - now:
mintime = t - now
now = t
if data == '' and code == 204:
ok += 1
if code == 302:
result |= HOTSPOT
except Exception, e:
logger.debug('Captive check failed %s: %s', url, e)
if ok == 2:
result |= ONLINE_CAPTIVE
try:
data = ctx.get(CHECKS['msonline']['url'])
t = time.time()
if mintime is None or mintime > t - now:
mintime = t - now
now = t
if data == CHECKS['msonline']['text']:
result |= ONLINE_MS
except Exception, e:
logger.debug('MS Online check failed: %s', e)
if result & ONLINE_CAPTIVE:
for url in CAPTIVE_URLS:
try:
data, code = ctx_noproxy.get(url, code=True)
if not (data == '' and code == 204) and ok:
result |= PROXY
break
except Exception, e:
result |= PROXY
logger.debug('Captive check failed %s: %s', url, e)
try:
data = ctx.get(CHECKS['http']['url'])
if CHECKS['http']['text'] in data:
result |= HTTP
except Exception, e:
logger.debug('HTTP Check failed: %s', e)
try:
data = ctx.get(CHECKS['https']['url'])
if CHECKS['https']['text'] in data:
result |= HTTPS
except Exception, e:
logger.debug('HTTPS Check failed: %s', e)
try:
data = ctx_mitm.get(CHECKS['https']['url'])
if not CHECKS['https']['text'] in data:
result |= HTTPS_MITM
except Exception, e:
logger.debug('HTTPS Mitm Check failed: %s', e)
result |= HTTPS_MITM
try:
data = ctx_nocert.get(CHECKS['https']['url'])
if CHECKS['https']['text'] in data:
result |= HTTPS_NOCERT
result |= HTTPS
except Exception, e:
logger.debug('HTTPS NoCert Check failed: %s', e)
for hostname, ip in KNOWN_DNS.iteritems():
try:
if ip == socket.gethostbyname(hostname):
result |= DNS
except Exception, e:
logger.debug('DNS Check failed: %s', e)
for pastebin, bit in PASTEBINS.iteritems():
try:
data, code = ctx_nocert.get(
pastebin,
code=True, headers={'User-Agent': 'curl'}
)
if code == 200:
result |= bit
except Exception, e:
logger.debug('Pastebin Check failed %s: %s', pastebin, e)
if check_transparent_proxy():
result |= TRANSPARENT | PROXY
else:
headers = external_headers()
for header in headers:
if 'via' in header.lower():
result |= PROXY
break
deip = dns_external_ip()
if deip:
result |= DIRECT_DNS
try:
nat, _, _ = stun.get_ip_info()
for bit, descr in STUN_NAT_DESCRIPTION.iteritems():
if descr == nat:
result |= bit
break
except Exception, e:
logger.debug('STUN Checks failed: %s', e)
result |= STUN_NAT_BLOCKED
try:
offset = ntp_time_diff()
result |= NTP
if offset > 32767:
offset = 32767
elif offset < -32768:
offset = -32768
except Exception, e:
logger.debug('NTP Checks failed: %s', e)
offset = 0
if sys.platform != 'win32':
# This may cause firewall window
# TODO: Work around this with pressing enter using keyboard module
try:
igdc = igd.IGDClient()
if igdc.available:
result |= IGD
except Exception, e:
logger.debug('IGD Check failed: %s', e)
if mintime is None:
mintime = 0
else:
mintime = int(mintime * 1000)
if mintime > 65535:
mintime = 65535
ONLINE_STATUS = (offset, mintime, result)
ONLINE_STATUS_CHECKED = time.time()
logger.debug('Online check completed')
return ONLINE_STATUS
def bits_to_dict(data):
return {
'online': bool(data & ONLINE),
'online-by': {
'android': bool(data & ONLINE_CAPTIVE),
'microsoft': bool(data & ONLINE_MS),
},
'igd': bool(data & IGD),
'hotspot': bool(data & HOTSPOT),
'dns': bool(data & DNS),
'direct-dns': bool(data & DIRECT_DNS),
'http': bool(data & HTTP),
'https': bool(data & HTTPS),
'https-no-cert': bool(data & HTTPS_NOCERT),
'https-mitm': bool(data & HTTPS_MITM),
'proxy': bool(data & PROXY),
'transparent-proxy': bool(data & TRANSPARENT),
'stun': [
descr for value,descr in STUN_NAT_DESCRIPTION.iteritems() if (
(data & STUN_NAT_VALUE) == value
)
][0],
'ntp': bool(data & NTP),
'pastebins': {
pastebin:bool(data & bit) for pastebin,bit in PASTEBINS.iteritems()
}
}
class PortQuiz(threading.Thread):
PORTQUIZ_ADDR='5.196.70.86'
PORTQUIZ_HOSTNAME='portquiz.net'
PORTQUIZ_443_MESSAGE='Your browser sent a request that this server could not understand'
def __init__(self, amount=8, http_timeout=15, connect_timeout=10):
threading.Thread.__init__(self)
self.daemon = True
self.table = {}
self.lock = threading.Lock()
self.abort = threading.Event()
self.amount = amount
self.opener = urllib2.OpenerDirector()
self.opener.handlers = []
self.opener.add_handler(tinyhttp.NullHandler(self.table, self.lock))
self.opener.add_handler(urllib2.HTTPHandler())
self.http_timeout = http_timeout
self.connect_timeout = connect_timeout
self.available = list()
def _on_open_port(self, info):
host, port, sock = info
logger.debug('Check: %s:%d', host, port)
try:
with self.lock:
self.table['{}:{}'.format(host,port)] = sock
sock.setblocking(1)
sock.settimeout(self.http_timeout)
url = urllib2.Request(
'http://{}:{}'.format(host, port),
headers={
'Host': self.PORTQUIZ_HOSTNAME,
'User-Agent': 'curl',
})
response = self.opener.open(url, timeout=self.http_timeout)
data = response.read()
if 'test successful!' in data \
or (port == 443 and self.PORTQUIZ_443_MESSAGE in data):
self.available.append(port)
if len(self.available) >= self.amount:
self.abort.set()
else:
logger.debug('Invalid response, port %d: %s', port, repr(data))
except Exception, e:
logger.exception('port check: %s:%s: %s', host, port, e)
finally:
try:
sock.close()
except:
pass
def _run(self):
most_important = [
80, 443, 8080, 53, 5222, 25, 110, 465
]
try:
portquiz_addr = socket.gethostbyname(self.PORTQUIZ_HOSTNAME)
except socket.gaierror:
portquiz_addr = self.PORTQUIZ_ADDR
logger.debug('Scan most important. IP: %s', portquiz_addr)
scan.scan([portquiz_addr], most_important, timeout=self.connect_timeout, abort=self.abort,
on_open_port=self._on_open_port, pass_socket=True)
logger.debug('Scan other ports')
if len(self.available) < self.amount:
other = list([
x for x in scan.TOP1000 if x not in most_important
])
random.shuffle(other)
scan.scan(
[portquiz_addr], other, timeout=self.connect_timeout, abort=self.abort,
on_open_port=self._on_open_port, pass_socket=True)
logger.debug('Done. Found %d ports', len(self.available))
def run(self):
try:
logger.debug('PortQuiz: started')
self._run()
logger.debug('PortQuiz: completed (available %d ports)', len(self.available))
except Exception, e:
logger.exception('PortQuiz: %s', e)
| 18,988 | 7,636 |
from Transaction import Transaction
class Dataset:
transactions = []
maxItem = 0
def __init__(self, datasetpath, neighbors):
with open(datasetpath, 'r') as f:
lines = f.readlines()
for line in lines:
self.transactions.append(self.createTransaction(line, neighbors))
print('Transaction Count :' + str(len(self.transactions)))
f.close()
def createTransaction(self, line, neighbors):
trans_list = line.strip().split(':')
transactionUtility = int(trans_list[1])
itemsString = trans_list[0].strip().split(' ')
utilityString = trans_list[2].strip().split(' ')
# pmuString = trans_list[3].strip().split(' ')
items = []
utilities = []
pmus = []
for idx, item in enumerate(itemsString):
item_int = int(item)
if item_int > self.maxItem:
self.maxItem = item_int
items.append(item_int)
utilities.append(int(utilityString[idx]))
pm = int(utilityString[idx])
if item_int in neighbors:
for j in range(0, len(itemsString)):
if j != idx:
if int(itemsString[j]) in neighbors[item_int]:
pm += int(utilityString[j])
pmus.append(pm)
return Transaction(items, utilities, transactionUtility, pmus)
def getMaxItem(self):
return self.maxItem
def getTransactions(self):
return self.transactions
| 1,548 | 431 |
import json
import prefect
from typing import Any, Dict
from prefect import Task
from prefect.tasks.secrets import PrefectSecret
from .azure_key_vault import AzureKeyVaultSecret
from viadot.config import local_config
from viadot.sources import AzureSQL
class ASELiteToDF(Task):
def __init__(
self, credentials: Dict[str, Any] = None, query: str = None, *args, **kwargs
):
"""
Task for obtaining data from ASElite source.
Args:
credentials (Dict[str, Any], optional): ASElite SQL Database credentials. Defaults to None.
query(str, optional): Query to perform on a database. Defaults to None.
Returns: Pandas DataFrame
"""
self.credentials = credentials
self.query = query
super().__init__(
name="ASElite_to_df",
*args,
**kwargs,
)
def __call__(self, *args, **kwargs):
"""Download from aselite database to df"""
return super().__call__(*args, **kwargs)
def run(
self,
query: str,
credentials: Dict[str, Any] = None,
credentials_secret: str = None,
vault_name: str = None,
):
logger = prefect.context.get("logger")
if not credentials_secret:
try:
credentials_secret = PrefectSecret("aselite").run()
except ValueError:
pass
if credentials_secret:
credentials_str = AzureKeyVaultSecret(
credentials_secret, vault_name=vault_name
).run()
credentials = json.loads(credentials_str)
logger.info("Loaded credentials from Key Vault")
else:
credentials = local_config.get("ASELite_SQL")
logger.info("Loaded credentials from local source")
aselite = AzureSQL(credentials=credentials)
logger.info("Connected to ASELITE SOURCE")
df = aselite.to_df(query=query)
logger.info("Succefully collected data from query")
return df
| 2,047 | 560 |
"""CFSR & CVSv2 conversion
This requires a running UPCluster:
$ ipcluster start -n 12
"""
import os
from ipyparallel import Client
import cfsr
path_input = '/some/path'
path_output = '/some/path'
path_pycfs = '/some/path' # The path to cfsr.py and gribou.py
var_names = ['pressfc'] # The RDA archive cfsr dataset prefix
grib_var_names = ['Surface pressure']
"""The grib_var_names can be obtained from gribou.all_str_dump(file_name)"""
grib_levels = [None]
"""The grib levels are set to None if there are no vertical level units
in the groubou.all_str_dump(file_name), otherwise the number is used
(e.g. grib_levels = [2] for one 2 meter variable)"""
nc_var_names = ['ps']
nc_units = ['Pa']
"""nc_var_names can also be obtained in the gribou.all_str_dump(file_name)"""
nc_format = 'NETCDF4_CLASSIC'
initial_year = 1979
final_year = 2010
months = ['01', '02', '03', '04', '05', '06',
'07', '08', '09', '10', '11', '12']
grib_source = 'rda'
resolution = 'highres'
"""This is related to the grid choice on the rda portal. Generally, if
the higher resolution is selected, set to 'highres'. For lower resolutions,
the file names should have a *.l.gdas.* structure, in this case set to
'lowres'"""
cache_size = 100
rc = Client()
with rc[:].sync_imports():
import sys
rc[:].execute("sys.path.append('{0}')".format(path_pycfs))
with rc[:].sync_imports():
import cfsr
with rc[:].sync_imports():
import gribou
lview = rc.load_balanced_view()
mylviews = []
for i, var_name in enumerate(var_names):
for yyyy in range(initial_year, final_year + 1):
for mm in months:
vym = (var_name, str(yyyy), mm)
ncvym = (nc_var_names[i], str(yyyy), mm)
if resolution in ['highres', 'prmslmidres', 'ocnmidres']:
if (yyyy > 2011) or ((yyyy == 2011) and (int(mm) > 3)):
grib_file = "{0}.cdas1.{1}{2}.grb2".format(*vym)
else:
grib_file = "{0}.gdas.{1}{2}.grb2".format(*vym)
file_name = "{0}_1hr_cfsr_reanalysis_{1}{2}.nc".format(*ncvym)
nc_file = os.path.join(path_output, file_name)
elif resolution in ['lowres', 'ocnlowres']:
grib_file = "{0}.l.gdas.{1}{2}.grb2".format(*vym)
file_name = "{0}_1hr_cfsr_reanalysis_lowres_{1}{2}.nc".format(
*ncvym)
nc_file = os.path.join(path_output, file_name)
grib_file = os.path.join(path_input, grib_file)
if not os.path.isfile(grib_file):
continue
print(grib_file)
mylviews.append(lview.apply(
cfsr.hourly_grib2_to_netcdf, grib_file, grib_source, nc_file,
nc_var_names[i], grib_var_names[i], grib_levels[i],
cache_size=cache_size, overwrite_nc_units=nc_units[i],
nc_format=nc_format))
if nc_var_names[i] in ['tasmin','tasmax']:
print("WARNING: this is a cumulative min/max variable, need to run"
"cfsr_sampling.py afterwards.")
| 3,062 | 1,139 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
def normalize_text(text):
return text.lower()
def attach_attributes(obj, data_dict):
for d in data_dict:
setattr(obj, d, data_dict[d])
def load_api_key(rtkey):
"""
Loads api key from provided path
>>> load_api_key("~/path/to/my/rt/key")
returns api key
"""
with open(os.path.expanduser(rtkey), 'r') as kf:
key = kf.read().strip()
return key
| 455 | 171 |
PesoMaior = 0
PesoMenor = 0
for c in range(1,6):
peso = float(input('Peso da {}ª pessoa: '.format(c)))
if c == 1:
PesoMaior = peso
PesoMenor = peso
else:
if PesoMaior < peso:
PesoMaior = peso
elif PesoMenor > peso:
PesoMenor = peso
print('O menor peso é {} e o maior {}'.format(PesoMenor, PesoMaior))
| 369 | 145 |
"""Quantum network."""
import collections
class Network:
# TODO: Remove this when we have more methods
# pylint:disable=too-few-public-methods
"""A quantum network.
A quantum network object describes the topology and available resources in a quantum network. It
consists of quantum routers and quantum links."""
def __init__(self):
self.routers = collections.OrderedDict() # Router objects indexed by name
def add_router(self, router):
"""Add a quantum router to this quantum network.
Args:
router(Router): The quantum router to be added.
Returns:
None
"""
assert router.name not in self.routers, \
f"Network already contains a router with name {router.name}"
self.routers[router.name] = router
| 826 | 211 |
# ------------------------------------------------------------------
# Copyright (c) 2020 PyInstaller Development Team.
#
# This file is distributed under the terms of the GNU General Public
# License (version 2.0 or later).
#
# The full license is available in LICENSE.GPL.txt, distributed with
# this software.
#
# SPDX-License-Identifier: GPL-2.0-or-later
# ------------------------------------------------------------------
# Hook for the diStorm3 module: https://pypi.python.org/pypi/distorm3
# Tested with distorm3 3.3.0, Python 2.7, Windows
from PyInstaller.utils.hooks import collect_dynamic_libs
# distorm3 dynamic library should be in the path with other dynamic libraries.
binaries = collect_dynamic_libs('distorm3', destdir='.')
| 745 | 221 |
"""Statistical tests for multidimensional data in :class:`NDVar` objects"""
__test__ = False
from ._stats.testnd import (
NDTest, MultiEffectNDTest,
t_contrast_rel, corr, ttest_1samp, ttest_ind, ttest_rel, anova,
Vector, VectorDifferenceRelated,
)
from ._stats.spm import LM, LMGroup
| 297 | 109 |
import logging
from functools import lru_cache
logger = logging.getLogger(__name__)
def parse_adapter_input(adapters):
# Separate by lines, convert to integer, prepend the initial adapter (0) and append the final adapter (max + 3)
adapters = [0] + sorted(int(x) for x in adapters.split("\n") if x)
adapters.append(max(adapters) + 3)
return adapters
def get_adapter_differences(adapters):
# Given all adapters need to be used, this is just a matter of sorting them and computing the differences
adapters = parse_adapter_input(adapters)
adapters_delta = [adapters[i + 1] - adapters[i] for i in range(len(adapters) - 1)]
return adapters_delta
def get_adapter_path_count(adapters):
# Parse and convert adapters to tuple (because lru_cache decorated functions need hashable arguments)
adapters = tuple(parse_adapter_input(adapters))
return get_adapter_path_count_priv(adapters)
@lru_cache()
def get_adapter_path_count_priv(adapters, current=0):
# Get the next adapter indices
next_indices = [x for x in range(current + 1, current + 4) if x < len(adapters)]
# If there are no more indices, we're at base case so return 1
if not next_indices:
return 1
# Otherwise, sum all branches from matching adapters (according to <= 3 criteria)
return sum(
get_adapter_path_count_priv(adapters, i)
for i in next_indices
if adapters[i] - adapters[current] <= 3
)
| 1,461 | 491 |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: toit/model/device.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from toit.model import data_pb2 as toit_dot_model_dot_data__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='toit/model/device.proto',
package='toit.model',
syntax='proto3',
serialized_options=b'\n\030io.toit.proto.toit.modelB\013DeviceProtoZ&github.com/toitware/api.git/toit/model',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x17toit/model/device.proto\x12\ntoit.model\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x15toit/model/data.proto\"\xf0\x02\n\x0c\x44\x65viceConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rstarted_after\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0b\n\x03sdk\x18\x03 \x01(\t\x12\x30\n\x06\x62roker\x18\x04 \x01(\x0b\x32 .toit.model.DeviceBrokerSettings\x12\x38\n\nconnection\x18\x05 \x01(\x0b\x32$.toit.model.DeviceConnectionSettings\x12\r\n\x05model\x18\x06 \x01(\t\x12\x31\n\rfactory_after\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x07runtime\x18\x08 \x01(\x0b\x32!.toit.model.DeviceRuntimeSettings\x12\x30\n\x07modules\x18\t \x01(\x0b\x32\x1f.toit.model.DeviceModulesConfig\"u\n\x13\x44\x65viceModulesConfig\x12\x32\n\x0esync_requested\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12*\n\x07modules\x18\x02 \x01(\x0b\x32\x19.toit.model.DeviceModules\"s\n\x15\x44\x65viceRuntimeSettings\x12,\n\x07logging\x18\x01 \x01(\x0b\x32\x1b.toit.model.LoggingSettings\x12,\n\x07metrics\x18\x02 \x01(\x0b\x32\x1b.toit.model.MetricsSettings\";\n\x0fLoggingSettings\x12(\n\x05level\x18\x01 \x01(\x0e\x32\x19.toit.model.LogData.Level\"?\n\x0fMetricsSettings\x12,\n\x05level\x18\x01 \x01(\x0e\x32\x1d.toit.model.MetricsData.Level\"\x9d\x01\n\x18\x44\x65viceConnectionSettings\x12.\n\x0bmax_offline\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0b\x63onnections\x18\x02 \x03(\x0b\x32\x1d.toit.model.ConnectionSetting\x12\x1d\n\x15\x65vent_queue_threshold\x18\x03 \x01(\r\"c\n\x11\x43onnectionSetting\x12%\n\x04wifi\x18\x01 \x01(\x0b\x32\x17.toit.model.WifiSetting\x12\'\n\x05nbiot\x18\x02 \x01(\x0b\x32\x18.toit.model.NBIoTSetting\"-\n\x0bWifiSetting\x12\x0c\n\x04ssid\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\"I\n\x0cNBIoTSetting\x12\x0b\n\x03\x61pn\x18\x01 \x01(\t\x12\r\n\x05\x62\x61nds\x18\x02 \x03(\x03\x12\x10\n\x08operator\x18\x03 \x01(\t\x12\x0b\n\x03pin\x18\x04 \x01(\t\"T\n\x14\x44\x65viceBrokerSettings\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x03\x12\n\n\x02\x63n\x18\x03 \x01(\t\x12\x14\n\x08protocol\x18\x04 \x01(\tB\x02\x18\x01\"\xaa\x03\n\x0c\x44\x65viceStatus\x12\x14\n\x08revision\x18\x01 \x01(\x03\x42\x02\x18\x01\x12\r\n\x05\x65poch\x18\x06 \x01(\x0c\x12\x16\n\x0estate_revision\x18\x05 \x01(\x03\x12+\n\x07updated\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x17\n\x0f\x63onfig_revision\x18\x03 \x01(\x03\x12\x0b\n\x03sdk\x18\x04 \x01(\t\x12\r\n\x05model\x18\x07 \x01(\t\x12(\n\x06\x63onfig\x18\x08 \x01(\x0b\x32\x18.toit.model.DeviceConfig\x12\x11\n\tconnected\x18\t \x01(\x08\x12*\n\x04\x62oot\x18\n \x01(\x0b\x32\x1c.toit.model.DeviceBootStatus\x12(\n\x06health\x18\x0b \x01(\x0b\x32\x18.toit.model.DeviceHealth\x12\x36\n\nconnection\x18\x0c \x01(\x0b\x32\".toit.model.DeviceConnectionStatus\x12\x30\n\x07modules\x18\r \x01(\x0b\x32\x1f.toit.model.DeviceModulesStatus\"s\n\x13\x44\x65viceModulesStatus\x12\x30\n\x0clast_updated\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12*\n\x07modules\x18\x02 \x01(\x0b\x32\x19.toit.model.DeviceModules\";\n\rDeviceModules\x12*\n\x08\x63\x65llular\x18\x01 \x01(\x0b\x32\x18.toit.model.DeviceModule\".\n\x0c\x44\x65viceModule\x12\r\n\x05model\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\"W\n\x16\x44\x65viceConnectionStatus\x12\r\n\x05index\x18\x01 \x01(\x03\x12.\n\x04type\x18\x02 \x01(\x0e\x32 .toit.model.DeviceConnectionType\"\x8c\x01\n\x10\x44\x65viceBootStatus\x12-\n\tlast_boot\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x12\n\nin_factory\x18\x02 \x01(\x08\x12\x35\n\x11last_factory_boot\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xfb\x04\n\x0c\x44\x65viceHealth\x12-\n\x05power\x18\x01 \x01(\x0b\x32\x1e.toit.model.DeviceHealth.Power\x12<\n\rbattery_curve\x18\x02 \x01(\x0b\x32%.toit.model.DeviceHealth.BatteryCurve\x12;\n\x0c\x63onnectivity\x18\x03 \x01(\x0b\x32%.toit.model.DeviceHealth.Connectivity\x1a\xc0\x01\n\x05Power\x12\x33\n\x05value\x18\x01 \x01(\x0b\x32$.toit.model.DeviceHealth.Power.Value\x12\x30\n\x0clast_updated\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1aP\n\x05Value\x12\x11\n\x07\x62\x61ttery\x18\x01 \x01(\x01H\x00\x12+\n\tpluggedin\x18\x02 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x00\x42\x07\n\x05power\x1aO\n\x0c\x42\x61tteryCurve\x12\r\n\x05slope\x18\x01 \x01(\x01\x12\x30\n\x0clast_updated\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\xac\x01\n\x0c\x43onnectivity\x12-\n\tlast_seen\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\tconnected\x18\x02 \x01(\x08\x12?\n\x08\x63heckins\x18\x03 \x03(\x0b\x32-.toit.model.DeviceHealth.Connectivity.Checkin\x1a\x19\n\x07\x43heckin\x12\x0e\n\x06missed\x18\x01 \x01(\x08\"y\n\x10HardwareIdentity\x12\x13\n\x0bhardware_id\x18\x01 \x01(\x0c\x12\x17\n\x0fsequence_number\x18\x02 \x01(\x03\x12\r\n\x05\x62\x61tch\x18\x03 \x01(\t\x12\x13\n\x0bprivate_key\x18\x04 \x01(\x0c\x12\x13\n\x0b\x63\x65rtificate\x18\x05 \x01(\x0c\"\x85\x01\n\x14HardwareIdentityInfo\x12\x1b\n\x13\x66\x61\x63tory_sdk_version\x18\x01 \x01(\t\x12\x12\n\nflashed_by\x18\x02 \x01(\x0c\x12\r\n\x05iccid\x18\x03 \x01(\t\x12\x0f\n\x07\x63hip_id\x18\x04 \x01(\t\x12\x1c\n\x14\x66\x61\x63tory_device_model\x18\x05 \x01(\t\"S\n\x0cHardwareInfo\x12\x13\n\x0bhardware_id\x18\x01 \x01(\x0c\x12\x15\n\rhardware_fqdn\x18\x02 \x01(\t\x12\x17\n\x0forganization_id\x18\x04 \x01(\x0c\"\x96\x01\n\x14HardwareToDeviceInfo\x12\x13\n\x0bhardware_id\x18\x01 \x01(\x0c\x12\x11\n\tdevice_id\x18\x02 \x01(\x0c\x12)\n\x05\x62ound\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x07unbound\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xd5\t\n\x0b\x44\x65viceEvent\x12\x11\n\tdevice_id\x18\x01 \x01(\x0c\x12\x10\n\x08\x65vent_id\x18\x02 \x01(\x0c\x12+\n\x07\x63reated\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12*\n\x04type\x18\x04 \x01(\x0e\x32\x1c.toit.model.DeviceEvent.Type\x12\x0b\n\x03msg\x18\x05 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x06 \x01(\x0c\x12\x34\n\tinitiater\x18\x07 \x01(\x0b\x32!.toit.model.DeviceEvent.Initiater\x12\x31\n\x0binformation\x18\x08 \x03(\x0b\x32\x1c.toit.model.DeviceEvent.Info\x1a\x8b\x02\n\tInitiater\x12:\n\x06\x64\x65vice\x18\x01 \x01(\x0b\x32(.toit.model.DeviceEvent.Initiater.DeviceH\x00\x12<\n\x07\x63onsole\x18\x02 \x01(\x0b\x32).toit.model.DeviceEvent.Initiater.ConsoleH\x00\x12\x36\n\x04user\x18\x03 \x01(\x0b\x32&.toit.model.DeviceEvent.Initiater.UserH\x00\x1a\x1b\n\x06\x44\x65vice\x12\x11\n\tdevice_id\x18\x01 \x01(\x0c\x1a\t\n\x07\x43onsole\x1a\x17\n\x04User\x12\x0f\n\x07user_id\x18\x01 \x01(\x0c\x42\x0b\n\tinitiater\x1a\xaf\x04\n\x04Info\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x31\n\x05value\x18\x02 \x01(\x0b\x32\".toit.model.DeviceEvent.Info.Value\x1a\xe6\x03\n\x05Value\x12\x41\n\tprimitive\x18\x01 \x01(\x0b\x32,.toit.model.DeviceEvent.Info.Value.PrimitiveH\x00\x12\x37\n\x04\x64iff\x18\x02 \x01(\x0b\x32\'.toit.model.DeviceEvent.Info.Value.DiffH\x00\x12\x39\n\x05\x65rror\x18\x03 \x01(\x0b\x32(.toit.model.DeviceEvent.Info.Value.ErrorH\x00\x1a\x86\x01\n\tPrimitive\x12?\n\x04type\x18\x01 \x01(\x0e\x32\x31.toit.model.DeviceEvent.Info.Value.Primitive.Type\x12\r\n\x05value\x18\x02 \x01(\x0c\")\n\x04Type\x12\x0b\n\x07INVALID\x10\x00\x12\x08\n\x04JSON\x10\x01\x12\n\n\x06STRING\x10\x02\x1a|\n\x04\x44iff\x12:\n\x04\x66rom\x18\x01 \x01(\x0b\x32,.toit.model.DeviceEvent.Info.Value.Primitive\x12\x38\n\x02to\x18\x02 \x01(\x0b\x32,.toit.model.DeviceEvent.Info.Value.Primitive\x1a\x16\n\x05\x45rror\x12\r\n\x05\x65rror\x18\x01 \x01(\tB\x07\n\x05value\"\x83\x01\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x11\n\rCONFIG_CHANGE\x10\x01\x12\x11\n\rSTATUS_CHANGE\x10\x02\x12\x0f\n\x0bNEXT_ACTION\x10\x03\x12\x11\n\rDEVICE_ACTION\x10\x04\x12\x12\n\x0eQUEUE_OVERFLOW\x10\x05\x12\x10\n\x0cREQUEST_FILE\x10\x06*\x9c\x01\n\x14\x44\x65viceConnectionType\x12\"\n\x1e\x44\x45VICE_CONNECTION_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1b\x44\x45VICE_CONNECTION_TYPE_WIFI\x10\x01\x12 \n\x1c\x44\x45VICE_CONNECTION_TYPE_NBIOT\x10\x02\x12\x1d\n\x19\x44\x45VICE_CONNECTION_TYPE_OS\x10\x03*\xda\x01\n\x10\x44\x65viceChangeType\x12\x19\n\x15\x44\x45VICE_CHANGE_UNKNOWN\x10\x00\x12\x17\n\x13\x44\x45VICE_CHANGE_ADDED\x10\x01\x12\x19\n\x15\x44\x45VICE_CHANGE_DELETED\x10\x02\x12\x19\n\x15\x44\x45VICE_CHANGE_CHANGED\x10\x03\x12\x1f\n\x17\x44\x45VICE_CHANGE_HEARTBEAT\x10\x04\x1a\x02\x08\x01\x12\x1b\n\x17\x44\x45VICE_CHANGE_CONNECTED\x10\x05\x12\x1e\n\x1a\x44\x45VICE_CHANGE_DISCONNECTED\x10\x06*L\n\x17\x44\x65viceSessionChangeType\x12\x10\n\x0cUNKNOWN_TYPE\x10\x00\x12\r\n\tCONNECTED\x10\x01\x12\x10\n\x0c\x44ISCONNECTED\x10\x02*\xb4\x03\n\x0c\x44\x65viceAction\x12\x19\n\x15\x44\x45VICE_ACTION_UNKNOWN\x10\x00\x12\x15\n\x11\x44\x45VICE_ACTION_NOP\x10\x01\x12\x18\n\x14\x44\x45VICE_ACTION_REBOOT\x10\x02\x12\x1b\n\x17\x44\x45VICE_ACTION_NEW_EPOCH\x10\x03\x12\x1f\n\x1b\x44\x45VICE_ACTION_REPORT_CONFIG\x10\x04\x12!\n\x1d\x44\x45VICE_ACTION_FACTORY_PROMOTE\x10\x05\x12\x1f\n\x1b\x44\x45VICE_ACTION_UPDATE_CONFIG\x10\x06\x12!\n\x1d\x44\x45VICE_ACTION_UPDATE_FIRMWARE\x10\x07\x12\x1f\n\x1b\x44\x45VICE_ACTION_FACTORY_RESET\x10\x08\x12\x1b\n\x17\x44\x45VICE_ACTION_RECONCILE\x10\t\x12)\n%DEVICE_ACTION_UPDATE_PARTIAL_FIRMWARE\x10\n\x12 \n\x1c\x44\x45VICE_ACTION_REPORT_MODULES\x10\x0b\x12(\n$DEVICE_ACTION_UPDATE_MODULE_CELLULAR\x10\x0c*-\n\x10\x44\x65viceModuleType\x12\x0b\n\x07Unknown\x10\x00\x12\x0c\n\x08\x43\x65llular\x10\x01\x42O\n\x18io.toit.proto.toit.modelB\x0b\x44\x65viceProtoZ&github.com/toitware/api.git/toit/modelb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,toit_dot_model_dot_data__pb2.DESCRIPTOR,])
_DEVICECONNECTIONTYPE = _descriptor.EnumDescriptor(
name='DeviceConnectionType',
full_name='toit.model.DeviceConnectionType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='DEVICE_CONNECTION_TYPE_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_CONNECTION_TYPE_WIFI', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_CONNECTION_TYPE_NBIOT', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_CONNECTION_TYPE_OS', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4621,
serialized_end=4777,
)
_sym_db.RegisterEnumDescriptor(_DEVICECONNECTIONTYPE)
DeviceConnectionType = enum_type_wrapper.EnumTypeWrapper(_DEVICECONNECTIONTYPE)
_DEVICECHANGETYPE = _descriptor.EnumDescriptor(
name='DeviceChangeType',
full_name='toit.model.DeviceChangeType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='DEVICE_CHANGE_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_CHANGE_ADDED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_CHANGE_DELETED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_CHANGE_CHANGED', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_CHANGE_HEARTBEAT', index=4, number=4,
serialized_options=b'\010\001',
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_CHANGE_CONNECTED', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_CHANGE_DISCONNECTED', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4780,
serialized_end=4998,
)
_sym_db.RegisterEnumDescriptor(_DEVICECHANGETYPE)
DeviceChangeType = enum_type_wrapper.EnumTypeWrapper(_DEVICECHANGETYPE)
_DEVICESESSIONCHANGETYPE = _descriptor.EnumDescriptor(
name='DeviceSessionChangeType',
full_name='toit.model.DeviceSessionChangeType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_TYPE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONNECTED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DISCONNECTED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5000,
serialized_end=5076,
)
_sym_db.RegisterEnumDescriptor(_DEVICESESSIONCHANGETYPE)
DeviceSessionChangeType = enum_type_wrapper.EnumTypeWrapper(_DEVICESESSIONCHANGETYPE)
_DEVICEACTION = _descriptor.EnumDescriptor(
name='DeviceAction',
full_name='toit.model.DeviceAction',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_NOP', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_REBOOT', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_NEW_EPOCH', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_REPORT_CONFIG', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_FACTORY_PROMOTE', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_UPDATE_CONFIG', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_UPDATE_FIRMWARE', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_FACTORY_RESET', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_RECONCILE', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_UPDATE_PARTIAL_FIRMWARE', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_REPORT_MODULES', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION_UPDATE_MODULE_CELLULAR', index=12, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5079,
serialized_end=5515,
)
_sym_db.RegisterEnumDescriptor(_DEVICEACTION)
DeviceAction = enum_type_wrapper.EnumTypeWrapper(_DEVICEACTION)
_DEVICEMODULETYPE = _descriptor.EnumDescriptor(
name='DeviceModuleType',
full_name='toit.model.DeviceModuleType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Unknown', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Cellular', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5517,
serialized_end=5562,
)
_sym_db.RegisterEnumDescriptor(_DEVICEMODULETYPE)
DeviceModuleType = enum_type_wrapper.EnumTypeWrapper(_DEVICEMODULETYPE)
DEVICE_CONNECTION_TYPE_UNKNOWN = 0
DEVICE_CONNECTION_TYPE_WIFI = 1
DEVICE_CONNECTION_TYPE_NBIOT = 2
DEVICE_CONNECTION_TYPE_OS = 3
DEVICE_CHANGE_UNKNOWN = 0
DEVICE_CHANGE_ADDED = 1
DEVICE_CHANGE_DELETED = 2
DEVICE_CHANGE_CHANGED = 3
DEVICE_CHANGE_HEARTBEAT = 4
DEVICE_CHANGE_CONNECTED = 5
DEVICE_CHANGE_DISCONNECTED = 6
UNKNOWN_TYPE = 0
CONNECTED = 1
DISCONNECTED = 2
DEVICE_ACTION_UNKNOWN = 0
DEVICE_ACTION_NOP = 1
DEVICE_ACTION_REBOOT = 2
DEVICE_ACTION_NEW_EPOCH = 3
DEVICE_ACTION_REPORT_CONFIG = 4
DEVICE_ACTION_FACTORY_PROMOTE = 5
DEVICE_ACTION_UPDATE_CONFIG = 6
DEVICE_ACTION_UPDATE_FIRMWARE = 7
DEVICE_ACTION_FACTORY_RESET = 8
DEVICE_ACTION_RECONCILE = 9
DEVICE_ACTION_UPDATE_PARTIAL_FIRMWARE = 10
DEVICE_ACTION_REPORT_MODULES = 11
DEVICE_ACTION_UPDATE_MODULE_CELLULAR = 12
Unknown = 0
Cellular = 1
_DEVICEEVENT_INFO_VALUE_PRIMITIVE_TYPE = _descriptor.EnumDescriptor(
name='Type',
full_name='toit.model.DeviceEvent.Info.Value.Primitive.Type',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='INVALID', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='JSON', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STRING', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4284,
serialized_end=4325,
)
_sym_db.RegisterEnumDescriptor(_DEVICEEVENT_INFO_VALUE_PRIMITIVE_TYPE)
_DEVICEEVENT_TYPE = _descriptor.EnumDescriptor(
name='Type',
full_name='toit.model.DeviceEvent.Type',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONFIG_CHANGE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STATUS_CHANGE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NEXT_ACTION', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEVICE_ACTION', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='QUEUE_OVERFLOW', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='REQUEST_FILE', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4487,
serialized_end=4618,
)
_sym_db.RegisterEnumDescriptor(_DEVICEEVENT_TYPE)
_DEVICECONFIG = _descriptor.Descriptor(
name='DeviceConfig',
full_name='toit.model.DeviceConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='toit.model.DeviceConfig.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='started_after', full_name='toit.model.DeviceConfig.started_after', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sdk', full_name='toit.model.DeviceConfig.sdk', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='broker', full_name='toit.model.DeviceConfig.broker', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection', full_name='toit.model.DeviceConfig.connection', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='model', full_name='toit.model.DeviceConfig.model', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='factory_after', full_name='toit.model.DeviceConfig.factory_after', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='runtime', full_name='toit.model.DeviceConfig.runtime', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='modules', full_name='toit.model.DeviceConfig.modules', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=157,
serialized_end=525,
)
_DEVICEMODULESCONFIG = _descriptor.Descriptor(
name='DeviceModulesConfig',
full_name='toit.model.DeviceModulesConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='sync_requested', full_name='toit.model.DeviceModulesConfig.sync_requested', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='modules', full_name='toit.model.DeviceModulesConfig.modules', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=527,
serialized_end=644,
)
_DEVICERUNTIMESETTINGS = _descriptor.Descriptor(
name='DeviceRuntimeSettings',
full_name='toit.model.DeviceRuntimeSettings',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='logging', full_name='toit.model.DeviceRuntimeSettings.logging', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metrics', full_name='toit.model.DeviceRuntimeSettings.metrics', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=646,
serialized_end=761,
)
_LOGGINGSETTINGS = _descriptor.Descriptor(
name='LoggingSettings',
full_name='toit.model.LoggingSettings',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='level', full_name='toit.model.LoggingSettings.level', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=763,
serialized_end=822,
)
_METRICSSETTINGS = _descriptor.Descriptor(
name='MetricsSettings',
full_name='toit.model.MetricsSettings',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='level', full_name='toit.model.MetricsSettings.level', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=824,
serialized_end=887,
)
_DEVICECONNECTIONSETTINGS = _descriptor.Descriptor(
name='DeviceConnectionSettings',
full_name='toit.model.DeviceConnectionSettings',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='max_offline', full_name='toit.model.DeviceConnectionSettings.max_offline', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connections', full_name='toit.model.DeviceConnectionSettings.connections', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_queue_threshold', full_name='toit.model.DeviceConnectionSettings.event_queue_threshold', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=890,
serialized_end=1047,
)
_CONNECTIONSETTING = _descriptor.Descriptor(
name='ConnectionSetting',
full_name='toit.model.ConnectionSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='wifi', full_name='toit.model.ConnectionSetting.wifi', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nbiot', full_name='toit.model.ConnectionSetting.nbiot', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1049,
serialized_end=1148,
)
_WIFISETTING = _descriptor.Descriptor(
name='WifiSetting',
full_name='toit.model.WifiSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='ssid', full_name='toit.model.WifiSetting.ssid', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='password', full_name='toit.model.WifiSetting.password', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1150,
serialized_end=1195,
)
_NBIOTSETTING = _descriptor.Descriptor(
name='NBIoTSetting',
full_name='toit.model.NBIoTSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='apn', full_name='toit.model.NBIoTSetting.apn', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bands', full_name='toit.model.NBIoTSetting.bands', index=1,
number=2, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='operator', full_name='toit.model.NBIoTSetting.operator', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pin', full_name='toit.model.NBIoTSetting.pin', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1197,
serialized_end=1270,
)
_DEVICEBROKERSETTINGS = _descriptor.Descriptor(
name='DeviceBrokerSettings',
full_name='toit.model.DeviceBrokerSettings',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='host', full_name='toit.model.DeviceBrokerSettings.host', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='port', full_name='toit.model.DeviceBrokerSettings.port', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cn', full_name='toit.model.DeviceBrokerSettings.cn', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='protocol', full_name='toit.model.DeviceBrokerSettings.protocol', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1272,
serialized_end=1356,
)
_DEVICESTATUS = _descriptor.Descriptor(
name='DeviceStatus',
full_name='toit.model.DeviceStatus',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='revision', full_name='toit.model.DeviceStatus.revision', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch', full_name='toit.model.DeviceStatus.epoch', index=1,
number=6, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state_revision', full_name='toit.model.DeviceStatus.state_revision', index=2,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated', full_name='toit.model.DeviceStatus.updated', index=3,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='config_revision', full_name='toit.model.DeviceStatus.config_revision', index=4,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sdk', full_name='toit.model.DeviceStatus.sdk', index=5,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='model', full_name='toit.model.DeviceStatus.model', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='config', full_name='toit.model.DeviceStatus.config', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connected', full_name='toit.model.DeviceStatus.connected', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='boot', full_name='toit.model.DeviceStatus.boot', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='health', full_name='toit.model.DeviceStatus.health', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection', full_name='toit.model.DeviceStatus.connection', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='modules', full_name='toit.model.DeviceStatus.modules', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1359,
serialized_end=1785,
)
_DEVICEMODULESSTATUS = _descriptor.Descriptor(
name='DeviceModulesStatus',
full_name='toit.model.DeviceModulesStatus',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='last_updated', full_name='toit.model.DeviceModulesStatus.last_updated', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='modules', full_name='toit.model.DeviceModulesStatus.modules', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1787,
serialized_end=1902,
)
_DEVICEMODULES = _descriptor.Descriptor(
name='DeviceModules',
full_name='toit.model.DeviceModules',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='cellular', full_name='toit.model.DeviceModules.cellular', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1904,
serialized_end=1963,
)
_DEVICEMODULE = _descriptor.Descriptor(
name='DeviceModule',
full_name='toit.model.DeviceModule',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='model', full_name='toit.model.DeviceModule.model', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='toit.model.DeviceModule.version', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1965,
serialized_end=2011,
)
_DEVICECONNECTIONSTATUS = _descriptor.Descriptor(
name='DeviceConnectionStatus',
full_name='toit.model.DeviceConnectionStatus',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='toit.model.DeviceConnectionStatus.index', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='toit.model.DeviceConnectionStatus.type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2013,
serialized_end=2100,
)
_DEVICEBOOTSTATUS = _descriptor.Descriptor(
name='DeviceBootStatus',
full_name='toit.model.DeviceBootStatus',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='last_boot', full_name='toit.model.DeviceBootStatus.last_boot', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='in_factory', full_name='toit.model.DeviceBootStatus.in_factory', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_factory_boot', full_name='toit.model.DeviceBootStatus.last_factory_boot', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2103,
serialized_end=2243,
)
_DEVICEHEALTH_POWER_VALUE = _descriptor.Descriptor(
name='Value',
full_name='toit.model.DeviceHealth.Power.Value',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='battery', full_name='toit.model.DeviceHealth.Power.Value.battery', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pluggedin', full_name='toit.model.DeviceHealth.Power.Value.pluggedin', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='power', full_name='toit.model.DeviceHealth.Power.Value.power',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=2545,
serialized_end=2625,
)
_DEVICEHEALTH_POWER = _descriptor.Descriptor(
name='Power',
full_name='toit.model.DeviceHealth.Power',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='toit.model.DeviceHealth.Power.value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_updated', full_name='toit.model.DeviceHealth.Power.last_updated', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DEVICEHEALTH_POWER_VALUE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2433,
serialized_end=2625,
)
_DEVICEHEALTH_BATTERYCURVE = _descriptor.Descriptor(
name='BatteryCurve',
full_name='toit.model.DeviceHealth.BatteryCurve',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='slope', full_name='toit.model.DeviceHealth.BatteryCurve.slope', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_updated', full_name='toit.model.DeviceHealth.BatteryCurve.last_updated', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2627,
serialized_end=2706,
)
_DEVICEHEALTH_CONNECTIVITY_CHECKIN = _descriptor.Descriptor(
name='Checkin',
full_name='toit.model.DeviceHealth.Connectivity.Checkin',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='missed', full_name='toit.model.DeviceHealth.Connectivity.Checkin.missed', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2856,
serialized_end=2881,
)
_DEVICEHEALTH_CONNECTIVITY = _descriptor.Descriptor(
name='Connectivity',
full_name='toit.model.DeviceHealth.Connectivity',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='last_seen', full_name='toit.model.DeviceHealth.Connectivity.last_seen', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connected', full_name='toit.model.DeviceHealth.Connectivity.connected', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='checkins', full_name='toit.model.DeviceHealth.Connectivity.checkins', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DEVICEHEALTH_CONNECTIVITY_CHECKIN, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2709,
serialized_end=2881,
)
_DEVICEHEALTH = _descriptor.Descriptor(
name='DeviceHealth',
full_name='toit.model.DeviceHealth',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='power', full_name='toit.model.DeviceHealth.power', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='battery_curve', full_name='toit.model.DeviceHealth.battery_curve', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connectivity', full_name='toit.model.DeviceHealth.connectivity', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DEVICEHEALTH_POWER, _DEVICEHEALTH_BATTERYCURVE, _DEVICEHEALTH_CONNECTIVITY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2246,
serialized_end=2881,
)
_HARDWAREIDENTITY = _descriptor.Descriptor(
name='HardwareIdentity',
full_name='toit.model.HardwareIdentity',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='hardware_id', full_name='toit.model.HardwareIdentity.hardware_id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sequence_number', full_name='toit.model.HardwareIdentity.sequence_number', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='batch', full_name='toit.model.HardwareIdentity.batch', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='private_key', full_name='toit.model.HardwareIdentity.private_key', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='certificate', full_name='toit.model.HardwareIdentity.certificate', index=4,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2883,
serialized_end=3004,
)
_HARDWAREIDENTITYINFO = _descriptor.Descriptor(
name='HardwareIdentityInfo',
full_name='toit.model.HardwareIdentityInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='factory_sdk_version', full_name='toit.model.HardwareIdentityInfo.factory_sdk_version', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='flashed_by', full_name='toit.model.HardwareIdentityInfo.flashed_by', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='iccid', full_name='toit.model.HardwareIdentityInfo.iccid', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='chip_id', full_name='toit.model.HardwareIdentityInfo.chip_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='factory_device_model', full_name='toit.model.HardwareIdentityInfo.factory_device_model', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3007,
serialized_end=3140,
)
_HARDWAREINFO = _descriptor.Descriptor(
name='HardwareInfo',
full_name='toit.model.HardwareInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='hardware_id', full_name='toit.model.HardwareInfo.hardware_id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hardware_fqdn', full_name='toit.model.HardwareInfo.hardware_fqdn', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='organization_id', full_name='toit.model.HardwareInfo.organization_id', index=2,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3142,
serialized_end=3225,
)
_HARDWARETODEVICEINFO = _descriptor.Descriptor(
name='HardwareToDeviceInfo',
full_name='toit.model.HardwareToDeviceInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='hardware_id', full_name='toit.model.HardwareToDeviceInfo.hardware_id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='device_id', full_name='toit.model.HardwareToDeviceInfo.device_id', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bound', full_name='toit.model.HardwareToDeviceInfo.bound', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unbound', full_name='toit.model.HardwareToDeviceInfo.unbound', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3228,
serialized_end=3378,
)
_DEVICEEVENT_INITIATER_DEVICE = _descriptor.Descriptor(
name='Device',
full_name='toit.model.DeviceEvent.Initiater.Device',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='device_id', full_name='toit.model.DeviceEvent.Initiater.Device.device_id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3846,
serialized_end=3873,
)
_DEVICEEVENT_INITIATER_CONSOLE = _descriptor.Descriptor(
name='Console',
full_name='toit.model.DeviceEvent.Initiater.Console',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3875,
serialized_end=3884,
)
_DEVICEEVENT_INITIATER_USER = _descriptor.Descriptor(
name='User',
full_name='toit.model.DeviceEvent.Initiater.User',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='user_id', full_name='toit.model.DeviceEvent.Initiater.User.user_id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3886,
serialized_end=3909,
)
_DEVICEEVENT_INITIATER = _descriptor.Descriptor(
name='Initiater',
full_name='toit.model.DeviceEvent.Initiater',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='device', full_name='toit.model.DeviceEvent.Initiater.device', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='console', full_name='toit.model.DeviceEvent.Initiater.console', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user', full_name='toit.model.DeviceEvent.Initiater.user', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DEVICEEVENT_INITIATER_DEVICE, _DEVICEEVENT_INITIATER_CONSOLE, _DEVICEEVENT_INITIATER_USER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='initiater', full_name='toit.model.DeviceEvent.Initiater.initiater',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=3655,
serialized_end=3922,
)
_DEVICEEVENT_INFO_VALUE_PRIMITIVE = _descriptor.Descriptor(
name='Primitive',
full_name='toit.model.DeviceEvent.Info.Value.Primitive',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='toit.model.DeviceEvent.Info.Value.Primitive.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='toit.model.DeviceEvent.Info.Value.Primitive.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_DEVICEEVENT_INFO_VALUE_PRIMITIVE_TYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4191,
serialized_end=4325,
)
_DEVICEEVENT_INFO_VALUE_DIFF = _descriptor.Descriptor(
name='Diff',
full_name='toit.model.DeviceEvent.Info.Value.Diff',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='from', full_name='toit.model.DeviceEvent.Info.Value.Diff.from', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='to', full_name='toit.model.DeviceEvent.Info.Value.Diff.to', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4327,
serialized_end=4451,
)
_DEVICEEVENT_INFO_VALUE_ERROR = _descriptor.Descriptor(
name='Error',
full_name='toit.model.DeviceEvent.Info.Value.Error',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='error', full_name='toit.model.DeviceEvent.Info.Value.Error.error', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4453,
serialized_end=4475,
)
_DEVICEEVENT_INFO_VALUE = _descriptor.Descriptor(
name='Value',
full_name='toit.model.DeviceEvent.Info.Value',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='primitive', full_name='toit.model.DeviceEvent.Info.Value.primitive', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='diff', full_name='toit.model.DeviceEvent.Info.Value.diff', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='error', full_name='toit.model.DeviceEvent.Info.Value.error', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DEVICEEVENT_INFO_VALUE_PRIMITIVE, _DEVICEEVENT_INFO_VALUE_DIFF, _DEVICEEVENT_INFO_VALUE_ERROR, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='value', full_name='toit.model.DeviceEvent.Info.Value.value',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=3998,
serialized_end=4484,
)
_DEVICEEVENT_INFO = _descriptor.Descriptor(
name='Info',
full_name='toit.model.DeviceEvent.Info',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='toit.model.DeviceEvent.Info.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='toit.model.DeviceEvent.Info.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DEVICEEVENT_INFO_VALUE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3925,
serialized_end=4484,
)
_DEVICEEVENT = _descriptor.Descriptor(
name='DeviceEvent',
full_name='toit.model.DeviceEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='device_id', full_name='toit.model.DeviceEvent.device_id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_id', full_name='toit.model.DeviceEvent.event_id', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created', full_name='toit.model.DeviceEvent.created', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='toit.model.DeviceEvent.type', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='msg', full_name='toit.model.DeviceEvent.msg', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='toit.model.DeviceEvent.data', index=5,
number=6, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='initiater', full_name='toit.model.DeviceEvent.initiater', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='information', full_name='toit.model.DeviceEvent.information', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DEVICEEVENT_INITIATER, _DEVICEEVENT_INFO, ],
enum_types=[
_DEVICEEVENT_TYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3381,
serialized_end=4618,
)
_DEVICECONFIG.fields_by_name['started_after'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICECONFIG.fields_by_name['broker'].message_type = _DEVICEBROKERSETTINGS
_DEVICECONFIG.fields_by_name['connection'].message_type = _DEVICECONNECTIONSETTINGS
_DEVICECONFIG.fields_by_name['factory_after'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICECONFIG.fields_by_name['runtime'].message_type = _DEVICERUNTIMESETTINGS
_DEVICECONFIG.fields_by_name['modules'].message_type = _DEVICEMODULESCONFIG
_DEVICEMODULESCONFIG.fields_by_name['sync_requested'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICEMODULESCONFIG.fields_by_name['modules'].message_type = _DEVICEMODULES
_DEVICERUNTIMESETTINGS.fields_by_name['logging'].message_type = _LOGGINGSETTINGS
_DEVICERUNTIMESETTINGS.fields_by_name['metrics'].message_type = _METRICSSETTINGS
_LOGGINGSETTINGS.fields_by_name['level'].enum_type = toit_dot_model_dot_data__pb2._LOGDATA_LEVEL
_METRICSSETTINGS.fields_by_name['level'].enum_type = toit_dot_model_dot_data__pb2._METRICSDATA_LEVEL
_DEVICECONNECTIONSETTINGS.fields_by_name['max_offline'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
_DEVICECONNECTIONSETTINGS.fields_by_name['connections'].message_type = _CONNECTIONSETTING
_CONNECTIONSETTING.fields_by_name['wifi'].message_type = _WIFISETTING
_CONNECTIONSETTING.fields_by_name['nbiot'].message_type = _NBIOTSETTING
_DEVICESTATUS.fields_by_name['updated'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICESTATUS.fields_by_name['config'].message_type = _DEVICECONFIG
_DEVICESTATUS.fields_by_name['boot'].message_type = _DEVICEBOOTSTATUS
_DEVICESTATUS.fields_by_name['health'].message_type = _DEVICEHEALTH
_DEVICESTATUS.fields_by_name['connection'].message_type = _DEVICECONNECTIONSTATUS
_DEVICESTATUS.fields_by_name['modules'].message_type = _DEVICEMODULESSTATUS
_DEVICEMODULESSTATUS.fields_by_name['last_updated'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICEMODULESSTATUS.fields_by_name['modules'].message_type = _DEVICEMODULES
_DEVICEMODULES.fields_by_name['cellular'].message_type = _DEVICEMODULE
_DEVICECONNECTIONSTATUS.fields_by_name['type'].enum_type = _DEVICECONNECTIONTYPE
_DEVICEBOOTSTATUS.fields_by_name['last_boot'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICEBOOTSTATUS.fields_by_name['last_factory_boot'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICEHEALTH_POWER_VALUE.fields_by_name['pluggedin'].message_type = google_dot_protobuf_dot_empty__pb2._EMPTY
_DEVICEHEALTH_POWER_VALUE.containing_type = _DEVICEHEALTH_POWER
_DEVICEHEALTH_POWER_VALUE.oneofs_by_name['power'].fields.append(
_DEVICEHEALTH_POWER_VALUE.fields_by_name['battery'])
_DEVICEHEALTH_POWER_VALUE.fields_by_name['battery'].containing_oneof = _DEVICEHEALTH_POWER_VALUE.oneofs_by_name['power']
_DEVICEHEALTH_POWER_VALUE.oneofs_by_name['power'].fields.append(
_DEVICEHEALTH_POWER_VALUE.fields_by_name['pluggedin'])
_DEVICEHEALTH_POWER_VALUE.fields_by_name['pluggedin'].containing_oneof = _DEVICEHEALTH_POWER_VALUE.oneofs_by_name['power']
_DEVICEHEALTH_POWER.fields_by_name['value'].message_type = _DEVICEHEALTH_POWER_VALUE
_DEVICEHEALTH_POWER.fields_by_name['last_updated'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICEHEALTH_POWER.containing_type = _DEVICEHEALTH
_DEVICEHEALTH_BATTERYCURVE.fields_by_name['last_updated'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICEHEALTH_BATTERYCURVE.containing_type = _DEVICEHEALTH
_DEVICEHEALTH_CONNECTIVITY_CHECKIN.containing_type = _DEVICEHEALTH_CONNECTIVITY
_DEVICEHEALTH_CONNECTIVITY.fields_by_name['last_seen'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICEHEALTH_CONNECTIVITY.fields_by_name['checkins'].message_type = _DEVICEHEALTH_CONNECTIVITY_CHECKIN
_DEVICEHEALTH_CONNECTIVITY.containing_type = _DEVICEHEALTH
_DEVICEHEALTH.fields_by_name['power'].message_type = _DEVICEHEALTH_POWER
_DEVICEHEALTH.fields_by_name['battery_curve'].message_type = _DEVICEHEALTH_BATTERYCURVE
_DEVICEHEALTH.fields_by_name['connectivity'].message_type = _DEVICEHEALTH_CONNECTIVITY
_HARDWARETODEVICEINFO.fields_by_name['bound'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_HARDWARETODEVICEINFO.fields_by_name['unbound'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICEEVENT_INITIATER_DEVICE.containing_type = _DEVICEEVENT_INITIATER
_DEVICEEVENT_INITIATER_CONSOLE.containing_type = _DEVICEEVENT_INITIATER
_DEVICEEVENT_INITIATER_USER.containing_type = _DEVICEEVENT_INITIATER
_DEVICEEVENT_INITIATER.fields_by_name['device'].message_type = _DEVICEEVENT_INITIATER_DEVICE
_DEVICEEVENT_INITIATER.fields_by_name['console'].message_type = _DEVICEEVENT_INITIATER_CONSOLE
_DEVICEEVENT_INITIATER.fields_by_name['user'].message_type = _DEVICEEVENT_INITIATER_USER
_DEVICEEVENT_INITIATER.containing_type = _DEVICEEVENT
_DEVICEEVENT_INITIATER.oneofs_by_name['initiater'].fields.append(
_DEVICEEVENT_INITIATER.fields_by_name['device'])
_DEVICEEVENT_INITIATER.fields_by_name['device'].containing_oneof = _DEVICEEVENT_INITIATER.oneofs_by_name['initiater']
_DEVICEEVENT_INITIATER.oneofs_by_name['initiater'].fields.append(
_DEVICEEVENT_INITIATER.fields_by_name['console'])
_DEVICEEVENT_INITIATER.fields_by_name['console'].containing_oneof = _DEVICEEVENT_INITIATER.oneofs_by_name['initiater']
_DEVICEEVENT_INITIATER.oneofs_by_name['initiater'].fields.append(
_DEVICEEVENT_INITIATER.fields_by_name['user'])
_DEVICEEVENT_INITIATER.fields_by_name['user'].containing_oneof = _DEVICEEVENT_INITIATER.oneofs_by_name['initiater']
_DEVICEEVENT_INFO_VALUE_PRIMITIVE.fields_by_name['type'].enum_type = _DEVICEEVENT_INFO_VALUE_PRIMITIVE_TYPE
_DEVICEEVENT_INFO_VALUE_PRIMITIVE.containing_type = _DEVICEEVENT_INFO_VALUE
_DEVICEEVENT_INFO_VALUE_PRIMITIVE_TYPE.containing_type = _DEVICEEVENT_INFO_VALUE_PRIMITIVE
_DEVICEEVENT_INFO_VALUE_DIFF.fields_by_name['from'].message_type = _DEVICEEVENT_INFO_VALUE_PRIMITIVE
_DEVICEEVENT_INFO_VALUE_DIFF.fields_by_name['to'].message_type = _DEVICEEVENT_INFO_VALUE_PRIMITIVE
_DEVICEEVENT_INFO_VALUE_DIFF.containing_type = _DEVICEEVENT_INFO_VALUE
_DEVICEEVENT_INFO_VALUE_ERROR.containing_type = _DEVICEEVENT_INFO_VALUE
_DEVICEEVENT_INFO_VALUE.fields_by_name['primitive'].message_type = _DEVICEEVENT_INFO_VALUE_PRIMITIVE
_DEVICEEVENT_INFO_VALUE.fields_by_name['diff'].message_type = _DEVICEEVENT_INFO_VALUE_DIFF
_DEVICEEVENT_INFO_VALUE.fields_by_name['error'].message_type = _DEVICEEVENT_INFO_VALUE_ERROR
_DEVICEEVENT_INFO_VALUE.containing_type = _DEVICEEVENT_INFO
_DEVICEEVENT_INFO_VALUE.oneofs_by_name['value'].fields.append(
_DEVICEEVENT_INFO_VALUE.fields_by_name['primitive'])
_DEVICEEVENT_INFO_VALUE.fields_by_name['primitive'].containing_oneof = _DEVICEEVENT_INFO_VALUE.oneofs_by_name['value']
_DEVICEEVENT_INFO_VALUE.oneofs_by_name['value'].fields.append(
_DEVICEEVENT_INFO_VALUE.fields_by_name['diff'])
_DEVICEEVENT_INFO_VALUE.fields_by_name['diff'].containing_oneof = _DEVICEEVENT_INFO_VALUE.oneofs_by_name['value']
_DEVICEEVENT_INFO_VALUE.oneofs_by_name['value'].fields.append(
_DEVICEEVENT_INFO_VALUE.fields_by_name['error'])
_DEVICEEVENT_INFO_VALUE.fields_by_name['error'].containing_oneof = _DEVICEEVENT_INFO_VALUE.oneofs_by_name['value']
_DEVICEEVENT_INFO.fields_by_name['value'].message_type = _DEVICEEVENT_INFO_VALUE
_DEVICEEVENT_INFO.containing_type = _DEVICEEVENT
_DEVICEEVENT.fields_by_name['created'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DEVICEEVENT.fields_by_name['type'].enum_type = _DEVICEEVENT_TYPE
_DEVICEEVENT.fields_by_name['initiater'].message_type = _DEVICEEVENT_INITIATER
_DEVICEEVENT.fields_by_name['information'].message_type = _DEVICEEVENT_INFO
_DEVICEEVENT_TYPE.containing_type = _DEVICEEVENT
DESCRIPTOR.message_types_by_name['DeviceConfig'] = _DEVICECONFIG
DESCRIPTOR.message_types_by_name['DeviceModulesConfig'] = _DEVICEMODULESCONFIG
DESCRIPTOR.message_types_by_name['DeviceRuntimeSettings'] = _DEVICERUNTIMESETTINGS
DESCRIPTOR.message_types_by_name['LoggingSettings'] = _LOGGINGSETTINGS
DESCRIPTOR.message_types_by_name['MetricsSettings'] = _METRICSSETTINGS
DESCRIPTOR.message_types_by_name['DeviceConnectionSettings'] = _DEVICECONNECTIONSETTINGS
DESCRIPTOR.message_types_by_name['ConnectionSetting'] = _CONNECTIONSETTING
DESCRIPTOR.message_types_by_name['WifiSetting'] = _WIFISETTING
DESCRIPTOR.message_types_by_name['NBIoTSetting'] = _NBIOTSETTING
DESCRIPTOR.message_types_by_name['DeviceBrokerSettings'] = _DEVICEBROKERSETTINGS
DESCRIPTOR.message_types_by_name['DeviceStatus'] = _DEVICESTATUS
DESCRIPTOR.message_types_by_name['DeviceModulesStatus'] = _DEVICEMODULESSTATUS
DESCRIPTOR.message_types_by_name['DeviceModules'] = _DEVICEMODULES
DESCRIPTOR.message_types_by_name['DeviceModule'] = _DEVICEMODULE
DESCRIPTOR.message_types_by_name['DeviceConnectionStatus'] = _DEVICECONNECTIONSTATUS
DESCRIPTOR.message_types_by_name['DeviceBootStatus'] = _DEVICEBOOTSTATUS
DESCRIPTOR.message_types_by_name['DeviceHealth'] = _DEVICEHEALTH
DESCRIPTOR.message_types_by_name['HardwareIdentity'] = _HARDWAREIDENTITY
DESCRIPTOR.message_types_by_name['HardwareIdentityInfo'] = _HARDWAREIDENTITYINFO
DESCRIPTOR.message_types_by_name['HardwareInfo'] = _HARDWAREINFO
DESCRIPTOR.message_types_by_name['HardwareToDeviceInfo'] = _HARDWARETODEVICEINFO
DESCRIPTOR.message_types_by_name['DeviceEvent'] = _DEVICEEVENT
DESCRIPTOR.enum_types_by_name['DeviceConnectionType'] = _DEVICECONNECTIONTYPE
DESCRIPTOR.enum_types_by_name['DeviceChangeType'] = _DEVICECHANGETYPE
DESCRIPTOR.enum_types_by_name['DeviceSessionChangeType'] = _DEVICESESSIONCHANGETYPE
DESCRIPTOR.enum_types_by_name['DeviceAction'] = _DEVICEACTION
DESCRIPTOR.enum_types_by_name['DeviceModuleType'] = _DEVICEMODULETYPE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DeviceConfig = _reflection.GeneratedProtocolMessageType('DeviceConfig', (_message.Message,), {
'DESCRIPTOR' : _DEVICECONFIG,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceConfig)
})
_sym_db.RegisterMessage(DeviceConfig)
DeviceModulesConfig = _reflection.GeneratedProtocolMessageType('DeviceModulesConfig', (_message.Message,), {
'DESCRIPTOR' : _DEVICEMODULESCONFIG,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceModulesConfig)
})
_sym_db.RegisterMessage(DeviceModulesConfig)
DeviceRuntimeSettings = _reflection.GeneratedProtocolMessageType('DeviceRuntimeSettings', (_message.Message,), {
'DESCRIPTOR' : _DEVICERUNTIMESETTINGS,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceRuntimeSettings)
})
_sym_db.RegisterMessage(DeviceRuntimeSettings)
LoggingSettings = _reflection.GeneratedProtocolMessageType('LoggingSettings', (_message.Message,), {
'DESCRIPTOR' : _LOGGINGSETTINGS,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.LoggingSettings)
})
_sym_db.RegisterMessage(LoggingSettings)
MetricsSettings = _reflection.GeneratedProtocolMessageType('MetricsSettings', (_message.Message,), {
'DESCRIPTOR' : _METRICSSETTINGS,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.MetricsSettings)
})
_sym_db.RegisterMessage(MetricsSettings)
DeviceConnectionSettings = _reflection.GeneratedProtocolMessageType('DeviceConnectionSettings', (_message.Message,), {
'DESCRIPTOR' : _DEVICECONNECTIONSETTINGS,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceConnectionSettings)
})
_sym_db.RegisterMessage(DeviceConnectionSettings)
ConnectionSetting = _reflection.GeneratedProtocolMessageType('ConnectionSetting', (_message.Message,), {
'DESCRIPTOR' : _CONNECTIONSETTING,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.ConnectionSetting)
})
_sym_db.RegisterMessage(ConnectionSetting)
WifiSetting = _reflection.GeneratedProtocolMessageType('WifiSetting', (_message.Message,), {
'DESCRIPTOR' : _WIFISETTING,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.WifiSetting)
})
_sym_db.RegisterMessage(WifiSetting)
NBIoTSetting = _reflection.GeneratedProtocolMessageType('NBIoTSetting', (_message.Message,), {
'DESCRIPTOR' : _NBIOTSETTING,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.NBIoTSetting)
})
_sym_db.RegisterMessage(NBIoTSetting)
DeviceBrokerSettings = _reflection.GeneratedProtocolMessageType('DeviceBrokerSettings', (_message.Message,), {
'DESCRIPTOR' : _DEVICEBROKERSETTINGS,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceBrokerSettings)
})
_sym_db.RegisterMessage(DeviceBrokerSettings)
DeviceStatus = _reflection.GeneratedProtocolMessageType('DeviceStatus', (_message.Message,), {
'DESCRIPTOR' : _DEVICESTATUS,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceStatus)
})
_sym_db.RegisterMessage(DeviceStatus)
DeviceModulesStatus = _reflection.GeneratedProtocolMessageType('DeviceModulesStatus', (_message.Message,), {
'DESCRIPTOR' : _DEVICEMODULESSTATUS,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceModulesStatus)
})
_sym_db.RegisterMessage(DeviceModulesStatus)
DeviceModules = _reflection.GeneratedProtocolMessageType('DeviceModules', (_message.Message,), {
'DESCRIPTOR' : _DEVICEMODULES,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceModules)
})
_sym_db.RegisterMessage(DeviceModules)
DeviceModule = _reflection.GeneratedProtocolMessageType('DeviceModule', (_message.Message,), {
'DESCRIPTOR' : _DEVICEMODULE,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceModule)
})
_sym_db.RegisterMessage(DeviceModule)
DeviceConnectionStatus = _reflection.GeneratedProtocolMessageType('DeviceConnectionStatus', (_message.Message,), {
'DESCRIPTOR' : _DEVICECONNECTIONSTATUS,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceConnectionStatus)
})
_sym_db.RegisterMessage(DeviceConnectionStatus)
DeviceBootStatus = _reflection.GeneratedProtocolMessageType('DeviceBootStatus', (_message.Message,), {
'DESCRIPTOR' : _DEVICEBOOTSTATUS,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceBootStatus)
})
_sym_db.RegisterMessage(DeviceBootStatus)
DeviceHealth = _reflection.GeneratedProtocolMessageType('DeviceHealth', (_message.Message,), {
'Power' : _reflection.GeneratedProtocolMessageType('Power', (_message.Message,), {
'Value' : _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), {
'DESCRIPTOR' : _DEVICEHEALTH_POWER_VALUE,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceHealth.Power.Value)
})
,
'DESCRIPTOR' : _DEVICEHEALTH_POWER,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceHealth.Power)
})
,
'BatteryCurve' : _reflection.GeneratedProtocolMessageType('BatteryCurve', (_message.Message,), {
'DESCRIPTOR' : _DEVICEHEALTH_BATTERYCURVE,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceHealth.BatteryCurve)
})
,
'Connectivity' : _reflection.GeneratedProtocolMessageType('Connectivity', (_message.Message,), {
'Checkin' : _reflection.GeneratedProtocolMessageType('Checkin', (_message.Message,), {
'DESCRIPTOR' : _DEVICEHEALTH_CONNECTIVITY_CHECKIN,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceHealth.Connectivity.Checkin)
})
,
'DESCRIPTOR' : _DEVICEHEALTH_CONNECTIVITY,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceHealth.Connectivity)
})
,
'DESCRIPTOR' : _DEVICEHEALTH,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceHealth)
})
_sym_db.RegisterMessage(DeviceHealth)
_sym_db.RegisterMessage(DeviceHealth.Power)
_sym_db.RegisterMessage(DeviceHealth.Power.Value)
_sym_db.RegisterMessage(DeviceHealth.BatteryCurve)
_sym_db.RegisterMessage(DeviceHealth.Connectivity)
_sym_db.RegisterMessage(DeviceHealth.Connectivity.Checkin)
HardwareIdentity = _reflection.GeneratedProtocolMessageType('HardwareIdentity', (_message.Message,), {
'DESCRIPTOR' : _HARDWAREIDENTITY,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.HardwareIdentity)
})
_sym_db.RegisterMessage(HardwareIdentity)
HardwareIdentityInfo = _reflection.GeneratedProtocolMessageType('HardwareIdentityInfo', (_message.Message,), {
'DESCRIPTOR' : _HARDWAREIDENTITYINFO,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.HardwareIdentityInfo)
})
_sym_db.RegisterMessage(HardwareIdentityInfo)
HardwareInfo = _reflection.GeneratedProtocolMessageType('HardwareInfo', (_message.Message,), {
'DESCRIPTOR' : _HARDWAREINFO,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.HardwareInfo)
})
_sym_db.RegisterMessage(HardwareInfo)
HardwareToDeviceInfo = _reflection.GeneratedProtocolMessageType('HardwareToDeviceInfo', (_message.Message,), {
'DESCRIPTOR' : _HARDWARETODEVICEINFO,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.HardwareToDeviceInfo)
})
_sym_db.RegisterMessage(HardwareToDeviceInfo)
DeviceEvent = _reflection.GeneratedProtocolMessageType('DeviceEvent', (_message.Message,), {
'Initiater' : _reflection.GeneratedProtocolMessageType('Initiater', (_message.Message,), {
'Device' : _reflection.GeneratedProtocolMessageType('Device', (_message.Message,), {
'DESCRIPTOR' : _DEVICEEVENT_INITIATER_DEVICE,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceEvent.Initiater.Device)
})
,
'Console' : _reflection.GeneratedProtocolMessageType('Console', (_message.Message,), {
'DESCRIPTOR' : _DEVICEEVENT_INITIATER_CONSOLE,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceEvent.Initiater.Console)
})
,
'User' : _reflection.GeneratedProtocolMessageType('User', (_message.Message,), {
'DESCRIPTOR' : _DEVICEEVENT_INITIATER_USER,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceEvent.Initiater.User)
})
,
'DESCRIPTOR' : _DEVICEEVENT_INITIATER,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceEvent.Initiater)
})
,
'Info' : _reflection.GeneratedProtocolMessageType('Info', (_message.Message,), {
'Value' : _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), {
'Primitive' : _reflection.GeneratedProtocolMessageType('Primitive', (_message.Message,), {
'DESCRIPTOR' : _DEVICEEVENT_INFO_VALUE_PRIMITIVE,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceEvent.Info.Value.Primitive)
})
,
'Diff' : _reflection.GeneratedProtocolMessageType('Diff', (_message.Message,), {
'DESCRIPTOR' : _DEVICEEVENT_INFO_VALUE_DIFF,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceEvent.Info.Value.Diff)
})
,
'Error' : _reflection.GeneratedProtocolMessageType('Error', (_message.Message,), {
'DESCRIPTOR' : _DEVICEEVENT_INFO_VALUE_ERROR,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceEvent.Info.Value.Error)
})
,
'DESCRIPTOR' : _DEVICEEVENT_INFO_VALUE,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceEvent.Info.Value)
})
,
'DESCRIPTOR' : _DEVICEEVENT_INFO,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceEvent.Info)
})
,
'DESCRIPTOR' : _DEVICEEVENT,
'__module__' : 'toit.model.device_pb2'
# @@protoc_insertion_point(class_scope:toit.model.DeviceEvent)
})
_sym_db.RegisterMessage(DeviceEvent)
_sym_db.RegisterMessage(DeviceEvent.Initiater)
_sym_db.RegisterMessage(DeviceEvent.Initiater.Device)
_sym_db.RegisterMessage(DeviceEvent.Initiater.Console)
_sym_db.RegisterMessage(DeviceEvent.Initiater.User)
_sym_db.RegisterMessage(DeviceEvent.Info)
_sym_db.RegisterMessage(DeviceEvent.Info.Value)
_sym_db.RegisterMessage(DeviceEvent.Info.Value.Primitive)
_sym_db.RegisterMessage(DeviceEvent.Info.Value.Diff)
_sym_db.RegisterMessage(DeviceEvent.Info.Value.Error)
DESCRIPTOR._options = None
_DEVICECHANGETYPE.values_by_name["DEVICE_CHANGE_HEARTBEAT"]._options = None
_DEVICEBROKERSETTINGS.fields_by_name['protocol']._options = None
_DEVICESTATUS.fields_by_name['revision']._options = None
# @@protoc_insertion_point(module_scope)
| 105,769 | 40,769 |
from datetime import date
from bl_predictor import gui
def test_gui():
try:
tested_gui = gui.MainWindow("test")
# detects headless server
except:
return None
assert (tested_gui.picked_home_team is None)
assert (tested_gui.picked_guest_team is None)
assert (tested_gui.date_label.cget("text") == str(
date.today().strftime("%a %d.%m.%y")))
# timeframe slider
assert (tested_gui.slider.W == 300)
assert (tested_gui.act_crawler_button.cget("text")
== "Download Data")
# activate crawler
tested_gui._activate_crawler_helper()
assert (tested_gui.act_crawler_button.cget("text")
== 'Download complete')
assert tested_gui.crawler_data.empty is False
tested_gui._choose_model()
assert (tested_gui.model_label.cget(
"text") == "Choose a prediction model:")
# testing button to train model
assert (tested_gui.train_ml_button.cget(
"text") == "Train prediction model")
tested_gui._train_model_helper()
assert (tested_gui.train_ml_button.cget("text") == 'Model trained')
# testing prediction button
assert (tested_gui.prediction_button.cget("text")
== "Show predicted winner!")
tested_gui._make_prediction_helper()
assert (tested_gui.prediction_button.cget("text")
== 'Winner predicted')
# testing reset teams button
assert (tested_gui.reset_teams_button.cget("text")
== "put in new teams")
tested_gui._reset_teams()
assert (not tested_gui.reset_teams_button.winfo_viewable())
# testing reset model button
assert (tested_gui.reset_model_button.cget("text")
== "choose new model")
tested_gui._reset_model()
assert (not tested_gui.reset_model_button.winfo_viewable())
# testing reset button
assert (tested_gui.reset_button.cget("text")
== "Reset")
tested_gui._reset_model()
assert (not tested_gui.reset_button.winfo_viewable())
| 1,982 | 642 |
# test cigar strings
PROGNAM = "../src/smalt"
FNAM_REF = "cigar_ref.fa.gz"
FNAM_READ1 = "cigar_read1.fq"
FNAM_READ2 = "cigar_read2.fq"
TMPFIL_PREFIX = "TMPcig"
KMER = 13
NSKIP = 2
def smalt_index(df,index_name, fasta_name, kmer, nskip):
from sys import exit
from subprocess import call
tup = (PROGNAM, 'index',
'-k', '%i' % (int(kmer)),
'-s', '%i' % (int(nskip)),
index_name,
fasta_name)
df.call(tup, "when indexing")
def smalt_map(df, oufilnam, indexnam, readfil, matefil, typ="fastq", flags=[]):
from sys import exit
from subprocess import call
tup = [PROGNAM, 'map']
if len(flags) > 0:
tup.extend(flags)
tup.extend([
'-f', typ,
'-o', oufilnam,
indexnam,
readfil, matefil])
df.call(tup, "when mapping")
if __name__ == '__main__':
from testdata import DataFiles
df = DataFiles()
refnam = df.joinData(FNAM_REF)
readnamA = df.joinData(FNAM_READ1)
readnamB = df.joinData(FNAM_READ2)
indexnam = df.addIndex(TMPFIL_PREFIX)
oufilnam = df.addTMP(TMPFIL_PREFIX + ".sam")
smalt_index(df,indexnam, refnam, KMER, NSKIP)
smalt_map(df,oufilnam, indexnam, readnamA, readnamB, "sam", ["-x"])
#print "Test ok."
df.cleanup()
exit()
| 1,334 | 551 |
from perform.rom.nonintrusive_rom.autoencoder_rom.autoencoder_koopman.autoencoder_koopman import AutoencoderKoopman
class AEKoopmanPan2020(AutoencoderKoopman):
"""Class implementing the continuous-time variant of the autoencoder Koopman method by Pan (2020)."""
| 268 | 94 |
from tkinter import *
from PIL import ImageTk,Image
import matlab.engine
eng = matlab.engine.start_matlab()
font11 = "-family Arial -size 19 -weight normal -slant roman " \
"-underline 0 -overstrike 0"
font12 = "-family Arial -size 12 -weight normal -slant roman " \
"-underline 0 -overstrike 0"
font14 = "-family Arial -size 15 -weight normal -slant roman " \
"-underline 0 -overstrike 0"
font15 = "-family Arial -size 12 -weight bold -slant roman " \
"-underline 0 -overstrike 0"
root = Tk()
TFrame1 = Frame(root)
TFrame1.place(relx=0.01, rely=0.02, relheight=0.94, relwidth=0.48)
TFrame1.configure(relief=GROOVE)
TFrame1.configure(borderwidth="2")
TFrame1.configure(relief=GROOVE)
TFrame1.configure(width=465)
TLabel = Label(TFrame1)
TLabel.place(relx=0.3, rely=0.04, height=38, width=350)
TLabel.configure(background="#d9d9d9")
TLabel.configure(foreground="#000000")
TLabel.configure(font=font11)
TLabel.configure(relief=FLAT)
TLabel.configure(text='''Enter Patient's data ''')
#--------------------------------INPUT 1-------------------------------
TLabel1 = Label(TFrame1)
TLabel1.place(relx=0.02, rely=0.15, height=39, width=150)
TLabel1.configure(background="#d9d9d9")
TLabel1.configure(foreground="#000000")
TLabel1.configure(font=font12)
TLabel1.configure(relief=FLAT)
TLabel1.configure(text='''Clump Thickness''')
TEntry_Clump = Entry(TFrame1)
TEntry_Clump.place(relx=0.24, rely=0.15, relheight=0.05, relwidth=0.53)
TEntry_Clump.configure(width=246)
TEntry_Clump.configure(takefocus="")
TEntry_Clump.configure(cursor="ibeam")
#---------------------------------INPUT 2-------------------------------
TLabel2 = Label(TFrame1)
TLabel2.place(relx=0.02, rely=0.24, height=39, width=150)
TLabel2.configure(background="#d9d9d9")
TLabel2.configure(foreground="#000000")
TLabel2.configure(font=font12)
TLabel2.configure(relief=FLAT)
TLabel2.configure(text='''Uniformity Cell Size''')
TEntry_UCellSize = Entry(TFrame1)
TEntry_UCellSize.place(relx=0.24, rely=0.24, relheight=0.05, relwidth=0.53)
TEntry_UCellSize.configure(width=246)
TEntry_UCellSize.configure(takefocus="")
TEntry_UCellSize.configure(cursor="ibeam")
#---------------------------------INPUT 3-------------------------------
TLabel3 = Label(TFrame1)
TLabel3.place(relx=0.02, rely=0.33, height=39, width=150)
TLabel3.configure(background="#d9d9d9")
TLabel3.configure(foreground="#000000")
TLabel3.configure(font=font12)
TLabel3.configure(relief=FLAT)
TLabel3.configure(text='''Uniformity Cell Shape''')
TEntry_UCellShape = Entry(TFrame1)
TEntry_UCellShape.place(relx=0.24, rely=0.33, relheight=0.05, relwidth=0.53)
TEntry_UCellShape.configure(width=246)
TEntry_UCellShape.configure(takefocus="")
TEntry_UCellShape.configure(cursor="ibeam")
#----------------------------------------INPUT 4----------------------------------
TLabel4 = Label(TFrame1)
TLabel4.place(relx=0.02, rely=0.41, height=39, width=150)
TLabel4.configure(background="#d9d9d9")
TLabel4.configure(foreground="#000000")
TLabel4.configure(font=font12)
TLabel4.configure(relief=FLAT)
TLabel4.configure(text='''Marginal Adhesion''')
TEntry_MarAdh = Entry(TFrame1)
TEntry_MarAdh.place(relx=0.24, rely=0.41, relheight=0.05, relwidth=0.53)
TEntry_MarAdh.configure(width=246)
TEntry_MarAdh.configure(takefocus="")
TEntry_MarAdh.configure(cursor="ibeam")
#-----------------------------INPUT 5-----------------------------------------
TLabel5 = Label(TFrame1)
TLabel5.place(relx=0.02, rely=0.5, height=39, width=150)
TLabel5.configure(background="#d9d9d9")
TLabel5.configure(foreground="#000000")
TLabel5.configure(font=font12)
TLabel5.configure(relief=FLAT)
TLabel5.configure(text='''Single Epi Cell Size''')
TEntry_EpiCellSize =Entry(TFrame1)
TEntry_EpiCellSize.place(relx=0.24, rely=0.5, relheight=0.05, relwidth=0.53)
TEntry_EpiCellSize.configure(width=246)
TEntry_EpiCellSize.configure(takefocus="")
TEntry_EpiCellSize.configure(cursor="ibeam")
#-----------------------------INPUT 6--------------------------------------
TLabel6 = Label(TFrame1)
TLabel6.place(relx=0.02, rely=0.61, height=39, width=150)
TLabel6.configure(background="#d9d9d9")
TLabel6.configure(foreground="#000000")
TLabel6.configure(font=font12)
TLabel6.configure(relief=FLAT)
TLabel6.configure(text='''Bare Nuclei''')
TEntry_Bare = Entry(TFrame1)
TEntry_Bare.place(relx=0.24, rely=0.61, relheight=0.05, relwidth=0.53)
TEntry_Bare.configure(width=246)
TEntry_Bare.configure(takefocus="")
TEntry_Bare.configure(cursor="ibeam")
#-----------------------------INPUT 7------------------------------------
TLabel7 = Label(TFrame1)
TLabel7.place(relx=0.02, rely=0.70, height=39, width=150)
TLabel7.configure(background="#d9d9d9")
TLabel7.configure(foreground="#000000")
TLabel7.configure(font=font12)
TLabel7.configure(relief=FLAT)
TLabel7.configure(text='''Bland Chromatin''')
TEntry_Chromatin = Entry(TFrame1)
TEntry_Chromatin.place(relx=0.24, rely=0.70, relheight=0.05, relwidth=0.53)
TEntry_Chromatin.configure(width=246)
TEntry_Chromatin.configure(takefocus="")
TEntry_Chromatin.configure(cursor="ibeam")
#---------------------------INPUT 8----------------------------------------
TLabel8 = Label(TFrame1)
TLabel8.place(relx=0.02, rely=0.79, height=39, width=150)
TLabel8.configure(background="#d9d9d9")
TLabel8.configure(foreground="#000000")
TLabel8.configure(font=font12)
TLabel8.configure(relief=FLAT)
TLabel8.configure(text='''Normal Nucleoli''')
TEntry_Normal = Entry(TFrame1)
TEntry_Normal.place(relx=0.24, rely=0.79, relheight=0.05, relwidth=0.53)
TEntry_Normal.configure(width=246)
TEntry_Normal.configure(takefocus="")
TEntry_Normal.configure(cursor="ibeam")
#---------------------------------INPUT 9-------------------------------
TLabel9 = Label(TFrame1)
TLabel9.place(relx=0.02, rely=0.88, height=39, width=150)
TLabel9.configure(background="#d9d9d9")
TLabel9.configure(foreground="#000000")
TLabel9.configure(font=font12)
TLabel9.configure(relief=FLAT)
TLabel9.configure(text='''Mitosis''')
TEntry_Mitosis = Entry(TFrame1)
TEntry_Mitosis.place(relx=0.24, rely=0.88, relheight=0.05, relwidth=0.53)
TEntry_Mitosis.configure(width=246)
TEntry_Mitosis.configure(takefocus="")
TEntry_Mitosis.configure(cursor="ibeam")
# -----------------------------------------------------------------------
TButton_eval = Button(TFrame1,command=lambda w=TFrame1: get_all_entry_widgets_text_content(w))
TButton_eval.place(relx=0.34, rely=0.95, height=35, width=126)
TButton_eval.configure(takefocus="")
TButton_eval.configure(text='''Evaluate''')
# -----------------------------------------------------------------------
TLabel_Output = Label(root)
TLabel_Output.place(relx=0.60, rely=0.06, height=38, width=436)
TLabel_Output.configure(background="#d9d9d9")
TLabel_Output.configure(foreground="#000000")
TLabel_Output.configure(font=font11)
TLabel_Output.configure(relief=FLAT)
TLabel_Output.configure(anchor=CENTER)
TLabel_Output.configure(text='''Breast Cancer Stage :''')
TLabel_Output.configure(width=436)
Canvas_Graph = Canvas(root)
Canvas_Graph.place(relx=0.51, rely=0.16, relheight=0.66, relwidth=0.47)
Canvas_Graph.configure(background="white")
Canvas_Graph.configure(borderwidth="2")
Canvas_Graph.configure(highlightbackground="#e0ded1")
Canvas_Graph.configure(highlightcolor="black")
Canvas_Graph.configure(insertbackground="black")
Canvas_Graph.configure(relief=RIDGE)
Canvas_Graph.configure(selectbackground="#cac8bc")
Canvas_Graph.configure(selectforeground="black")
Canvas_Graph.configure(width=456)
TLabel_OutputText = Label(root)
TLabel_OutputText.place(relx=0.60, rely=0.87, height=38, width=500)
TLabel_OutputText.configure(background="#d9d9d9")
TLabel_OutputText.configure(foreground="#000000")
TLabel_OutputText.configure(font=font14)
TLabel_OutputText.configure(relief=FLAT)
TLabel_OutputText.configure(anchor=CENTER)
TLabel_OutputText.configure(width=500)
def get_all_entry_widgets_text_content(parent_widget):
args = []
children_widgets = parent_widget.winfo_children()
for child_widget in children_widgets:
if child_widget.winfo_class() == 'Entry':
args.append(child_widget.get())
#print(args)
#print(type(args[1]))
# check if all inputs are valid or not also non of the input field is null
doMATLABProcessing(args)
def doMATLABProcessing(data):
# contacting MATLAB using its API
for i in range(len(data)):
data[i] = float(data[i])
val = eng.evalFuzzy(data[0],data[1],data[2],data[3],data[4],data[5],data[6],data[7],data[8])
outputMsg(val)
eng.createOutputGraph(val,nargout=0)
tk_img = ImageTk.PhotoImage(Image.open("output.jpg"))
Canvas_Graph.create_image((150, 100), image=tk_img, anchor=NW)
Canvas_Graph.tk_img = tk_img
def outputMsg(val):
if val > 0.8:
text = "You are at high risk of Breast Cancer"
elif val <0.8 and val >0.6:
text = "You are at medium risk of Breast Cancer"
else:
text = "You are at low risk of Breast Cancer"
TLabel_OutputText['text'] = text
root.mainloop() | 9,071 | 3,509 |
#!/usr/bin/env python
# Based loosely on git://github.com/klightspeed/RS5-Extractor
# I wanted something a bit lower level that didn't convert the contained files
# so I could examine the format for myself. Don't expect this to be feature
# complete for a while
# Fix print function for Python 2 deficiency regarding non-ascii encoded text files:
from __future__ import print_function
import utf8file
print = utf8file.print
try:
from PySide import QtCore
except ImportError:
class RS5Patcher(object):
def tr(self, msg):
return msg
else:
# For PySide translations without being overly verbose...
class RS5Patcher(QtCore.QObject): pass
RS5Patcher = RS5Patcher()
import struct
import zlib
import sys
import os
import collections
import rs5file
chunk_extensions = {
('IMAG', 'DATA'): '.dds',
}
def progress(percent=None, msg=None):
if msg is not None:
print(msg)
# http://msdn.microsoft.com/en-us/library/system.datetime.fromfiletimeutc.aspx:
# A Windows file time is a 64-bit value that represents the number of
# 100-nanosecond intervals that have elapsed since 12:00 midnight,
# January 1, 1601 A.D. (C.E.) Coordinated Universal Time (UTC).
import calendar
win_epoch = calendar.timegm((1601, 1, 1, 0, 0, 0))
def from_win_time(win_time):
return win_time / 10000000 + win_epoch
def to_win_time(unix_time):
return (unix_time - win_epoch) * 10000000
def mkdir_recursive(path):
if path == '':
return
(head, tail) = os.path.split(path)
mkdir_recursive(head)
if not os.path.exists(path):
os.mkdir(path)
elif not os.path.isdir(path):
raise OSError(17, '%s exists but is not a directory' % path)
class NotAFile(Exception): pass
class Rs5CompressedFile(object):
def gen_dir_ent(self):
return struct.pack('<QIQ4sQQ',
self.data_off, self.compressed_size, self.u1,
self.type, self.uncompressed_size << 1 | self.u2,
to_win_time(self.modtime)) + self.filename + '\0'
def read(self):
self.fp.seek(self.data_off)
return self.fp.read(self.compressed_size)
def decompress(self):
return zlib.decompress(self.read())
class Rs5CompressedFileDecoder(Rs5CompressedFile):
def __init__(self, f, data):
self.fp = f
(self.data_off, self.compressed_size, self.u1, self.type, self.uncompressed_size,
modtime) \
= struct.unpack('<QIQ4sQQ', data[:40])
self.u2 = self.uncompressed_size & 0x1
if not self.u2:
raise NotAFile()
self.uncompressed_size >>= 1
filename_len = data[40:].find('\0')
self.filename = data[40:40 + filename_len]
self.modtime = from_win_time(modtime)
def extract(self, base_path, strip, overwrite):
dest = os.path.join(base_path, self.filename.replace('\\', os.path.sep))
if os.path.isfile(dest) and not overwrite: # and size != 0
print('Skipping %s - file exists.' % dest, file=sys.stderr)
return
(dir, file) = os.path.split(dest)
mkdir_recursive(dir)
f = open(dest, 'wb')
try:
data = self.decompress()
if strip:
contents = rs5file.Rs5FileDecoder(data)
assert(contents.magic == self.type)
assert(contents.filename == self.filename)
#assert(len(contents.data) == filesize) # Removed because it breaks --strip
f.write(contents.data)
else:
f.write(data)
except zlib.error as e:
print('ERROR EXTRACTING %s: %s. Skipping decompression!' % (dest, str(e)), file=sys.stderr)
f.write(self.read())
f.close()
os.utime(dest, (self.modtime, self.modtime))
def extract_chunks(self, base_path, overwrite):
dest = os.path.join(base_path, self.filename.replace('\\', os.path.sep))
dest = dest.rstrip() # "TEX/Rock_Set2_Moss " ends in a space, which Windows can't handle, so strip it.
data = self.decompress()
try:
chunks = rs5file.Rs5ChunkedFileDecoder(data)
except:
# print('NOTE: %s does not contain chunks, extracting whole file...' % dest, file=sys.stderr)
return self.extract(base_path, False, overwrite)
if os.path.exists(dest) and not os.path.isdir(dest):
print('WARNING: %s exists, but is not a directory, skipping!' % dest, file=sys.stderr)
return
mkdir_recursive(dest)
path = os.path.join(dest, '00-HEADER')
if os.path.isfile(path) and not overwrite: # and size != 0
print('Skipping %s - file exists.' % dest, file=sys.stderr)
else:
f = open(path, 'wb')
f.write(chunks.header())
f.close()
for (i, chunk) in enumerate(chunks.itervalues(), 1):
extension = (self.type, chunk.name)
path = os.path.join(dest, '%.2i-%s%s' % (i, chunk.name, chunk_extensions.get(extension, '')))
if os.path.isfile(path) and not overwrite: # and size != 0
print('Skipping %s - file exists.' % dest, file=sys.stderr)
continue
f = open(path, 'wb')
f.write(chunk.data)
f.close()
class Rs5CompressedFileEncoder(Rs5CompressedFile):
def __init__(self, fp, filename = None, buf = None, seek_cb = None):
if filename is not None:
self.modtime = os.stat(filename).st_mtime
uncompressed = open(filename, 'rb').read()
else:
import time
self.modtime = time.time()
uncompressed = buf
self.uncompressed_size = len(uncompressed)
contents = rs5file.Rs5FileDecoder(uncompressed)
(self.type, self.filename) = (contents.magic, contents.filename)
compressed = zlib.compress(uncompressed)
self.compressed_size = len(compressed)
self.u1 = 0x30080000000
self.u2 = 1
self.fp = fp
if seek_cb is not None:
seek_cb(self.compressed_size)
self.data_off = fp.tell()
fp.write(compressed)
class Rs5CompressedFileRepacker(Rs5CompressedFile):
def __init__(self, newfp, oldfile, seek_cb=None):
self.compressed_size = oldfile.compressed_size
self.u1 = oldfile.u1
self.type = oldfile.type
self.uncompressed_size = oldfile.uncompressed_size
self.u2 = oldfile.u2
self.modtime = oldfile.modtime
self.filename = oldfile.filename
self.fp = newfp
if seek_cb is not None:
seek_cb(self.compressed_size)
self.data_off = newfp.tell()
newfp.write(oldfile.read())
class Rs5CentralDirectory(collections.OrderedDict):
@property
def d_size(self):
return self.ent_len * (1 + len(self))
class Rs5CentralDirectoryDecoder(Rs5CentralDirectory):
def __init__(self, real_fp = None):
self.fp.seek(self.d_off)
data = self.fp.read(self.ent_len)
(d_off1, self.d_orig_len, flags) = struct.unpack('<QII', data[:16])
assert(self.d_off == d_off1)
if real_fp is None:
real_fp = self.fp
collections.OrderedDict.__init__(self)
for f_off in range(self.d_off + self.ent_len, self.d_off + self.d_orig_len, self.ent_len):
try:
entry = Rs5CompressedFileDecoder(real_fp, self.fp.read(self.ent_len))
self[entry.filename] = entry
except NotAFile:
# XXX: Figure out what these are.
# I think they are just deleted files
continue
class Rs5CentralDirectoryEncoder(Rs5CentralDirectory):
def write_directory(self):
self.d_off = self.fp.tell()
self.d_orig_len = self.d_size
dir_hdr = struct.pack('<QII', self.d_off, self.d_size, self.flags)
pad = '\0' * (self.ent_len - len(dir_hdr)) # XXX: Not sure if any data here is important
self.fp.write(dir_hdr + pad)
for file in self.itervalues():
ent = file.gen_dir_ent()
pad = '\0' * (self.ent_len - len(ent)) # XXX: Not sure if any data here is important
self.fp.write(ent + pad)
class Rs5ArchiveDecoder(Rs5CentralDirectoryDecoder):
def __init__(self, f):
self.fp = f
magic = f.read(8)
if magic != 'CFILEHDR':
raise ValueError('Invalid file header')
(self.d_off, self.ent_len, self.u1) = struct.unpack('<QII', f.read(16))
Rs5CentralDirectoryDecoder.__init__(self)
class Rs5ArchiveEncoder(Rs5CentralDirectoryEncoder):
header_len = 24
ent_len = 168
u1 = 0
flags = 0x80000000
def __init__(self, filename):
Rs5CentralDirectoryEncoder.__init__(self)
self.fp = open(filename, 'wb')
self.fp.seek(self.header_len)
def add(self, filename, seek_cb=None, progress=progress):
progress(msg=RS5Patcher.tr("Adding {0}...").format(filename))
entry = Rs5CompressedFileEncoder(self.fp, filename, seek_cb=seek_cb)
self[entry.filename] = entry
def add_from_buf(self, buf, seek_cb=None, progress=progress):
entry = Rs5CompressedFileEncoder(self.fp, buf=buf, seek_cb=seek_cb)
progress(msg=RS5Patcher.tr("Adding {0}...").format(entry.filename))
self[entry.filename] = entry
def add_chunk_dir(self, path, seek_cb=None):
print("Adding chunks from {0}...".format(path))
files = sorted(os.listdir(path))
files.remove('00-HEADER')
header = open(os.path.join(path, '00-HEADER'), 'rb')
header = rs5file.Rs5FileDecoder(header.read())
chunks = collections.OrderedDict()
for filename in files:
chunk_path = os.path.join(path, filename)
if not os.path.isfile(chunk_path) or '-' not in filename:
print('Skipping {0}: not a valid chunk'.format(chunk_path))
continue
chunk_name = filename.split('-', 1)[1]
print(' {0}'.format(filename))
chunk = open(chunk_path, 'rb')
chunk = rs5file.Rs5ChunkEncoder(chunk_name, chunk.read())
chunks[chunk.name] = chunk
chunks = rs5file.Rs5ChunkedFileEncoder(header.magic, header.filename, header.u2, chunks)
entry = Rs5CompressedFileEncoder(self.fp, buf=chunks.encode(), seek_cb=seek_cb)
self[entry.filename] = entry
def write_header(self, progress=progress):
progress(msg=RS5Patcher.tr("Writing RS5 header..."))
self.fp.seek(0)
self.fp.write(struct.pack('<8sQII', 'CFILEHDR', self.d_off, self.ent_len, self.u1))
def save(self, progress=progress):
progress(msg=RS5Patcher.tr("Writing central directory..."))
self.write_directory()
self.write_header(progress=progress)
self.fp.flush()
progress(msg=RS5Patcher.tr("RS5 Written"))
self.do_timestamp_workaround(progress)
def do_timestamp_workaround(self, progress=progress):
# Miasmata v2.0.0.4 has a bizzare bug where the menu is blank
# other than the 'created by...' if the main.rs5 timestamp is
# certain values. I do not yet fully understand what values it
# can and cannot accept, so force everything to a known working
# time
import time
fake_time = time.mktime((2015, 2, 16, 4, 5, 0, 0, 0, -1))
# fake_time = time.time - (30 * 60)
progress(msg=RS5Patcher.tr("Setting timestamp on %s to %s to workaround for v2.0.0.4 bug" % \
(self.fp.name, time.asctime(time.localtime(fake_time)))))
os.utime(self.fp.name, (fake_time, fake_time))
class Rs5ArchiveUpdater(Rs5ArchiveEncoder, Rs5ArchiveDecoder):
def __init__(self, fp):
return Rs5ArchiveDecoder.__init__(self, fp)
def seek_eof(self):
self.fp.seek(0, 2)
def seek_find_hole(self, size):
'''Safe fallback version - always seeks to the end of file'''
return self.seek_eof()
def add(self, filename, progress=progress):
return Rs5ArchiveEncoder.add(self, filename, seek_cb = self.seek_find_hole, progress=progress)
def add_chunk_dir(self, path):
return Rs5ArchiveEncoder.add_chunk_dir(self, path, seek_cb = self.seek_find_hole)
def add_from_buf(self, buf, progress=progress):
return Rs5ArchiveEncoder.add_from_buf(self, buf, seek_cb = self.seek_find_hole, progress=progress)
def save(self, progress=progress):
self.seek_find_hole(self.d_size)
progress(msg=RS5Patcher.tr("Writing central directory..."))
self.write_directory()
# When updating an existing archive we use an extra flush
# before writing the header to reduce the risk of writing a bad
# header in case of an IO error, power failure, etc:
self.fp.flush()
self.write_header(progress=progress)
self.fp.flush()
progress(msg=RS5Patcher.tr("RS5 Written"))
self.do_timestamp_workaround(progress)
# vi:noexpandtab:sw=8:ts=8
| 11,473 | 4,668 |
def isUnstablePair(filename1, filename2):
'''
Some file managers sort filenames taking into account case of the letters, others compare strings as if all of the letters are of the same case. That may lead to different ways of filename ordering.
Call two filenames an unstable pair if their ordering depends on the case.
To compare two filenames a and b, find the first position i at which a[i] ≠ b[i]. If a[i] < b[i], then a < b. Otherwise a > b. If such position doesn't exist, the shorter string goes first.
Given two filenames, check whether they form an unstable pair.
Example
For filename1 = "aa" and filename2 = "AAB", the output should be
isUnstablePair(filename1, filename2) = true.
Because "AAB" < "aa", but "AAB" > "AA".
For filename1 = "A" and filename2 = "z", the output should be
isUnstablePair(filename1, filename2) = false.
Both "A" < "z" and "a" < "z".
'''
if filename2 > filename1:
return filename2.upper() < filename1.upper()
elif filename2 < filename1:
return filename2.lower() > filename1.lower()
else:
return False | 1,091 | 330 |
# Copyright 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
import tempfile
import time
import unittest
from sawtooth_signing import pbct_nativerecover as signing
from sawtooth_validator.consensus.dev_mode.dev_mode_consensus \
import DevModeConsensus
from gossip import signed_object
from gossip.gossip_core import Gossip
from gossip.node import Node
from journal.journal_core import Journal
from journal.transaction import Status as tStatus
from journal.transaction import Transaction
from journal.transaction_block import Status as tbStatus
from journal.transaction_block import TransactionBlock
class TestingJournalTransaction(unittest.TestCase):
def test_journal_transaction_init(self):
# Test normal init of a transaction
minfo = {'__SIGNATURE__': 'Test', '__NONCE__': time.time(),
'Dependencies': []}
transaction = Transaction(minfo)
self.assertEqual(transaction.Status, tStatus.unknown)
self.assertFalse(transaction.InBlock)
self.assertEqual(transaction.Dependencies, [])
def test_journal_transaction_str(self):
# Test str function for transaction
minfo = {'__SIGNATURE__': 'Test', '__NONCE__': time.time(),
'Dependencies': []}
transaction = Transaction(minfo)
self.assertEqual(str(transaction), '/Transaction')
def test_journal_transaction_apply(self):
# Test Transaction apply, Does nothing at this point
minfo = {'__SIGNATURE__': 'Test', '__NONCE__': time.time(),
'Dependencies': []}
transaction = Transaction(minfo)
transaction.apply("store")
def test_journal_transaction_add_to_pending(self):
# Test transaction add_to_pending, should always return true
minfo = {'__SIGNATURE__': 'Test', '__NONCE__': time.time(),
'Dependencies': []}
transaction = Transaction(minfo)
self.assertTrue(transaction.add_to_pending())
def test_journal_transaction_build_message(self):
# Test that build_message returns a message of MessageType
# TransactionMessage and that msg is linked to the transaction
minfo = {'__SIGNATURE__': 'Test', '__NONCE__': time.time(),
'Dependencies': []}
transaction = Transaction(minfo)
msg = transaction.build_message()
self.assertEqual(msg.MessageType,
"/journal.messages.TransactionMessage/Transaction")
self.assertEqual(msg.Transaction, transaction)
def test_journal_transaction_dump(self):
# Test that transactions dump the correct info
now = time.time()
minfo = {'__SIGNATURE__': 'Test', '__NONCE__': now,
'Dependencies': []}
transaction = Transaction(minfo)
time.sleep(0.5)
t_dict = transaction.dump()
new = time.time()
self.assertLess(t_dict["Nonce"], new)
self.assertEqual(t_dict["Dependencies"], [])
self.assertEqual(t_dict["TransactionType"], '/Transaction')
def test_is_valid_pub_key(self):
pubkey = signing.generate_pubkey("5KQ4iQQGgbQX9MmfiPUwwHBL1R"
"GPa86NwFbqrWoodjuzruqFVDd")
pub = signing.encode_pubkey(pubkey, "hex")
minfo = {'Nonce': 100, 'PublicKey': pub,
'TransactionType': '/Transaction', 'Dependencies': []}
sig = signing.sign(
signed_object.dict2cbor(minfo),
"5KQ4iQQGgbQX9MmfiPUwwHBL1RGPa86NwFbqrWoodjuzruqFVDd"
)
# Create valid transaction
minfo["Signature"] = sig
temp = Transaction(minfo)
self.assertTrue(temp.is_valid("unused"))
# Change transaction after it was signed
minfo["Nonce"] = time.time()
temp = Transaction(minfo)
self.assertFalse(temp.is_valid("unused"))
class TestingJournalTransactionBlock(unittest.TestCase):
_next_port = 10000
def _create_node(self):
signingkey = signed_object.generate_signing_key()
ident = signed_object.generate_identifier(signingkey)
node = Node(identifier=ident, signingkey=signingkey,
address=("localhost", self._next_port))
self.__class__._next_port = self._next_port + 1
return node
def _create_journal(self, node=None):
node = node or self._create_node()
gossip = Gossip(node)
# Takes a journal, create a temporary directory to use with the journal
path = tempfile.mkdtemp()
journal = Journal(
gossip.LocalNode,
gossip,
gossip.dispatcher,
consensus=DevModeConsensus(),
data_directory=path)
return (gossip, journal)
def test_journal_transaction_block_init(self):
# Test normal init of a transaction
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block = TransactionBlock(minfo)
self.assertEqual(trans_block.BlockNum, 0)
self.assertEqual(trans_block.TransactionIDs, [])
self.assertEqual(trans_block.Status, tbStatus.incomplete)
self.assertEqual(trans_block.TransactionDepth, 0)
def test_journal_transaction_block_str(self):
# Test str function for a signed transaction block
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block = TransactionBlock(minfo)
node = self._create_node()
# Need to sign TransactionBlock, use sign_from_node form signed object
trans_block.sign_from_node(node)
self.assertEqual(str(trans_block), "{0}, {1}, {2}, {3:0.2f}"
.format(trans_block.BlockNum,
trans_block.Identifier[:8],
len(trans_block.TransactionIDs),
trans_block.CommitTime))
def test_journal_transaction_block_str_unsigned(self):
# Test that an assertion error is caused if the Block is not signed
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block = TransactionBlock(minfo)
try:
str(trans_block)
self.fail("This should cause an AssertError")
except AssertionError, e:
self.assertIsInstance(e, AssertionError)
def test_journal_transaction_block_cmp_valid_blocks(self):
# Test the overridden cmp function
# Needs the Blocks to be signed and valid
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block1 = TransactionBlock(minfo)
trans_block2 = TransactionBlock(minfo)
node = self._create_node()
# Need to sign TransactionBlock, use sign_from_node form signed object
trans_block1.sign_from_node(node)
trans_block2.sign_from_node(node)
trans_block1.Status = tbStatus.valid
trans_block2.Status = tbStatus.valid
# Test Equal Transaction Blocks
self.assertEquals(cmp(trans_block2, trans_block1), 0)
# Test a Transaction Block with greater Transaction Depth
trans_block2.TransactionDepth = 10
self.assertEquals(cmp(trans_block2, trans_block1), 1)
# Test a Transaction Block with lesser Transaction Depth
trans_block1.TransactionDepth = 20
self.assertEquals(cmp(trans_block2, trans_block1), -1)
def test_journal_transaction_block_cmp_nonvalid_blocks(self):
# Test that a ValueError is raised when a trans_block is not valid
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block1 = TransactionBlock(minfo)
trans_block2 = TransactionBlock(minfo)
node = self._create_node()
# Need to sign TransactionBlock, use sign_from_node form signed object
trans_block1.sign_from_node(node)
trans_block2.sign_from_node(node)
try:
cmp(trans_block2, trans_block1)
self.fail("This should cause a ValueError")
except ValueError, e1:
self.assertIsInstance(e1, ValueError)
def test_journal_transaction_block_cmp_unsigned(self):
# Test AssertionError is raised if TransactionBlock are not signed
# Need a signature to use Identifier
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block1 = TransactionBlock(minfo)
trans_block2 = TransactionBlock(minfo)
trans_block1.Status = tbStatus.valid
trans_block2.Status = tbStatus.valid
try:
cmp(trans_block2, trans_block1)
self.fail("This should cause an AssertionError")
except AssertionError, e2:
self.assertIsInstance(e2, AssertionError)
def test_journal_transaction_block_is_valid(self):
# Test whether or not a transblock is valid
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block = TransactionBlock(minfo)
(gossip, journal) = self._create_journal()
# Need to sign TransactionBlock, use sign_from_node
# from signed object
trans_block.sign_from_node(gossip.LocalNode)
self.assertTrue(trans_block.is_valid(journal))
def test_journal_transaction_block_not_is_valid(self):
# Test that an invalid transaction block does not get verified as valid
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block = TransactionBlock(minfo)
(gossip, journal) = self._create_journal()
# Need to sign TransactionBlock, use sign_from_node from signed object
try:
trans_block.is_valid(journal)
except AssertionError, e:
self.assertIsInstance(e, AssertionError)
finally:
if gossip is not None:
gossip.shutdown()
def test_journal_transaction_block_missing_transactions(self):
# Test missing transactions, should return list of missing
# transactions
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block = TransactionBlock(minfo)
(gossip, journal) = self._create_journal()
node = gossip.LocalNode
trans_block.sign_from_node(node)
missing = trans_block.missing_transactions(journal)
# No missing transactions
self.assertEqual(missing, [])
minfo = {'__SIGNATURE__': 'Test', '__NONCE__': time.time(),
'Dependencies': []}
transaction = Transaction(minfo)
transaction.sign_from_node(node)
trans_block.TransactionIDs += [transaction.Identifier]
missing = trans_block.missing_transactions(journal)
# One missing transactions
self.assertEqual(missing, [transaction.Identifier])
journal.transaction_store[transaction.Identifier] = transaction
missing = trans_block.missing_transactions(journal)
# Back to no missing transactions
self.assertEqual(missing, [])
def test_journal_transaction_block_update_block_weight(self):
# Test block update weight
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block = TransactionBlock(minfo)
trans_block.Status = tbStatus.valid
(gossip, journal) = self._create_journal()
node = gossip.LocalNode
trans_block.sign_from_node(gossip.LocalNode)
trans_block.update_block_weight(journal)
# No transactions
self.assertEqual(trans_block.TransactionDepth, 0)
minfo = {'__SIGNATURE__': 'Test', '__NONCE__': time.time(),
'Dependencies': []}
transaction = Transaction(minfo)
transaction.sign_from_node(node)
trans_block.TransactionIDs += [transaction.Identifier]
trans_block.update_block_weight(journal)
# One transaction
self.assertEqual(trans_block.TransactionDepth, 1)
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 1,
'PreviousBlockID': trans_block.Identifier}
new_trans_block = TransactionBlock(minfo)
new_trans_block.Status = tbStatus.valid
journal.block_store[trans_block.Identifier] = trans_block
new_trans_block.update_block_weight(journal)
# Get depth from previous block
self.assertEqual(new_trans_block.TransactionDepth, 1)
def test_journal_transaction_block_build_message(self):
# Test build_message, returns a TransactionBlockMessage
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block = TransactionBlock(minfo)
node = self._create_node()
trans_block.sign_from_node(node)
trans_block.Status = tbStatus.valid
msg = trans_block.build_message()
self.assertEqual(msg.MessageType,
"/journal.messages.TransactionBlockMessage" +
"/TransactionBlock")
self.assertEqual(msg.TransactionBlock, trans_block)
def test_journal_transaction_block_dump(self):
# Test that transactions dump the correct info
minfo = {'__SIGNATURE__': 'Test', "BlockNum": 0}
trans_block = TransactionBlock(minfo)
node = self._create_node()
trans_block.sign_from_node(node)
trans_block.Status = tbStatus.valid
tb_dic = trans_block.dump()
self.assertEqual(tb_dic["TransactionIDs"], [])
self.assertEqual(tb_dic["TransactionBlockType"], "/TransactionBlock")
self.assertEqual(tb_dic["BlockNum"], 0)
self.assertIsNotNone(tb_dic["Signature"])
self.assertNotEqual(tb_dic["Signature"], "")
| 14,018 | 4,074 |
import logging
from typing import Dict
from src.database import queries
from src.exceptions.exceptions import UnrecognizedEventOperation
def _get_user(message: Dict) -> Dict:
return {
'id': message.get('user_id'),
'first_name': message.get('first_name'),
'last_name': message.get('last_name'),
'birth_date': message.get('birth_date'),
}
def _handle_create_user(user: Dict):
queries.create_user(user)
def _handle_update_user(user: Dict):
queries.update_user(user.get('id'), user)
def _handle_delete_user(user: Dict):
queries.delete_user(user.get('id'))
def handle_users_events(message: Dict):
event_type: str = message.get('event_type')
operation: str = message.get('operation')
if event_type == 'users':
if operation == 'create':
user = _get_user(message)
_handle_create_user(user)
logging.info(f"user id={user.get('id')} created")
elif operation == 'update':
user = _get_user(message)
_handle_update_user(user)
logging.info(f"user id={user.get('id')} updated")
elif operation == 'delete':
user = _get_user(message)
_handle_delete_user(user)
logging.info(f"user id={user.get('id')} deleted")
else:
raise UnrecognizedEventOperation(operation)
else:
raise UnrecognizedEventOperation(event_type)
| 1,433 | 442 |
import json, subprocess
from ... pyaz_utils import get_cli_name, get_params
def show(resource_group, account_name, pool_name, volume_name):
params = get_params(locals())
command = "az netappfiles volume show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list(resource_group, account_name, pool_name):
params = get_params(locals())
command = "az netappfiles volume list " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def delete(resource_group, account_name, pool_name, volume_name):
params = get_params(locals())
command = "az netappfiles volume delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def create(account_name, pool_name, volume_name, resource_group, location, file_path, usage_threshold, vnet, subnet=None, service_level=None, protocol_types=None, volume_type=None, endpoint_type=None, replication_schedule=None, remote_volume_resource_id=None, tags=None, snapshot_id=None, snapshot_policy_id=None, backup_policy_id=None, backup_enabled=None, backup_id=None, policy_enforced=None, vault_id=None, kerberos_enabled=None, security_style=None, throughput_mibps=None, kerberos5_r=None, kerberos5_rw=None, kerberos5i_r=None, kerberos5i_rw=None, kerberos5p_r=None, kerberos5p_rw=None, has_root_access=None, snapshot_dir_visible=None, smb_encryption=None, smb_continuously_avl=None, encryption_key_source=None, rule_index=None, unix_read_only=None, unix_read_write=None, cifs=None, allowed_clients=None, ldap_enabled=None, chown_mode=None, cool_access=None, coolness_period=None, unix_permissions=None):
params = get_params(locals())
command = "az netappfiles volume create " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def update(resource_group, account_name, pool_name, volume_name, usage_threshold=None, service_level=None, tags=None, vault_id=None, backup_enabled=None, backup_policy_id=None, policy_enforced=None, throughput_mibps=None, snapshot_policy_id=None, set=None, add=None, remove=None, force_string=None):
params = get_params(locals())
command = "az netappfiles volume update " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def revert(resource_group, account_name, pool_name, volume_name, snapshot_id):
params = get_params(locals())
command = "az netappfiles volume revert " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def pool_change(resource_group, account_name, pool_name, volume_name, new_pool_resource_id):
params = get_params(locals())
command = "az netappfiles volume pool-change " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 4,673 | 1,517 |
# -*- coding: utf-8 -*-
import xml.etree.cElementTree as ET
class qgsLayerTree:
def __init__(self, rootName=""):
"""
:param rootName: the name of the rootlayer
"""
self.tree = dict(name=rootName, layers=[], checked='Qt::Checked', expanded='1')
def addLayer(self, layerId, name="", level=None, checked=True, expanded=True):
"""
Add a regular layer
:param layerId:
:param name:
:param level:
:param checked:
:param expanded:
"""
if not level:
level = self.tree
checkedStr = "Qt::Checked" if checked else "Qt::Unchecked"
expandedStr = "1" if expanded else "0"
layer = dict(id=str(layerId), name=name, checked=checkedStr, expanded=expandedStr)
level['layers'].append(layer)
def addGroup(self, name='', level=None, checked=True, expanded=True):
"""
Add a gropup layer
:param name:
:param level:
:param checked:
:param expanded:
"""
if not level:
level = self.tree
else:
if 'layers' in level.keys(): raise Exception("layer is not a group layer")
checkedStr = "Qt::Checked" if checked else "Qt::Unchecked"
expandedStr = "1" if expanded else "0"
layer = dict(name=name, layers=[], checked=checkedStr, expanded=expandedStr)
level['layers'].append( layer )
return
def _appendNodes(self, treeRecord, node):
if 'layers' in treeRecord.keys():
attribs = { k:n for k,n in treeRecord.items() if k <> 'layers' }
group = ET.SubElement(node ,'layer-tree-group', attribs)
for layer in treeRecord['layers']:
self._appendNodes( layer, group )
return group
else:
return ET.SubElement(node, 'layer-tree-layer', treeRecord)
def node(self):
rootName = self.tree['name']
rootGroup = ET.Element('layer-tree-group', expanded="1", checked="Qt::Checked", name=rootName)
for layer in self.tree['layers']:
self._appendNodes( layer, rootGroup )
return rootGroup
| 2,178 | 630 |
"""
Subpackage for all built-in Snorkel data models.
To ensure correct behavior, this subpackage should always be treated as a single module (with one exception
described below). This rule means that all data models should be imported from this subpackage,
not directly from individual submodules. For example, the correct way to import the Corpus class is
.. code-block:: python
from snorkel.models import Corpus
The only exception is importing SnorkelBase or other classes in order to extend Snorkel's data models.
To ensure that any additional data models are included in the storage backend, these must be imported
and the extending subtypes defined before importing `snorkel.models`. For example, the correct way to
define a new type of Context is:
.. code-block:: python
from snorkel.models.context import Context
from sqlalchemy import Column, String, ForeignKey
class NewType(Context):
# Declares name for storage table
__tablename__ = 'newtype'
# Connects NewType records to generic Context records
id = Column(String, ForeignKey('context.id', ondelete='CASCADE'), primary_key=True)
# Polymorphism information for SQLAlchemy
__mapper_args__ = {
'polymorphic_identity': 'newtype',
}
# Rest of class definition here
# The entire storage schema, including NewType, can now be initialized with the following import
import snorkel.models
"""
from .meta import SnorkelBase, SnorkelSession, snorkel_engine, snorkel_postgres
from .context import Context, Document, Sentence, TemporarySpan, Span
from .context import construct_stable_id, split_stable_id
from .candidate import Candidate, candidate_subclass
from .annotation import Feature, FeatureKey, Label, LabelKey, GoldLabel, GoldLabelKey, StableLabel, Prediction, PredictionKey
from .parameter import Parameter
# This call must be performed after all classes that extend SnorkelBase are
# declared to ensure the storage schema is initialized
SnorkelBase.metadata.create_all(snorkel_engine)
| 2,056 | 528 |
#!/usr/bin/env python3
import logging
import plistlib
import struct
import tempfile
import typing
from datetime import datetime
from tarfile import TarFile
from construct import Struct, Bytes, Int32ul, Optional, Enum, Byte, Adapter, Int16ul, this, Computed, \
RepeatUntil
from pymobiledevice3.exceptions import PyMobileDevice3Exception
from pymobiledevice3.lockdown import LockdownClient
from pymobiledevice3.services.base_service import BaseService
from pymobiledevice3.utils import try_decode
CHUNK_SIZE = 4096
TIME_FORMAT = '%H:%M:%S'
SYSLOG_LINE_SPLITTER = '\n\x00'
class TimestampAdapter(Adapter):
def _decode(self, obj, context, path):
return datetime.fromtimestamp(obj.seconds + (obj.microseconds / 1000000))
def _encode(self, obj, context, path):
return list(map(int, obj.split(".")))
timestamp_t = Struct(
'seconds' / Int32ul,
Bytes(4),
'microseconds' / Int32ul
)
syslog_t = Struct(
Bytes(9),
'pid' / Int32ul,
Bytes(42),
'timestamp' / TimestampAdapter(timestamp_t),
Bytes(1),
'level' / Enum(Byte, Notice=0, Info=0x01, Debug=0x02, Error=0x10, Fault=0x11),
Bytes(38),
'image_name_size' / Int16ul,
'message_size' / Int16ul,
Bytes(6),
'_subsystem_size' / Int32ul,
'_category_size' / Int32ul,
Bytes(4),
'_filename' / RepeatUntil(lambda x, lst, ctx: lst[-1] == 0, Byte),
'filename' / Computed(lambda ctx: try_decode(bytearray(ctx._filename[:-1]))),
'_image_name' / Bytes(this.image_name_size),
'image_name' / Computed(lambda ctx: try_decode(ctx._image_name[:-1])),
'_message' / Bytes(this.message_size),
'message' / Computed(lambda ctx: try_decode(ctx._message[:-1])),
'label' / Optional(Struct(
'_subsystem' / Bytes(this._._subsystem_size),
'subsystem' / Computed(lambda ctx: try_decode(ctx._subsystem[:-1])),
'_category' / Bytes(this._._category_size),
'category' / Computed(lambda ctx: try_decode(ctx._category[:-1])),
)),
)
class OsTraceService(BaseService):
"""
Provides API for the following operations:
* Show process list (process name and pid)
* Stream syslog lines in binary form with optional filtering by pid.
* Get old stored syslog archive in PAX format (can be extracted using `pax -r < filename`).
* Archive contain the contents are the `/var/db/diagnostics` directory
"""
SERVICE_NAME = 'com.apple.os_trace_relay'
def __init__(self, lockdown: LockdownClient):
super().__init__(lockdown, self.SERVICE_NAME)
self.logger = logging.getLogger(__name__)
def get_pid_list(self):
self.service.send_plist({'Request': 'PidList'})
# ignore first received unknown byte
self.service.recvall(1)
response = self.service.recv_prefixed()
return plistlib.loads(response)
def create_archive(self, out: typing.IO, size_limit: int = None, age_limit: int = None, start_time: int = None):
request = {'Request': 'CreateArchive'}
if size_limit is not None:
request.update({'SizeLimit': size_limit})
if age_limit is not None:
request.update({'AgeLimit': age_limit})
if start_time is not None:
request.update({'StartTime': start_time})
self.service.send_plist(request)
assert 1 == self.service.recvall(1)[0]
assert plistlib.loads(self.service.recv_prefixed()).get('Status') == 'RequestSuccessful', 'Invalid status'
while True:
try:
assert 3 == self.service.recvall(1)[0], 'invalid magic'
except ConnectionAbortedError:
break
out.write(self.service.recv_prefixed(endianity='<'))
def collect(self, out: str, size_limit: int = None, age_limit: int = None, start_time: int = None):
"""
Collect the system logs into a .logarchive that can be viewed later with tools such as log or Console.
"""
with tempfile.NamedTemporaryFile() as tar:
self.create_archive(tar, size_limit=size_limit, age_limit=age_limit, start_time=start_time)
TarFile(tar.name).extractall(out)
def syslog(self, pid=-1):
self.service.send_plist({'Request': 'StartActivity', 'MessageFilter': 65535, 'Pid': pid, 'StreamFlags': 60})
length_length, = struct.unpack('<I', self.service.recvall(4))
length = int(self.service.recvall(length_length)[::-1].hex(), 16)
response = plistlib.loads(self.service.recvall(length))
if response.get('Status') != 'RequestSuccessful':
raise PyMobileDevice3Exception(f'got invalid response: {response}')
while True:
assert b'\x02' == self.service.recvall(1)
length, = struct.unpack('<I', self.service.recvall(4))
line = self.service.recvall(length)
entry = syslog_t.parse(line)
yield entry
| 4,913 | 1,604 |
# -*- coding: utf-8 -*-
# Ejercicio read_csv
conv = dict.fromkeys(['open', 'close', 'high', 'low', 'next_weeks_open', 'next_weeks_close'],
lambda x: float(x.strip("$")))
df = pd.read_csv("dow_jones_index.data", sep=',', header=0, index_col='date',
converters=conv, parse_dates=[2])
display(df.head(),
df.dtypes)
AA_df = df[df["stock"] == "AA"].loc["2011-03-01":"2011-06-01"][["open", "high", "low", "close"]]
# Opcional: Graficando los valores de las acciones
import matplotlib.dates as mdates
fig, ax = plt.subplots(figsize=(7, 4))
aa_dates_mpl = mdates.date2num(AA_df.index.values)
for date, stock_open, stock_close in zip(aa_dates_mpl, AA_df['open'].values, AA_df['close'].values):
ax.arrow(x=date,
y=stock_open,
dx=0.,
dy=stock_close - stock_open,
head_width=2, head_length=0.1, fc='k', ec='k')
ax.fill_between(AA_df.index.values, AA_df['low'].values, AA_df['high'].values, alpha=0.5);
ax.set_ylabel("Precio de las acciones de AA")
fig.autofmt_xdate()
# Ejercicio MultiIndex
df = pd.read_excel("Cantidad-de-Viviendas-por-Tipo.xlsx",
sheet_name=1, # Importamos la segunda hoja (vivienda)
usecols=list(range(1, 20)), # Importamos las columnas 1 a 20
header=1, # El header está en la segunda fila
skiprows=[2], # Eliminamos la fila 2 ya que es invalida
index_col='ORDEN' # Usaremos el orden de las comunas como índice
).dropna() # Eliminamos las filas con NaN
df.set_index(["NOMBRE REGIÓN", "NOMBRE PROVINCIA", "NOMBRE COMUNA"], inplace=True)
display(df.head())
idx = pd.IndexSlice
display(df.loc[("LOS RÍOS")],
df.loc[idx[:, ["RANCO", "OSORNO"], :], :],
df.loc[idx[:, :, ["VALDIVIA", "FRUTILLAR"]], :])
col_mask = df.columns[4:-1]
display(col_mask)
display(df.loc[idx[:, "VALDIVIA", :], col_mask].head(),
df.loc[idx[:, "VALDIVIA", :], col_mask].sum())
"""
Viviendas Particulares Ocupadas con Moradores Presentes 94771.0
Viviendas Particulares Ocupadas con Moradores Ausentes 5307.0
Viviendas Particulares Desocupadas (en Venta, para arriendo, Abandonada u otro) 6320.0
Viviendas Particulares Desocupadas\n(de Temporada) 6910.0
Viviendas Colectivas 386.0
"""
# Ejercicio groupby
df = pd.read_excel("Cantidad-de-Viviendas-por-Tipo.xlsx",
sheet_name=1, # Importamos la segunda hoja (vivienda)
usecols=list(range(1, 20)), # Importamos las columnas 1 a 20
header=1, # El header está en la segunda fila
skiprows=[2], # Eliminamos la fila 2 ya que es invalida
index_col='ORDEN' # Usaremos el orden de las comunas como índice
).dropna() # Eliminamos las filas con NaN
df.set_index(["NOMBRE REGIÓN", "NOMBRE PROVINCIA", "NOMBRE COMUNA"], inplace=True)
mask = ["Viviendas Particulares Ocupadas con Moradores Presentes",
"Viviendas Particulares Ocupadas con Moradores Ausentes"]
display(df.groupby(by="NOMBRE REGIÓN", sort=False)[mask].aggregate([np.mean, np.std]).head(5))
def responsables(x):
#Regiones donde en promedio las comunas tengan una proporcion de viviendas ocupadas(presentes)/total mayor a 85%
return x[mask[0]]/(x[mask[0]] + x[mask[1]]) > 0.98
display(df.groupby("NOMBRE COMUNA", sort=False).filter(responsables)[mask])
def normalizar(x):
if x.dtype == np.float:
return (x - x.mean())/x.std()
else:
return x
display(df.groupby(by="NOMBRE REGIÓN", sort=False)[mask].transform(normalizar).head(10))
| 3,783 | 1,394 |
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
from ..functional.elemwise import _elwise
from ..tensor import Tensor
from .module import Module
class Elemwise(Module):
r"""
A :class:`~.Module` to do :mod:`~.functional.elemwise` operator. Could be replaced with :class:`~.QATModule`
version :class:`~.qat.Elemwise` using :func:`~.quantize.quantize_qat`.
:param method: the elemwise method, support the following string.
It will do the normal elemwise operator for float.
* "add": a + b
* "fuse_add_relu": max(x+y, 0)
* "mul": x * y
* "min": min(x, y)
* "max": max(x, y)
* "sub": x - y
* "true_div": x / y
* "fuse_add_sigmoid": sigmoid(x + y)
* "fuse_add_tanh": tanh(x + y)
* "relu": x > 0 ? x : 0
* "silu": silu(x)
* "gelu": gelu(x)
* "abs": x > 0 ? x : -x
* "sigmoid": sigmoid(x)
* "exp": exp(x)
* "tanh": tanh(x)
* "fuse_mul_add3": x * y + z
* "fast_tanh": x * (27. + x * x) / (27. + 9. * x * x)
* "negate": -x
* "acos": acos(x)
* "asin": asin(x)
* "ceil": ceil(x)
* "cos": cos(x)
* "expm1": expm1(x)
* "floor": floor(x)
* "log": log(x)
* "log1p": log1p(x)
* "sin": sin(x)
* "round": round(x)
* "erf": erf(x)
* "erfinv": erfinv(x)
* "erfc": erfc(x)
* "erfcinv": erfcinv(x)
* "abs_grad": abs_grad
* "floor_div": floor_div
* "mod": mod
* "sigmoid_grad": sigmoid_grad
* "switch_gt0": switch_gt0
* "tanh_grad": tanh_grad
* "lt": less
* "leq": leq
* "eq": equal
* "pow": pow
* "log_sum_exp": log_sum_exp
* "fast_tanh_grad": fast_tanh_grad
* "atan2": atan2
* "cond_leq_mov": cond_leq_mov
* "h_swish": h_swish
* "fuse_add_h_swish": h_swish(x+y)
* "h_swish_grad": h_swish_grad
* "and": bool binary: x && y
* "or": bool binary: x || y
* "xor": bool binary: x ^ y
* "not": bool unary: ~x
"""
def __init__(self, method, **kwargs):
super().__init__(**kwargs)
self.method = method
def forward(self, *inps):
return _elwise(*inps, mode=self.method)
| 2,639 | 1,011 |
# -*- coding: utf-8 -*-
"""Quickly open Flameshot to make a screenshot."""
from albert import *
import os
__title__ = "Flameshot shortcut"
__version__ = "0.4.1"
__triggers__ = "fs"
__authors__ = "Benjamin Altpeter"
iconPath = iconLookup("flameshot")
def handleQuery(query):
if not query.isTriggered:
return
results = []
results.append(
Item(
id=__title__,
icon=iconPath,
text="Open Flameshot in GUI mode",
subtext="This will run `flameshot gui`.",
completion=query.rawString,
actions=[
# We need to wait for the Albert prompt to disappear, otherwise it will be in the screenshot. Waiting for 0.2 seconds seems long enough but I am not sure. Maybe there is a cleaner way to do this?
# We cannot use the more appropriate `ProcAction` here because (afaik) the subprocess.run-style array cannot issue commands like the one we want.
FuncAction("Open Flameshot", lambda: os.system("(sleep 0.2 && flameshot gui)&"))
]
)
)
return results
| 1,114 | 326 |
import os
repository="origin"
branch="master"
defaultIgnoreList=[
"__pycache__",
"migrations",
".git",
".log",
".vscode",
]
ignoreList=[
"uwsgi",
"config",
]
os.system("git pull "+repository+" "+branch)
fetchList=os.listdir()
filesList=[]
i=0
while i<len(fetchList):
path=fetchList[i]
if os.path.isdir(path):
l2=os.listdir(path)
for it in l2:
fetchList.append(os.path.join(path,it))
else:
isFind=False
for it in defaultIgnoreList:
if path.find(it) != -1:
isFind=True
break
if not isFind:
for it in ignoreList:
if path.find(it)!= -1:
isFind=True
break
if not isFind:
filesList.append(path)
i+=1
for it in filesList:
os.system("git add "+it)
os.system("git commit")
os.system("git push "+repository+" "+branch)
| 959 | 318 |
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
from pytouch import PyTouchZoo, sensors
def main():
pytouch_zoo = PyTouchZoo()
# list available pytouch zoo models
available_models = pytouch_zoo.list_models()
print(available_models)
# load DIGIT sensor touch detect model from pytouch zoo
touch_detect_model = pytouch_zoo.load_model_from_zoo( # noqa: F841
"touchdetect_resnet18", sensors.DigitSensor
)
# load custom PyTorch-Lightning saved model
custom_model = pytouch_zoo.load_model("/path/to/pl/model") # noqa: F841
# create custom onnx session for inference
custom_onnx = pytouch_zoo.load_onnx_session("/path/to/onnx/model") # noqa: F841
if __name__ == "__main__":
main()
| 766 | 279 |
from .prepare import make_train_test
import os
import tempfile
import scipy.io as sio
from hashlib import sha256
try:
import urllib.request as urllib_request # for Python 3
except ImportError:
import urllib2 as urllib_request # for Python 2
urls = {
"chembl-IC50-346targets.mm" :
(
"http://homes.esat.kuleuven.be/~jsimm/chembl-IC50-346targets.mm",
"10c3e1f989a7a415a585a175ed59eeaa33eff66272d47580374f26342cddaa88",
),
"chembl-IC50-compound-feat.mm" :
(
"http://homes.esat.kuleuven.be/~jsimm/chembl-IC50-compound-feat.mm",
"f9fe0d296272ef26872409be6991200dbf4884b0cf6c96af8892abfd2b55e3bc",
),
}
def load_one(filename):
(url, expected_sha) = urls[filename]
with tempfile.TemporaryDirectory() as tmpdirname:
output = os.path.join(tmpdirname, filename)
urllib_request.urlretrieve(url, output)
actual_sha = sha256(open(output, "rb").read()).hexdigest()
assert actual_sha == expected_sha
matrix = sio.mmread(output)
return matrix
def load_chembl():
"""Downloads a small subset of the ChEMBL dataset.
Returns
-------
ic50_train: sparse matrix
sparse train matrix
ic50_test: sparse matrix
sparse test matrix
feat: sparse matrix
sparse row features
"""
# load bioactivity and features
ic50 = load_one("chembl-IC50-346targets.mm")
feat = load_one("chembl-IC50-compound-feat.mm")
## creating train and test sets
ic50_train, ic50_test = make_train_test(ic50, 0.2)
return (ic50_train, ic50_test, feat)
| 1,663 | 653 |
import requests
from bs4 import BeautifulSoup
from datetime import date
class Seasonal:
year = date.today()
year = str(year).split('-')[0]
doy = date.today().timetuple().tm_yday
# "day of year" ranges for the northern hemisphere
spring = range(80, 172)
summer = range(172, 264)
fall = range(264, 355)
# winter = everything else
if doy in spring:
season = 'spring'
elif doy in summer:
season = 'summer'
elif doy in fall:
season = 'fall'
else:
season = 'winter'
def __init__(self, limit='3', years=year, seasons=season):
url = requests.get(f'https://myanimelist.net/anime/season/{years}/{seasons}')
soup = BeautifulSoup(url.content, 'html.parser')
if int(limit) >= 9:
raise KeyError('too many requests, limit max is 9')
limit = int(limit)
nama = soup.find_all('a', {"class": "link-title"})[:limit]
name = []
for namaa in nama:
name.append(namaa.text)
name = ', '.join(name)
self.name = name or None
| 1,115 | 372 |
# Generated by Django 3.2.6 on 2021-11-29 00:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('bitacoras', '0004_remove_bitacora_area'),
]
operations = [
migrations.RemoveField(
model_name='bitacora',
name='receta',
),
]
| 334 | 121 |
#!/usr/bin/env python3
# --*-- encoding=utf-8 --*--
import datetime as dt
import sys
from . import addition
from . import subtraction
from ..eval import quiz
def main():
"""
Run this program with limit and times
使用最大值和题目数作为参数来调用程序
e.g. python3 -m papamath.calc
python3 -m papamath.calc 20
python3 -m papamath.calc 20 50
"""
limit = int(sys.argv[1]) if len(sys.argv) > 1 else 100
times = int(sys.argv[2]) if len(sys.argv) > 2 else 50
summary = quiz.repeat(
[addition.add_ints(limit), subtraction.sub_ints(limit)], times=times)
num = len(summary['question'].unique())
if num > 0:
total_time = summary['spent'].sum()
average_time = total_time / num
print(f'你一共做了{num}道数学题,用时{str(dt.timedelta(seconds=total_time))},'
f'每题平均{str(dt.timedelta(seconds=average_time))},继续加油!')
if __name__ == '__main__':
main()
| 919 | 372 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Country'
db.create_table(u'address_country', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=40, blank=True)),
('code', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
))
db.send_create_signal(u'address', ['Country'])
# Adding model 'State'
db.create_table(u'address_state', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=165, blank=True)),
('code', self.gf('django.db.models.fields.CharField')(max_length=3, blank=True)),
('country', self.gf('django.db.models.fields.related.ForeignKey')(related_name='states', to=orm['address.Country'])),
))
db.send_create_signal(u'address', ['State'])
# Adding unique constraint on 'State', fields ['name', 'country']
db.create_unique(u'address_state', ['name', 'country_id'])
# Adding model 'Locality'
db.create_table(u'address_locality', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=165, blank=True)),
('postal_code', self.gf('django.db.models.fields.CharField')(max_length=10, blank=True)),
('state', self.gf('django.db.models.fields.related.ForeignKey')(related_name='localities', to=orm['address.State'])),
))
db.send_create_signal(u'address', ['Locality'])
# Adding unique constraint on 'Locality', fields ['name', 'state']
db.create_unique(u'address_locality', ['name', 'state_id'])
# Adding model 'Address'
db.create_table(u'address_address', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('street_address', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('locality', self.gf('django.db.models.fields.related.ForeignKey')(related_name='addresses', to=orm['address.Locality'])),
('formatted', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('latitude', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('longitude', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
))
db.send_create_signal(u'address', ['Address'])
def backwards(self, orm):
# Removing unique constraint on 'Locality', fields ['name', 'state']
db.delete_unique(u'address_locality', ['name', 'state_id'])
# Removing unique constraint on 'State', fields ['name', 'country']
db.delete_unique(u'address_state', ['name', 'country_id'])
# Deleting model 'Country'
db.delete_table(u'address_country')
# Deleting model 'State'
db.delete_table(u'address_state')
# Deleting model 'Locality'
db.delete_table(u'address_locality')
# Deleting model 'Address'
db.delete_table(u'address_address')
models = {
u'address.address': {
'Meta': {'ordering': "('locality', 'street_address')", 'object_name': 'Address'},
'formatted': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'locality': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'addresses'", 'to': u"orm['address.Locality']"}),
'longitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'street_address': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'address.country': {
'Meta': {'ordering': "('name',)", 'object_name': 'Country'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'blank': 'True'})
},
u'address.locality': {
'Meta': {'ordering': "('state', 'name')", 'unique_together': "(('name', 'state'),)", 'object_name': 'Locality'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '165', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'state': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'localities'", 'to': u"orm['address.State']"})
},
u'address.state': {
'Meta': {'ordering': "('country', 'name')", 'unique_together': "(('name', 'country'),)", 'object_name': 'State'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'states'", 'to': u"orm['address.Country']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '165', 'blank': 'True'})
}
}
complete_apps = ['address'] | 5,916 | 1,884 |
"""
Define nodes of the detection tree. Nodes are responsible for how they
list their children, detecting if they are pw-protected, and in some
cases how they can be identified (e.g. by a heuristic on their mimetype
and extension)
Also implements the module-level function: `node_from_file()` which
determines what node to assign an input file.
"""
import io
import os
import zipfile
from fnmatch import fnmatch
from typing import Dict, IO, Iterator, List, Optional, Tuple
import magic
from msoffcrypto import OfficeFile # type: ignore
from msoffcrypto.__main__ import is_encrypted
from fables.constants import OS_PATTERNS_TO_SKIP, NUM_BYTES_FOR_MIMETYPE_DETECTION
from fables.errors import ExtractError
UNEXPECTED_DECRYPTION_EXCEPTION_MESSAGE = (
"Unexpected exception occured when decrypting: {0}. Exception: {1}."
)
class IncorrectPassword(Exception):
pass
class StreamManager:
"""If the user passes in a stream, just use that. Otherwise
use a managed open file as a stream.
This makes it so the user of the stream doesn't have to worry
about closing it to save resources or keeping it open to not
step on the user's open stream. It also keeps the interface
the same, whether or not an open stream already exists.
"""
def __init__(self, name: Optional[str], stream: Optional[IO[bytes]]) -> None:
self.name = name
self.stream = stream
if self.stream is not None:
self.stream.seek(0)
self.opened_stream: Optional[IO[bytes]] = None
def __enter__(self) -> IO[bytes]:
if self.stream is not None:
self.stream.seek(0)
return self.stream
if self.name is None:
raise RuntimeError(
"Cannot compute 'io.BufferedIOBase' for without "
+ "'name' or 'stream' property"
)
self.opened_stream = open(self.name, "rb")
return self.opened_stream
def __exit__(self, *exc) -> None: # type: ignore
if self.opened_stream is not None and hasattr(self.opened_stream, "close"):
self.opened_stream.close()
class FileNode:
def __init__(
self,
*,
name: Optional[str] = None,
stream: Optional[IO[bytes]] = None,
mimetype: Optional[str] = None,
extension: Optional[str] = None,
passwords: Dict[str, str] = {},
) -> None:
self.name = name or getattr(stream, "name", None)
self._stream = stream
self.mimetype = mimetype
self.extension = extension
self.passwords = passwords
self.extract_errors: List[ExtractError] = []
self._decrypted_stream: Optional[IO[bytes]] = None
@property
def empty(self) -> bool:
return self.mimetype == "application/x-empty"
@property
def stream(self) -> StreamManager:
"""Usage:
>>> with node.stream as bytesio:
>>> bytes = bytesio.read()
"""
return StreamManager(name=self.name, stream=self._stream)
@property
def children(self) -> Iterator["FileNode"]:
yield from []
@property
def encrypted(self) -> bool:
return False
def add_password(self, name: str, password: str) -> None:
self.passwords[name] = password
@property
def password(self) -> Optional[str]:
"""Assigns to this node property, the password that is key'd by the
longest os-normalized sub-path of the node file name.
E.g. for node.name == sub_dir_1/encrypted.xlsx, the following file-paths
1. sub_dir_1/encrypted.xlsx
2. encrypted.xlsx
3. sub_dir_2/encrypted.xlsx
are ordered by best key to use for the password.
"""
if self.name is None:
return None
match_metrics: List[Tuple[bool, int, str]] = []
for path, password in self.passwords.items():
is_match = fnmatch(self.name, f"*{path}")
match_metrics.append((is_match, len(path), password))
if not match_metrics:
return None
# sort on match truthiness first then on file-path length
return sorted(match_metrics, reverse=True)[0][-1]
def __str__(self) -> str:
return f"{self.__class__.__name__}(name={self.name}, mimetype={self.mimetype})"
class MimeTypeFileNode(FileNode):
MIMETYPES: List[str] = []
EXTENSIONS: List[str] = []
EXTENSIONS_TO_EXCLUDE: List[str] = []
@classmethod
def is_my_mimetype_or_extension(
cls, mimetype: Optional[str], extension: Optional[str]
) -> bool:
mimetype = mimetype or "_"
extension = (extension or "_").lower()
if mimetype in cls.MIMETYPES:
# trust the best mimetype match except when the extension is exluded
if (
mimetype == cls.MIMETYPES[0]
and extension not in cls.EXTENSIONS_TO_EXCLUDE
):
return True
# extension is fine as long as we already matched some acceptable mimetype
elif extension in cls.EXTENSIONS:
return True
return False
class Zip(MimeTypeFileNode):
MIMETYPES = ["application/zip"]
EXTENSIONS = ["zip"]
EXTENSIONS_TO_EXCLUDE = ["xlsx", "xlsb"]
@property
def _bytes_password(self) -> Optional[bytes]:
str_password = self.password
if str_password is not None:
return str_password.encode("utf-8")
return None
@staticmethod
def _encrypted_from_bit_signature(zf: zipfile.ZipFile) -> bool:
"""Stdlib impl here:
https://github.com/python/cpython/blob/3.7/Lib/zipfile.py#L1514
to understand why, search for "bit 0: If set" in:
https://www.iana.org/assignments/media-types/application/zip
basically we are checking if the 0th bit is 0 or 1
"""
return bool(zf.infolist()[0].flag_bits & 0x1)
def _password_decrypts(self) -> bool:
if self.password is None:
return False
with self.stream as node_stream:
with zipfile.ZipFile(node_stream) as zf:
first_child_file = zf.namelist()[0]
try:
zf.open(first_child_file, pwd=self._bytes_password)
return True
except RuntimeError as e:
if "Bad password for file" in str(e):
return False
else:
raise RuntimeError(
UNEXPECTED_DECRYPTION_EXCEPTION_MESSAGE.format(
self.name, str(e)
)
)
@property
def encrypted(self) -> bool:
with self.stream as node_stream:
with zipfile.ZipFile(node_stream) as zf:
if self._encrypted_from_bit_signature(zf):
return not self._password_decrypts()
return False
@property
def children(self) -> Iterator[FileNode]:
try:
with self.stream as node_stream:
with zipfile.ZipFile(node_stream) as zf:
for child_file in zf.namelist():
with zf.open(
child_file, pwd=self._bytes_password
) as child_stream:
# TODO(Thomas: 3/5/2019):
# Reading the zipfile bytes into a BytesIO stream
# instead of using the default zipfile stream because
# our usage of zipfile trips the cyclic redundancy
# checks (bad CRC-32) of the zipfile.ZipExtFile.
# Similar issue: https://stackoverflow.com/questions/5624669/strange-badzipfile-bad-crc-32-problem/5626098 # noqa: E501
# I don't think passing this along to the BytesIO instance
# is too expensive to worry about fixing this issue now.
bytes_stream = io.BytesIO(child_stream.read())
if self.name is not None:
child_file_path = os.path.join(
os.path.basename(self.name), child_file
)
yield node_from_file(
name=child_file_path,
stream=bytes_stream,
passwords=self.passwords,
)
else:
yield node_from_file(
name=child_file,
stream=bytes_stream,
passwords=self.passwords,
)
except RuntimeError as e:
extract_error = ExtractError(
message=str(e), exception_type=type(e), name=self.name
)
self.extract_errors.append(extract_error)
class ExcelEncryptionMixin(FileNode):
def __init__(self, **kwargs) -> None: # type: ignore
super().__init__(**kwargs)
self._raw_stream_mgr: StreamManager = super().stream
self._decrypted_stream: Optional[IO[bytes]] = None
@staticmethod
def decrypt(encrypted_stream: IO[bytes], password: str) -> IO[bytes]:
try:
office_file = OfficeFile(encrypted_stream)
decrypted_stream = io.BytesIO()
office_file.load_key(password=password)
office_file.decrypt(decrypted_stream)
decrypted_stream.seek(0)
return decrypted_stream
except Exception as e:
# xlsx exception message: 'The file could not be decrypted with this password'
# xls exception message: 'Failed to verify password'
if "password" in str(e):
raise IncorrectPassword()
else:
raise RuntimeError(
UNEXPECTED_DECRYPTION_EXCEPTION_MESSAGE.format(
getattr(encrypted_stream, "name", None), str(e)
)
)
@property
def encrypted(self) -> bool:
with self._raw_stream_mgr as raw_stream:
if is_encrypted(raw_stream):
if self.password is not None:
try: # to see if the password works
self._decrypted_stream = self.decrypt(raw_stream, self.password)
return False
except IncorrectPassword:
pass
return True
return False
@property
def stream(self) -> StreamManager:
if not self.encrypted and self._decrypted_stream is not None:
return StreamManager(self.name, self._decrypted_stream)
else:
return self._raw_stream_mgr
class Xlsx(MimeTypeFileNode, ExcelEncryptionMixin):
MIMETYPES = [
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
"application/encrypted",
"application/zip",
]
EXTENSIONS = ["xlsx"]
EXTENSIONS_TO_EXCLUDE = ["xlsb"]
class Xlsb(MimeTypeFileNode, ExcelEncryptionMixin):
MIMETYPES = [
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
"application/vnd.ms-excel.sheet.binary.macroEnabled.12",
"application/encrypted",
"application/zip",
]
EXTENSIONS = ["xlsb"]
EXTENSIONS_TO_EXCLUDE = ["xlsx"]
class Xls(MimeTypeFileNode, ExcelEncryptionMixin):
MIMETYPES = ["application/vnd.ms-excel", "application/CDFV2"]
EXTENSIONS = ["xls"]
class Csv(MimeTypeFileNode):
MIMETYPES = ["application/csv", "text/plain"]
EXTENSIONS = ["csv", "tsv", "txt"]
class Directory(FileNode):
@property
def children(self) -> Iterator[FileNode]:
for child_name in os.listdir(self.name):
child_path = os.path.join(self.name, child_name)
node = node_from_file(name=child_path, passwords=self.passwords)
yield node
class Skip(FileNode):
pass
def mimetype_from_stream(stream: Optional[IO[bytes]]) -> Optional[str]:
if stream is None:
return None
mimebytes = stream.read(NUM_BYTES_FOR_MIMETYPE_DETECTION)
mimetype = magic.from_buffer(mimebytes, mime=True)
stream.seek(0)
return mimetype
def extension_from_name(name: str) -> Optional[str]:
_, ext = os.path.splitext(name)
if ext:
return ext.lstrip(".")
else:
return None
def mimetype_and_extension(
*, name: Optional[str] = None, stream: Optional[IO[bytes]] = None
) -> Tuple[Optional[str], Optional[str]]:
if name is not None and stream is None:
with open(name, "rb") as byte_stream:
mimetype = mimetype_from_stream(byte_stream)
else:
mimetype = mimetype_from_stream(stream)
if name is None:
extension = None
else:
extension = extension_from_name(name)
return mimetype, extension
def node_from_file(
*,
name: Optional[str] = None,
stream: Optional[IO[bytes]] = None,
passwords: Dict[str, str] = {},
) -> FileNode:
if name is not None and any(pattern in name for pattern in OS_PATTERNS_TO_SKIP):
return Skip(name=name, stream=stream)
if name is not None and os.path.isdir(name):
return Directory(name=name, stream=stream, passwords=passwords)
mimetype, extension = mimetype_and_extension(name=name, stream=stream)
for node_type in MimeTypeFileNode.__subclasses__():
if node_type.is_my_mimetype_or_extension(mimetype, extension):
node: FileNode = node_type(
name=name,
stream=stream,
mimetype=mimetype,
extension=extension,
passwords=passwords,
)
return node
return Skip(name=name, stream=stream, mimetype=mimetype, extension=extension)
| 14,096 | 4,098 |
import json
import requests
RASA = "http://localhost:5005"
RASA_API = RASA + "/webhooks/rest/webhook"
def helloworld():
response = requests.request("GET", RASA)
return response.text
def version():
headers = {
'Content-Type':"application/json",
}
response = requests.request("GET", RASA +"/version", headers=headers)
return response.text
def message(message, sender="TheLegend27", debug=0):
data = {
"sender":sender,
"message":str(message)
}
headers = {
'Content-Type':"application/json",
'X-Requested-With': 'XMLHttpRequest',
'Connection': 'keep-alive',
}
try:
response = requests.post(RASA_API, data=json.dumps(data), headers=headers)
except:
# no response from rasa server, is it running?
return {"text":"ERROR 1"}
if (debug == 1):
print(response.status_code)
print(response.content)
print(json.loads(response.text))
try:
return json.loads(response.text)
except:
# something wrong with rasa server, it's running, but not working
return {"text":"ERROR 0"} | 1,020 | 379 |
from typing import List
import math
import heapq
class Solution:
def minimumEffortPath(self, heights: List[List[int]]) -> int:
row = len(heights)
col = len(heights[0])
diff = [[math.inf] * col for _ in range(row)]
diff[0][0] = 0
visited = [[False] * col for _ in range(row)]
queue = [(0, 0, 0)]
while queue:
difference, x, y = heapq.heappop(queue)
visited[x][y] = True
for dx, dy in [[0, 1], [1, 0], [0, -1], [-1, 0]]:
adjacent_x = x + dx
adjacent_y = y + dy
if (
0 <= adjacent_x < row
and 0 <= adjacent_y < col
and not visited[adjacent_x][adjacent_y]
):
current_difference = abs(
heights[adjacent_x][adjacent_y] - heights[x][y]
)
max_difference = max(current_difference, diff[x][y])
if diff[adjacent_x][adjacent_y] > max_difference:
diff[adjacent_x][adjacent_y] = max_difference
heapq.heappush(queue, (max_difference, adjacent_x, adjacent_y))
return diff[-1][-1]
| 1,246 | 385 |
from os.path import join as j
relative_path_to_root = j("..","..")
# use and abuse from os.path.join() (here aliased as "j") it ensures cross OS compatible paths
data_folder = j(relative_path_to_root, "data")
figures_folder = j(relative_path_to_root, "report", "figures")
# STEP 0 download data
# ===================================
s0_folder = j(data_folder, "s0_downloaded_data")
s0_balzac_books = j(s0_folder, "balzac_books.json")
s0_figure_sinusoid = j(figures_folder, "s0_sinusoid.png")
# STEP 1 train model
# ===================================
# ... | 564 | 208 |
import random
from bernoulliarm import *
from thompsonbernoulli import *
from bandittestframe import *
random.seed(1)
means = [0.1, 0.1, 0.1, 0.1, 0.9]
n_arms = len(means)
random.shuffle(means)
arms = [BernoulliArm(mu) for mu in means]
print("Best arm is " + str(means.index(max(means))))
f = open("thompson_bernoulli_results.tsv", "w+")
algo = ThompsonBernoulli([], [], [], [])
algo.initialize(n_arms)
results = test_algorithm(algo, arms, 5000, 250)
for i in range(len(results[0])):
f.write("\t".join([str(results[j][i]) for j in range(len(results))])+ "\n")
f.close()
| 578 | 249 |
from collections import Counter
from typing import List
from datastructures.trees.heaps.min_heap import MinHeap
def top_k_frequent(nums: List[int], k: int) -> List[int]:
counter = Counter(nums)
return [x for x, y in counter.most_common(k)]
def top_k_frequent_with_min_heap(nums: List[int], k: int) -> List[int]:
"""
Uses a Min Heap to get the top k frequent elements
"""
counter = Counter(nums)
arr = []
for num, count in counter.items():
arr.append([-count, num])
min_heap = MinHeap(arr)
ans = []
for _ in range(k):
a = min_heap.remove_min()
ans.append(a[1])
return ans
| 653 | 239 |
#!/usr/bin/env python3
""" Filter delimited records """
import argparse
import csv
import re
import sys
# --------------------------------------------------
def get_args():
"""Get command-line arguments"""
parser = argparse.ArgumentParser(
description='Filter delimited records',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-f',
'--file',
metavar='FILE',
type=argparse.FileType('rt'),
help='Input file',
required=True)
parser.add_argument('-v',
'--val',
help='Value for filter',
metavar='val',
type=str,
required=True)
parser.add_argument('-c',
'--col',
help='Column for filter',
metavar='col',
type=str,
default='')
parser.add_argument('-o',
'--outfile',
help='Output filename',
type=argparse.FileType('wt'),
default='out.csv')
parser.add_argument('-d',
'--delimiter',
help='Input delimiter',
metavar='delim',
type=str,
default=',')
return parser.parse_args()
# --------------------------------------------------
def main():
"""Make a jazz noise here"""
args = get_args()
search_for = args.val
search_col = args.col
reader = csv.DictReader(args.file, delimiter=args.delimiter)
if search_col and search_col not in reader.fieldnames:
print(f'--col "{search_col}" not a valid column!', file=sys.stderr)
print(f'Choose from {", ".join(reader.fieldnames)}')
sys.exit(1)
writer = csv.DictWriter(args.outfile, fieldnames=reader.fieldnames)
writer.writeheader()
num_written = 0
for rec in reader:
text = rec.get(search_col) if search_col else ' '.join(rec.values())
if re.search(search_for, text, re.IGNORECASE):
num_written += 1
writer.writerow(rec)
# args.outfile.write(text + '\n')
print(f'Done, wrote {num_written:,} to "{args.outfile.name}".')
# --------------------------------------------------
if __name__ == '__main__':
main()
| 2,530 | 681 |
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
class ConvNet(nn.Module):
def __init__(self, n_conv_layers, n_fc_layers, kernel_size, n_conv_filters, hidden_size, dropout=0.5):
super(ConvNet, self).__init__()
self.n_conv_layers = n_conv_layers
self.n_fc_layers = n_fc_layers
self.kernel_size = kernel_size
self.n_conv_filters = n_conv_filters
self.hidden_size = hidden_size
self.conv_layers = []
self.fc_layers = []
self.m = nn.MaxPool2d(2, stride=2)
self.n = nn.Dropout(dropout)
self.relu = nn.ReLU()
in_channels = 3
for layer in range(self.n_conv_layers):
self.conv_layers.append(nn.Conv2d(in_channels, self.n_conv_filters[layer], self.kernel_size[layer]))
self.conv_layers.append(self.relu)
self.conv_layers.append(self.m)
in_channels = self.n_conv_filters[layer]
in_channels = in_channels * 25
for layer in range(self.n_fc_layers):
self.fc_layers.append(nn.Linear(in_channels, self.hidden_size[layer]))
self.fc_layers.append(self.relu)
self.fc_layers.append(self.n)
in_channels = self.hidden_size[layer]
self.conv = nn.Sequential(*self.conv_layers)
self.fc = nn.Sequential(*self.fc_layers)
self.classification_layer = nn.Linear(in_channels, 2)
def forward(self, x):
embed = self.conv(x)
embed = embed.view(x.shape[0],-1)
y = self.fc(embed)
return y
class Attention(nn.Module):
def __init__(self, input_size, hidden_size, output_size, gated=True):
super(Attention, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.gated = gated
self.V = nn.Linear(input_size, hidden_size)
self.U = nn.Linear(input_size, hidden_size)
self.w = nn.Linear(hidden_size, output_size)
self.sigm = nn.Sigmoid()
self.tanh = nn.Tanh()
self.sm = nn.Softmax(dim=0)
def forward(self, h):
if self.gated == True:
a = self.sm(self.w(self.tanh(self.V(h)) * self.sigm(self.U(h))))
else:
a = self.sm(self.w(self.tanh(self.V(h))))
return a
class pool(nn.Module):
def __init__(self,attn = None):
super(pool,self).__init__()
self.attn = attn
def forward(self,x):
if self.attn == None:
return torch.mean(x,0)
else:
a = self.attn(x)
v = torch.transpose(a, dim0=0, dim1=1).matmul(x)
return v.squeeze(0)
class Generator(nn.Module):
def __init__(self, n_conv_layers, kernel_size, n_conv_filters, hidden_size, n_rnn_layers, dropout=0.5):
super(Generator, self).__init__()
self.n_conv_layers = n_conv_layers
self.kernel_size = kernel_size
self.n_conv_filters = n_conv_filters
self.hidden_size = hidden_size
self.n_rnn_layers = n_rnn_layers
self.conv_layers = []
self.m = nn.MaxPool2d(2, stride=2)
self.relu = nn.ReLU()
in_channels = 3
for layer in range(self.n_conv_layers):
self.conv_layers.append(nn.Conv2d(in_channels, self.n_conv_filters[layer], self.kernel_size[layer]))
self.conv_layers.append(self.relu)
self.conv_layers.append(self.m)
in_channels = self.n_conv_filters[layer]
self.conv = nn.Sequential(*self.conv_layers)
in_channels = in_channels * 25
self.lstm = nn.LSTM(in_channels, self.hidden_size, self.n_rnn_layers, batch_first=True,
dropout=dropout, bidirectional=True)
in_channels = hidden_size * 2
self.classification_layer = nn.Linear(in_channels, 2)
def forward(self, x):
embed = self.conv(x)
embed = embed.view(1,x.shape[0],-1)
self.lstm.flatten_parameters()
output, hidden = self.lstm(embed)
y = self.classification_layer(output)
return y
def zero_grad(self):
"""Sets gradients of all model parameters to zero."""
for p in self.parameters():
if p.grad is not None:
p.grad.data.zero_()
def update_tile_shape(H_in, W_in, kernel_size, dilation = 1., padding = 0., stride = 1.):
H_out = (H_in + 2. * padding - dilation * (kernel_size-1) -1)/stride + 1
W_out = (W_in + 2. * padding - dilation * (kernel_size-1) -1)/stride + 1
return int(np.floor(H_out)),int(np.floor(W_out))
class Neighborhood_Generator(nn.Module):
def __init__(self, n_conv_layers, n_fc_layers, kernel_size, n_conv_filters, hidden_size, dropout=0.5,
dilation = 1., padding = 0, H_in = 27, W_in = 27):
super(Neighborhood_Generator, self).__init__()
# set class attributes
self.n_conv_layers = n_conv_layers
self.kernel_size = kernel_size
self.n_conv_filters = n_conv_filters
self.hidden_size = hidden_size
self.n_fc_layers = n_fc_layers
self.conv_layers = []
self.fc_layers = []
self.n = nn.Dropout(dropout)
self.m = nn.MaxPool2d(2, stride=2)
self.relu = nn.ReLU()
self.H_in, self.W_in = H_in, W_in
# perform the encoding
in_channels = 3
for layer in range(self.n_conv_layers):
self.conv_layers.append(nn.Conv2d(in_channels, self.n_conv_filters[layer], self.kernel_size[layer]))
self.conv_layers.append(self.relu)
self.conv_layers.append(self.m)
# convolution
self.H_in, self.W_in = update_tile_shape(self.H_in, self.W_in, kernel_size[layer])
# max pooling
self.H_in, self.W_in = update_tile_shape(self.H_in, self.W_in, 2, stride = 2)
in_channels = self.n_conv_filters[layer]
# compute concatenation size
in_channels = in_channels * self.H_in * self.W_in * 5
# infer the z
for layer in range(self.n_fc_layers):
self.fc_layers.append(nn.Linear(in_channels, self.hidden_size[layer]))
self.fc_layers.append(self.relu)
self.fc_layers.append(self.n)
in_channels = self.hidden_size[layer]
self.conv = nn.Sequential(*self.conv_layers)
self.fc = nn.Sequential(*self.fc_layers)
self.classification_layer = nn.Linear(in_channels, 2)
def forward(self, x, neighbors):
embed = self.conv(x)
embed = embed.view(x.shape[0],-1)
e_neighbors = [torch.index_select(embed,0,n) for n in neighbors]
embed_n = torch.stack([torch.cat([e.unsqueeze(0),n],0).view(-1) for e,n in zip(embed,e_neighbors)])
output = self.fc(embed_n)
logits = self.classification_layer(output)
return logits | 7,043 | 2,476 |
"""
cepelblog/core/views.py
"""
from cepelblog.models import BlogPost, Tag
from flask import render_template, request, Blueprint
core = Blueprint('core', __name__)
# -----------------------------------------------------------------------------
# INDEX VIEW
# -----------------------------------------------------------------------------
@core.route('/')
def index():
blog_posts = BlogPost.query.order_by(BlogPost.date.desc()).limit(4).all()
tags = Tag.query.all()
return render_template('index.html', blog_posts=blog_posts, tags=tags)
# -----------------------------------------------------------------------------
# BLOG VIEW
# -----------------------------------------------------------------------------
@core.route('/blog')
def blog():
page = request.args.get('page', 1, type=int)
blog_posts = BlogPost.query.order_by(BlogPost.date.desc()).paginate(page=page, per_page=10, error_out=False)
tags = Tag.query.all()
return render_template('blog.html', blog_posts=blog_posts, tags=tags)
# -----------------------------------------------------------------------------
# ABOUT VIEW
# -----------------------------------------------------------------------------
@core.route('/about')
def about():
return render_template('about.html') | 1,271 | 336 |
from invoke import (
Collection,
task,
)
from . import (
go,
java,
js,
python,
release,
rewrite,
swift,
)
@task(post=[python.compile, swift.compile, js.compile])
def compile():
print 'compiling protobufs...'
@task(pre=[compile], post=[release.release], aliases=('release',))
def full_release():
pass
ns = Collection()
ns.add_collection(go)
ns.add_collection(java)
ns.add_collection(python)
ns.add_collection(release)
ns.add_collection(swift)
ns.add_collection(rewrite)
ns.add_collection(js)
ns.add_task(compile)
ns.add_task(full_release)
| 590 | 210 |
from sc2 import Race, Difficulty
from sc2.player import Computer
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Protoss, CannonRushBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=True)
if __name__ == '__main__':
main()
| 286 | 112 |
from machine import Pin
import utime
#button = Pin(6,Pin.OUT,Pin.PULL_DOWN)
button = Pin(4,Pin.IN,Pin.PULL_UP)
print(button.value())
utime.sleep(2)
button.init(button.IN, button.PULL_DOWN)
print(button.value())
utime.sleep(2)
button.init(button.IN, button.PULL_UP)
print(button.value())
| 288 | 123 |
A_0203_9 = {0: {'A': 0.10204317968289706, 'C': -0.13691712115186927, 'E': -0.801906817305166, 'D': -0.8225967811119382, 'G': 0.11878824300296573, 'F': 0.9807481728927135, 'I': -0.08975052529873016, 'H': -0.46728943474294626, 'K': -0.01635051504420257, 'M': 0.13635490849258533, 'L': 0.29716182243116734, 'N': -0.1286542528765269, 'Q': -0.22532226558777754, 'P': -0.8157353525058761, 'S': 0.04830718247501479, 'R': -0.4139337673856644, 'T': -0.24763654544865543, 'W': -0.021527526172074474, 'V': -0.03137707240956574, 'Y': 0.6643862587620629}, 1: {'A': -0.54949356374825, 'C': -0.8117744894320461, 'E': -1.0743765866104174, 'D': -1.061760905698326, 'G': -0.9308617213690403, 'F': -0.932347794466434, 'I': 0.323520392704971, 'H': -1.1273491231541024, 'K': -1.1879585122200325, 'M': 1.1481619061051433, 'L': 1.5407733671618127, 'N': -0.7055969164687933, 'Q': -0.1890056209633594, 'P': -1.2571661594664847, 'S': -0.87614894680028, 'R': -1.109929073073095, 'T': -0.5318477027332926, 'W': -0.9675691383410863, 'V': -0.16187691341759808, 'Y': -1.2398545581322644}, 2: {'A': 0.24290299933218903, 'C': 0.04723930441586608, 'E': -0.6496701326245561, 'D': -0.4946524897631653, 'G': -0.41022468732501594, 'F': 0.2905502247349859, 'I': 0.12012242513926548, 'H': 0.01064708381446897, 'K': -0.21789481823035867, 'M': 0.4338393474707017, 'L': -0.08077762024804472, 'N': 0.3216772052094605, 'Q': -0.16162942542270664, 'P': 0.03696701843183928, 'S': 0.31994843965675096, 'R': -0.4981739332080992, 'T': -0.13299870072639222, 'W': -0.11307072353574531, 'V': 0.020445108310871943, 'Y': 0.4967935293649042}, 3: {'A': 0.20699761767603636, 'C': 0.48842025345696677, 'E': 0.19404838402004712, 'D': 0.45033075416711604, 'G': 0.19440196578838168, 'F': -0.24497841826451822, 'I': -0.05917739556527642, 'H': -0.20982550013390275, 'K': -0.23616575677187762, 'M': -0.2977624141600084, 'L': -0.2683806660697341, 'N': 0.08404468367645503, 'Q': -0.14857218716372908, 'P': -0.21397471181100178, 'S': 0.34199621146412756, 'R': -0.2631053807694868, 'T': -0.05308558508059852, 'W': -0.3300697932490994, 'V': -0.4707400600782509, 'Y': 0.15754531045307651}, 4: {'A': -0.10212301390856257, 'C': 0.06536695915267256, 'E': -0.1886608639881943, 'D': 0.06484362870301151, 'G': 0.005010623789248809, 'F': 0.12300943645851675, 'I': -0.004004490492481512, 'H': 0.21015501986123333, 'K': -0.09724305996957416, 'M': 0.08494093216578519, 'L': 0.09811065269205965, 'N': -0.1965332152613238, 'Q': -0.15518521233824856, 'P': -0.3816905157559997, 'S': -0.08932565945163161, 'R': -0.40727115109856377, 'T': 0.2784626404936263, 'W': 0.200773012824807, 'V': 0.06145451483780586, 'Y': 0.635717398722066}, 5: {'A': -0.2580044361911012, 'C': 0.17942452883353013, 'E': -0.4443287705395302, 'D': -0.41075658381696095, 'G': -0.2607903518786664, 'F': 0.16947698794235327, 'I': 0.5634868047540258, 'H': -0.36343574273467755, 'K': -0.5852253370849595, 'M': 0.29088306160708877, 'L': 0.6443176398328498, 'N': -0.28451514587707233, 'Q': -0.09060691523811505, 'P': -0.2894914381485486, 'S': 0.08350992967587026, 'R': -0.5521680944493348, 'T': -0.030859956901080756, 'W': -0.5146675121506715, 'V': 0.41617534690626434, 'Y': 0.037858704640756996}, 6: {'A': 0.5232920403386511, 'C': 0.2058508297174797, 'E': -0.25219796859268373, 'D': -0.1234154740200415, 'G': -0.12939740070395714, 'F': 0.11263107182513618, 'I': -0.13598800943358744, 'H': 0.12253261744142137, 'K': -0.33235717584736746, 'M': 0.13345788573190864, 'L': -0.009249204948599784, 'N': -0.24132226603454482, 'Q': -0.16250846821916767, 'P': 0.014394516273535085, 'S': 0.21337296803616956, 'R': -0.5746846121007303, 'T': 0.413108330497925, 'W': -0.18770217396592426, 'V': -0.1752219121738958, 'Y': -0.033673771208459144}, 7: {'A': 0.036566500965322574, 'C': -0.27974891711982197, 'E': -0.051651718593687286, 'D': -0.26228426010481537, 'G': 0.1287677133627596, 'F': 0.15999363902553837, 'I': -0.31835436041766746, 'H': -0.006466141376958506, 'K': -0.29841280452954366, 'M': -0.02642297637225856, 'L': 0.2334394485403842, 'N': -0.06299828223004916, 'Q': -0.17268540496120088, 'P': 0.012357211703997149, 'S': 0.15124585787160916, 'R': -0.1879296582440065, 'T': 0.2968258933516108, 'W': 0.30133389544879363, 'V': -0.5142472089384342, 'Y': 0.21903442086984154}, 8: {'A': 0.639050023202892, 'C': -0.7028491322053141, 'E': -0.9914677282497425, 'D': -1.2717919380745935, 'G': -0.6582341134954846, 'F': -1.0439932301792096, 'I': 0.47329427574977373, 'H': -1.1339509382702664, 'K': -1.2024078639898832, 'M': -0.4415333100600978, 'L': 0.16990162993385377, 'N': -0.9872677354495493, 'Q': -0.9695839752794148, 'P': -0.6781766520609745, 'S': -0.16931392427765923, 'R': -1.0583443697619022, 'T': -0.17970374194511513, 'W': -1.1640627432346589, 'V': 1.5395564940661135, 'Y': -1.1731219124757624}, -1: {'slope': 0.11621320737700856, 'intercept': -0.3854461610547183}} | 4,837 | 4,330 |
from src.main import loadConfig
import argparse
import base64
import logging as logs
import coloredlogs
if __name__ == "__main__":
# Basic Logs Config
logs.basicConfig()
coloredlogs.install(level=logs.INFO)
# Basic CLI config
parser = argparse.ArgumentParser(
prog="launcher.py",
description=base64.b64decode(
"ICAgLi0tLS0tLS0tLS0tLS0uICAgICAgIC4gICAgLiAgICogICAgICAgKgogIC9"
"fL18vXy9fL18vXy9fLyBcICAgICAgICAgKiAgICAgICAuICAgKSAgICAuCiAvL18"
"vXy9fL18vXy9fLy8gXyBcIF9fICAgICAgICAgIC4gICAgICAgIC4KL18vXy9fL1"
"8vXy9fL18vfC8gXC4nIC5gLW8KIHwgICAgICAgICAgICAgfHwtJygvICwtLScKIH"
"wgICAgICAgICAgICAgfHwgIF8gfAogfCAgICAgICAgICAgICB8fCcnIHx8ICAgI"
"CAgICAgICBNYWRlIGJ5OgogfF9fX19fX19fX19fX198fCB8X3xMICAgICAgICAgI"
"CBpbGx1c2lvbgogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIFNoYWRv"
"d0RvZwogICAgICAgICAgICAgICAgICAgICBBdmF0YXI6IENsb3VkIFN0cmlmZQog"
"ICAgICAgICAgICgoKCUjIyUoLygoKCgoIyUlICAgICAgICAgICAgICAgICAKICA"
"gICAgICAgICAlKCgvIy8lJSMvKCUoIygmJSUlJSAgICAgICAgICAgICAgCiAgICA"
"gICAgICAgIC8vKCUmJSMlLyUvJSUmQEBAQEAmJiAgICAgICAgICAgIAogICAgIC"
"AgICAoKC8qLyMlIyUlJiMvLy8vJiZAQCYmJiUmLiAgICAgICAgICAKICAgICAgIC"
"AgIC4vLyMlIy8jJSojLy8oIyglJiYmJiYmJiYmICAgICAgICAgCiAgICAgICAgI"
"C8jIyovKCglIyUjIygvIyUmJSZAJiUlJiYmJiYgICAgICAgIAogICAgICAgLyUvL"
"yovKCMjIyglJSUmJiUvKCYmJUAmJiYmJiAgICAgICAgICAKICAgICAgICAvLyoo"
"LyUqKioqJSMlKC8uLi8sKiUmJiUlIyYgICAgICAgICAgCiAgICAgICAoIC8gKigo"
"KioqKiovQCYmJSUmQCMlKCUmLyAgICAgICAgICAgIAogICAgICAoICAgICAoKC8"
"vLy8qLyYmJiYmJiYmJSUmJiMsICAgICAgICAgICAKICAgICAgICAgICAgICggLiw"
"vLyovJiYmJiYmJiMmIy4uLCAgICAgICAgICAgCiAgICAgICAgICAgICAvICAgKi"
"oqKi8jJiYmJiMuJi4uLi4sICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgIC"
"4vIyYmLi4uLi8uLiAuLi4uLi4gICAgLCAKICAgICAgICAgICAgICAsIC4gICAgI"
"CAuICAgLiAgICAgLiAgLiwsLiAuLi4uCiAgICAgICAgICAqKiogICAgICAgICAgI"
"CAgICAgICAgICosLC4uLi4uLi4uLgogICAgICAgICAqKiouICAgICAgICAgICAg"
"ICAgICAgKi4uLi4uLi4uLi4uLiwKICAgICAgICAqKiAuICAgICAgICAgICAgICAg"
"Li4uICAuLi4qLi4uLi4uLCwsCiAgICAgICAgKiAuICAgICAgICAgICAgICAgIC4"
"uLiAgLi4sIC8uLi4uLi4uLAo=").decode('utf-8')
, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-f',
'--file',
required=False,
help='Specify file to be patched.')
parser.add_argument('-p',
'--patch',
required=False,
help='Specify patch file.')
parser.add_argument('-v',
'--verbose',
required=False,
action="store_true",
help='Enable Verbose Mode.')
parser.add_argument('-od',
'--output_date',
required=False,
action="store_true",
help='Append date and time to output directory.')
parser.add_argument('-o',
'--output_path',
required=False,
help='Specify output file path.')
parser.add_argument('-ci',
'--ci_build',
required=False,
action="store_true",
help='For running tests on buildbot.')
parser.add_argument('-y',
'--always_yes',
required=False,
action="store_false",
help='Always skip confirmation prompts.')
parser.add_argument('-dl',
'--download_patch',
required=False,
action="store_true",
help='Download/Update patch files.')
args = parser.parse_args()
# Load the config file, and patch the ELF file
loadConfig(args.file, args.patch, args.verbose, args.output_date, args.output_path, args.ci_build, args.always_yes, args.download_patch)
| 4,241 | 1,775 |
import json
import numpy as np
import matplotlib.pyplot as plt
from tensorboard.backend.event_processing import event_accumulator
def load_data_from_tensorboard(path):
ea=event_accumulator.EventAccumulator(path)
ea.Reload()
val_psnr=ea.scalars.Items('loss')
data = [i.value for i in val_psnr]
return data
# CVAE
BCE_loss = load_data_from_tensorboard("runs/Mar17_05-00-30_06bed19cdc6a/loss_BCE/events.out.tfevents.1615957245.06bed19cdc6a.20271.2")
KL_loss = load_data_from_tensorboard("runs/Mar17_05-00-30_06bed19cdc6a/loss_KLD/events.out.tfevents.1615957245.06bed19cdc6a.20271.3")
loss = load_data_from_tensorboard("runs/Mar17_05-00-30_06bed19cdc6a/loss_loss/events.out.tfevents.1615957245.06bed19cdc6a.20271.1")
# VAE
# BCE_loss = load_data_from_tensorboard("runs/Mar17_05-00-23_06bed19cdc6a/loss_BCE/events.out.tfevents.1615957234.06bed19cdc6a.20225.2")
# KL_loss = load_data_from_tensorboard("runs/Mar17_05-00-23_06bed19cdc6a/loss_KLD/events.out.tfevents.1615957234.06bed19cdc6a.20225.3")
# loss = load_data_from_tensorboard("runs/Mar17_05-00-23_06bed19cdc6a/loss_loss/events.out.tfevents.1615957234.06bed19cdc6a.20225.1")
x = list(range(len(KL_loss)))
ax1 = plt.subplot(1,1,1)
ax1.plot(x, BCE_loss, color="red",linewidth=1, label = "BCE loss")
ax1.plot(x, KL_loss, color="blue",linewidth=1, label = "KL loss")
ax1.plot(x, loss, color="yellow",linewidth=1, label = "total loss")
plt.xlabel("epoch")
plt.ylabel("loss")
plt.title("loss with respect to epoch(CVAE)")
ax1.legend()
plt.show()
| 1,521 | 782 |
from django.conf import settings
from django.conf.urls import url
from django.conf.urls.static import static
from api.views import SecretView, AngularTemplateView, SecretImageView, SecretImageFileView
urlpatterns = [
url(r'^api/secret/$', SecretView.as_view(), name='secret'),
url(r'^api/secret-image/$', SecretImageView.as_view(), name='secret-image'),
url(r'^api/secret-image.img$', SecretImageFileView.as_view(), name='secret-image-file'),
url(r'^.*', AngularTemplateView.as_view(), name="home")
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| 585 | 183 |
import logging
import time
import numpy as np
import os
import rsopt.conversion
from libensemble import message_numbers
from libensemble.executors.executor import Executor
from collections import Iterable
# TODO: This should probably be in libe_tools right?
_POLL_TIME = 1 # seconds
_PENALTY = 1e9
def get_x_from_H(H, sim_specs):
# 'x' may have different name depending on software being used
# Assumes vector data
x_name = sim_specs['in'][0]
x = H[x_name][0]
return x.tolist()
def get_signature(parameters, settings):
# TODO: signature just means dict with settings and params. This should be renamed if it is kept.
# No lambda functions are allowed in settings and parameter names may not be referenced
# Just needs to insert parameter keys into the settings dict, but they won't have usable values yet
signature = settings.copy()
for key in parameters.keys():
signature[key] = None
return signature
def _parse_x(x, parameters):
x_struct = {}
if not isinstance(x, Iterable):
x = [x, ]
for val, name in zip(x, parameters.keys()):
x_struct[name] = val
# Remove used parameters
for _ in parameters.keys():
x.pop(0)
return x_struct
def compose_args(x, parameters, settings):
args = None # Not used for now
x_struct = _parse_x(x, parameters)
signature = get_signature(parameters, settings)
kwargs = signature.copy()
for key in kwargs.keys():
if key in x_struct:
kwargs[key] = x_struct[key]
return args, kwargs
def format_evaluation(sim_specs, container):
if not hasattr(container, '__iter__'):
container = (container,)
# FUTURE: Type check for container values against spec
outspecs = sim_specs['out']
output = np.zeros(1, dtype=outspecs)
if len(outspecs) == 1:
output[output.dtype.names[0]] = container
return output
for spec, value in zip(output.dtype.names, container):
output[spec] = value
return output
class SimulationFunction:
def __init__(self, jobs: list, objective_function: callable):
# Received from libEnsemble during function evaluation
self.H = None
self.J = {}
self.persis_info = None
self.sim_specs = None
self.libE_info = None
self.log = logging.getLogger('libensemble')
self.jobs = jobs
self.objective_function = objective_function
self.switchyard = None
def __call__(self, H, persis_info, sim_specs, libE_info):
self.H = H
self.persis_info = persis_info
self.sim_specs = sim_specs
self.libE_info = libE_info
x = get_x_from_H(H, self.sim_specs)
halt_job_sequence = False
for job in self.jobs:
# Generate input values
_, kwargs = compose_args(x, job.parameters, job.settings)
self.J['inputs'] = kwargs
# Call preprocessors
if job.pre_process:
for f_pre in job._setup._preprocess:
f_pre(self.J)
# Generate input files for simulation
job._setup.generate_input_file(kwargs, '.')
if self.switchyard and job.input_distribution:
if os.path.exists(job.input_distribution):
os.remove(job.input_distribution)
self.switchyard.write(job.input_distribution, job.code)
job_timeout_sec = job.timeout
if job.executor:
# MPI Job or non-Python executable
exctr = Executor.executor
task = exctr.submit(**job.executor_args)
while True:
time.sleep(_POLL_TIME)
task.poll()
if task.finished:
if task.state == 'FINISHED':
sim_status = message_numbers.WORKER_DONE
self.J['status'] = sim_status
f = None
break
elif task.state == 'FAILED':
sim_status = message_numbers.TASK_FAILED
self.J['status'] = sim_status
halt_job_sequence = True
break
else:
self.log.warning("Unknown task failure")
sim_status = message_numbers.TASK_FAILED
self.J['status'] = sim_status
halt_job_sequence = True
break
elif task.runtime > job_timeout_sec:
self.log.warning('Task Timed out, aborting Job chain')
sim_status = message_numbers.WORKER_KILL_ON_TIMEOUT
self.J['status'] = sim_status
task.kill() # Timeout
halt_job_sequence = True
break
else:
# Serial Python Job
f = job.execute(**kwargs)
sim_status = message_numbers.WORKER_DONE
# NOTE: Right now f is not passed to the objective function. Would need to go inside J. Or pass J into
# function job.execute(**kwargs)
if halt_job_sequence:
break
if job.output_distribution:
self.switchyard = rsopt.conversion.create_switchyard(job.output_distribution, job.code)
self.J['switchyard'] = self.switchyard
if job.post_process:
for f_post in job._setup._postprocess:
f_post(self.J)
if sim_status == message_numbers.WORKER_DONE and not halt_job_sequence:
# Use objective function is present
if self.objective_function:
val = self.objective_function(self.J)
output = format_evaluation(self.sim_specs, val)
self.log.info('val: {}, output: {}'.format(val, output))
else:
# If only serial python was run then then objective_function doesn't need to be defined
try:
output = format_evaluation(self.sim_specs, f)
except NameError as e:
print(e)
print("An objective function must be defined if final Job is is not Python")
else:
# TODO: Temporary penalty. Need to add a way to adjust this.
self.log.warning('Penalty was used because result could not be evaluated')
output = format_evaluation(self.sim_specs, _PENALTY)
return output, persis_info, sim_status | 6,774 | 1,833 |
"""Microbe Directory display models."""
from app.extensions import mongoDB as mdb
class MicrobeDirectoryResult(mdb.EmbeddedDocument): # pylint: disable=too-few-public-methods
"""Set of microbe directory results."""
samples = mdb.DictField(required=True)
| 267 | 83 |
# In this example we will use the concept of RECURSION
# to the the Power and the Factorial of a given Number
def calculatePower(number, power):
if power == 0:
return 1
else:
return number * calculatePower(number, power - 1)
def calculateFactorial(number):
if number == 0:
return 1
else:
return number * calculateFactorial(number - 1)
def main():
print("{0} to the power of {1} is: {2}".format(2, 3, calculatePower(2, 3)))
print("{0} to the power of {1} is: {2}".format(10, 2, calculatePower(10, 2)))
print("Factorial of {0} is: {1}".format(0, calculateFactorial(0)))
print("Factorial of {0} is: {1}".format(5, calculateFactorial(5)))
if __name__ == "__main__":
main() | 753 | 270 |
# -*- coding: utf-8 -*-
"""Executor
These functions are for execution.
"""
from configs.supported_info import SUPPORTED_TRAINER
from trainers.default_trainer import DefaultTrainer
def get_trainer(cfg: object) -> object:
"""Get trainer
Args:
cfg: Config of the project.
Returns:
Trainer object.
Raises:
NotImplementedError: If the model you want to use is not suppoeted.
"""
trainer_name = cfg.train.trainer.name
if trainer_name not in SUPPORTED_TRAINER:
raise NotImplementedError('The trainer is not supported.')
if trainer_name == "default":
return DefaultTrainer(cfg) | 653 | 219 |
import numpy as np
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(4.25,8*.55))
ax = fig.add_axes([0,0,1,1], xlim=[0,11], ylim=[0.5,8.5], frameon=False,
xticks=[], yticks=[]) #, aspect=1)
X = np.linspace(1,10,10)
Y = np.zeros(len(X))
y = 8
# Marker edge color
# ----------------------------------------------------------------------------
C = ["C%d" % i for i in range(10)]
plt.scatter(X, y+Y, s=200, facecolor="white", edgecolor=C, linewidth=1.5)
for x,c in zip(X,C):
plt.text(x, y-0.25, '"%s"' % c,
size="x-small", ha="center", va="top", family="monospace")
plt.text(X[0]-0.25, y+0.25, "Marker edge color",
size="small", ha="left", va="baseline")
plt.text(X[-1]+0.25, y+0.25, "mec / ec", color="blue",
size="small", ha="right", va="baseline", family="monospace")
y -= 1
# Marker face color
# ----------------------------------------------------------------------------
C = ["C%d" % i for i in range(10)]
plt.scatter(X, y+Y, s=200, facecolor=C, edgecolor="None")
for x,c in zip(X,C):
plt.text(x, y-0.25, '"%s"' % c,
size="x-small", ha="center", va="top", family="monospace")
plt.text(X[0]-0.25, y+0.25, "Marker face color",
size="small", ha="left", va="baseline")
plt.text(X[-1]+0.25, y+0.25, "mfc / fc", color="blue",
size="small", ha="right", va="baseline", family="monospace")
y -= 1
# Marker edge width
# ----------------------------------------------------------------------------
LW = (1+np.arange(10)/2)
plt.scatter(X, y+Y, s=100, facecolor="white", edgecolor="black", linewidth=LW)
for x,lw in zip(X,LW):
plt.text(x, y-0.25, "%.1f" % lw,
size="x-small", ha="center", va="top", family="monospace")
plt.text(X[0]-0.25, y+0.25, "Marker edge width",
size="small", ha="left", va="baseline")
plt.text(X[-1]+0.25, y+0.25, "mew / lw", color="blue",
size="small", ha="right", va="baseline", family="monospace")
y -= 1
# Marker edge width
# ----------------------------------------------------------------------------
S = (1+np.arange(10))*25
plt.scatter(X, y+Y, s=S, facecolor="black", edgecolor="None")
for x,s in zip(X,S):
plt.text(x, y-0.25, '%d' % s,
size="x-small", ha="center", va="top", family="monospace")
plt.text(X[0]-0.25, y+0.25, "Marker size",
size="small", ha="left", va="baseline")
plt.text(X[-1]+0.25, y+0.25, "ms / s", color="blue",
size="small", ha="right", va="baseline", family="monospace")
y -= 1
X = np.linspace(1,10,12)
# Filled markers
# -----------------------------------------------------------------------------
M = [".", "o", "s", "P", "X", "*", "p", "D", "<", ">", "^", "v"]
for x, marker in zip(X,M):
plt.scatter(x, y, s=256, color="black", marker="s",fc=".9", ec="none")
plt.scatter(x, y, s=100, color="black", marker=marker,
fc="white", ec="black",linewidth=0.75)
plt.text(x, y-0.25, '"%s"' % marker,
size="x-small", ha="center", va="top", family="monospace")
plt.text(X[0]-0.25, y+0.25, "Filled markers", size="small", ha="left", va="baseline")
plt.text(X[-1]+0.25, y+0.25, "marker", color="blue",
size="small", ha="right", va="baseline", family="monospace")
y -= 1
# Unfilled markers
# -----------------------------------------------------------------------------
M = ["1", "2", "3", "4", "+", "x", "|", "_", 4, 5, 6, 7]
for x, marker in zip(X,M):
if isinstance(marker,str): text = '"%s"' % marker
else: text = '%s' % marker
plt.scatter(x, y, s=256, color="black", marker="s",fc=".9", ec="none")
plt.scatter(x, y, s=100, color="black", marker=marker,
fc="none", ec="black",linewidth=0.75)
plt.text(x, y-0.25, text,
size="x-small", ha="center", va="top", family="monospace")
plt.text(X[0]-0.25, y+0.25, "Unfilled markers", size="small", ha="left", va="baseline")
plt.text(X[-1]+0.25, y+0.25, "marker", color="blue",
size="small", ha="right", va="baseline", family="monospace")
y -= 1
# Unicode markers
# -----------------------------------------------------------------------------
M = ["♠","♣","♥","♦", "→","←","↑","↓", "◐","◑","◒","◓"]
for x, marker in zip(X,M):
ax.scatter(x, y, s=256, color="black", marker="s",fc=".9", ec="none")
ax.scatter(x, y, s=100, color="black", marker="$"+marker+"$",
fc="black", ec="none", linewidth=0.5)
ax.text(x, y-0.25, '"\$%s\$"' % marker,
size="x-small", ha="center", va="top", family="monospace")
ax.text(X[0]-0.25, y+0.25, "Unicode markers", size="small", ha="left", va="baseline")
ax.text(X[-1]+0.25, y+0.25, "marker", color="blue",
size="small", ha="right", va="baseline", family="monospace")
y -= 1
# Spacing
# -----------------------------------------------------------------------------
n_segment = 4
width = 9
segment_width = 0.75*(width/n_segment)
segment_pad = (width - n_segment*segment_width)/(n_segment-1)
X0 = 1+np.arange(n_segment)*(segment_width+segment_pad)
marks = [ 10, [0,-1], (25, 5), [0,25,-1] ]
for x0, mark in zip(X0,marks):
X = np.linspace(x0, x0+segment_width, 50)
Y = y*np.ones(len(X))
ax.plot(X, Y, linewidth=1, color="black",
marker=".", mfc="white", mec="black", mew="1", markevery=mark)
ax.text((X[0]+X[-1])/2, y-0.1, '%s' % str(mark),
size="x-small", ha="center", va="top")
ax.text(1-0.25, y+0.25, "Marker spacing", size="small", ha="left", va="baseline")
ax.text(X[-1]+0.25, y+0.25, "markevery", color="blue",
size="small", ha="right", va="baseline", family="monospace")
plt.savefig("reference-markers.pdf", dpi=600)
plt.show()
| 5,669 | 2,303 |
#!/usr/bin/env python
"""
A script to build a little example netcdf file for trajectories
"""
import numpy as np
import datetime
from post_gnome import nc_particles
def write_sample_file(filename):
start_time = datetime.datetime(2010, 11, 3, 12)
#timesteps = [T.start_time + datetime.timedelta(hour=1) for i in range(10)]
timesteps = [datetime.datetime(2010, 11, 3, 12, 0),
datetime.datetime(2010, 11, 3, 12, 30),
datetime.datetime(2010, 11, 3, 13, 0),
]
trajectory = []
# first timestep: three particles
trajectory.append( {'longitude': np.array( [ -88.0,
-88.1,
-88.1,
], dtype=np.float64 ),
'latitude': np.array( [ 28.0,
28.0,
28.1,
], dtype=np.float64 ),
'depth': np.array( [ 0.0,
0.1,
0.2,
], dtype=np.float64 ),
'mass': np.array( [ 0.01,
0.005,
0.007,
], dtype=np.float64 ),
'id': np.array( [ 0,
1,
2,
], dtype=np.int32 ),
'status_code': np.array( [1,
2,
3,
], dtype=np.int16,)
} )
# second timestep: four particles
trajectory.append( {'longitude': np.array( [ -88.0,
-88.1,
-88.1,
-87.9,
], dtype=np.float64 ),
'latitude': np.array( [ 28.0,
28.0,
28.1,
27.9
], dtype=np.float64 ),
'depth': np.array( [ 0.0,
0.1,
0.2,
0.1
], dtype=np.float64 ),
'mass': np.array( [ 0.01,
0.005,
0.007,
0.006,
], dtype=np.float64 ),
'id': np.array( [ 0,
1,
2,
3,
], dtype=np.int32 ),
'status_code': np.array( [1,
2,
3,
4,
], dtype=np.int16,)
} )
# third timestep: two particles
trajectory.append( {'longitude': np.array( [ -88.0,
-88.1,
], dtype=np.float64 ),
'latitude': np.array( [ 28.0,
28.0,
], dtype=np.float64 ),
'depth': np.array( [ 0.0,
0.1,
], dtype=np.float64 ),
'mass': np.array( [ 0.01,
0.005,
], dtype=np.float64 ),
'id': np.array( [ 1,
3,
], dtype=np.int32 ),
'status_code': np.array( [2,
3,
], dtype=np.int16,)
} )
writer = nc_particles.Writer(filename,
num_timesteps=len(timesteps),
ref_time=timesteps[0],
)
for i, time in enumerate(timesteps):
writer.write_timestep(time, trajectory[i])
writer.close()
if __name__ == "__main__":
write_sample_file('test_particles.nc')
| 5,117 | 1,236 |
from subprocess import Popen, PIPE
import re
import glob
import os
import pickle
import json
import time
import logging
import argparse
import datetime
import hashlib
__author__ = "Paolo Di Prodi"
__copyright__ = "Copyright 2018, Paolo Di Prodi"
__license__ = "Apache 2.0"
__version__ = "0.99"
__email__ = "contact [AT] logstotal.com"
STATE_FOLDER = os.path.join('progress','defender')
os.makedirs(STATE_FOLDER,exist_ok=True)
LOG_FOLDER = 'logs'
os.makedirs(LOG_FOLDER,exist_ok=True)
logging.basicConfig(
format="%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s",
handlers=[
logging.FileHandler("{0}/{1}.log".format(LOG_FOLDER, 'defenderscan')),
logging.StreamHandler()
],level=logging.DEBUG)
class WinDefenderProcessor():
md5rex = re.compile(r"[0-9a-f]{32}$",re.IGNORECASE)
sha1rex = re.compile(r"[0-9a-f]{40}$",re.IGNORECASE)
sha256rex = re.compile(r"[0-9a-f]{64}$",re.IGNORECASE)
threatrex = re.compile(r"^Threat\s+\:\s(.+)$")
resrex = re.compile(r"^Resources\s+\:\s(\d+)\stotal$")
filerex = re.compile(r"^\s+file\s+\:\s(.*)$")
headrex = re.compile(r"found\s(\d+)\sthreats\.$")
def __init__(self,hashtype='md5'):
self.preferred_hash = hashtype
self.state_path = os.path.join(STATE_FOLDER,'state.pk')
self.get_version()
if os.path.exists(self.state_path):
with open(self.state_path, 'rb') as handle:
self.state = pickle.load(handle)
else:
os.makedirs(LOG_FOLDER,exist_ok=True)
self.state = []
@staticmethod
def hash_file(path):
with open(path, 'rb') as file:
data = file.read()
info = {
'md5': hashlib.md5(data).hexdigest(),
'sha1': hashlib.sha1(data).hexdigest(),
'sha256': hashlib.sha256(data).hexdigest()}
return info
def get_version(self):
# use WMI to get all versions
self.engine = '1.1.14800.3'
self.platform = ' 4.14.17613.18039'
self.signature = '1.267.196.0'
def get_hash(self,filename):
match = re.findall(self.md5rex, filename)
if match:
return ('md5',match[0].lower())
match = re.findall(self.sha1rex, filename)
if match:
return ('sha1', match[0].lower())
match = re.findall(self.sha256rex, filename)
if match:
return ('sha256', match[0].lower())
return (None,None)
def scan_folder(self,path,recursive = False, batch = 10):
''' Scan file one by one '''
if path.endswith(os.path.sep):
self.files = list(glob.iglob(path + '*', recursive=recursive))
else:
self.files = list(glob.iglob(path + os.path.sep + '*', recursive=recursive))
logging.info("Preparing to scan {0} total files".format(len(self.files)))
# which files were not scanned from last time
notscanned = [path for path in self.files if path not in self.state]
if len(notscanned) == 0:
logging.warn("No new files to scan")
return
interrupted = False
for filepath in notscanned:
if interrupted == True:
break
try:
[hashtype, value] = self.get_hash(os.path.basename(filepath))
defender_process = Popen(['mpcmdrun', '-scan', '-scantype', '3', '-DisableRemediation', '-file', filepath],
stdout=PIPE,stderr = PIPE)
out, err = defender_process.communicate()
if len(err.decode('utf-8')) > 0:
logging.error(err.decode('utf-8'))
summary = self.parse_defender_out(out.decode('utf-8'))
if hashtype is None or hashtype!=self.preferred_hash:
#compute the hash of the file
all_hash = WinDefenderProcessor.hash_file()
hashtype = self.preferred_hash
value = all_hash[self.preferred_hash]
if 'Found' not in summary:
report = {hashtype: value, "defender": '',
"engine": self.engine, "platform": self.platform,
"signature": self.signature,
'scanTime': datetime.datetime.utcnow().isoformat()}
elif summary['Found'] > 0 :
for threat in summary['Threats']:
report = {hashtype: value, "defender": threat['Threat'],
"engine": self.engine, "build": self.platform,
"signature": self.signature,
'scanTime': datetime.datetime.utcnow().isoformat()}
self.state += [filepath]
except KeyboardInterrupt:
# remove the last chunk just in case
del self.state[-1:]
# kill the clamscan process
if defender_process.poll() is not None:
defender_process.kill()
logging.warning("Terminating batch process....")
interrupted = True
finally:
yield report
with open(self.state_path, 'wb') as handle:
pickle.dump(self.state, handle, protocol=pickle.HIGHEST_PROTOCOL)
def parse_defender_out(self,report):
# 'Threat : Virus:DOS/Svir'
# 'Resources : 1 total'
# ' file : F:\\VirusShare_xxxxx\\VirusShare_000a50c55a2f4517d2e27b21f4b27e3b'
lines = report.split('\r\n')
header = False
begin_manifest = False
end_manifest = False
summary = {}
detection = {}
for line in lines:
if 'LIST OF DETECTED THREATS' in line or 'Scan starting...' in line or 'Scan finished.' in line:
header = True
continue
elif line.startswith("Scanning"):
match = re.findall(self.headrex, line)
if match:
summary["Found"] = int(match[0])
summary["Threats"] = []
header = False
elif 'Threat information' in line:
begin_manifest = True
continue
elif line.count('-') == len(line):
end_manifest = True
# time to flush!
if len(detection.keys())>0:
summary["Threats"].append(detection)
detection = {}
begin_manifest = False
elif begin_manifest == True:
match = re.findall(self.threatrex, line)
if match:
detection['Threat'] = match[0]
match = re.findall(self.resrex, line)
if match:
detection['Resources'] = match[0]
match = re.findall(self.filerex, line)
if match:
if 'Files' in detection:
detection['Files'].append(match[0])
else:
detection['Files'] = [match[0]]
return summary
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Scan an entire folder with ClamAv')
parser.add_argument('--version',action='store_true',
help='Display Clam version')
parser.add_argument('--scan',action='store_true',
help='Scan a folder')
parser.add_argument('--folder',dest='folder', type=str,
help='Folder with virus samples')
parser.add_argument('--detections',dest='detections', type=str,
help='Folder with the output of the scan')
parser.add_argument('--merge', action='store_true',
help='Merge previous scans')
parser.add_argument('--recursive', action='store_true',
help='Scan all files with nested folders')
parser.add_argument('--batchsize', default = 140, type = int,
help='Batch scanning in groups')
parser.add_argument('--newline', action='store_true', default= True,
help='Use new lines in json output')
args = parser.parse_args()
if args.merge:
processor = WinDefenderProcessor()
merged = processor.merge_scans(args.detections,args.newline)
if args.version:
processor = WinDefenderProcessor()
version = processor.get_version()
print("Version {0} Build {1}".format(version[0],version[1]))
sigs = processor.get_definition_time()
print("Signature date {0}".format(sigs.isoformat()))
if args.scan:
if args.folder:
processor = WinDefenderProcessor()
os.makedirs(args.detections,exist_ok=True)
reports = []
for report in processor.scan_folder(args.folder,recursive=args.recursive):
reports.append(report)
if len(reports) >= args.batchsize:
output_file = os.path.join(args.detections, "%s.json" % int(time.time()))
with open(output_file, "w") as file:
json.dump(reports, file)
logging.info("Saved %d " % len(reports))
reports = []
if len(reports) > 0:
output_file = os.path.join(args.detections, "%s.json" % int(time.time()))
with open(output_file, "w") as file:
json.dump(reports, file)
logging.info("Saved %d " % len(reports)) | 9,779 | 2,914 |
# imports
import json
import requests
import logging
import sys
logging.basicConfig(level=logging.INFO, format=f'[%(asctime)s] - %(levelname)s - %(name)s %(threadName)s : %(message)s')
handler = logging.StreamHandler(sys.stdout)
logger = logging.getLogger(__name__)
# constants
URL_ONTOLOGY_KP = "https://stars-app.renci.org/sparql-kp/query"
# methods
def build_query(predicate, subject_category, subject_id, object_category, object_id):
''' will build a trapi v1.1 query '''
edges = {"e00": {"predicates": [predicate], "subject": "n00", "object": "n01"}}
nodes = {"n00": {}, "n01": {}}
if subject_category:
nodes["n00"]["categories"] = [subject_category]
if object_category:
nodes["n01"]["categories"] = [object_category]
if subject_id:
if isinstance(subject_id, list):
nodes["n00"]["ids"] = subject_id
else:
nodes["n00"]["ids"] = [subject_id]
if object_id:
if isinstance(object_id, list):
nodes["n01"]["ids"] = object_id
else:
nodes["n01"]["ids"] = [object_id]
message = {"query_graph": {"edges": edges, "nodes": nodes}}
result = {"message": message}
# return
return result
def get_node_list(json_response):
''' will extract the nodes from the trapi v1.1 response'''
result = []
# get the nodes
if json_response and json_response.get("message") and json_response.get("message").get("query_graph"):
knowledge_graph = json_response.get("message").get("knowledge_graph")
# loop
if knowledge_graph.get("nodes"):
for key, values in knowledge_graph.get("nodes").items():
result.append(key)
# return result
return result
def query_service(url, query):
''' will do a post call to a service qith a trapi v1.1 query'''
response = None
# call
try:
response = requests.post(url, json=query).json()
except (RuntimeError, TypeError, NameError, ValueError):
print('ERROR: query_service - REST query or decoding JSON has failed')
# return
return response
def get_disease_descendants(disease_id, category=None, debug=False):
''' will query the trapi v1.1 ontology kp and return the descendant diseases '''
# initialize
list_diseases = []
json_query = build_query(predicate="biolink:subclass_of", subject_category=category, object_category=category, subject_id=None, object_id=disease_id)
# print result
if debug:
print("the query is: \n{}".format(json.dumps(json_query, indent=2)))
# query the KP and get the results
response = query_service(URL_ONTOLOGY_KP, json_query)
list_diseases = get_node_list(response)
# always add itself back in in case there was error and empty list returned
list_diseases.append(disease_id)
# get unique elements in the list
list_diseases = list(set(list_diseases))
# log
if debug:
print("got the child disease list: {}".format(list_diseases))
# return
return list_diseases
def get_disease_descendants_from_list(list_curie_id, category=None, log=False):
''' will query the trapi v1.1 ontology kp and return the descendant diseases, will return list of (original, new) tuples '''
# initialize
list_result = []
list_filtered = [item for item in list_curie_id if item.split(':')[0] in ['EFO', 'MONDO']]
json_query = build_query(predicate="biolink:subclass_of", subject_category=category, object_category=category, subject_id=None, object_id=list_filtered)
# print result
if log:
logger.info("reduced efo/mondo input descendant list from: {} to: {}".format(list_curie_id, list_filtered))
if len(list_filtered) > 0:
logger.info("the query is: \n{}".format(json.dumps(json_query, indent=2)))
# query the KP and get the results
json_response = query_service(URL_ONTOLOGY_KP, json_query)
# get the nodes
if json_response and json_response.get("message") and json_response.get("message").get("knowledge_graph"):
knowledge_graph = json_response.get("message").get("knowledge_graph")
# loop
logger.info("edges: {}".format(knowledge_graph.get("edges")))
if knowledge_graph.get("edges"):
for key, value in knowledge_graph.get("edges").items():
descendant = (value.get("object"), value.get("subject"))
list_result.append(descendant)
# get unique elements in the list
list_result = list(set(list_result))
# log
if log:
for item in list_result:
logger.info("got the web descendant disease entry: {}".format(item))
# return
return list_result
# test
if __name__ == "__main__":
disease_id = "MONDO:0007972" # meniere's disease
disease_id = "MONDO:0020066" # ehler's danlos
# disease_id = "MONDO:0005267" # heart disease
get_disease_descendants(disease_id=disease_id, category="biolink:DiseaseOrPhenotypicFeature", debug=True)
get_disease_descendants(disease_id=disease_id, debug=True)
# json_query = build_query(predicate="biolink:subclass_of", subject_category="biolink:Disease", object_category="biolink:Disease", subject_id=None, object_id=)
# test server error catching
disease_id = "NCBIGene:1281"
get_disease_descendants(disease_id=disease_id, debug=True)
# # print result
# print("the query is: \n{}".format(json.dumps(json_query, indent=2)))
# # query the KP and get the results
# response = query_service(URL_ONTOLOGY_KP, json_query)
# list_diseases = get_node_list(response)
# print("got the child disease list: {}".format(list_diseases))
| 5,731 | 1,884 |
import traceback
import time
import threading
from zutils.zrpc.client.task_template import TaskTemplateWithFrame, TaskTemplateWithoutFrame
class WorkThread(threading.Thread):
status = 'init'
thread_num = 0
thread_dict = dict()
cur_frame = None
def __init__(self, run_instance):
threading.Thread.__init__(self)
self.run_instance = run_instance
self.thread_id = str(WorkThread.thread_num)
WorkThread.thread_num += 1
WorkThread.thread_dict[self.thread_id] = [self, None]
self.run_type = None
if isinstance(run_instance, TaskTemplateWithFrame):
self.run_type = 'withframe'
elif isinstance(run_instance, TaskTemplateWithoutFrame):
self.run_type = 'withoutframe'
else:
raise Exception('run_instance 不是继承 TaskTemplate')
def run(self):
instance_frame = WorkThread.thread_dict[self.thread_id]
while WorkThread.status == 'run':
try:
if self.run_type == 'withframe':
frame = instance_frame[1]
instance_frame[1] = None
if frame is not None:
self.run_instance.run(frame)
else:
self.run_instance.run()
except:
traceback.print_exc()
time.sleep(1)
self.run_instance.sleep()
@staticmethod
def set_frame(frame):
thread_dict = WorkThread.thread_dict
for key in thread_dict:
thread_dict[key][1] = frame
@staticmethod
def start_all():
WorkThread.status = 'run'
thread_dict = WorkThread.thread_dict
for key in thread_dict:
thread_dict[key][0].start()
print(type(thread_dict[key][0].run_instance), 'start')
print('start all thread')
@staticmethod
def stop_all():
WorkThread.status = 'exit'
thread_dict = WorkThread.thread_dict
for key in thread_dict:
thread_dict[key][0].join()
print(type(thread_dict[key][0].run_instance), 'stop')
print('stop all thread')
if __name__ == '__main__':
from zutils.task.client.task_template import TaskTemplate
class XXX(TaskTemplate):
def sleep(self):
print('xxxxxx')
time.sleep(1)
class YYY(TaskTemplate):
def sleep(self):
print('yyyyyy')
time.sleep(3)
class ZZZ(TaskTemplate):
def run(self, frame):
WorkThread.set_frame(frame + 1)
def sleep(self):
print('zzzzzz')
time.sleep(3)
WorkThread(XXX())
WorkThread(YYY())
WorkThread(ZZZ())
WorkThread.set_frame(0)
WorkThread.start_all()
time.sleep(15)
WorkThread.stop_all()
#
# a = {
# 'a':1,
# 'b':2
# }
#
#
#
#
#
# v = a['a']
# a['a'] = None
# print(v)
# print(a)
#
| 2,991 | 920 |
#! /usr/bin/env python
import yaml
config_file = "config.yml"
with open(config_file, "r") as ymlfile: # script_name without .py
cfg = yaml.safe_load(ymlfile) | 162 | 63 |
import logging
from urllib.parse import urlsplit
import requests
import xmltodict
from .cmd_input import GetInputsListCommand, GetCurrentInputCommand, ChangeInputCommand
from .cmd_pmode import GetPmodesListCommand, GetCurrentPmodeCommand, ChangePmodeCommand
from .cmd_pair import BeginPairCommand, CancelPairCommand, PairChallengeCommand
from .cmd_power import GetPowerStateCommand
from .cmd_remote import EmulateRemoteCommand
from .cmd_settings import GetCurrentAudioCommand
from .discovery import discover
from .protocol import invoke_api, invoke_api_auth, KeyCodes
_LOGGER = logging.getLogger(__name__)
MAX_VOLUME = {
'tv': 100,
'soundbar': 31
}
class DeviceDescription(object):
def __init__(self, ip, name, model, udn):
self.ip = ip
self.name = name
self.model = model
self.udn = udn
class c_Vizio(object):
def __init__(self, device_id, ip, name, auth_token="", device_type="tv"):
self._device_type = device_type.lower()
if self._device_type != "tv" and self._device_type != "soundbar":
raise Exception("Invalid device type specified. Use either 'tv' or 'soundbar'")
self._ip = ip
self._name = name
self._device_id = device_id
self._auth_token = auth_token
def __invoke_api(self, cmd):
return invoke_api(self._ip, cmd, _LOGGER)
def __invoke_api_may_need_auth(self, cmd):
if self._auth_token is None or "" == self._auth_token:
if self._device_type == "soundbar":
return invoke_api(self._ip, cmd, _LOGGER)
else:
raise Exception("Empty auth token. To target a soundbar and bypass auth requirements, pass 'soundbar' as device_type")
return invoke_api_auth(self._ip, cmd, self._auth_token, _LOGGER)
def __remote(self, key_list):
key_codes = []
if isinstance(key_list, list) is False:
key_list = [key_list]
for key in key_list:
if key not in KeyCodes.CODES[self._device_type]:
_LOGGER.error("Key Code of '%s' not found for device type of '%s'", key, self._device_type)
return False
else:
key_codes.append(KeyCodes.CODES[self._device_type][key])
cmd = EmulateRemoteCommand(key_codes, self._device_type)
result = self.__invoke_api_may_need_auth(cmd)
return result is not None
def __remote_multiple(self, key_code, num):
key_codes = []
for ii in range(0, num):
key_codes.append(key_code)
return self.__remote(key_codes)
@staticmethod
def discovery():
results = []
devices = discover("urn:dial-multiscreen-org:device:dial:1")
for dev in devices:
data = xmltodict.parse(requests.get(dev.location, verify=False).text)
if "root" not in data or "device" not in data["root"]:
continue
root = data["root"]["device"]
manufacturer = root["manufacturer"]
if manufacturer is None or "VIZIO" != manufacturer:
continue
split_url = urlsplit(dev.location)
device = DeviceDescription(split_url.hostname, root["friendlyName"], root["modelName"], root["UDN"])
results.append(device)
return results
def start_pair(self):
return self.__invoke_api(BeginPairCommand(self._device_id, self._name, self._device_type))
def stop_pair(self):
return self.__invoke_api(CancelPairCommand(self._device_id, self._name, self._device_type))
def pair(self, ch_type, token, pin):
return self.__invoke_api(PairChallengeCommand(self._device_id, ch_type, token, pin, self._device_type))
def get_inputs(self):
return self.__invoke_api_may_need_auth(GetInputsListCommand(self._device_type))
def get_current_input(self):
return self.__invoke_api_may_need_auth(GetCurrentInputCommand(self._device_type))
def get_pmodes(self):
return self.__invoke_api_may_need_auth(GetPmodesListCommand(self._device_type))
def get_current_pmode(self):
return self.__invoke_api_may_need_auth(GetCurrentPmodeCommand(self._device_type))
def get_power_state(self):
return self.__invoke_api_may_need_auth(GetPowerStateCommand(self._device_type))
def pow_on(self):
return self.__remote("POW_ON")
def pow_off(self):
return self.__remote("POW_OFF")
def pow_toggle(self):
return self.__remote("POW_TOGGLE")
def vol_up(self, num=1):
return self.__remote_multiple("VOL_UP", num)
def vol_down(self, num=1):
return self.__remote_multiple("VOL_DOWN", num)
def get_current_volume(self):
return self.__invoke_api_may_need_auth(GetCurrentAudioCommand(self._device_type))
def get_max_volume(self):
return MAX_VOLUME[self._device_type]
def ch_up(self, num=1):
return self.__remote_multiple("CH_UP", num)
def ch_down(self, num=1):
return self.__remote_multiple("CH_DOWN", num)
def ch_prev(self):
return self.__remote("CH_PREV")
def mute_on(self):
return self.__remote("MUTE_ON")
def mute_off(self):
return self.__remote("MUTE_OFF")
def mute_toggle(self):
return self.__remote("MUTE_TOGGLE")
def input_next(self):
# HACK: Single call just invoking overlay menu with current input
return self.__remote_multiple("INPUT_NEXT", 2)
def input_switch(self, name):
cur_input = self.get_current_input()
if cur_input is None:
_LOGGER.error("Couldn't detect current input")
return False
return self.__invoke_api_may_need_auth(ChangeInputCommand(cur_input.id, name, self._device_type))
def pmode_switch(self, name):
cur_pmode = self.get_current_pmode()
if cur_pmode is None:
_LOGGER.error("Couldn't detect current picture mode")
return False
return self.__invoke_api_may_need_auth(ChangePmodeCommand(cur_pmode.id, name, self._device_type))
def play(self):
return self.__remote("PLAY")
def pause(self):
return self.__remote("PAUSE")
def remote(self, key):
return self.__remote(key)
def get_device_keys(self):
return KeyCodes.CODES[self._device_type].keys()
| 6,376 | 1,956 |