text stringlengths 957 885k |
|---|
<gh_stars>0
import cv2
import numpy as np
import keras
from keras.layers import *
from keras.models import Model , load_model
from keras.preprocessing import image
from keras.utils import np_utils
from keras.applications.resnet50 import ResNet50
from keras.optimizers import Adam
import matplotlib.pyplot as plt
import random
import os
import playsound
# Python code to illustrate Sending mail from
# your Gmail account
import smtplib
# import reverse_geocoder as rg
import pprint
import smtplib
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
import reverse_geocoder as rg
import pprint
import os
import os
import googlemaps
from twilio.rest import Client
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from ipregistry import IpregistryClient
count = 0
continous_viol = 0
pred = "Non-Violence"
model = load_model("using_batch_128_notebook_new_data_gray_new_1_epochs.h5")
cam = cv2.VideoCapture("Test.mp4")
while True:
ret,frame = cam.read()
mail_send_frame = frame
if ret==False:
print("Something Went Wrong!")
break
key_pressed = cv2.waitKey(1) & 0xFF
if key_pressed == ord('q'):
break
test_data = cv2.resize(frame, (224,224))
count = count + 1
test_data = np.array(test_data)
test_data.shape = (1,224,224,3)
if count == 9:
zz = model.predict(test_data)
print(zz[0][0])
count = 0
if zz[0][0]<0.24:
pred = "Violence"
continous_viol = continous_viol + 1
if continous_viol == 7:
playsound.playsound('chase_siren.wav')
continous_viol = 0
client = IpregistryClient("API KEY OF USER")
ipInfo = client.lookup()
zzz_lat = ipInfo.location["latitude"]
zzz_long = ipInfo.location["longitude"]
print("live location is latitude {0} and longitude {1}".format(zzz_lat,zzz_long))
filename ="DANGER.jpg"
cv2.imwrite("%s"%filename, mail_send_frame)
message = "ALERT VIOLENCE DETECTED and live location is latitude {0} and longitude {1}".format(zzz_lat,zzz_long)
img_data = open("DANGER.jpg", 'rb').read()
msg = MIMEMultipart()
msg['Subject'] = 'ALERT VIOLENCE'
msg['From'] = "SENDER'S EMAIL ADDRESS"
msg['To'] = "RECIEVER's EMAIL ADDRESS"
text = MIMEText(message)
msg.attach(text)
image = MIMEImage(img_data, name=os.path.basename("DANGER.jpg"))
msg.attach(image)
s = smtplib.SMTP('smtp.gmail.com', 587)
s.ehlo()
s.starttls()
s.ehlo()
s.login("SENDER'S EMAIL ADDRESS", "SENDER'S EMAIL ADDRESS PASSWORD")
s.sendmail("SENDER'S EMAIL ADDRESS", "RECIEVER's EMAIL ADDRESS", msg.as_string())
s.quit()
print("MAIL SENT")
account_sid = 'API KEY OF USER (Eg- <KEY>) '
auth_token = 'API KEY (Eg- 337c5e5465e1bd8f31417602d2) '
client = Client(account_sid, auth_token)
call = client.calls.create(
url='http://demo.twilio.com/docs/voice.xml',
to=' RECIEVERs PHONE NUMBER (Eg- +919211733317)',
from_='SENDERs PHONE NUMBER (Eg- +919211733317)'
)
# Your Account Sid and Auth Token from twilio.com/console
# DANGER! This is insecure. See http://twil.io/secure
account_sid = 'API KEY OF USER (Eg- <KEY>)'
auth_token = 'API KEY OF USER (Eg- <KEY>)'
client = Client(account_sid, auth_token)
message = client.messages \
.create(
body=message,
from_='SENDERs PHONE NUMBER (Eg- +919211733317)',
to='RECIEVERs PHONE NUMBER (Eg- +919211733317)'
)
print(message.sid)
cv2.putText(frame, "Mail and Call SENT",(100,500), cv2.FONT_HERSHEY_SIMPLEX,1 ,(255, 0 ,0), 2,cv2.LINE_AA)
else:
pred = "Non-Violence"
continous_viol = 0
cv2.putText(frame, pred,(50,50), cv2.FONT_HERSHEY_SIMPLEX,1 ,(255, 0 ,0), 2,cv2.LINE_AA)
cv2.imshow("Video frame",frame)
cam.release()
cv2.destroyAllWindows()
|
<reponame>jzabl/mpdaf<gh_stars>0
"""
Copyright (c) 2010-2018 CNRS / Centre de Recherche Astrophysique de Lyon
Copyright (c) 2019 <NAME> <<EMAIL>>
Copyright (c) 2019 <NAME> <<EMAIL>>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import numpy as np
import pytest
from astropy.io import fits
from mpdaf.obj import Cube
from mpdaf.MUSE import get_FSF_from_cube_keywords, FSFModel
from mpdaf.MUSE.fsf import find_model_cls, OldMoffatModel, MoffatModel2
from mpdaf.MUSE.fsf import combine_fsf
from mpdaf.tools import MpdafWarning
from mpdaf.tests.utils import get_data_file
from numpy.testing import assert_allclose
def test_fsf_model_errors():
# This cube has no FSF info
with pytest.raises(ValueError):
FSFModel.read(get_data_file('sdetect', 'minicube.fits'))
with pytest.raises(ValueError):
find_model_cls(fits.Header({'FSFMODE': 5}))
with pytest.raises(ValueError):
OldMoffatModel.from_header(fits.Header(), 0)
for hdr in [fits.Header(),
fits.Header({'FSFLB1': 5000, 'FSFLB2': 9000}),
fits.Header({'FSFLB1': 9000, 'FSFLB2': 5000})]:
with pytest.raises(ValueError):
MoffatModel2.from_header(hdr, 0)
def test_fsf_model(tmpdir):
cubename = get_data_file('sdetect', 'subcub_mosaic.fits')
cube = Cube(cubename)
# Read FSF model with the old method
with pytest.warns(MpdafWarning):
PSF, fwhm_pix, fwhm_arcsec = get_FSF_from_cube_keywords(cube, 13)
# Read FSF model from file
fsf = FSFModel.read(cubename)
assert len(fsf) == 9
assert fsf[0].model == 'MOFFAT1'
assert_allclose(fsf[0].get_fwhm(cube.wave.coord()), fwhm_arcsec[0])
# Read FSF model from cube
fsf = FSFModel.read(cube)
assert len(fsf) == 9
assert fsf[0].model == 'MOFFAT1'
assert_allclose(fsf[0].get_fwhm(cube.wave.coord()), fwhm_arcsec[0])
# Read FSF model from header and for a specific field
hdr = cube.primary_header.copy()
hdr.update(cube.data_header)
fsf = FSFModel.read(hdr, field=2)
assert fsf.model == 'MOFFAT1'
assert_allclose(fsf.get_fwhm(cube.wave.coord()), fwhm_arcsec[1])
# test to_header
assert [str(x).strip() for x in fsf.to_header().cards] == [
"FSFMODE = 'MOFFAT1 ' / Old model with a fixed beta",
'FSF00BET= 2.8',
'FSF00FWA= 0.825',
'FSF00FWB= -3.01E-05'
]
hdr = fits.Header({'FOO': 1})
outhdr = fsf.to_header(hdr=hdr, field_idx=2)
assert [str(x).strip() for x in outhdr.cards] == [
'FOO = 1',
"FSFMODE = 'MOFFAT1 ' / Old model with a fixed beta",
'FSF02BET= 2.8',
'FSF02FWA= 0.825',
'FSF02FWB= -3.01E-05'
]
# Convert to model2
fsf2 = fsf.to_model2()
assert fsf2.get_beta(7000) == fsf.beta
assert [str(x).strip() for x in fsf2.to_header().cards] == [
'FSFMODE = 2 / Circular MOFFAT beta=poly(lbda) fwhm=poly(lbda)',
'FSFLB1 = 5000 / FSF Blue Ref Wave (A)',
'FSFLB2 = 9000 / FSF Red Ref Wave (A)',
'FSF00FNC= 2 / FSF00 FWHM Poly Ncoef',
'FSF00F00= -0.1204 / FSF00 FWHM Poly C00',
'FSF00F01= 0.6143 / FSF00 FWHM Poly C01',
'FSF00BNC= 1 / FSF00 BETA Poly Ncoef',
'FSF00B00= 2.8 / FSF00 BETA Poly C00'
]
testfile = str(tmpdir.join('test.fits'))
outcube = cube.copy()
fsf2.to_header(hdr=outcube.primary_header)
outcube.write(testfile)
fsf3 = FSFModel.read(testfile, field=0)
assert fsf3.model == 2
assert fsf3.get_beta(7000) == fsf.get_beta(7000)
assert fsf3.get_fwhm(7000) == fsf.get_fwhm(7000)
assert fsf3.get_fwhm(7000, unit='pix') == fsf.get_fwhm(7000, unit='pix')
def test_fsf_arrays():
cubename = get_data_file('sdetect', 'subcub_mosaic.fits')
cube = Cube(cubename)
fsf = FSFModel.read(cube, field=2)
fsf2 = fsf.to_model2()
with pytest.raises(ValueError):
fsf2.get_2darray([7000], (20, 20))
with pytest.raises(ValueError):
fsf2.get_image([7000], cube.wcs)
ima = fsf2.get_image(7000, cube.wcs, center=(10, 10))
assert np.unravel_index(ima.data.argmax(), ima.shape) == (10, 10)
tcube = cube[:5, :, :]
c = fsf2.get_cube(tcube.wave, cube.wcs, center=(10, 10))
assert c.shape == (5, 30, 30)
assert np.unravel_index(c[0].data.argmax(), c.shape[1:]) == (10, 10)
def test_fsf_convolve():
lbrange = [4750.0, 9350.0]
beta_pol = [0.425572268419153, -0.963126218379342, -0.0014311681713689742,
-0.0064324103352929405, 0.09098701358534873, 2.0277399948419843]
fwhm_pol = [0.6321570666462952, -0.06284858095522032, 0.04282359923274102,
0.045673032671778586, -0.1864068502712748, 0.3693082688212182]
fsf = MoffatModel2(fwhm_pol, beta_pol, lbrange, 0.2)
fsf2 = fsf.convolve(cfwhm=0.1)
assert_allclose(fsf2.get_fwhm(7000), 0.3919, rtol=1e-3)
assert_allclose(fsf2.get_beta(7000), 2.1509, rtol=1e-3)
def test_combine_fsf():
lbrange = [4750.0, 9350.0]
beta_pol = [0.425572268419153, -0.963126218379342, -0.0014311681713689742,
-0.0064324103352929405, 0.09098701358534873, 2.0277399948419843]
fwhm_pol = [0.6321570666462952, -0.06284858095522032, 0.04282359923274102,
0.045673032671778586, -0.1864068502712748, 0.3693082688212182]
fsf1 = MoffatModel2(fwhm_pol, beta_pol, lbrange, 0.2)
fwhm_pol = [0.6539648695212446, -0.09803896219961082, 0.0768935513209841,
0.13029884613164275, -0.30890727537189494, 0.4420737174631386]
beta_pol = [1.3422018214910905, -1.0824007679002177, 0.0654899276450118,
0.5566091154793532, -0.4488955513549307, 1.7496593644278122]
fsf2 = MoffatModel2(fwhm_pol, beta_pol, lbrange, 0.2)
fsf, cube = combine_fsf([fsf1, fsf1])
assert_allclose(fsf.get_fwhm(7000), fsf1.get_fwhm(7000), rtol=1.e-6)
assert_allclose(fsf.get_beta(7000), fsf1.get_beta(7000), rtol=1.e-6)
fsf, cube = combine_fsf([fsf1, fsf2])
assert_allclose(fsf.get_fwhm(7000), 0.397959, rtol=1.e-2)
assert_allclose(fsf.get_beta(7000), 1.843269, rtol=1.e-2)
|
<reponame>korcsmarosgroup/ARN2DataBase
'''
miRNA-lncRNA interactions
'''
# Imports
from SLKlib.SQLiteDBApi.sqlite_db_api import PsimiSQL
# Defining constants
SQL_SEED = '../../../../../SLKlib/SQLiteDBApi/network-db-seed.sql'
DB_DESTINATION = '../../output/starbase'
FILE_TO_TAXID = {
'files/starbase_v3_miRNAlncRNA.txt': "taxid:9606",
'files/starbase_v3_ncRNA_degradome_human.txt': 'taxid:9606',
'files/starbase_v3_ncRNA_degradome_worm.txt': 'taxid:6239',
'files/starbase_v3_lncRNA_valid.txt': 'taxid:9606'
}
file_to_detmet = {
'files/starbase_v3_miRNAlncRNA.txt': "MI:1110(predicted interaction)",
'files/starbase_v3_ncRNA_degradome_human.txt': 'MI:0045(experimental interaction detection)',
'files/starbase_v3_ncRNA_degradome_worm.txt': 'MI:0045(experimental interaction detection)',
'files/starbase_v3_lncRNA_valid.txt': 'MI:2321(high throughput sequencing)'
}
def get_node_mirna(mirna_name, taxid, psi_mi_to_sql_object):
"""
This function sets up a node dict and returns it. If the node is already in the SQLite database it fetches that node from the db, so it won't be inserted multiple times.
"""
# Testing if the node is already in the database
node_dict = psi_mi_to_sql_object.get_node(mirna_name, node_tax_id=taxid)
if not node_dict:
node_dict = {
"name" : 'HPRD:' + mirna_name,
"tax_id": taxid,
"alt_accession": None,
'pathways': None,
"aliases": None,
"topology": None
}
return node_dict
def get_node_lncrna(lncrna_name, taxid, psi_mi_to_sql_object):
"""
This function sets up a node dict and returns it. If the node is already in the SQLite database it fetches that node from the db, so it won't be inserted multiple times.
"""
# Testing if the node is already in the database
node_dict = psi_mi_to_sql_object.get_node(lncrna_name, node_tax_id=taxid)
if not node_dict:
node_dict = {
"name": 'mirbase:' + lncrna_name,
"tax_id": taxid,
"alt_accession": None,
'pathways': None,
"aliases": None,
"topology": None
}
return node_dict
def main(logger):
# Declaring variables and constants
inserted_nodes = {}
# Initiating the parser
db_api = PsimiSQL(SQL_SEED)
for file in DATA_FILE_LIST:
with open(file) as data:
# Skipping the header
data.readline()
data.readline()
data.readline()
data.readline()
for line in data:
columns = line.split('\t')
taxid = FILE_TO_TAXID[file]
if len(columns) != 1:
if file == 'lncRNA/databases/starbase/files/starbase_v3_miRNAlncRNA.txt':
mirna_name = columns[1]
lncrna_name = columns[3]
elif file == 'lncRNA/databases/starbase/files/starbase_v3_ncRNA_degradome_human.txt' \
or file == 'lncRNA/databases/starbase/files/starbase_v3_ncRNA_degradome_worm.txt':
mirna_name = columns[1]
lncrna_name = columns[2]
elif file == 'lncRNA/databases/starbase/files/starbase_v3_lncRNA_valid.txt':
mirna_name = columns[1]
lncrna_name = columns[4]
else:
mirna_name = None
lncrna_name = None
# Creating the node dicts, if the node is already in the db assigning that to the node dict
source_dict = get_node_lncrna(mirna_name, taxid, db_api)
target_dict = get_node_mirna(lncrna_name, taxid, db_api)
# Nodes are inserted to the db if they are not in it yet
if not 'id' in source_dict:
db_api.insert_node(source_dict)
if not 'id' in target_dict:
db_api.insert_node(target_dict)
interaction_types = "effect:%s|is_directed:%s|is_direct:%s" \
% ('MI:0256(rna interference)', 'directed', 'unknown')
# Inserting edges
edge_dict = {
'publication_ids': 'pubmed:24297251',
'layer': '7',
'source_db': 'starbase',
'interaction_identifiers': None,
'confidence_scores': None,
'interaction_detection_method': file_to_detmet[file],
'interaction_types': interaction_types,
'first_author': None
}
db_api.insert_edge(source_dict, target_dict, edge_dict)
# Saving the to a DB_TYPE.db files
db_api.save_db_to_file(DB_DESTINATION)
if __name__ == '__main__':
print("Parsing database...")
main(logger = None)
print("Parsing database is completed. SQLite database is saved to: " + DB_DESTINATION)
|
import os
import unittest
import sys
topdir = os.path.join(os.path.dirname(__file__), "..")
sys.path.append(topdir)
from Prescient import app, db
from Prescient.config import basedir
# Integration tests
class AuthorisationTests(unittest.TestCase):
def setUp(self): # sets up the database
app.config["TESTING"] = True # tests for assertions or exceptions
app.config["WTF_CSRF_ENABLED"] = False
app.config["SQLALCHEMY_DATABASE_URI"] = 'sqlite:///' + \
os.path.join(basedir,
'test.db')
self.app = app.test_client() # this creates a test client for the app
db.create_all()
def tearDown(self): # removes the database
db.session.remove()
db.drop_all()
# Helper methods to create dummy post requests and store to test database
def register_user(self, username, password, confirm):
credentials = dict(username=username,
password=password,
confirm=confirm)
return self.app.post("auth/register",
data=credentials,
follow_redirects=True)
def login_user(self, username, password):
credentials = dict(username=username,
password=password)
return self.app.post("auth/login",
data=credentials,
follow_redirects=True)
def logout_user(self):
return self.app.get("auth/logout", follow_redirects=True)
# tests
def test_auth_urls(self):
response_login = self.app.get("auth/login", follow_redirects=True)
response_register = self.app.get("auth/register", follow_redirects=True)
response_logout = self.app.get("auth/logout", follow_redirects=True)
self.assertEqual(response_login.status_code, 200)
self.assertEqual(response_register.status_code, 200)
self.assertEqual(response_logout.status_code, 200)
def test_valid_user_registration(self):
response = self.register_user("RandomUser1!",
"Testing<PASSWORD>",
"Testing123")
self.assertIn(b"Your account has now been created!", response.data)
def test_invalid_user_registration_wrong_confirm(self):
response = self.register_user("RandomUser1!", "Testing13", "Testing31")
self.assertIn(b"Passwords must match", response.data)
def test_invalid_user_registration_duplicate_username(self):
response = self.register_user("RandomUser2",
"python99",
"python99")
response = self.register_user("RandomUser2",
"python11",
"python11")
self.assertIn(b"That username is already registered.", response.data)
def test_valid_login(self):
self.register_user("RandomUser1!", "Testing123", "Testing123")
response = self.login_user("RandomUser1!", "Testing123")
self.assertIn(b"Welcome to Prescient Finance", response.data)
def test_invalid_login_wrong_username(self):
self.register_user("RandomUser1!", "Testing123", "Testing123")
response = self.login_user("randomuser1!", "Testing123")
self.assertNotIn(b"Welcome to Prescient Finance", response.data)
def test_invalid_login_wrong_password(self):
self.register_user("RandomUser1!", "Testing123", "<PASSWORD>")
response = self.login_user("RandomUser1!", "Testing456")
self.assertNotIn(b"Welcome to Prescient Finance", response.data)
def test_logout(self):
self.register_user("RandomUser1!", "Testing123", "Testing123")
self.login_user("RandomUser1!", "Testing123")
response = self.logout_user()
self.assertIn(b"Sign In", response.data)
if __name__ == "__main__":
unittest.main()
|
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\statistics\tunable.py
# Compiled at: 2020-10-09 19:17:02
# Size of source mod 2**32: 5281 bytes
from sims4.tuning.tunable import TunableMapping, Tunable, TunableInterval, TunableReference, AutoFactoryInit, HasTunableSingletonFactory, TunableEnumEntry
from sims4.tuning.tunable_base import SourceQueries
from statistics.statistic_categories import StatisticCategory
import services
class TunableStatAsmParam(HasTunableSingletonFactory, AutoFactoryInit):
FACTORY_TUNABLES = {'level_ranges':TunableMapping(description='\n The value mapping of the stat range to stat value or user value. If\n use_user_value is True, the range should be user value, otherwise\n stat value.\n ',
key_type=Tunable(description="\n The asm parameter for Sim's stat level.\n ",
tunable_type=str,
default=None,
source_query=(SourceQueries.SwingEnumNamePattern.format('statLevel'))),
value_type=TunableInterval(description='\n Stat value fall into the range (inclusive).\n ',
tunable_type=float,
default_lower=1,
default_upper=1)),
'asm_param_name':Tunable(description='\n The asm param name.\n ',
tunable_type=str,
default='statLevel'),
'use_user_value':Tunable(description='\n Whether use the user value or stat value to decide the asm_param.\n ',
tunable_type=bool,
default=True),
'use_effective_skill_level':Tunable(description='\n If true, the effective skill level of the Sim will be used for \n the asm_param.\n ',
tunable_type=bool,
default=True),
'always_apply':Tunable(description='\n If checked, this parameter is always applied on any ASM involving the\n owning Sim.\n ',
tunable_type=bool,
default=False)}
def get_asm_param(self, stat):
stat_value = stat.get_user_value() if self.use_user_value else stat.get_value()
if stat.is_skill:
if self.use_effective_skill_level:
stat_value = stat.tracker.owner.get_effective_skill_level(stat)
return self.get_asm_param_for_value(stat_value)
def get_asm_param_for_value(self, stat_value):
asm_param_value = None
for range_key, stat_range in self.level_ranges.items():
if stat_value >= stat_range.lower_bound and stat_value <= stat_range.upper_bound:
asm_param_value = range_key
break
return (
self.asm_param_name, asm_param_value)
class CommodityDecayModifierMapping(TunableMapping):
def __init__(self, description=''):
(
super().__init__(description=description, key_type=TunableReference((services.statistic_manager()),
class_restrictions=('Commodity', 'RankedStatistic'),
description='\n The stat the modifier will apply to.\n ',
pack_safe=True),
value_type=Tunable(float, 0, description='Multiply statistic decay by this value.')),)
@property
def export_class(self):
return 'TunableMapping'
class StatisticCategoryModifierMapping(TunableMapping):
def __init__(self, description=''):
super().__init__(description=description, key_type=TunableEnumEntry(description='\n The category of statistics to add the modifier to.\n ',
tunable_type=StatisticCategory,
default=(StatisticCategory.INVALID)),
value_type=Tunable(description='\n The value to multiply by the decay of the statistic by.\n ',
tunable_type=float,
default=1.0)) |
<reponame>ManjushreeRao/FinalProject-WebApplication
from __future__ import print_function
from __future__ import print_function
import datetime
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from typing import List, Dict
import simplejson as json
from flask import Flask, request, Response, redirect,json, jsonify
from flask import render_template
from flaskext.mysql import MySQL
from pymysql.cursors import DictCursor
from google_auth_oauthlib.flow import InstalledAppFlow
import json
app = Flask(__name__)
mysql = MySQL(cursorclass=DictCursor)
app.config['MYSQL_DATABASE_HOST'] = 'db'
app.config['MYSQL_DATABASE_USER'] = 'root'
app.config['MYSQL_DATABASE_PASSWORD'] = '<PASSWORD>'
app.config['MYSQL_DATABASE_PORT'] = 3306
app.config['MYSQL_DATABASE_DB'] = 'records'
mysql.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/about/')
def about():
return render_template('about.html')
@app.route('/academics/')
def academics():
return render_template('academics.html')
@app.route('/gallery/')
def gallery():
return render_template('gallery.html')
@app.route('/calendar/')
def calendar():
return render_template('calendar.html')
@app.route('/covid/')
def covid():
return render_template('covid.html')
@app.route('/Faculty/')
def Faculty():
return render_template('Faculty.html')
@app.route('/records/', methods=['GET'])
def course():
user = {'username': 'Rutgers'}
cursor = mysql.get_db().cursor()
cursor.execute('SELECT * FROM tblrecords')
result = cursor.fetchall()
return render_template('records.html', title='Home', user=user, records=result)
@app.route('/chart/')
def chart():
legend = "Student Age Ratio"
labels = list()
values = list()
cursor = mysql.get_db().cursor()
#labels = [1,2,3,4,5,6]
#values = [41, 30, 39, 30, 26, 30]
cursor.execute('SELECT * from tblrecords')
result1 = cursor.fetchall()
for i in result1:
labels.append(i['Name'])
values.append(i['Age'])
return render_template('chart.html', values=values, labels=labels, legend=legend)
@app.route('/view/<int:record_id>', methods=['GET'])
def view(record_id):
cursor = mysql.get_db().cursor()
cursor.execute('SELECT * FROM tblrecords WHERE id=%s', record_id)
result = cursor.fetchall()
return render_template('view.html', title='View Form', record=result[0])
@app.route('/edit/<int:record_id>', methods=['GET'])
def form_edit_get(record_id):
cursor = mysql.get_db().cursor()
cursor.execute('SELECT * FROM tblrecords WHERE id=%s', record_id)
result = cursor.fetchall()
return render_template('edit.html', title='Edit Form', record=result[0])
@app.route('/edit/<int:record_id>', methods=['POST'])
def form_update_post(record_id):
cursor = mysql.get_db().cursor()
inputData = (request.form.get('Name'), request.form.get('Sex'),
request.form.get('Age'),request.form.get('Height_in'),
request.form.get('Weight_lbs'), record_id)
sql_update_query = """UPDATE tblrecords t SET t.Name = %s, t.Sex = %s,
t.Age = %s, t.Height_in = %s, t.Weight_lbs = %s
WHERE t.id = %s """
cursor.execute(sql_update_query, inputData)
mysql.get_db().commit()
return redirect("/records/", code=302)
@app.route('/records/new', methods=['GET'])
def form_insert_get():
return render_template('new.html', title='New Course Form')
@app.route('/records/new', methods=['POST'])
def form_insert_post():
cursor = mysql.get_db().cursor()
inputData = (request.form.get('Name'), request.form.get('Sex'),
request.form.get('Age'), request.form.get('Height_in'),
request.form.get('Weight_lbs'))
sql_insert_query = """INSERT INTO tblrecords
(Name,Sex,Age,Height_in,Weight_lbs)
VALUES (%s, %s,%s,%s, %s) """
cursor.execute(sql_insert_query, inputData)
mysql.get_db().commit()
return redirect("/records/", code=302)
@app.route('/delete/<int:record_id>', methods=['POST'])
def form_delete_post(record_id):
cursor = mysql.get_db().cursor()
sql_delete_query = """DELETE FROM tblrecords WHERE id = %s """
cursor.execute(sql_delete_query, record_id)
mysql.get_db().commit()
return redirect("/records/", code=302)
@app.route('/api/v1/records', methods=['GET'])
def api_browse() -> str:
cursor = mysql.get_db().cursor()
cursor.execute('SELECT * FROM tblrecords')
result = cursor.fetchall()
json_result = json.dumps(result);
resp = Response(json_result, status=200, mimetype='application/json')
return resp
@app.route('/api/v1/records/<int:record_id>', methods=['GET'])
def api_retrieve(record_id) -> str:
cursor = mysql.get_db().cursor()
cursor.execute('SELECT * FROM tblrecords WHERE id=%s', record_id)
result = cursor.fetchall()
json_result = json.dumps(result);
resp = Response(json_result, status=200, mimetype='application/json')
return resp
@app.route('/api/v1/records', methods=['POST'])
def api_add() -> str:
content = request.json
cursor = mysql.get_db().cursor()
inputData = (content['Name'], content['Age'],
content['Sex'], content ['Height_in'],
content['Weight_lbs'])
sql_insert_query = """INSERT INTO tblrecords
(Name,Age,Sex,Height_in,Weight_lbs)
VALUES (%s, %s,%s, %s, %s) """
cursor.execute(sql_insert_query, inputData)
mysql.get_db().commit()
resp = Response(status=201, mimetype='application/json')
return resp
@app.route('/api/v1/records/<int:record_id>', methods=['PUT'])
def api_edit(record_id) -> str:
cursor = mysql.get_db().cursor()
content = request.json
inputData = (content['Name'], content['Sex'], content['Age'], content ['Height_in'],
content['Weight_lbs']
, record_id)
sql_update_query = """UPDATE tblrecords t SET t.Name = %s,
t.Sex = %s, t.Age = %s,t.Height_in = %s, t.Weight_lbs = %s
WHERE t.id = %s """
cursor.execute(sql_update_query, inputData)
mysql.get_db().commit()
resp = Response(status=200, mimetype='application/json')
return resp
@app.route('/api/v1/records/<int:record_id>', methods=['DELETE'])
def api_delete(record_id) -> str:
cursor = mysql.get_db().cursor()
sql_delete_query = """DELETE FROM tblrecords WHERE id = %s """
cursor.execute(sql_delete_query, record_id)
mysql.get_db().commit()
resp = Response(status=200, mimetype='application/json')
return resp
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/calendar.readonly']
def main():
"""Shows basic usage of the Google Calendar API.
Prints the start and name of the next 10 events on the user's calendar.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
service = build('calendar', 'v3', credentials=creds)
# Call the Calendar API
now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
print('Getting the upcoming 10 events')
events_result = service.events().list(calendarId='primary', timeMin=now,
maxResults=10, singleEvents=True,
orderBy='startTime').execute()
events = events_result.get('items', [])
if not events:
print('No upcoming events found.')
for event in events:
start = event['start'].get('dateTime', event['start'].get('date'))
print(start, event['summary'])
if __name__ == '__main__':
main() |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import csv
import datetime
import logging
import requests
from simplejson.scanner import JSONDecodeError
import subprocess
from urlparse import urlparse
import infra.tools.antibody.cloudsql_connect as csql
from infra.tools.antibody.static.third_party import gerrit_util
# https://chromium.googlesource.com/infra/infra/+/master/infra_libs/logs/README.md
LOGGER = logging.getLogger(__name__)
time_format = '%Y-%m-%d %H:%M:%S'
KNOWN_RIETVELD_INSTANCES = [
'chromereviews.googleplex.com',
'chromiumcodereview-hr.appspot.com',
'chromiumcodereview.appspot.com',
'codereview.appspot.com',
'codereview.chromium.org',
'skia-codereview-staging.appspot.com',
'skia-codereview-staging2.appspot.com',
'skia-codereview-staging3.appspot.com',
]
KNOWN_GERRIT_INSTANCES = [
'chromium-review.googlesource.com',
]
def extract_code_review_json_data(review_url, cc, git_checkout_path):
if any(hostname in review_url for hostname in KNOWN_RIETVELD_INSTANCES):
return _extract_json_data_from_rietveld(review_url)
elif any(hostname in review_url for hostname in KNOWN_GERRIT_INSTANCES):
return _extract_json_data_from_gerrit(review_url, cc, git_checkout_path)
else:
LOGGER.error('unknown code review instance: %s' % review_url)
def _extract_json_data_from_rietveld(rietveld_url):
"""Extracts json data from an issue of a rietveld instance
Args:
rietveld_url(str): rietveld url formatted 'https://hostname/issuenum'
Return:
Dictionary containing information from rietveld about the specific
issue linked in rietveld_url.
* It is important to note that not all fields existed in at the time of a
given CL's existence, so the data is not all 100% reliable. Known false
cases are starred below.
Most relevant keys:
- 'owner_email': email address of the owner
- 'created': timestamp issue was created
- 'cc': list of cc'ed email addresses
- 'reviewers': list of reviewers' email addresses
- 'messages': dict with the following relevant keys:
- *'patchset': latest patchset when message was sent. Note: for some
older CLs, this value is null for all messages.
- 'sender': message author email address
- 'approval': boolean for whether or not the message contained 'lgtm'
- 'date': timestamp message was sent
- 'text': str containing contents of the message
- 'auto_generated': boolean for whether or not the message was
autogenerated. Note: messages from commit-bot are not considered
autogenerated.
- 'disapproval': boolean for whether or not the message contained
'not lgtm'
- 'issue_was_closed': boolean for whether or not the issue was closed
at the time it was sent. Note: this is true for 'Patchset
committed' messages
- 'patchsets': list of patchset IDs that still exist on Rietveld at time
of request
- 'modified': timestamp the CL was last modified
"""
url_components = urlparse(rietveld_url)
json_data_url = '%s://%s/api%s?messages=true' % (url_components.scheme,
url_components.netloc, url_components.path.strip(','))
response = requests.get(json_data_url)
if (response.status_code == requests.codes.ok):
try: # pragma: no cover
return response.json()
except JSONDecodeError: # pragma: no cover
LOGGER.error('json parse failed for url: %s' % rietveld_url)
return {'messages': [{'text': ''}]}
else:
LOGGER.info('unable to access: %s' % rietveld_url)
if (response.status_code == 404): # pragma: no cover
# fake json response so the review url is still put in the review table
# even though the relevant data is inaccessable
return {'messages': [{'text': ''}]}
else:
response.raise_for_status()
def _extract_json_data_from_gerrit(gerrit_url, cc, git_checkout_path):
"""Extracts json data from an issue of a gerrit instance
Args:
gerrit_url(str): gerrit url formatted 'https://hostname/issuenum'
cc(Cursor): cursor to the database with extra info on
Return:
Dictionary containing informationon from gerrit about the specific
issue linked in gerrit_url
Most relevant keys:
- 'owner': dict with name and email of owner
- 'created': timestamp issue was created
- 'updated': timestamp issue was last updated
- 'labels': dict with the following relevant keys:
- 'Code Review': dict with the following relevant keys:
- 'approved': dict with name and email of approver
- 'all': list of dicts with name and email of all users marked for
review, as well as review result and time that user last
updated review status or commented
- 'values': dict of possible numerical rating values and meanings
- 'messages': list of messages, each represented as dicts with the
following relevant keys:
- 'author': dict with name and email of the author of the comment
- 'date': timestamp the message was posted
- 'message': str containing contents of the message
"""
LOGGER.debug('Fetching gerrit review %s', gerrit_url)
url_components = urlparse(gerrit_url)
cc.execute("""SELECT hash
FROM git_commit
WHERE review_url = '%s'""" % gerrit_url)
git_hash = cc.fetchone()[0]
commit_message = subprocess.check_output(['git', 'show', git_hash,
'--format=%b'],
cwd=git_checkout_path)
change_id = None
match_count = 0
for line in commit_message.splitlines():
if line.startswith('Change-Id: '):
match_count += 1
if not change_id:
change_id = line[len('Change-Id: '):]
if match_count > 1:
LOGGER.warn('Multiple lines beginning with Change-Id for %s'
% gerrit_url)
change_detail = gerrit_util.GetChangeDetail(url_components.netloc, change_id)
if change_detail:
return change_detail
else:
LOGGER.error('could not get change detail for: %s' % gerrit_url)
def to_canonical_review_url(review_url):
review_url = review_url.strip('.')
if 'chromiumcodereview.appspot.com' in review_url:
return review_url.replace('chromiumcodereview.appspot.com',
'codereview.chromium.org')
if 'chromiumcodereview-hr.appspot.com' in review_url:
return review_url.replace('chromiumcodereview-hr.appspot.com',
'codereview.chromium.org')
return review_url
def _get_rietveld_data_for_review(rietveld_url, json_data): # pragma: no cover
curr_time = datetime.datetime.utcnow()
committed_timestamp = None
patchset_still_exists = 0
url_exists = 0
for message in json_data['messages']:
if ('committed' in message['text'].lower() and
(json_data['closed'] or message['issue_was_closed'])):
url_exists = 1
committed_timestamp = message['date'].split('.')[0]
if 'patchset' in message:
patchset = message['patchset']
elif message['text'] and 'patchset' in message['text']:
for word in message['text'].split():
if word.startswith('(id:'):
patchset = word[4:].strip(')')
if patchset in json_data['patchsets']:
patchset_still_exists = 1
# TODO(ksho): insert data for reverts/project id (currently set to default
# to False for reverted and 0 for project id, because can't write None to
# a Cloud SQL table with LOAD DATA LOCAL INFILE)
db_data = (rietveld_url, url_exists, curr_time, committed_timestamp,
patchset_still_exists, 0, 0)
return db_data
def _get_gerrit_data_for_review(gerrit_url, json_data): # pragma: no cover
curr_time = datetime.datetime.utcnow()
committed_timestamp = None
# merged patchsets cannot be deleted in gerrit
patchset_still_exists = 1
for message in json_data['messages']:
# TODO(ksho): write better check to detect generated committed message
# by using more explicit checks for bots, checking all requirements
# for commit and all changes due to commit,
if (
json_data['status'] == 'MERGED'
and
('author' not in message.keys() or
'bot' in message['author']['name'].lower())
and
any(commit_text in message['message'].lower() for commit_text in
(
'cherry-picked',
'cherry picked',
'merged',
'pushed',
)
)
):
committed_timestamp = message['date'].split('.')[0]
# TODO(ksho): insert data for reverts/project id (currently set to default
# to False for reverted and 0 for project id, because can't write None to
# a Cloud SQL table with LOAD DATA LOCAL INFILE)
db_data = (gerrit_url, 1, curr_time, committed_timestamp,
patchset_still_exists, 0, 0)
return db_data
def _get_rietveld_data_for_review_people(rietveld_url,
json_data): # pragma: no cover
curr_time = datetime.datetime.utcnow()
db_data_all = []
# only try to add review_people data if the review url is accessable
if json_data['messages'] != [{'text': ''}] and json_data[
'all_required_reviewers_approved']:
people = []
people.append([json_data['cc'], 'cc'])
people.append([json_data['reviewers'], 'reviewer'])
people.append([[json_data['owner_email'],], 'owner'])
time_submitted = json_data['created'].split('.')[0]
for person_list, typ in people:
for person_email in person_list:
db_data = (person_email.split('@')[0], rietveld_url, time_submitted,
curr_time, typ)
db_data_all.append(db_data)
for message in json_data['messages']:
if message['approval']:
time_commented = message['date'].split('.')[0]
db_data = (message['sender'].split('@')[0], rietveld_url,
time_commented, curr_time, 'lgtm')
db_data_all.append(db_data)
elif message['disapproval']:
time_commented = message['date'].split('.')[0]
db_data = (message['sender'].split('@')[0], rietveld_url,
time_commented, curr_time, 'not lgtm')
db_data_all.append(db_data)
return db_data_all
def _get_gerrit_data_for_review_people(gerrit_url,
json_data): # pragma: no cover
curr_time = datetime.datetime.utcnow()
db_data_all = []
time_submitted = json_data['created'].split('.')[0]
db_data_all.append((json_data['owner']['email'], gerrit_url,
time_submitted, curr_time, 'owner'))
people = json_data['labels']['Code-Review']['all']
for person in people:
person_email = person['email']
if '<EMAIL>' in person_email: # pragma: no cover
continue
# in gerrit "+2" means "Looks good to me, approved"
if person['value'] == 2:
db_data_all.append((person_email, gerrit_url,
person['date'].split('.')[0], curr_time, 'lgtm'))
# in gerrit "-2" means "Do not submit"
elif person['value'] == -2:
db_data_all.append((person_email, gerrit_url,
person['date'].split('.')[0], curr_time, 'not lgtm'))
db_data = (person_email, gerrit_url, time_submitted, curr_time, 'reviewer')
db_data_all.append(db_data)
return db_data_all
def get_urls_from_git_commit(cc): # pragma: no cover
"""Accesses Cloud SQL instance to find the review urls of the stored
commits that have a TBR
Arg:
cc: a cursor for the Cloud SQL connection
Return:
commits_with_review_urls(list): all the commits in the db w/ a TBR
and a review url
"""
cc.execute("""SELECT git_commit.review_url,
commit_people.people_email_address, commit_people.type
FROM commit_people
INNER JOIN (
SELECT git_commit_hash, COUNT(*)
AS c
FROM commit_people
WHERE type='tbr'
GROUP BY git_commit_hash) tbr_count
ON commit_people.git_commit_hash = tbr_count.git_commit_hash
INNER JOIN git_commit
ON commit_people.git_commit_hash = git_commit.hash
WHERE tbr_count.c <> 0
AND git_commit.review_url != ''
AND commit_people.type='author'""")
commits_with_review_urls = cc.fetchall()
return [x[0] for x in commits_with_review_urls]
def primary_key_uniquifier(seq, idfun=lambda x: x):
seen = set()
result = []
for item in seq:
marker = idfun(item)
if marker in seen:
continue
seen.add(marker)
result.append(item)
return result
def get_code_review_data(cc, git_checkout_path): # pragma: no cover
review_data, review_people_data = [], []
git_commits_with_tbr_and_review_url = get_urls_from_git_commit(cc)
for num, review_url in enumerate(git_commits_with_tbr_and_review_url):
url = to_canonical_review_url(review_url)
# cannot get access into chromereview.googleplex.com
if not any(host in url for host in (
'chromereviews.googleplex',
)):
try:
json_data = extract_code_review_json_data(url, cc, git_checkout_path)
except JSONDecodeError: # pragma: no cover
return
if any(hostname in url for hostname in KNOWN_RIETVELD_INSTANCES) \
and json_data:
db_data = _get_rietveld_data_for_review(url, json_data)
review_data.append(db_data)
db_data_all = _get_rietveld_data_for_review_people(url, json_data)
for data_row in db_data_all:
review_people_data.append(data_row)
elif any(hostname in url for hostname in KNOWN_GERRIT_INSTANCES) \
and json_data:
db_data = _get_gerrit_data_for_review(url, json_data)
review_data.append(db_data)
db_data_all = _get_gerrit_data_for_review_people(url, json_data)
for data_row in db_data_all:
review_people_data.append(data_row)
else:
LOGGER.error('unknown code review instance: %s' % url)
if num % 100 == 0:
cc.execute("""SELECT COUNT(*) FROM git_commit""")
LOGGER.debug("Rows in git_commit: %s", str(cc.fetchall()))
unique_review_people_data = primary_key_uniquifier(
review_people_data, lambda x: (x[0], x[1], x[2], x[4]))
return review_data, unique_review_people_data
def write_code_review_data_to_csv(cc, git_checkout_path, review_filename,
review_people_filename): # pragma: no cover
LOGGER.debug('Starting write_code_review_data_to_csv() for %s and %s',
review_filename, review_people_filename)
csv_review_data, csv_review_people_data = get_code_review_data(cc,
git_checkout_path)
LOGGER.debug('Writing %s ...', review_filename)
with open(review_filename, 'w') as f:
for row in csv_review_data:
# review_url|url_exists|request_timestamp|patchset_committed|
# patchset_still_exists|reverted|project_prj_id
# VARCHAR(200)|TINYINT|TIMESTAMP|TIMESTAMP|TINYINT|TINYINT|INT
csv.writer(f).writerow(row)
LOGGER.debug('Done writing %s', review_filename)
LOGGER.debug('Writing %s ...', review_people_filename)
with open(review_people_filename, 'w') as f:
for row in csv_review_people_data:
# people_email_address|review_url|timestamp|request_timestamp|type
# VARCHAR(200)|VARCHAR(200)|TIMESTAMP|TIMESTAMP|VARCHAR(10)
# type: author, reviewer, cc, lgtm, or not lgtm
csv.writer(f).writerow(row)
LOGGER.debug('Done writing %s', review_people_filename)
def upload_to_sql(cc, git_checkout_path, review_filename,
review_people_filename): # pragma: no cover
"""Writes review information on suspicious commits to a Cloud SQL database
Args:
cc: a cursor for the Cloud SQL connection
"""
LOGGER.debug('Starting upload_to_sql().')
write_code_review_data_to_csv(cc, git_checkout_path, review_filename,
review_people_filename)
csql.write_to_sql_table(cc, review_filename, 'review')
csql.write_to_sql_table(cc, review_people_filename, 'review_people')
LOGGER.debug('Finished upload_to_sql().')
def get_tbr_no_lgtm(cc, commit_people_type):
cc.execute("""SELECT review.review_url, git_commit.timestamp,
git_commit.subject, commit_people.people_email_address, git_commit.hash
FROM review
INNER JOIN git_commit
ON review.review_url = git_commit.review_url
INNER JOIN commit_people
ON commit_people.git_commit_hash = git_commit.hash
LEFT JOIN (
SELECT review_url, COUNT(*)
AS c
FROM review_people
WHERE type = 'lgtm'
GROUP BY review_url) lgtm_count
ON review.review_url = lgtm_count.review_url
WHERE lgtm_count.c = 0 OR lgtm_count.c IS NULL
AND commit_people.type = '%s'""" % commit_people_type)
data_all = cc.fetchall()
formatted_data = []
for data in data_all:
subject = data[2]
formatted_data.append([data[0], data[1].strftime("%Y-%m-%d %H:%M:%S"),
subject.replace('-', ' '), data[3], data[4]])
return sorted(formatted_data, key=lambda x: x[1], reverse=True)
|
<reponame>M155K4R4/Tensorflow
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for state management."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy
from tensorflow.contrib.timeseries.python.timeseries import feature_keys
from tensorflow.contrib.timeseries.python.timeseries import input_pipeline
from tensorflow.contrib.timeseries.python.timeseries import math_utils
from tensorflow.contrib.timeseries.python.timeseries import model
from tensorflow.contrib.timeseries.python.timeseries import state_management
from tensorflow.contrib.timeseries.python.timeseries import test_utils
from tensorflow.python.estimator import estimator_lib
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import coordinator as coordinator_lib
from tensorflow.python.training import queue_runner_impl
from tensorflow.python.training import training as train
from tensorflow.python.util import nest
class StubTimeSeriesModel(model.TimeSeriesModel):
def __init__(self, correct_offset=False):
self._correct_offset = correct_offset
super(StubTimeSeriesModel, self).__init__(1)
def initialize_graph(self, input_statistics=None):
super(StubTimeSeriesModel, self).initialize_graph(
input_statistics=input_statistics)
self.prior_var = variable_scope.get_variable(
"prior", [], initializer=init_ops.constant_initializer(0.))
def generate(self, *args):
pass
def predict(self, *args):
pass
def get_start_state(self):
return (array_ops.zeros([], dtype=dtypes.int64), self.prior_var)
def get_batch_loss(self, features, mode, state):
raise NotImplementedError("This stub only supports managed state.")
def per_step_batch_loss(self, features, mode, state):
times = features[feature_keys.TrainEvalFeatures.TIMES]
values = features[feature_keys.TrainEvalFeatures.VALUES]
(priors_from_time, prior) = state
time_corrected_priors = prior + math_ops.cast(
math_utils.batch_start_time(times) - priors_from_time, dtypes.float32)
posterior = time_corrected_priors[:, None] + math_ops.cast(
times - math_utils.batch_start_time(times)[:, None], dtypes.float32)
batch_end_values = array_ops.squeeze(
array_ops.slice(values, [0, array_ops.shape(times)[1] - 1, 0],
[-1, 1, -1]),
squeeze_dims=[1, 2])
# A pretty odd but easy to think about loss: L1 loss on the batch end
# values.
loss = math_ops.reduce_sum(
math_ops.abs(
array_ops.reshape(posterior[:, -1], [-1]) - batch_end_values))
if self._correct_offset:
posterior += batch_end_values[0] - posterior[0, -1]
posteriors = (times, posterior)
return loss, posteriors, {"dummy_predictions": array_ops.zeros_like(values)}
class ChainingStateManagerTest(test.TestCase):
def _make_test_data(self, length, cut_start, cut_end, offset, step=1):
times_full = step * numpy.arange(length, dtype=numpy.int64)
values_full = offset + step * numpy.arange(length, dtype=numpy.float32)
if cut_start is not None:
times = numpy.concatenate((times_full[:cut_start],
times_full[cut_end:]))
values = numpy.concatenate((values_full[:cut_start],
values_full[cut_end:]))
else:
times = times_full
values = values_full
return {
feature_keys.TrainEvalFeatures.TIMES: times,
feature_keys.TrainEvalFeatures.VALUES: values
}
def _test_initialization(self, warmup_iterations, batch_size):
stub_model = StubTimeSeriesModel()
data = self._make_test_data(length=20, cut_start=None, cut_end=None,
offset=0.)
if batch_size == -1:
input_fn = test_utils.AllWindowInputFn(
input_pipeline.NumpyReader(data), window_size=10)
else:
input_fn = input_pipeline.RandomWindowInputFn(
input_pipeline.NumpyReader(data),
window_size=10,
batch_size=batch_size)
chainer = state_management.ChainingStateManager(
state_saving_interval=1)
features, _ = input_fn()
stub_model.initialize_graph()
chainer.initialize_graph(model=stub_model)
model_outputs = chainer.define_loss(
model=stub_model, features=features, mode=estimator_lib.ModeKeys.TRAIN)
with self.test_session() as session:
variables.global_variables_initializer().run()
coordinator = coordinator_lib.Coordinator()
queue_runner_impl.start_queue_runners(session, coord=coordinator)
for _ in range(warmup_iterations):
# Warm up saved state
model_outputs.loss.eval()
outputs = model_outputs.loss.eval()
coordinator.request_stop()
coordinator.join()
return outputs
def test_zero_initializations(self):
# Even with no initialization, we are imputing values up to each chunk,
# which in this case gives exact values.
self.assertEqual(0., self._test_initialization(
warmup_iterations=0, batch_size=-1))
def test_one_initializations(self):
# Further initialization should still be correct, if redundant
self.assertEqual(0., self._test_initialization(
warmup_iterations=1, batch_size=-1))
def test_stochastic_batch(self):
# It shouldn't matter whether we're using a full deterministic batch or a
# smaller stochastic batch.
self.assertEqual(0., self._test_initialization(
warmup_iterations=1, batch_size=5))
def _test_pass_to_next(self, read_offset, step, correct_offset):
stub_model = StubTimeSeriesModel(correct_offset=correct_offset)
data = self._make_test_data(
length=100 + read_offset, cut_start=None, cut_end=None, offset=100.,
step=step)
init_input_fn = input_pipeline.WholeDatasetInputFn(
input_pipeline.NumpyReader(
{k: v[:-read_offset] for k, v in data.items()}))
result_input_fn = input_pipeline.WholeDatasetInputFn(
input_pipeline.NumpyReader(
{k: v[read_offset:] for k, v in data.items()}))
chainer = state_management.ChainingStateManager(
state_saving_interval=1)
stub_model.initialize_graph()
chainer.initialize_graph(model=stub_model)
init_model_outputs = chainer.define_loss(
model=stub_model, features=init_input_fn()[0],
mode=estimator_lib.ModeKeys.TRAIN)
result_model_outputs = chainer.define_loss(
model=stub_model, features=result_input_fn()[0],
mode=estimator_lib.ModeKeys.TRAIN)
with self.test_session() as session:
variables.global_variables_initializer().run()
coordinator = coordinator_lib.Coordinator()
queue_runner_impl.start_queue_runners(session, coord=coordinator)
init_model_outputs.loss.eval()
returned_loss = result_model_outputs.loss.eval()
coordinator.request_stop()
coordinator.join()
return returned_loss
def test_pass_to_next_step_one_no_correction(self):
self.assertEqual(100., self._test_pass_to_next(
read_offset=1, step=1, correct_offset=False))
def test_pass_to_next_step_one_with_correction(self):
self.assertEqual(0., self._test_pass_to_next(
read_offset=1, step=1, correct_offset=True))
def test_pass_to_next_step_three_with_correction(self):
self.assertEqual(0., self._test_pass_to_next(
read_offset=1, step=3, correct_offset=True))
def test_large_read_offset(self):
self.assertEqual(0., self._test_pass_to_next(
read_offset=50, step=20, correct_offset=True))
def test_past_init_offset(self):
self.assertEqual(100., self._test_pass_to_next(
read_offset=100, step=20, correct_offset=True))
def _test_missing_values(self, cut_start, cut_end, offset):
stub_model = StubTimeSeriesModel()
data = self._make_test_data(
length=100, cut_start=cut_start, cut_end=cut_end, offset=offset)
input_fn = test_utils.AllWindowInputFn(
input_pipeline.NumpyReader(data), window_size=10)
chainer = state_management.ChainingStateManager(
state_saving_interval=1)
features, _ = input_fn()
stub_model.initialize_graph()
chainer.initialize_graph(model=stub_model)
model_outputs = chainer.define_loss(
model=stub_model, features=features, mode=estimator_lib.ModeKeys.TRAIN)
with self.test_session() as session:
variables.global_variables_initializer().run()
coordinator = coordinator_lib.Coordinator()
queue_runner_impl.start_queue_runners(session, coord=coordinator)
for _ in range(10):
model_outputs.loss.eval()
returned_loss = model_outputs.loss.eval()
coordinator.request_stop()
coordinator.join()
return returned_loss
def test_missing_values_ten(self):
# Each posterior should be off by 10 from the offset in the values. 90
# values with a chunk size of 10 means 90 - 10 + 1 possible chunks.
self.assertEqual((90 - 10 + 1) * 10, self._test_missing_values(
cut_start=20, cut_end=30, offset=10.))
def test_missing_values_five(self):
self.assertEqual((95 - 10 + 1) * 10, self._test_missing_values(
cut_start=15, cut_end=20, offset=10.))
class _StateOverrideModel(model.TimeSeriesModel):
def __init__(self):
super(_StateOverrideModel, self).__init__(num_features=1)
def generate(self, *args):
pass
def predict(self, *args):
pass
def get_start_state(self):
return (constant_op.constant([20, 30, 40], dtype=dtypes.int64),
(constant_op.constant(-10, dtype=dtypes.int64),
constant_op.constant([30., 50.], dtype=dtypes.float64)))
def get_batch_loss(self, features, mode, state):
per_observation_loss, state, outputs = self.per_step_batch_loss(
features, mode, state)
state = nest.map_structure(lambda element: element[:, -1], state)
outputs["observed"] = features[feature_keys.TrainEvalFeatures.VALUES]
return model.ModelOutputs(
loss=per_observation_loss,
end_state=state,
predictions=outputs,
prediction_times=features[feature_keys.TrainEvalFeatures.TIMES])
def per_step_batch_loss(self, features, mode, state):
return (
constant_op.constant(1.),
# Assumes only one step: this is the per-step loss.
nest.map_structure(
lambda element: ops.convert_to_tensor(element)[:, None], state),
{
"dummy_predictions":
array_ops.zeros_like(
features[feature_keys.TrainEvalFeatures.VALUES])
})
class _StateOverrideTest(test.TestCase):
def test_state_override(self):
test_start_state = (numpy.array([[2, 3, 4]]), (numpy.array([2]),
numpy.array([[3., 5.]])))
data = {
feature_keys.FilteringFeatures.TIMES: numpy.arange(5),
feature_keys.FilteringFeatures.VALUES: numpy.zeros(shape=[5, 3])
}
features, _ = input_pipeline.WholeDatasetInputFn(
input_pipeline.NumpyReader(data))()
features[feature_keys.FilteringFeatures.STATE_TUPLE] = test_start_state
stub_model = _StateOverrideModel()
chainer = state_management.ChainingStateManager()
stub_model.initialize_graph()
chainer.initialize_graph(model=stub_model)
model_outputs = chainer.define_loss(
model=stub_model, features=features, mode=estimator_lib.ModeKeys.EVAL)
with train.MonitoredSession() as session:
end_state = session.run(model_outputs.end_state)
nest.assert_same_structure(test_start_state, end_state)
for expected, received in zip(
nest.flatten(test_start_state), nest.flatten(end_state)):
self.assertAllEqual(expected, received)
if __name__ == "__main__":
test.main()
|
"""Load dataset and examples.
This function loads different data set for unitary test or tutorial purpose,
either theoretical-synthetic or real dataset from scikit-learn
https://scikit-learn.org/stable/datasets/index.html
* dataset = 1 load iris dataset
* dataset = 2 load Boston house prices
* dataset = 3 load DIABETES dataset
* dataset = 4 CAUSAL Inference data challenge
http://www.causality.inf.ethz.ch/data/LUCAS.html
* dataset = 5 Borromean
* dataset = 6 mnist digit dataset
"""
import matplotlib.pyplot as plt
def load_data_sets(dataset_type, plot_data=False):
"""Loading example dataset.
Parameters
----------
dataset_type : int
Dataset type to load. Use either :
* 1 : iris dataset
* 2 : boston dataset
* 3 : diabete dataset
* 4 : causal inference data challenge
* 5 : Borromean case I_1 are 1 bit (max: "random") I_2 are 0 bit
(min: independent) I_3 is -1 bit
* 6 : mnist digit dataset
plot_data : bool | False
Plot the data (True)
Returns
-------
dataset : array_like
Array of data
nb_of_values : int
Number of values
"""
import pandas as pd
import seaborn as sns
from sklearn.datasets import load_iris, load_digits, load_boston, load_diabetes
if dataset_type == 1: # Iris
dataset = load_iris()
dataset_df = pd.DataFrame(dataset.data, columns=dataset.feature_names)
nb_of_values = 9
dataset_df = pd.DataFrame(dataset.data, columns=dataset.feature_names)
dataset_df['species'] = pd.Series(dataset.target).map(dict(zip(range(3),dataset.target_names)))
if plot_data:
sns.pairplot(dataset_df, hue='species')
dataset = dataset.data
elif dataset_type == 2: # Boston
dataset = load_boston()
dataset_df = pd.DataFrame(dataset.data, columns = dataset.feature_names)
nb_of_values =9
dataset_df = pd.DataFrame(dataset.data, columns=dataset.feature_names)
dataset_df['MEDV'] = pd.Series(dataset.target).map(dict(zip(range(3),dataset.data[:,12])))
dataset = dataset.data
elif dataset_type == 3: # diabetes
dataset = load_diabetes()
dataset_df = pd.DataFrame(dataset.data, columns = dataset.feature_names)
nb_of_values = 9
dataset_df = pd.DataFrame(dataset.data, columns=dataset.feature_names)
dataset = dataset.data
elif dataset_type == 4: # causal inference
dataset = pd.read_csv(r"/home/pierre/Documents/Data/lucas0_train.csv") # csv to download at http://www.causality.inf.ethz.ch/data/LUCAS.html
print(dataset.columns)
print(dataset.shape)
dataset_df = pd.DataFrame(dataset, columns = dataset.columns)
dataset = dataset.to_numpy()
nb_of_values = 2
elif dataset_type == 5: # borromean case
nb_of_values = 2
if nb_of_values == 2:
dataset = np.array([[ 0, 0, 1],
[ 0, 1, 0],
[ 1, 0, 0],
[ 1, 1, 1]])
elif nb_of_values == 3:
dataset = np.array([[ 0, 0, 0],
[ 1, 1, 0],
[ 2, 2, 0],
[ 0, 1, 1],
[ 1, 2, 1],
[ 2, 0, 1],
[ 0, 2, 2],
[ 1, 0, 2],
[ 2, 1, 2]])
elif nb_of_values == 4:
dataset = np.array([[ 3, 0, 0],
[ 2, 1, 0],
[ 1, 2, 0],
[ 0, 3, 0],
[ 0, 0, 1],
[ 1, 3, 1],
[ 2, 2, 1],
[ 3, 1, 1],
[ 1, 0, 2],
[ 0, 1, 2],
[ 2, 3, 2],
[ 3, 2, 2],
[ 0, 2, 3],
[ 1, 1, 3],
[ 2, 0, 3],
[ 3, 3, 3]])
elif dataset_type == 6: # mnist digit dataset
dataset = load_digits()
print(dataset.DESCR)
fig, ax_array = plt.subplots(20, 20)
axes = ax_array.flatten()
for i, ax in enumerate(axes):
ax.imshow(dataset.images[i], cmap='gray_r')
plt.setp(axes, xticks=[], yticks=[], frame_on=False)
plt.tight_layout(h_pad=0.5, w_pad=0.01)
nb_of_values = 17
dataset = dataset.data
plt.show()
return dataset, nb_of_values
|
<reponame>bfloch/tink<filename>python/tink/mac/mac_key_manager_test.py
# Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tink.python.tink.mac_key_manager."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from tink.proto import common_pb2
from tink.proto import hmac_pb2
from tink.proto import tink_pb2
from tink import core
from tink import tink_config
from tink.mac import mac
from tink.mac import mac_key_manager
def setUpModule():
tink_config.register()
class MacKeyManagerTest(absltest.TestCase):
def setUp(self):
super(MacKeyManagerTest, self).setUp()
self.key_manager = mac_key_manager.from_cc_registry(
'type.googleapis.com/google.crypto.tink.HmacKey')
def new_hmac_key_template(self, hash_type, tag_size, key_size):
key_format = hmac_pb2.HmacKeyFormat()
key_format.params.hash = hash_type
key_format.params.tag_size = tag_size
key_format.key_size = key_size
key_template = tink_pb2.KeyTemplate()
key_template.type_url = ('type.googleapis.com/google.crypto.tink.HmacKey')
key_template.value = key_format.SerializeToString()
return key_template
def test_primitive_class(self):
self.assertEqual(self.key_manager.primitive_class(), mac.Mac)
def test_key_type(self):
self.assertEqual(self.key_manager.key_type(),
'type.googleapis.com/google.crypto.tink.HmacKey')
def test_new_key_data(self):
key_template = self.new_hmac_key_template(common_pb2.SHA256, 24, 16)
key_data = self.key_manager.new_key_data(key_template)
self.assertEqual(key_data.type_url, self.key_manager.key_type())
key = hmac_pb2.HmacKey()
key.ParseFromString(key_data.value)
self.assertEqual(key.version, 0)
self.assertEqual(key.params.hash, common_pb2.SHA256)
self.assertEqual(key.params.tag_size, 24)
self.assertLen(key.key_value, 16)
def test_invalid_params_throw_exception(self):
key_template = self.new_hmac_key_template(common_pb2.SHA256, 9, 16)
with self.assertRaisesRegex(core.TinkError, 'Invalid HmacParams'):
self.key_manager.new_key_data(key_template)
def test_mac_success(self):
mac_primitive = self.key_manager.primitive(
self.key_manager.new_key_data(
self.new_hmac_key_template(common_pb2.SHA256, 24, 16)))
data = b'data'
tag = mac_primitive.compute_mac(data)
self.assertLen(tag, 24)
# No exception raised, no return value.
self.assertIsNone(mac_primitive.verify_mac(tag, data))
def test_mac_wrong(self):
mac_primitive = self.key_manager.primitive(
self.key_manager.new_key_data(
self.new_hmac_key_template(common_pb2.SHA256, 16, 16)))
with self.assertRaisesRegex(core.TinkError, 'verification failed'):
mac_primitive.verify_mac(b'0123456789ABCDEF', b'data')
if __name__ == '__main__':
absltest.main()
|
<reponame>ringw/reikna
"""
This example demonstrates how to implement a FFT frequency shift (``reikna.fft.FFTShift``)
as a transformation instead of a computation. A peculiarity of this transformation
is the repositioning of elements it performs (as opposed to more common
``load_same``/``store_same`` pair which keel the element order).
It makes this transformation unsafe to use for inplace kernels.
It also contains some performance tests
that compare the speed of FFT + shift as two separate computations and as a
single computation with a transformation against ``numpy`` implementation.
"""
import time
import numpy
from reikna.cluda import any_api
from reikna.fft import FFT, FFTShift
import reikna.cluda.dtypes as dtypes
from reikna.core import Transformation, Parameter, Annotation, Type
def fftshift(arr_t, axes=None):
"""
Returns a frequency shift transformation (1 output, 1 input) that
works as ``output = numpy.fft.fftshift(input, axes=axes)``.
.. warning::
Involves repositioning of the elements, so cannot be used on inplace kernels.
"""
if axes is None:
axes = tuple(range(len(arr_t.shape)))
else:
axes = tuple(sorted(axes))
# The code taken from the FFTShift template for odd problem sizes
# (at the moment of the writing).
# Note the use of ``idxs`` template parameter to get access to element indices.
return Transformation(
[Parameter('output', Annotation(arr_t, 'o')),
Parameter('input', Annotation(arr_t, 'i'))],
"""
<%
dimensions = len(output.shape)
new_idx_names = ['new_idx' + str(i) for i in range(dimensions)]
%>
%for dim in range(dimensions):
VSIZE_T ${new_idx_names[dim]} =
${idxs[dim]}
%if dim in axes:
%if output.shape[dim] % 2 == 0:
+ (${idxs[dim]} < ${output.shape[dim] // 2} ?
${output.shape[dim] // 2} :
${-output.shape[dim] // 2})
%else:
+ (${idxs[dim]} <= ${output.shape[dim] // 2} ?
${output.shape[dim] // 2} :
${-(output.shape[dim] // 2 + 1)})
%endif
%endif
;
%endfor
${output.ctype} val = ${input.load_same};
${output.store_idx}(${', '.join(new_idx_names)}, val);
""",
connectors=['input'],
render_kwds=dict(axes=axes))
def run_test(thr, shape, dtype, axes=None):
data = numpy.random.normal(size=shape).astype(dtype)
fft = FFT(data, axes=axes)
fftc = fft.compile(thr)
shift = FFTShift(data, axes=axes)
shiftc = shift.compile(thr)
# FFT + shift as two separate computations
data_dev = thr.to_device(data)
t_start = time.time()
fftc(data_dev, data_dev)
thr.synchronize()
t_gpu_fft = time.time() - t_start
t_start = time.time()
shiftc(data_dev, data_dev)
thr.synchronize()
t_gpu_shift = time.time() - t_start
data_dev = thr.to_device(data)
t_start = time.time()
fftc(data_dev, data_dev)
shiftc(data_dev, data_dev)
thr.synchronize()
t_gpu_separate = time.time() - t_start
data_gpu = data_dev.get()
# FFT + shift as a computation with a transformation
data_dev = thr.to_device(data)
# a separate output array to avoid unsafety of the shift transformation
res_dev = thr.empty_like(data_dev)
shift_tr = fftshift(data, axes=axes)
fft2 = fft.parameter.output.connect(shift_tr, shift_tr.input, new_output=shift_tr.output)
fft2c = fft2.compile(thr)
t_start = time.time()
fft2c(res_dev, data_dev)
thr.synchronize()
t_gpu_combined = time.time() - t_start
# Reference calculation with numpy
t_start = time.time()
numpy.fft.fftn(data, axes=axes)
t_cpu_fft = time.time() - t_start
t_start = time.time()
numpy.fft.fftshift(data, axes=axes)
t_cpu_shift = time.time() - t_start
t_start = time.time()
data_ref = numpy.fft.fftn(data, axes=axes)
data_ref = numpy.fft.fftshift(data_ref, axes=axes)
t_cpu_all = time.time() - t_start
data_gpu2 = res_dev.get()
# Checking that the results are correct
# (note: this will require relaxing the tolerances
# if complex64 is used instead of complex128)
assert numpy.allclose(data_ref, data_gpu)
assert numpy.allclose(data_ref, data_gpu2)
return dict(
t_gpu_fft=t_gpu_fft,
t_gpu_shift=t_gpu_shift,
t_gpu_separate=t_gpu_separate,
t_gpu_combined=t_gpu_combined,
t_cpu_fft=t_cpu_fft,
t_cpu_shift=t_cpu_shift,
t_cpu_all=t_cpu_all)
def run_tests(thr, shape, dtype, axes=None, attempts=10):
results = [run_test(thr, shape, dtype, axes=axes) for i in range(attempts)]
return {key:min(result[key] for result in results) for key in results[0]}
if __name__ == '__main__':
api = any_api()
thr = api.Thread.create()
shape = (1024, 1024)
dtype = numpy.complex128
axes = (0, 1)
results = run_tests(thr, shape, dtype, axes=axes)
print('device:', thr._device.name)
print('shape:', shape)
print('dtype:', dtype)
print('axes:', axes)
for key, val in results.items():
print(key, ':', val)
print(
"Speedup for a separate calculation:",
results['t_cpu_all'] / results['t_gpu_separate'])
print(
"Speedup for a combined calculation:",
results['t_cpu_all'] / results['t_gpu_combined'])
print(
"Speedup for fft alone:",
results['t_cpu_fft'] / results['t_gpu_fft'])
print(
"Speedup for shift alone:",
results['t_cpu_shift'] / results['t_gpu_shift'])
|
import os
import pandas as pd
import pyspark
from pyspark.ml.classification import LogisticRegression
from pyspark.ml.feature import VectorAssembler
from pyspark.ml.pipeline import Pipeline
from pyspark.version import __version__ as pyspark_version
import pytest
from sklearn import datasets
import shutil
from mlflow import pyfunc
from mlflow import spark as sparkm
from mlflow import tracking
from mlflow.utils.environment import _mlflow_conda_env
from tests.helper_functions import score_model_in_sagemaker_docker_container
@pytest.mark.large
def test_model_export(tmpdir):
conda_env = os.path.join(str(tmpdir), "conda_env.yml")
_mlflow_conda_env(conda_env, additional_pip_deps=["pyspark=={}".format(pyspark_version)])
iris = datasets.load_iris()
X = iris.data # we only take the first two features.
y = iris.target
pandas_df = pd.DataFrame(X, columns=iris.feature_names)
pandas_df['label'] = pd.Series(y)
spark_session = pyspark.sql.SparkSession.builder \
.config(key="spark_session.python.worker.reuse", value=True) \
.master("local-cluster[2, 1, 1024]") \
.getOrCreate()
spark_df = spark_session.createDataFrame(pandas_df)
model_path = tmpdir.mkdir("model")
assembler = VectorAssembler(inputCols=iris.feature_names, outputCol="features")
lr = LogisticRegression(maxIter=50, regParam=0.1, elasticNetParam=0.8)
pipeline = Pipeline(stages=[assembler, lr])
# Fit the model
model = pipeline.fit(spark_df)
# Print the coefficients and intercept for multinomial logistic regression
preds_df = model.transform(spark_df)
preds1 = [x.prediction for x in preds_df.select("prediction").collect()]
sparkm.save_model(model, path=str(model_path), conda_env=conda_env)
reloaded_model = sparkm.load_model(path=str(model_path))
preds_df_1 = reloaded_model.transform(spark_df)
preds1_1 = [x.prediction for x in preds_df_1.select("prediction").collect()]
assert preds1 == preds1_1
m = pyfunc.load_pyfunc(str(model_path))
preds2 = m.predict(pandas_df)
assert preds1 == preds2
preds3 = score_model_in_sagemaker_docker_container(model_path=str(model_path), data=pandas_df)
assert preds1 == preds3
@pytest.mark.large
def test_model_log(tmpdir):
conda_env = os.path.join(str(tmpdir), "conda_env.yml")
_mlflow_conda_env(conda_env, additional_pip_deps=["pyspark=={}".format(pyspark_version)])
iris = datasets.load_iris()
X = iris.data # we only take the first two features.
y = iris.target
pandas_df = pd.DataFrame(X, columns=iris.feature_names)
pandas_df['label'] = pd.Series(y)
spark_session = pyspark.sql.SparkSession.builder \
.config(key="spark_session.python.worker.reuse", value=True) \
.master("local-cluster[2, 1, 1024]") \
.getOrCreate()
spark_df = spark_session.createDataFrame(pandas_df)
model_path = tmpdir.mkdir("model")
assembler = VectorAssembler(inputCols=iris.feature_names, outputCol="features")
lr = LogisticRegression(maxIter=50, regParam=0.1, elasticNetParam=0.8)
pipeline = Pipeline(stages=[assembler, lr])
# Fit the model
model = pipeline.fit(spark_df)
# Print the coefficients and intercept for multinomial logistic regression
preds_df = model.transform(spark_df)
preds1 = [x.prediction for x in preds_df.select("prediction").collect()]
old_tracking_uri = tracking.get_tracking_uri()
# should_start_run tests whether or not calling log_model() automatically starts a run.
for should_start_run in [False, True]:
try:
tracking_dir = os.path.abspath(str(tmpdir.mkdir("mlruns")))
tracking.set_tracking_uri("file://%s" % tracking_dir)
if should_start_run:
tracking.start_run()
sparkm.log_model(artifact_path="model", spark_model=model)
run_id = tracking.active_run().info.run_uuid
x = pyfunc.load_pyfunc("model", run_id=run_id)
preds2 = x.predict(pandas_df)
assert preds1 == preds2
reloaded_model = sparkm.load_model("model", run_id=run_id)
preds_df_1 = reloaded_model.transform(spark_df)
preds3 = [x.prediction for x in preds_df_1.select("prediction").collect()]
assert preds1 == preds3
finally:
tracking.end_run()
tracking.set_tracking_uri(old_tracking_uri)
shutil.rmtree(tracking_dir)
|
<reponame>tomerhekredis/RedisGraph
from base import FlowTestsBase
import os
import sys
from RLTest import Env
from redisgraph import Graph, Node, Edge
redis_con = None
class test_v7_encode_decode(FlowTestsBase):
def __init__(self):
self.env = Env(decodeResponses=True, moduleArgs='VKEY_MAX_ENTITY_COUNT 10')
global redis_con
redis_con = self.env.getConnection()
def test01_nodes_over_multiple_keys(self):
graph_name = "nodes_over_multiple_keys"
redis_graph = Graph(graph_name, redis_con)
# Create 3 nodes meta keys
redis_graph.query("UNWIND range(0,20) as i CREATE (:Node {val:i})")
# Return all the nodes, before and after saving & loading the RDB, and check equality
query = "MATCH (n:Node) return n"
expected = redis_graph.query(query)
# Save RDB & Load from RDB
redis_con.execute_command("DEBUG", "RELOAD")
actual = redis_graph.query(query)
self.env.assertEquals(expected.result_set, actual.result_set)
def test02_no_compaction_on_nodes_delete(self):
graph_name = "no_compaction_on_nodes_delete"
redis_graph = Graph(graph_name, redis_con)
# Create 3 nodes meta keys
redis_graph.query("UNWIND range(0,20) as i CREATE (:Node)")
# Return all the nodes, before and after saving & loading the RDB, and check equality
query = "MATCH (n:Node) WITH n ORDER by id(n) return COLLECT(id(n))"
expected_full_graph_nodes_id = redis_graph.query(query)
# Delete 3 nodes.
redis_graph.query("MATCH (n:Node) WHERE id(n) IN [7,14,20] DELETE n")
expected_nodes_id_after_delete = redis_graph.query(query)
# Save RDB & Load from RDB
redis_con.execute_command("DEBUG", "RELOAD")
actual = redis_graph.query(query)
# Validate no compaction, all IDs are the same
self.env.assertEquals(expected_nodes_id_after_delete.result_set, actual.result_set)
# Validate reuse of node ids - create 3 nodes.
redis_graph.query("UNWIND range (0,2) as i CREATE (:Node)")
actual = redis_graph.query(query)
self.env.assertEquals(expected_full_graph_nodes_id.result_set, actual.result_set)
def test03_edges_over_multiple_keys(self):
graph_name = "edges_over_multiple_keys"
redis_graph = Graph(graph_name, redis_con)
# Create 3 edges meta keys
redis_graph.query("UNWIND range(0,20) as i CREATE (:Src)-[:R {val:i}]->(:Dest)")
# Return all the edges, before and after saving & loading the RDB, and check equality
query = "MATCH (:Src)-[e:R]->(:Dest) return e"
expected = redis_graph.query(query)
# Save RDB & Load from RDB
redis_con.execute_command("DEBUG", "RELOAD")
actual = redis_graph.query(query)
self.env.assertEquals(expected.result_set, actual.result_set)
def test04_no_compaction_on_edges_delete(self):
graph_name = "no_compaction_on_edges_delete"
redis_graph = Graph(graph_name, redis_con)
# Create 3 nodes meta keys
redis_graph.query("UNWIND range(0,20) as i CREATE (:Src)-[:R]->(:Dest)")
# Return all the edges, before and after saving & loading the RDB, and check equality
query = "MATCH (:Src)-[e:R]->(:Dest) WITH e ORDER by id(e) return COLLECT(id(e))"
expected_full_graph_nodes_id = redis_graph.query(query)
# Delete 3 edges.
redis_graph.query("MATCH (:Src)-[e:R]->(:Dest) WHERE id(e) IN [7,14,20] DELETE e")
expected_nodes_id_after_delete = redis_graph.query(query)
# Save RDB & Load from RDB
redis_con.execute_command("DEBUG", "RELOAD")
actual = redis_graph.query(query)
# Validate no compaction, all IDs are the same
self.env.assertEquals(expected_nodes_id_after_delete.result_set, actual.result_set)
# Validate reuse of edges ids - create 3 edges.
redis_graph.query("UNWIND range (0,2) as i CREATE (:Src)-[:R]->(:Dest)")
actual = redis_graph.query(query)
self.env.assertEquals(expected_full_graph_nodes_id.result_set, actual.result_set)
def test05_multiple_edges_over_multiple_keys(self):
graph_name = "multiple_edges_over_multiple_keys"
redis_graph = Graph(graph_name, redis_con)
# Create 3 edges meta keys
redis_graph.query("CREATE (n1:Src {val:1}), (n2:Dest {val:2}) WITH n1, n2 UNWIND range(0,20) as i CREATE (n1)-[:R {val:i}]->(n2)")
# Return all the edges, before and after saving & loading the RDB, and check equality
query = "MATCH (:Src)-[e:R]->(:Dest) return e"
expected = redis_graph.query(query)
# Save RDB & Load from RDB
redis_con.execute_command("DEBUG", "RELOAD")
actual = redis_graph.query(query)
self.env.assertEquals(expected.result_set, actual.result_set)
def test06_no_compaction_on_multiple_edges_delete(self):
graph_name = "no_compaction_on_multiple_edges_delete"
redis_graph = Graph(graph_name, redis_con)
# Create 3 nodes meta keys
redis_graph.query("CREATE (n1:Src {val:1}), (n2:Dest {val:2}) WITH n1, n2 UNWIND range(0,20) as i CREATE (n1)-[:R]->(n2)")
# Return all the edges, before and after saving & loading the RDB, and check equality
query = "MATCH (:Src)-[e:R]->(:Dest) WITH e ORDER by id(e) return COLLECT(id(e))"
expected_full_graph_nodes_id = redis_graph.query(query)
# Delete 3 edges.
redis_graph.query("MATCH (:Src)-[e:R]->(:Dest) WHERE id(e) IN [7,14,20] DELETE e")
expected_nodes_id_after_delete = redis_graph.query(query)
# Save RDB & Load from RDB
redis_con.execute_command("DEBUG", "RELOAD")
actual = redis_graph.query(query)
# Validate no compaction, all IDs are the same
self.env.assertEquals(expected_nodes_id_after_delete.result_set, actual.result_set)
# Validate reuse of edges ids - create 3 edges.
redis_graph.query("MATCH (n1:Src {val:1}), (n2:Dest {val:2}) WITH n1, n2 UNWIND range (0,2) as i CREATE (n1)-[:R]->(n2)")
actual = redis_graph.query(query)
self.env.assertEquals(expected_full_graph_nodes_id.result_set, actual.result_set)
def test07_index_after_encode_decode_in_v7(self):
graph_name = "index_after_encode_decode_in_v7"
redis_graph = Graph(graph_name, redis_con)
redis_graph.query("CREATE INDEX ON :N(val)")
# Verify indices exists.
plan = redis_graph.execution_plan(
"MATCH (n:N {val:1}) RETURN n")
self.env.assertIn("Index Scan", plan)
# Save RDB & Load from RDB
redis_con.execute_command("DEBUG", "RELOAD")
# Verify indices exists after loading RDB.
plan = redis_graph.execution_plan(
"MATCH (n:N {val:1}) RETURN n")
self.env.assertIn("Index Scan", plan)
def test08_multiple_graphs_with_index(self):
# Create a multi-key graph.
graph1_name = "v7_graph_1"
graph1 = Graph(graph1_name, redis_con)
graph1.query("UNWIND range(0,21) AS i CREATE (a:L {v: i})-[:E]->(b:L2 {v: i})")
# Create a single-key graph.
graph2_name = "v7_graph_2"
graph2 = Graph(graph2_name, redis_con)
graph2.query("CREATE (a:L {v: 1})-[:E]->(b:L2 {v: 2})")
# Add an index to the multi-key graph.
graph1.query("CREATE INDEX ON :L(v)")
# Save RDB and reload from RDB
redis_con.execute_command("DEBUG", "RELOAD")
# The load should be successful and the index should still be built.
query = "MATCH (n:L {v:1}) RETURN n.v"
plan = graph1.execution_plan(query)
self.env.assertIn("Index Scan", plan)
expected = [[1]]
actual = graph1.query(query)
self.env.assertEquals(actual.result_set, expected)
|
import numpy as np
c16 = np.complex128()
f8 = np.float64()
i8 = np.int64()
u8 = np.uint64()
c8 = np.complex64()
f4 = np.float32()
i4 = np.int32()
u4 = np.uint32()
dt = np.datetime64(0, "D")
td = np.timedelta64(0, "D")
b_ = np.bool_()
b = bool()
c = complex()
f = float()
i = int()
AR = np.array([0], dtype=np.int64)
AR.setflags(write=False)
SEQ = (0, 1, 2, 3, 4)
# Time structures
dt > dt
td > td
td > i
td > i4
td > i8
td > AR
td > SEQ
# boolean
b_ > b
b_ > b_
b_ > i
b_ > i8
b_ > i4
b_ > u8
b_ > u4
b_ > f
b_ > f8
b_ > f4
b_ > c
b_ > c16
b_ > c8
b_ > AR
b_ > SEQ
# Complex
c16 > c16
c16 > f8
c16 > i8
c16 > c8
c16 > f4
c16 > i4
c16 > b_
c16 > b
c16 > c
c16 > f
c16 > i
c16 > AR
c16 > SEQ
c16 > c16
f8 > c16
i8 > c16
c8 > c16
f4 > c16
i4 > c16
b_ > c16
b > c16
c > c16
f > c16
i > c16
AR > c16
SEQ > c16
c8 > c16
c8 > f8
c8 > i8
c8 > c8
c8 > f4
c8 > i4
c8 > b_
c8 > b
c8 > c
c8 > f
c8 > i
c8 > AR
c8 > SEQ
c16 > c8
f8 > c8
i8 > c8
c8 > c8
f4 > c8
i4 > c8
b_ > c8
b > c8
c > c8
f > c8
i > c8
AR > c8
SEQ > c8
# Float
f8 > f8
f8 > i8
f8 > f4
f8 > i4
f8 > b_
f8 > b
f8 > c
f8 > f
f8 > i
f8 > AR
f8 > SEQ
f8 > f8
i8 > f8
f4 > f8
i4 > f8
b_ > f8
b > f8
c > f8
f > f8
i > f8
AR > f8
SEQ > f8
f4 > f8
f4 > i8
f4 > f4
f4 > i4
f4 > b_
f4 > b
f4 > c
f4 > f
f4 > i
f4 > AR
f4 > SEQ
f8 > f4
i8 > f4
f4 > f4
i4 > f4
b_ > f4
b > f4
c > f4
f > f4
i > f4
AR > f4
SEQ > f4
# Int
i8 > i8
i8 > u8
i8 > i4
i8 > u4
i8 > b_
i8 > b
i8 > c
i8 > f
i8 > i
i8 > AR
i8 > SEQ
u8 > u8
u8 > i4
u8 > u4
u8 > b_
u8 > b
u8 > c
u8 > f
u8 > i
u8 > AR
u8 > SEQ
i8 > i8
u8 > i8
i4 > i8
u4 > i8
b_ > i8
b > i8
c > i8
f > i8
i > i8
AR > i8
SEQ > i8
u8 > u8
i4 > u8
u4 > u8
b_ > u8
b > u8
c > u8
f > u8
i > u8
AR > u8
SEQ > u8
i4 > i8
i4 > i4
i4 > i
i4 > b_
i4 > b
i4 > AR
i4 > SEQ
u4 > i8
u4 > i4
u4 > u8
u4 > u4
u4 > i
u4 > b_
u4 > b
u4 > AR
u4 > SEQ
i8 > i4
i4 > i4
i > i4
b_ > i4
b > i4
AR > i4
SEQ > i4
i8 > u4
i4 > u4
u8 > u4
u4 > u4
b_ > u4
b > u4
i > u4
AR > u4
SEQ > u4
|
#
# Copyright Cloudlab URV 2020
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import uuid
import pkgutil
import logging
from multiprocessing import Process, Queue
from threading import Thread
from cloudbutton.version import __version__
from cloudbutton.engine.utils import version_str, is_unix_system
from cloudbutton.engine.agent import function_handler
from cloudbutton.config import STORAGE_FOLDER, LOGS_PREFIX
logger = logging.getLogger(__name__)
class LocalhostBackend:
"""
A wrap-up around Localhost multiprocessing APIs.
"""
def __init__(self, local_config):
self.log_level = os.getenv('CLOUDBUTTON_LOGLEVEL')
self.config = local_config
self.name = 'local'
self.alive = True
self.queue = Queue()
self.logs_dir = os.path.join(STORAGE_FOLDER, LOGS_PREFIX)
self.num_workers = self.config['workers']
self.workers = []
if not is_unix_system():
for worker_id in range(self.num_workers):
p = Thread(target=self._process_runner, args=(worker_id,))
self.workers.append(p)
p.daemon = True
p.start()
else:
for worker_id in range(self.num_workers):
p = Process(target=self._process_runner, args=(worker_id,))
self.workers.append(p)
p.daemon = True
p.start()
log_msg = 'PyWren v{} init for Localhost - Total workers: {}'.format(__version__, self.num_workers)
logger.info(log_msg)
if not self.log_level:
print(log_msg)
def _local_handler(self, event):
"""
Handler to run local functions.
"""
if not self.log_level:
old_stdout = sys.stdout
sys.stdout = open(os.devnull, 'w')
event['extra_env']['__PW_LOCAL_EXECUTION'] = 'True'
act_id = str(uuid.uuid4()).replace('-', '')[:12]
os.environ['__PW_ACTIVATION_ID'] = act_id
function_handler(event)
if not self.log_level:
sys.stdout = old_stdout
def _process_runner(self, worker_id):
logger.debug('Localhost worker process {} started'.format(worker_id))
while self.alive:
try:
event = self.queue.get(block=True)
if event is None:
break
self._local_handler(event)
except KeyboardInterrupt:
break
logger.debug('Localhost worker process {} stopped'.format(worker_id))
def _generate_python_meta(self):
"""
Extracts installed Python modules from the local machine
"""
logger.debug("Extracting preinstalled Python modules...")
runtime_meta = dict()
mods = list(pkgutil.iter_modules())
runtime_meta["preinstalls"] = [entry for entry in sorted([[mod, is_pkg] for _, mod, is_pkg in mods])]
runtime_meta["python_ver"] = version_str(sys.version_info)
return runtime_meta
def invoke(self, runtime_name, memory, payload):
"""
Invoke the function with the payload. runtime_name and memory
are not used since it runs in the local machine.
"""
self.queue.put(payload)
act_id = str(uuid.uuid4()).replace('-', '')[:12]
return act_id
def invoke_with_result(self, runtime_name, memory, payload={}):
"""
Invoke waiting for a result. Never called in this case
"""
return self.invoke(runtime_name, memory, payload)
def create_runtime(self, runtime_name, memory, timeout):
"""
Extracts local python metadata. No need to create any runtime
since it runs in the local machine
"""
runtime_meta = self._generate_python_meta()
return runtime_meta
def build_runtime(self, runtime_name, dockerfile):
"""
Pass. No need to build any runtime since it runs in the local machine
"""
pass
def delete_runtime(self, runtime_name, memory):
"""
Pass. No runtime to delete since it runs in the local machine
"""
pass
def delete_all_runtimes(self):
"""
Pass. No runtimes to delete since it runs in the local machine
"""
pass
def list_runtimes(self, runtime_name='all'):
"""
Pass. No runtimes to list since it runs in the local machine
"""
return []
def get_runtime_key(self, runtime_name, runtime_memory):
"""
Method that creates and returns the runtime key.
Runtime keys are used to uniquely identify runtimes within the storage,
in order to know what runtimes are installed and what not.
"""
return os.path.join(self.name, runtime_name)
def __del__(self):
if self.alive:
self.alive = False
for worker in self.workers:
self.queue.put(None)
|
<gh_stars>100-1000
from __future__ import print_function
import unittest
import numpy as np
import tensorflow as tf
from parameterized import parameterized
from . import run_on_rpc_and_cpu, assertAllClose, gradients
class TestOpGradients(unittest.TestCase):
def setUp(self):
tf.reset_default_graph()
@parameterized.expand([(tf.int8,), (tf.int16,), (tf.int32,), (tf.int64,)])
def test_multiply_int(self, dtype):
def func():
x_init = np.asarray(
[[-9, -7, -5, -3, -1], [1, 3, 5, 7, 9]],
dtype=dtype.as_numpy_dtype, order="F")
x = tf.constant(
[-9, -7, -5, -3, -1, 1, 3, 5, 7, 9],
shape=[2, 5], dtype=dtype,
name="x")
y = tf.constant(
[-9, -7, -5, -3, -1, 1, 3, 5, 7, 9],
shape=[2, 5], dtype=dtype,
name="y")
z = tf.multiply(x, y, name="mul_test")
return gradients.compute_gradient(x, [2, 5], z, [2, 5], x_init_value=x_init)
actual, expected = run_on_rpc_and_cpu(func)
assertAllClose(actual, expected)
@parameterized.expand([(tf.float32,), (tf.float64,), (tf.complex64,), (tf.complex128,)])
def test_multiply(self, dtype):
def func():
x = tf.constant(
[-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
shape=[2, 5], dtype=dtype,
name="x")
y = tf.constant(
[-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
shape=[2, 5], dtype=dtype,
name="y")
z = tf.multiply(x, y, name="mul_test")
x_init = np.asarray(
[[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
dtype=dtype.as_numpy_dtype, order="F")
return gradients.compute_gradient(x, [2, 5], z, [2, 5], x_init_value=x_init)
actual, expected = run_on_rpc_and_cpu(func)
assertAllClose(actual, expected)
@parameterized.expand([(tf.int8,), (tf.int16,), (tf.int32,), (tf.int64,)])
def test_add_int(self, dtype):
def func():
x = tf.constant(
[-9, -7, -5, -3, -1, 1, 3, 5, 7, 9],
shape=[2, 5], dtype=dtype,
name="x")
y = tf.constant(
[-9, -7, -5, -3, -1, 1, 3, 5, 7, 9],
shape=[2, 5], dtype=dtype,
name="y")
z = tf.add(x, y, name="add_test")
x_init = np.asarray(
[[-9, -7, -5, -3, -1], [1, 3, 5, 7, 9]],
dtype=dtype.as_numpy_dtype, order="F")
return gradients.compute_gradient(
x, [2, 5], z, [2, 5], x_init_value=x_init)
actual, expected = run_on_rpc_and_cpu(func)
assertAllClose(actual, expected)
@parameterized.expand([(tf.float32,), (tf.float64,), (tf.complex64,), (tf.complex128,)])
def test_add(self, dtype):
def func():
x = tf.constant(
[-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
shape=[2, 5], dtype=dtype,
name="x")
y = tf.constant(
[-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
shape=[2, 5], dtype=dtype,
name="y")
z = tf.add(x, y, name="add_test")
x_init = np.asarray(
[[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
dtype=dtype.as_numpy_dtype, order="F")
return gradients.compute_gradient(x, [2, 5], z, [2, 5], x_init_value=x_init)
actual, expected = run_on_rpc_and_cpu(func)
assertAllClose(actual, expected)
@parameterized.expand([(tf.float32,), (tf.float64,), (tf.complex64,), (tf.complex128,)])
def test_matmul(self, dtype):
def func():
m1 = np.random.normal(size=(2, 5))
m2 = np.random.normal(size=(5, 6))
m3 = np.matmul(m1, m2)
x = tf.constant(m1, dtype=dtype, name="x")
y = tf.constant(m2, dtype=dtype, name="y")
z = tf.matmul(x, y, name="matmul_test")
dx = gradients.compute_gradient(x, m1.shape, z, m3.shape, x_init_value=m1)
dy = gradients.compute_gradient(y, m2.shape, z, m3.shape, x_init_value=m2)
return dx, dy
actual, expected = run_on_rpc_and_cpu(func)
assertAllClose(actual, expected)
@parameterized.expand([(tf.float16,), (tf.float32,)])
def test_conv2d(self, dtype):
def func():
mi = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtype.as_numpy_dtype).reshape((1, 4, 3, 1))
mf = np.array([1, 4, 7, 2, 5, 8, 3, 6, 9],
dtype=dtype.as_numpy_dtype).reshape((3, 3, 1, 1))
image = tf.constant(mi, dtype=dtype, name="image")
filter = tf.constant(mf, dtype=dtype, name="filter")
z = tf.nn.conv2d(image, filter, [1, 1, 1, 1], 'SAME')
di = gradients.compute_gradient(image, mi.shape, z, mi.shape, x_init_value=mi)
df = gradients.compute_gradient(filter, mf.shape, z, mi.shape, x_init_value=mf)
return di, df
actual, expected = run_on_rpc_and_cpu(func)
assertAllClose(actual, expected)
@parameterized.expand([(tf.float32,), (tf.float64,)])
def test_grad_relu(self, dtype):
def func():
x = tf.constant([-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
shape=[2, 5], dtype=dtype, name="x")
y = tf.nn.relu(x, name="relu")
x_init = np.asarray([[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
dtype=dtype.as_numpy_dtype, order="F")
return gradients.compute_gradient(x, [2, 5], y, [2, 5], x_init_value=x_init)
actual, expected = run_on_rpc_and_cpu(func)
assertAllClose(actual, expected)
@parameterized.expand([(tf.float32,), (tf.float64,)])
def test_grad_grad_relu(self, dtype):
def func():
x = tf.constant([-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
shape=[2, 5], dtype=dtype, name="x")
y = tf.nn.relu(x, name="relu")
z = tf.gradients(y, x)
x_init = np.asarray([[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
dtype=dtype.as_numpy_dtype, order="F")
return gradients.compute_gradient(x, [2, 5], z[0], [2, 5], x_init_value=x_init)
actual, expected = run_on_rpc_and_cpu(func)
assertAllClose(actual, expected)
def test_grad_relu_scala(self):
def func():
x = tf.Variable(100.)
y = tf.nn.relu(x)
loss = y**2
opt = tf.train.GradientDescentOptimizer(learning_rate=0.25)
grads_and_vars = opt.compute_gradients(loss)
train_op = opt.apply_gradients(grads_and_vars)
sess = tf.get_default_session()
sess.run(tf.global_variables_initializer())
xold = sess.run(x)
g, _ = sess.run(grads_and_vars)[0]
sess.run(train_op)
xnew = sess.run(x)
return xold, g, xnew
actual, expected = run_on_rpc_and_cpu(func)
assertAllClose(actual, expected)
if __name__ == '__main__':
unittest.main()
|
""" openconfig_bfd
An OpenConfig model of Bi\-Directional Forwarding Detection (BFD)
configuration and operational state.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class BfdDiagnosticCode(Enum):
"""
BfdDiagnosticCode (Enum Class)
Diagnostic codes defined by BFD. These typically indicate the
reason for a change of session state.
.. data:: NO_DIAGNOSTIC = 0
No diagnostic code was specified, or the session has not
changed state.
.. data:: DETECTION_TIMEOUT = 1
The control detection time expired: no BFD packet was
received within the required period.
.. data:: ECHO_FAILED = 2
The BFD echo function failed - echo packets have not been
received for the required period of time.
.. data:: FORWARDING_RESET = 3
The forwarding plane in the local system was reset - such
that the remote system cannot rely on the forwarding state of
the device specifying this error code.
.. data:: PATH_DOWN = 4
Signalling outside of BFD specified that the path underlying
this session has failed.
.. data:: CONCATENATED_PATH_DOWN = 5
When a BFD session runs over a series of path segments, this
error code indicates that a subsequent path segment (i.e.,
one in the transmit path between the source and destination
of the session) has failed.
.. data:: ADMIN_DOWN = 6
The BFD session has been administratively disabled by the
peer.
.. data:: REVERSE_CONCATENATED_PATH_DOWN = 7
In the case that a BFD session is running over a series of
path segments, this error code indicates that a path segment
on the reverse path (i.e., in the transmit direction from the
destination to the source of the session) has failed.
"""
NO_DIAGNOSTIC = Enum.YLeaf(0, "NO_DIAGNOSTIC")
DETECTION_TIMEOUT = Enum.YLeaf(1, "DETECTION_TIMEOUT")
ECHO_FAILED = Enum.YLeaf(2, "ECHO_FAILED")
FORWARDING_RESET = Enum.YLeaf(3, "FORWARDING_RESET")
PATH_DOWN = Enum.YLeaf(4, "PATH_DOWN")
CONCATENATED_PATH_DOWN = Enum.YLeaf(5, "CONCATENATED_PATH_DOWN")
ADMIN_DOWN = Enum.YLeaf(6, "ADMIN_DOWN")
REVERSE_CONCATENATED_PATH_DOWN = Enum.YLeaf(7, "REVERSE_CONCATENATED_PATH_DOWN")
class BfdSessionState(Enum):
"""
BfdSessionState (Enum Class)
The state of the BFD session according to the system referred
to by the context of the leaf.
.. data:: UP = 0
The BFD session is perceived to be up by the system.
.. data:: DOWN = 1
The BFD session is perceived to be down by the system.
.. data:: ADMIN_DOWN = 2
The BFD session is administratively disabled.
.. data:: INIT = 3
The BFD session is perceived to be initialising by the
system.
"""
UP = Enum.YLeaf(0, "UP")
DOWN = Enum.YLeaf(1, "DOWN")
ADMIN_DOWN = Enum.YLeaf(2, "ADMIN_DOWN")
INIT = Enum.YLeaf(3, "INIT")
class Bfd(_Entity_):
"""
Configuration and operational state parameters for BFD.
.. attribute:: interfaces
Interfaces on which BFD sessions are to be enabled
**type**\: :py:class:`Interfaces <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces>`
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd, self).__init__()
self._top_entity = None
self.yang_name = "bfd"
self.yang_parent_name = "openconfig-bfd"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("interfaces", ("interfaces", Bfd.Interfaces))])
self._leafs = OrderedDict()
self.interfaces = Bfd.Interfaces()
self.interfaces.parent = self
self._children_name_map["interfaces"] = "interfaces"
self._segment_path = lambda: "openconfig-bfd:bfd"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd, [], name, value)
class Interfaces(_Entity_):
"""
Interfaces on which BFD sessions are to be enabled.
.. attribute:: interface
Per\-interface configuration and state parameters for BFD
**type**\: list of :py:class:`Interface <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface>`
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces, self).__init__()
self.yang_name = "interfaces"
self.yang_parent_name = "bfd"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("interface", ("interface", Bfd.Interfaces.Interface))])
self._leafs = OrderedDict()
self.interface = YList(self)
self._segment_path = lambda: "interfaces"
self._absolute_path = lambda: "openconfig-bfd:bfd/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces, [], name, value)
class Interface(_Entity_):
"""
Per\-interface configuration and state parameters for BFD.
.. attribute:: id (key)
A reference to an identifier for the interface on which BFD is enabled
**type**\: str
**refers to**\: :py:class:`id <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.Config>`
.. attribute:: config
Configuration parameters for BFD on the specified interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.Config>`
.. attribute:: state
Operational state parameters for BFD on the specified interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.State>`
**config**\: False
.. attribute:: interface_ref
Reference to an interface or subinterface
**type**\: :py:class:`InterfaceRef <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.InterfaceRef>`
.. attribute:: micro_bfd_sessions
Parameters relating to micro\-BFD sessions associated with the interface
**type**\: :py:class:`MicroBfdSessions <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.MicroBfdSessions>`
.. attribute:: peers
Parameters relating to the BFD peers which are seen over this interface
**type**\: :py:class:`Peers <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.Peers>`
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "interfaces"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['id']
self._child_classes = OrderedDict([("config", ("config", Bfd.Interfaces.Interface.Config)), ("state", ("state", Bfd.Interfaces.Interface.State)), ("interface-ref", ("interface_ref", Bfd.Interfaces.Interface.InterfaceRef)), ("micro-bfd-sessions", ("micro_bfd_sessions", Bfd.Interfaces.Interface.MicroBfdSessions)), ("peers", ("peers", Bfd.Interfaces.Interface.Peers))])
self._leafs = OrderedDict([
('id', (YLeaf(YType.str, 'id'), ['str'])),
])
self.id = None
self.config = Bfd.Interfaces.Interface.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self.state = Bfd.Interfaces.Interface.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self.interface_ref = Bfd.Interfaces.Interface.InterfaceRef()
self.interface_ref.parent = self
self._children_name_map["interface_ref"] = "interface-ref"
self.micro_bfd_sessions = Bfd.Interfaces.Interface.MicroBfdSessions()
self.micro_bfd_sessions.parent = self
self._children_name_map["micro_bfd_sessions"] = "micro-bfd-sessions"
self.peers = Bfd.Interfaces.Interface.Peers()
self.peers.parent = self
self._children_name_map["peers"] = "peers"
self._segment_path = lambda: "interface" + "[id='" + str(self.id) + "']"
self._absolute_path = lambda: "openconfig-bfd:bfd/interfaces/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface, ['id'], name, value)
class Config(_Entity_):
"""
Configuration parameters for BFD on the specified
interface.
.. attribute:: id
A unique identifier for the interface
**type**\: str
.. attribute:: enabled
When this leaf is set to true then the BFD session is enabled on the specified interface \- if it is set to false, it is administratively disabled
**type**\: bool
.. attribute:: local_address
The source IP address to be used for BFD sessions over this interface
**type**\: union of the below types:
**type**\: str
**pattern:** ^(([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])$
**type**\: str
**pattern:** ^(([0\-9a\-fA\-F]{1,4}\:){7}[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,7}\:\|([0\-9a\-fA\-F]{1,4}\:){1,6}\:[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,5}(\:[0\-9a\-fA\-F]{1,4}){1,2}\|([0\-9a\-fA\-F]{1,4}\:){1,4}(\:[0\-9a\-fA\-F]{1,4}){1,3}\|([0\-9a\-fA\-F]{1,4}\:){1,3}(\:[0\-9a\-fA\-F]{1,4}){1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,2}(\:[0\-9a\-fA\-F]{1,4}){1,5}\|[0\-9a\-fA\-F]{1,4}\:((\:[0\-9a\-fA\-F]{1,4}){1,6})\|\:((\:[0\-9a\-fA\-F]{1,4}){1,7}\|\:))$
.. attribute:: desired_minimum_tx_interval
The minimum interval between transmission of BFD control packets that the operator desires. This value is advertised to the peer, however the actual interval used is specified by taking the maximum of desired\-minimum\-tx\-interval and the value of the remote required\-minimum\-receive interval value. This value is specified as an integer number of microseconds
**type**\: int
**range:** 0..4294967295
**units**\: microseconds
.. attribute:: required_minimum_receive
The minimum interval between received BFD control packets that this system should support. This value is advertised to the remote peer to indicate the maximum frequency (i.e., minimum inter\-packet interval) between BFD control packets that is acceptable to the local system
**type**\: int
**range:** 0..4294967295
**units**\: microseconds
.. attribute:: detection_multiplier
The number of packets that must be missed to declare this session as down. The detection interval for the BFD session is calculated by multiplying the value of the negotiated transmission interval by this value
**type**\: int
**range:** 1..65535
.. attribute:: enable_per_member_link
When this leaf is set to true \- BFD will be enabled on each member interface of the aggregated Ethernet bundle
**type**\: bool
**default value**\: false
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('id', (YLeaf(YType.str, 'id'), ['str'])),
('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])),
('local_address', (YLeaf(YType.str, 'local-address'), ['str','str'])),
('desired_minimum_tx_interval', (YLeaf(YType.uint32, 'desired-minimum-tx-interval'), ['int'])),
('required_minimum_receive', (YLeaf(YType.uint32, 'required-minimum-receive'), ['int'])),
('detection_multiplier', (YLeaf(YType.uint16, 'detection-multiplier'), ['int'])),
('enable_per_member_link', (YLeaf(YType.boolean, 'enable-per-member-link'), ['bool'])),
])
self.id = None
self.enabled = None
self.local_address = None
self.desired_minimum_tx_interval = None
self.required_minimum_receive = None
self.detection_multiplier = None
self.enable_per_member_link = None
self._segment_path = lambda: "config"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.Config, ['id', 'enabled', 'local_address', 'desired_minimum_tx_interval', 'required_minimum_receive', 'detection_multiplier', 'enable_per_member_link'], name, value)
class State(_Entity_):
"""
Operational state parameters for BFD on the specified
interface.
.. attribute:: id
A unique identifier for the interface
**type**\: str
**config**\: False
.. attribute:: enabled
When this leaf is set to true then the BFD session is enabled on the specified interface \- if it is set to false, it is administratively disabled
**type**\: bool
**config**\: False
.. attribute:: local_address
The source IP address to be used for BFD sessions over this interface
**type**\: union of the below types:
**type**\: str
**pattern:** ^(([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])$
**type**\: str
**pattern:** ^(([0\-9a\-fA\-F]{1,4}\:){7}[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,7}\:\|([0\-9a\-fA\-F]{1,4}\:){1,6}\:[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,5}(\:[0\-9a\-fA\-F]{1,4}){1,2}\|([0\-9a\-fA\-F]{1,4}\:){1,4}(\:[0\-9a\-fA\-F]{1,4}){1,3}\|([0\-9a\-fA\-F]{1,4}\:){1,3}(\:[0\-9a\-fA\-F]{1,4}){1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,2}(\:[0\-9a\-fA\-F]{1,4}){1,5}\|[0\-9a\-fA\-F]{1,4}\:((\:[0\-9a\-fA\-F]{1,4}){1,6})\|\:((\:[0\-9a\-fA\-F]{1,4}){1,7}\|\:))$
**config**\: False
.. attribute:: desired_minimum_tx_interval
The minimum interval between transmission of BFD control packets that the operator desires. This value is advertised to the peer, however the actual interval used is specified by taking the maximum of desired\-minimum\-tx\-interval and the value of the remote required\-minimum\-receive interval value. This value is specified as an integer number of microseconds
**type**\: int
**range:** 0..4294967295
**config**\: False
**units**\: microseconds
.. attribute:: required_minimum_receive
The minimum interval between received BFD control packets that this system should support. This value is advertised to the remote peer to indicate the maximum frequency (i.e., minimum inter\-packet interval) between BFD control packets that is acceptable to the local system
**type**\: int
**range:** 0..4294967295
**config**\: False
**units**\: microseconds
.. attribute:: detection_multiplier
The number of packets that must be missed to declare this session as down. The detection interval for the BFD session is calculated by multiplying the value of the negotiated transmission interval by this value
**type**\: int
**range:** 1..65535
**config**\: False
.. attribute:: enable_per_member_link
When this leaf is set to true \- BFD will be enabled on each member interface of the aggregated Ethernet bundle
**type**\: bool
**config**\: False
**default value**\: false
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('id', (YLeaf(YType.str, 'id'), ['str'])),
('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])),
('local_address', (YLeaf(YType.str, 'local-address'), ['str','str'])),
('desired_minimum_tx_interval', (YLeaf(YType.uint32, 'desired-minimum-tx-interval'), ['int'])),
('required_minimum_receive', (YLeaf(YType.uint32, 'required-minimum-receive'), ['int'])),
('detection_multiplier', (YLeaf(YType.uint16, 'detection-multiplier'), ['int'])),
('enable_per_member_link', (YLeaf(YType.boolean, 'enable-per-member-link'), ['bool'])),
])
self.id = None
self.enabled = None
self.local_address = None
self.desired_minimum_tx_interval = None
self.required_minimum_receive = None
self.detection_multiplier = None
self.enable_per_member_link = None
self._segment_path = lambda: "state"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.State, ['id', 'enabled', 'local_address', 'desired_minimum_tx_interval', 'required_minimum_receive', 'detection_multiplier', 'enable_per_member_link'], name, value)
class InterfaceRef(_Entity_):
"""
Reference to an interface or subinterface
.. attribute:: config
Configured reference to interface / subinterface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.InterfaceRef.Config>`
.. attribute:: state
Operational state for interface\-ref
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.InterfaceRef.State>`
**config**\: False
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.InterfaceRef, self).__init__()
self.yang_name = "interface-ref"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("config", ("config", Bfd.Interfaces.Interface.InterfaceRef.Config)), ("state", ("state", Bfd.Interfaces.Interface.InterfaceRef.State))])
self._leafs = OrderedDict()
self.config = Bfd.Interfaces.Interface.InterfaceRef.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self.state = Bfd.Interfaces.Interface.InterfaceRef.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._segment_path = lambda: "interface-ref"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.InterfaceRef, [], name, value)
class Config(_Entity_):
"""
Configured reference to interface / subinterface
.. attribute:: interface
Reference to a base interface. If a reference to a subinterface is required, this leaf must be specified to indicate the base interface
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: subinterface
Reference to a subinterface \-\- this requires the base interface to be specified using the interface leaf in this container. If only a reference to a base interface is requuired, this leaf should not be set
**type**\: int
**range:** 0..4294967295
**refers to**\: :py:class:`index <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface>`
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.InterfaceRef.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "interface-ref"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface', (YLeaf(YType.str, 'interface'), ['str'])),
('subinterface', (YLeaf(YType.str, 'subinterface'), ['int'])),
])
self.interface = None
self.subinterface = None
self._segment_path = lambda: "config"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.InterfaceRef.Config, ['interface', 'subinterface'], name, value)
class State(_Entity_):
"""
Operational state for interface\-ref
.. attribute:: interface
Reference to a base interface. If a reference to a subinterface is required, this leaf must be specified to indicate the base interface
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
**config**\: False
.. attribute:: subinterface
Reference to a subinterface \-\- this requires the base interface to be specified using the interface leaf in this container. If only a reference to a base interface is requuired, this leaf should not be set
**type**\: int
**range:** 0..4294967295
**refers to**\: :py:class:`index <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface>`
**config**\: False
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.InterfaceRef.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "interface-ref"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface', (YLeaf(YType.str, 'interface'), ['str'])),
('subinterface', (YLeaf(YType.str, 'subinterface'), ['int'])),
])
self.interface = None
self.subinterface = None
self._segment_path = lambda: "state"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.InterfaceRef.State, ['interface', 'subinterface'], name, value)
class MicroBfdSessions(_Entity_):
"""
Parameters relating to micro\-BFD sessions associated
with the interface.
.. attribute:: micro_bfd_session
This list contains configuration and state parameters relating to micro\-BFD session
**type**\: list of :py:class:`MicroBfdSession <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession>`
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.MicroBfdSessions, self).__init__()
self.yang_name = "micro-bfd-sessions"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("micro-bfd-session", ("micro_bfd_session", Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession))])
self._leafs = OrderedDict()
self.micro_bfd_session = YList(self)
self._segment_path = lambda: "micro-bfd-sessions"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.MicroBfdSessions, [], name, value)
class MicroBfdSession(_Entity_):
"""
This list contains configuration and state parameters
relating to micro\-BFD session.
.. attribute:: member_interface (key)
A reference to the member interface of the link aggregate
**type**\: str
**refers to**\: :py:class:`member_interface <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.Config>`
.. attribute:: config
Configuration parameters for the micro\-BFD session
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.Config>`
.. attribute:: state
Operational state parameters for the micro\-BFD session
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.State>`
**config**\: False
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession, self).__init__()
self.yang_name = "micro-bfd-session"
self.yang_parent_name = "micro-bfd-sessions"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['member_interface']
self._child_classes = OrderedDict([("config", ("config", Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.Config)), ("state", ("state", Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.State))])
self._leafs = OrderedDict([
('member_interface', (YLeaf(YType.str, 'member-interface'), ['str'])),
])
self.member_interface = None
self.config = Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self.state = Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._segment_path = lambda: "micro-bfd-session" + "[member-interface='" + str(self.member_interface) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession, ['member_interface'], name, value)
class Config(_Entity_):
"""
Configuration parameters for the micro\-BFD session.
.. attribute:: local_address
The local IP address used by the system for the micro\-BFD session specified
**type**\: union of the below types:
**type**\: str
**pattern:** ^(([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])$
**type**\: str
**pattern:** ^(([0\-9a\-fA\-F]{1,4}\:){7}[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,7}\:\|([0\-9a\-fA\-F]{1,4}\:){1,6}\:[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,5}(\:[0\-9a\-fA\-F]{1,4}){1,2}\|([0\-9a\-fA\-F]{1,4}\:){1,4}(\:[0\-9a\-fA\-F]{1,4}){1,3}\|([0\-9a\-fA\-F]{1,4}\:){1,3}(\:[0\-9a\-fA\-F]{1,4}){1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,2}(\:[0\-9a\-fA\-F]{1,4}){1,5}\|[0\-9a\-fA\-F]{1,4}\:((\:[0\-9a\-fA\-F]{1,4}){1,6})\|\:((\:[0\-9a\-fA\-F]{1,4}){1,7}\|\:))$
.. attribute:: remote_address
The remote IP destination that should be used by the system for the micro\-BFD session specified
**type**\: union of the below types:
**type**\: str
**pattern:** ^(([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])$
**type**\: str
**pattern:** ^(([0\-9a\-fA\-F]{1,4}\:){7}[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,7}\:\|([0\-9a\-fA\-F]{1,4}\:){1,6}\:[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,5}(\:[0\-9a\-fA\-F]{1,4}){1,2}\|([0\-9a\-fA\-F]{1,4}\:){1,4}(\:[0\-9a\-fA\-F]{1,4}){1,3}\|([0\-9a\-fA\-F]{1,4}\:){1,3}(\:[0\-9a\-fA\-F]{1,4}){1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,2}(\:[0\-9a\-fA\-F]{1,4}){1,5}\|[0\-9a\-fA\-F]{1,4}\:((\:[0\-9a\-fA\-F]{1,4}){1,6})\|\:((\:[0\-9a\-fA\-F]{1,4}){1,7}\|\:))$
.. attribute:: member_interface
Reference to a member link of the aggregate interface being described
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Config>`
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "micro-bfd-session"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('local_address', (YLeaf(YType.str, 'local-address'), ['str','str'])),
('remote_address', (YLeaf(YType.str, 'remote-address'), ['str','str'])),
('member_interface', (YLeaf(YType.str, 'member-interface'), ['str'])),
])
self.local_address = None
self.remote_address = None
self.member_interface = None
self._segment_path = lambda: "config"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.Config, ['local_address', 'remote_address', 'member_interface'], name, value)
class State(_Entity_):
"""
Operational state parameters for the micro\-BFD session.
.. attribute:: local_address
The local IP address used by the system for the micro\-BFD session specified
**type**\: union of the below types:
**type**\: str
**pattern:** ^(([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])$
**type**\: str
**pattern:** ^(([0\-9a\-fA\-F]{1,4}\:){7}[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,7}\:\|([0\-9a\-fA\-F]{1,4}\:){1,6}\:[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,5}(\:[0\-9a\-fA\-F]{1,4}){1,2}\|([0\-9a\-fA\-F]{1,4}\:){1,4}(\:[0\-9a\-fA\-F]{1,4}){1,3}\|([0\-9a\-fA\-F]{1,4}\:){1,3}(\:[0\-9a\-fA\-F]{1,4}){1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,2}(\:[0\-9a\-fA\-F]{1,4}){1,5}\|[0\-9a\-fA\-F]{1,4}\:((\:[0\-9a\-fA\-F]{1,4}){1,6})\|\:((\:[0\-9a\-fA\-F]{1,4}){1,7}\|\:))$
**config**\: False
.. attribute:: remote_address
The remote IP destination that should be used by the system for the micro\-BFD session specified
**type**\: union of the below types:
**type**\: str
**pattern:** ^(([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])$
**type**\: str
**pattern:** ^(([0\-9a\-fA\-F]{1,4}\:){7}[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,7}\:\|([0\-9a\-fA\-F]{1,4}\:){1,6}\:[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,5}(\:[0\-9a\-fA\-F]{1,4}){1,2}\|([0\-9a\-fA\-F]{1,4}\:){1,4}(\:[0\-9a\-fA\-F]{1,4}){1,3}\|([0\-9a\-fA\-F]{1,4}\:){1,3}(\:[0\-9a\-fA\-F]{1,4}){1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,2}(\:[0\-9a\-fA\-F]{1,4}){1,5}\|[0\-9a\-fA\-F]{1,4}\:((\:[0\-9a\-fA\-F]{1,4}){1,6})\|\:((\:[0\-9a\-fA\-F]{1,4}){1,7}\|\:))$
**config**\: False
.. attribute:: member_interface
Reference to a member link of the aggregate interface being described
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Config>`
**config**\: False
.. attribute:: session_state
The state of the BFD session perceived by the local system
**type**\: :py:class:`BfdSessionState <ydk.models.openconfig.openconfig_bfd.BfdSessionState>`
**config**\: False
.. attribute:: remote_session_state
The reported state of the BFD session according to the remote system. This state reflects the last state reported in a BFD control packet
**type**\: :py:class:`BfdSessionState <ydk.models.openconfig.openconfig_bfd.BfdSessionState>`
**config**\: False
.. attribute:: last_failure_time
The time of the last transition of the BFD session out of the UP state, expressed as the number of nanoseconds since the Unix epoch
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: failure_transitions
The number of times that the BFD session has transitioned out of the UP state
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: local_discriminator
A unique identifier used by the local system to identify this BFD session
**type**\: str
**config**\: False
.. attribute:: remote_discriminator
A unique identified used by the remote system to identify this BFD session
**type**\: str
**config**\: False
.. attribute:: local_diagnostic_code
The local BFD diagnostic code indicating the most recent reason for failure of this BFD session
**type**\: :py:class:`BfdDiagnosticCode <ydk.models.openconfig.openconfig_bfd.BfdDiagnosticCode>`
**config**\: False
.. attribute:: remote_diagnostic_code
The remote BFD diagnostic code indicating the remote system's reason for failure of the BFD session
**type**\: :py:class:`BfdDiagnosticCode <ydk.models.openconfig.openconfig_bfd.BfdDiagnosticCode>`
**config**\: False
.. attribute:: remote_minimum_receive_interval
The value of the minimum receive interval that was specified in the most recent BFD control packet received from the peer
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: demand_mode_requested
This leaf is set to true when the remote system has requested demand mode be run for this session
**type**\: bool
**config**\: False
.. attribute:: remote_authentication_enabled
This leaf is set to true when the remote system has specified that authentication is present for the BFD session
**type**\: bool
**config**\: False
.. attribute:: remote_control_plane_independent
This leaf is set to true when the remote system has specified that the hardware implementing this BFD session is independent of the control plane's liveliness
**type**\: bool
**config**\: False
.. attribute:: async_
Operational state parameters specifically relating to asynchronous mode of BFD
**type**\: :py:class:`Async <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.State.Async>`
**config**\: False
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "micro-bfd-session"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("async", ("async_", Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.State.Async))])
self._leafs = OrderedDict([
('local_address', (YLeaf(YType.str, 'local-address'), ['str','str'])),
('remote_address', (YLeaf(YType.str, 'remote-address'), ['str','str'])),
('member_interface', (YLeaf(YType.str, 'member-interface'), ['str'])),
('session_state', (YLeaf(YType.enumeration, 'session-state'), [('ydk.models.openconfig.openconfig_bfd', 'BfdSessionState', '')])),
('remote_session_state', (YLeaf(YType.enumeration, 'remote-session-state'), [('ydk.models.openconfig.openconfig_bfd', 'BfdSessionState', '')])),
('last_failure_time', (YLeaf(YType.uint64, 'last-failure-time'), ['int'])),
('failure_transitions', (YLeaf(YType.uint64, 'failure-transitions'), ['int'])),
('local_discriminator', (YLeaf(YType.str, 'local-discriminator'), ['str'])),
('remote_discriminator', (YLeaf(YType.str, 'remote-discriminator'), ['str'])),
('local_diagnostic_code', (YLeaf(YType.enumeration, 'local-diagnostic-code'), [('ydk.models.openconfig.openconfig_bfd', 'BfdDiagnosticCode', '')])),
('remote_diagnostic_code', (YLeaf(YType.enumeration, 'remote-diagnostic-code'), [('ydk.models.openconfig.openconfig_bfd', 'BfdDiagnosticCode', '')])),
('remote_minimum_receive_interval', (YLeaf(YType.uint32, 'remote-minimum-receive-interval'), ['int'])),
('demand_mode_requested', (YLeaf(YType.boolean, 'demand-mode-requested'), ['bool'])),
('remote_authentication_enabled', (YLeaf(YType.boolean, 'remote-authentication-enabled'), ['bool'])),
('remote_control_plane_independent', (YLeaf(YType.boolean, 'remote-control-plane-independent'), ['bool'])),
])
self.local_address = None
self.remote_address = None
self.member_interface = None
self.session_state = None
self.remote_session_state = None
self.last_failure_time = None
self.failure_transitions = None
self.local_discriminator = None
self.remote_discriminator = None
self.local_diagnostic_code = None
self.remote_diagnostic_code = None
self.remote_minimum_receive_interval = None
self.demand_mode_requested = None
self.remote_authentication_enabled = None
self.remote_control_plane_independent = None
self.async_ = Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.State.Async()
self.async_.parent = self
self._children_name_map["async_"] = "async"
self._segment_path = lambda: "state"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.State, ['local_address', 'remote_address', 'member_interface', 'session_state', 'remote_session_state', 'last_failure_time', 'failure_transitions', 'local_discriminator', 'remote_discriminator', 'local_diagnostic_code', 'remote_diagnostic_code', 'remote_minimum_receive_interval', 'demand_mode_requested', 'remote_authentication_enabled', 'remote_control_plane_independent'], name, value)
class Async(_Entity_):
"""
Operational state parameters specifically relating to
asynchronous mode of BFD.
.. attribute:: last_packet_transmitted
The date and time at which the last BFD packet was transmitted for this session, expressed as the number of nanoseconds since the Unix Epoch (January 1, 1970, 00\:00 UTC)
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: last_packet_received
The date and time at which the last BFD packet was received for this session, expressed as the number of nanoseconds since the Unix Epoch (January 1, 1970, 00\:00 UTC)
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: transmitted_packets
The number of packets that have been transmitted by the local system
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: received_packets
The number of packets that have been received by the local system from the remote neighbour
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: up_transitions
The number of times that the adjacency with the neighbor has transitioned into the up state
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.State.Async, self).__init__()
self.yang_name = "async"
self.yang_parent_name = "state"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('last_packet_transmitted', (YLeaf(YType.uint64, 'last-packet-transmitted'), ['int'])),
('last_packet_received', (YLeaf(YType.uint64, 'last-packet-received'), ['int'])),
('transmitted_packets', (YLeaf(YType.uint64, 'transmitted-packets'), ['int'])),
('received_packets', (YLeaf(YType.uint64, 'received-packets'), ['int'])),
('up_transitions', (YLeaf(YType.uint64, 'up-transitions'), ['int'])),
])
self.last_packet_transmitted = None
self.last_packet_received = None
self.transmitted_packets = None
self.received_packets = None
self.up_transitions = None
self._segment_path = lambda: "async"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.MicroBfdSessions.MicroBfdSession.State.Async, ['last_packet_transmitted', 'last_packet_received', 'transmitted_packets', 'received_packets', 'up_transitions'], name, value)
class Peers(_Entity_):
"""
Parameters relating to the BFD peers which are seen
over this interface.
.. attribute:: peer
Parameters relating to the BFD peer specified by the remote address
**type**\: list of :py:class:`Peer <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.Peers.Peer>`
**config**\: False
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.Peers, self).__init__()
self.yang_name = "peers"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("peer", ("peer", Bfd.Interfaces.Interface.Peers.Peer))])
self._leafs = OrderedDict()
self.peer = YList(self)
self._segment_path = lambda: "peers"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.Peers, [], name, value)
class Peer(_Entity_):
"""
Parameters relating to the BFD peer specified by the
remote address.
.. attribute:: local_discriminator (key)
The local discriminator, which is unique for the session on the system
**type**\: str
**refers to**\: :py:class:`local_discriminator <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.Peers.Peer.State>`
**config**\: False
.. attribute:: state
Operational state parameters for the BFD session
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.Peers.Peer.State>`
**config**\: False
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.Peers.Peer, self).__init__()
self.yang_name = "peer"
self.yang_parent_name = "peers"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['local_discriminator']
self._child_classes = OrderedDict([("state", ("state", Bfd.Interfaces.Interface.Peers.Peer.State))])
self._leafs = OrderedDict([
('local_discriminator', (YLeaf(YType.str, 'local-discriminator'), ['str'])),
])
self.local_discriminator = None
self.state = Bfd.Interfaces.Interface.Peers.Peer.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._segment_path = lambda: "peer" + "[local-discriminator='" + str(self.local_discriminator) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.Peers.Peer, ['local_discriminator'], name, value)
class State(_Entity_):
"""
Operational state parameters for the BFD session.
.. attribute:: local_address
The IP address used by the local system for this BFD session
**type**\: union of the below types:
**type**\: str
**pattern:** ^(([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])$
**type**\: str
**pattern:** ^(([0\-9a\-fA\-F]{1,4}\:){7}[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,7}\:\|([0\-9a\-fA\-F]{1,4}\:){1,6}\:[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,5}(\:[0\-9a\-fA\-F]{1,4}){1,2}\|([0\-9a\-fA\-F]{1,4}\:){1,4}(\:[0\-9a\-fA\-F]{1,4}){1,3}\|([0\-9a\-fA\-F]{1,4}\:){1,3}(\:[0\-9a\-fA\-F]{1,4}){1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,2}(\:[0\-9a\-fA\-F]{1,4}){1,5}\|[0\-9a\-fA\-F]{1,4}\:((\:[0\-9a\-fA\-F]{1,4}){1,6})\|\:((\:[0\-9a\-fA\-F]{1,4}){1,7}\|\:))$
**config**\: False
.. attribute:: remote_address
The IP address used by the remote system for this BFD session
**type**\: union of the below types:
**type**\: str
**pattern:** ^(([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])$
**type**\: str
**pattern:** ^(([0\-9a\-fA\-F]{1,4}\:){7}[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,7}\:\|([0\-9a\-fA\-F]{1,4}\:){1,6}\:[0\-9a\-fA\-F]{1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,5}(\:[0\-9a\-fA\-F]{1,4}){1,2}\|([0\-9a\-fA\-F]{1,4}\:){1,4}(\:[0\-9a\-fA\-F]{1,4}){1,3}\|([0\-9a\-fA\-F]{1,4}\:){1,3}(\:[0\-9a\-fA\-F]{1,4}){1,4}\|([0\-9a\-fA\-F]{1,4}\:){1,2}(\:[0\-9a\-fA\-F]{1,4}){1,5}\|[0\-9a\-fA\-F]{1,4}\:((\:[0\-9a\-fA\-F]{1,4}){1,6})\|\:((\:[0\-9a\-fA\-F]{1,4}){1,7}\|\:))$
**config**\: False
.. attribute:: subscribed_protocols
Indicates the set of protocols that currently use this BFD session for liveliness detection
**type**\: list of :py:class:`INSTALLPROTOCOLTYPE <ydk.models.openconfig.openconfig_policy_types.INSTALLPROTOCOLTYPE>`
**config**\: False
.. attribute:: session_state
The state of the BFD session perceived by the local system
**type**\: :py:class:`BfdSessionState <ydk.models.openconfig.openconfig_bfd.BfdSessionState>`
**config**\: False
.. attribute:: remote_session_state
The reported state of the BFD session according to the remote system. This state reflects the last state reported in a BFD control packet
**type**\: :py:class:`BfdSessionState <ydk.models.openconfig.openconfig_bfd.BfdSessionState>`
**config**\: False
.. attribute:: last_failure_time
The time of the last transition of the BFD session out of the UP state, expressed as the number of nanoseconds since the Unix epoch
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: failure_transitions
The number of times that the BFD session has transitioned out of the UP state
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: local_discriminator
A unique identifier used by the local system to identify this BFD session
**type**\: str
**config**\: False
.. attribute:: remote_discriminator
A unique identified used by the remote system to identify this BFD session
**type**\: str
**config**\: False
.. attribute:: local_diagnostic_code
The local BFD diagnostic code indicating the most recent reason for failure of this BFD session
**type**\: :py:class:`BfdDiagnosticCode <ydk.models.openconfig.openconfig_bfd.BfdDiagnosticCode>`
**config**\: False
.. attribute:: remote_diagnostic_code
The remote BFD diagnostic code indicating the remote system's reason for failure of the BFD session
**type**\: :py:class:`BfdDiagnosticCode <ydk.models.openconfig.openconfig_bfd.BfdDiagnosticCode>`
**config**\: False
.. attribute:: remote_minimum_receive_interval
The value of the minimum receive interval that was specified in the most recent BFD control packet received from the peer
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: demand_mode_requested
This leaf is set to true when the remote system has requested demand mode be run for this session
**type**\: bool
**config**\: False
.. attribute:: remote_authentication_enabled
This leaf is set to true when the remote system has specified that authentication is present for the BFD session
**type**\: bool
**config**\: False
.. attribute:: remote_control_plane_independent
This leaf is set to true when the remote system has specified that the hardware implementing this BFD session is independent of the control plane's liveliness
**type**\: bool
**config**\: False
.. attribute:: echo
Operational state parameters specifically relating to the echo mode of BFD
**type**\: :py:class:`Echo <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.Peers.Peer.State.Echo>`
**config**\: False
.. attribute:: async_
Operational state parameters specifically relating to asynchronous mode of BFD
**type**\: :py:class:`Async <ydk.models.openconfig.openconfig_bfd.Bfd.Interfaces.Interface.Peers.Peer.State.Async>`
**config**\: False
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.Peers.Peer.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "peer"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("echo", ("echo", Bfd.Interfaces.Interface.Peers.Peer.State.Echo)), ("async", ("async_", Bfd.Interfaces.Interface.Peers.Peer.State.Async))])
self._leafs = OrderedDict([
('local_address', (YLeaf(YType.str, 'local-address'), ['str','str'])),
('remote_address', (YLeaf(YType.str, 'remote-address'), ['str','str'])),
('subscribed_protocols', (YLeafList(YType.identityref, 'subscribed-protocols'), [('ydk.models.openconfig.openconfig_policy_types', 'INSTALLPROTOCOLTYPE')])),
('session_state', (YLeaf(YType.enumeration, 'session-state'), [('ydk.models.openconfig.openconfig_bfd', 'BfdSessionState', '')])),
('remote_session_state', (YLeaf(YType.enumeration, 'remote-session-state'), [('ydk.models.openconfig.openconfig_bfd', 'BfdSessionState', '')])),
('last_failure_time', (YLeaf(YType.uint64, 'last-failure-time'), ['int'])),
('failure_transitions', (YLeaf(YType.uint64, 'failure-transitions'), ['int'])),
('local_discriminator', (YLeaf(YType.str, 'local-discriminator'), ['str'])),
('remote_discriminator', (YLeaf(YType.str, 'remote-discriminator'), ['str'])),
('local_diagnostic_code', (YLeaf(YType.enumeration, 'local-diagnostic-code'), [('ydk.models.openconfig.openconfig_bfd', 'BfdDiagnosticCode', '')])),
('remote_diagnostic_code', (YLeaf(YType.enumeration, 'remote-diagnostic-code'), [('ydk.models.openconfig.openconfig_bfd', 'BfdDiagnosticCode', '')])),
('remote_minimum_receive_interval', (YLeaf(YType.uint32, 'remote-minimum-receive-interval'), ['int'])),
('demand_mode_requested', (YLeaf(YType.boolean, 'demand-mode-requested'), ['bool'])),
('remote_authentication_enabled', (YLeaf(YType.boolean, 'remote-authentication-enabled'), ['bool'])),
('remote_control_plane_independent', (YLeaf(YType.boolean, 'remote-control-plane-independent'), ['bool'])),
])
self.local_address = None
self.remote_address = None
self.subscribed_protocols = []
self.session_state = None
self.remote_session_state = None
self.last_failure_time = None
self.failure_transitions = None
self.local_discriminator = None
self.remote_discriminator = None
self.local_diagnostic_code = None
self.remote_diagnostic_code = None
self.remote_minimum_receive_interval = None
self.demand_mode_requested = None
self.remote_authentication_enabled = None
self.remote_control_plane_independent = None
self.echo = Bfd.Interfaces.Interface.Peers.Peer.State.Echo()
self.echo.parent = self
self._children_name_map["echo"] = "echo"
self.async_ = Bfd.Interfaces.Interface.Peers.Peer.State.Async()
self.async_.parent = self
self._children_name_map["async_"] = "async"
self._segment_path = lambda: "state"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.Peers.Peer.State, ['local_address', 'remote_address', 'subscribed_protocols', 'session_state', 'remote_session_state', 'last_failure_time', 'failure_transitions', 'local_discriminator', 'remote_discriminator', 'local_diagnostic_code', 'remote_diagnostic_code', 'remote_minimum_receive_interval', 'demand_mode_requested', 'remote_authentication_enabled', 'remote_control_plane_independent'], name, value)
class Echo(_Entity_):
"""
Operational state parameters specifically relating to the
echo mode of BFD.
.. attribute:: active
This leaf is set to true when echo mode is running between the local and remote system. When it is set to false, solely asynchronous mode is active
**type**\: bool
**config**\: False
.. attribute:: last_packet_transmitted
The date and time at which the last BFD packet was transmitted for this session, expressed as the number of nanoseconds since the Unix Epoch (January 1, 1970, 00\:00 UTC)
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: last_packet_received
The date and time at which the last BFD packet was received for this session, expressed as the number of nanoseconds since the Unix Epoch (January 1, 1970, 00\:00 UTC)
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: transmitted_packets
The number of packets that have been transmitted by the local system
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: received_packets
The number of packets that have been received by the local system from the remote neighbour
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: up_transitions
The number of times that the adjacency with the neighbor has transitioned into the up state
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.Peers.Peer.State.Echo, self).__init__()
self.yang_name = "echo"
self.yang_parent_name = "state"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('active', (YLeaf(YType.boolean, 'active'), ['bool'])),
('last_packet_transmitted', (YLeaf(YType.uint64, 'last-packet-transmitted'), ['int'])),
('last_packet_received', (YLeaf(YType.uint64, 'last-packet-received'), ['int'])),
('transmitted_packets', (YLeaf(YType.uint64, 'transmitted-packets'), ['int'])),
('received_packets', (YLeaf(YType.uint64, 'received-packets'), ['int'])),
('up_transitions', (YLeaf(YType.uint64, 'up-transitions'), ['int'])),
])
self.active = None
self.last_packet_transmitted = None
self.last_packet_received = None
self.transmitted_packets = None
self.received_packets = None
self.up_transitions = None
self._segment_path = lambda: "echo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.Peers.Peer.State.Echo, ['active', 'last_packet_transmitted', 'last_packet_received', 'transmitted_packets', 'received_packets', 'up_transitions'], name, value)
class Async(_Entity_):
"""
Operational state parameters specifically relating to
asynchronous mode of BFD.
.. attribute:: last_packet_transmitted
The date and time at which the last BFD packet was transmitted for this session, expressed as the number of nanoseconds since the Unix Epoch (January 1, 1970, 00\:00 UTC)
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: last_packet_received
The date and time at which the last BFD packet was received for this session, expressed as the number of nanoseconds since the Unix Epoch (January 1, 1970, 00\:00 UTC)
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: transmitted_packets
The number of packets that have been transmitted by the local system
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: received_packets
The number of packets that have been received by the local system from the remote neighbour
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: up_transitions
The number of times that the adjacency with the neighbor has transitioned into the up state
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'oc-bfd'
_revision = '2018-11-21'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Bfd.Interfaces.Interface.Peers.Peer.State.Async, self).__init__()
self.yang_name = "async"
self.yang_parent_name = "state"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('last_packet_transmitted', (YLeaf(YType.uint64, 'last-packet-transmitted'), ['int'])),
('last_packet_received', (YLeaf(YType.uint64, 'last-packet-received'), ['int'])),
('transmitted_packets', (YLeaf(YType.uint64, 'transmitted-packets'), ['int'])),
('received_packets', (YLeaf(YType.uint64, 'received-packets'), ['int'])),
('up_transitions', (YLeaf(YType.uint64, 'up-transitions'), ['int'])),
])
self.last_packet_transmitted = None
self.last_packet_received = None
self.transmitted_packets = None
self.received_packets = None
self.up_transitions = None
self._segment_path = lambda: "async"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Bfd.Interfaces.Interface.Peers.Peer.State.Async, ['last_packet_transmitted', 'last_packet_received', 'transmitted_packets', 'received_packets', 'up_transitions'], name, value)
def clone_ptr(self):
self._top_entity = Bfd()
return self._top_entity
|
"""
Tests for the cooperator service calls.
"""
import json
from unittest import mock
from waterdata.services import sifta
MOCK_RESPONSE = """
{"Site": "06864000", "Date": "6/19/2018", "Customers":[{"Name":"Kansas Water Office","URL":"http://www.kwo.org/","IconURL":"http://water.usgs.gov/customer/icons/6737.gif"},{"Name":"USGS - Cooperative Matching Funds","URL":"http://water.usgs.gov/coop/","IconURL":"http://water.usgs.gov/customer/icons/usgsIcon.gif"}]}
"""
MOCK_CUSTOMER_LIST = json.loads(MOCK_RESPONSE)['Customers']
def test_sifta_response(config):
with mock.patch('waterdata.services.sifta.execute_get_request') as r_mock:
response = mock.Mock()
response.status_code = 200
response.text = MOCK_RESPONSE
response.json.return_value = json.loads(MOCK_RESPONSE)
r_mock.return_value = response
config['COOPERATOR_LOOKUP_ENABLED'] = True
cooperators = sifta.get_cooperators('12345', 'district code ignored')
assert cooperators == MOCK_CUSTOMER_LIST, 'Expected response'
def test_sifta_disabled(config):
config['COOPERATOR_LOOKUP_ENABLED'] = False
cooperators = sifta.get_cooperators('12345', 'district code ignored')
assert cooperators == [], 'Expected empty response'
def test_sifta_district_enabled(config):
with mock.patch('waterdata.services.sifta.execute_get_request') as r_mock:
response = mock.Mock()
response.status_code = 200
response.text = MOCK_RESPONSE
response.json.return_value = json.loads(MOCK_RESPONSE)
r_mock.return_value = response
config['COOPERATOR_LOOKUP_ENABLED'] = ['10', '15']
cooperators = sifta.get_cooperators('12345', '10')
assert cooperators == MOCK_CUSTOMER_LIST, 'Expected response'
def test_sifta_district_disabled(config):
with mock.patch('waterdata.services.sifta.execute_get_request') as r_mock:
response = mock.Mock()
response.status_code = 200
response.text = MOCK_RESPONSE
response.json.return_value = json.loads(MOCK_RESPONSE)
r_mock.return_value = response
config['COOPERATOR_LOOKUP_ENABLED'] = ['10', '15']
cooperators = sifta.get_cooperators('12345', '20')
assert cooperators == [], 'Expected empty response'
def test_sifta_handling_bad_status_code(config):
with mock.patch('waterdata.services.sifta.execute_get_request') as r_mock:
response = mock.Mock()
response.status_code = 500
r_mock.return_value = response
config['COOPERATOR_LOOKUP_ENABLED'] = ['10', '15']
cooperators = sifta.get_cooperators('12345', '10')
assert cooperators == [], 'Expected response'
def test_unparsable_json(config):
with mock.patch('waterdata.services.sifta.execute_get_request') as r_mock:
mock_resp = mock.Mock()
mock_resp.status_code = 200
mock_resp.json.side_effect = json.JSONDecodeError('mock message', '{"x", "A",}', 2)
r_mock.return_value = mock_resp
config['COOPERATOR_LOOKUP_ENABLED'] = ['10', '15']
cooperators = sifta.get_cooperators('12345', '20')
assert cooperators == [], 'Expected empty response'
|
import numpy
import cv2
from zipfile import ZipFile
from .EyetrackingUtilities import SaveNPY, ReadNPY
class VideoReader(object):
"""
Base class that loads video rawFrames to an array
"""
def __init__(self, videoFileName = None, other = None):
"""
Constructor
@param videoFileName: video file name
@param other: use for copy constructing
@type videoFileName: str?
@type other: VideoReader?
"""
self.fileName = videoFileName
"""
@ivar: name of video file
@type: str
"""
self.fps = 0
"""
@ivar: FPS of video
@type: float
"""
self.width = 0
"""
@ivar: Width of video in pixels
@type: int
"""
self.height = 0
"""
@ivar: height of video
@type: int
"""
self.duration = 0 # in seconds
"""
@ivar: Duration of video in seconds
@type: float
"""
self.nFrames = 0
"""
@ivar: Number of frames in video
@type: int
"""
self.rawFrames = None # [t x w x h x 3] BGR video rawFrames
"""
@ivar: Frames that have been read in
@type: [time x w x h x 3] numpy.ndarray
"""
self.frames = None # [t x w x h] grayscale frames
"""
@ivar: Grayscale frames
@type: [time x w x h] numpy.ndarray
"""
self.frame = None
"""
@ivar: Current frame being processed
@type: [w x h] numpy.ndarray
"""
self.video = None # object for reading files
"""
@ivar: Object for reading video files
@type: cv2.VideoCapture
"""
self.isVidCap = None # bool?
"""
@ivar: Is this a VideoCap source from disk?
@type: bool?
"""
if (self.fileName):
self.GetVideoInfo()
self.LoadFrames()
print('{}x{} (HxW) video at {} fps with {} frames'.format(self.height, self.width, self.fps, self.nFrames))
elif (other is not None):
self.InitFromOther(other)
def InitFromOther(self, other):
"""
A jank copy constructor
@param other: Videoobject to init from
@type other: VideoReader
"""
if (other is not None):
self.rawFrames = other.rawFrames
self.fps = other.fps
self.width = other.width
self.height = other.height
self.duration = other.duration
self.nFrames = other.nFrames
def LoadFrames(self):
"""
Loads rawFrames to memory
"""
if (self.isVidCap is None):
self.GetVideoInfo()
if self.isVidCap:
frames = []
success, frame = self.video.read()
while success:
frames.append(frame)
success, frame = self.video.read()
self.rawFrames = numpy.stack(frames)
self.video.release()
else:
import subprocess
bufferSize = self.width * self.height * 3
command = ['ffmpeg',
'-y',
'-f', 'avi',
'-i', '-',
'-f', 'rawvideo',
'-pix_fmt', 'rgb24',
'-vcodec', 'rawvideo',
'-']
pipe = subprocess.Popen(command, stdin = subprocess.PIPE, stdout = subprocess.PIPE, bufsize = bufferSize)
rawFrameData = pipe.communicate(self.video)[0]
frames = numpy.fromstring(rawFrameData, dtype = numpy.uint8)
self.rawFrames = frames.reshape(-1, self.width, self.height, 3)
try:
pipe.kill()
except OSError:
pass
del pipe
del self.video
# self.rawFrames = self.rawFrames[:, :, :, 2] # only keep red channel since that's where the time info is and everything else is is b/w
def GetVideoInfo(self):
"""
Gets video info
"""
vc = cv2.VideoCapture(self.fileName)
if (vc.isOpened()):
self.video = vc
self.width = int(self.video.get(cv2.CAP_PROP_FRAME_WIDTH))
self.height = int(self.video.get(cv2.CAP_PROP_FRAME_HEIGHT))
self.fps = self.video.get(cv2.CAP_PROP_FPS)
self.nFrames = int(self.video.get(cv2.CAP_PROP_FRAME_COUNT))
self.duration = self.nFrames / self.fps # duration in seconds
self.isVidCap = True
else: # not a video file on disk
import cottoncandy
self.isVidCap = False
if self.fileName[:3] == 's3:': # file is on s3; assumed to the gzipped
fileName = self.fileName[5:] # since s3 files being with 's3://'
bucket = fileName.split('/')[0]
cloud = cottoncandy.get_interface(bucket)
zippedData = cloud.download_stream(fileName[(len(bucket) + 1):]).content
else: # zipped file on disk
file = open(self.fileName)
zippedData = file.read()
file.close()
self.video = ''
zipFile = cottoncandy.utils.GzipInputStream(zippedData, 20 * (2 ** 20))
while True:
chunk = zipFile.read(10 * (2 ** 20))
if not chunk:
break
self.video += chunk
del zippedData, zipFile
import struct
metadata = struct.unpack('i' * 14, self.video[32:(32 + 56)])
self.width = metadata[8]
self.height = metadata[9]
self.nFrames = metadata[4]
self.fps = int(1 / (metadata[0] * 1000000))
self.duration = metadata[0] * metadata[4] / 1000000.0
def WriteVideo(self, outFileName, frames = None):
"""
Writes a video out to disk
@param outFileName:
@param frames:
"""
if frames is None:
frames = self.frames
def Save(self, fileName = None, outFile = None):
"""
Save out information
@param fileName: name of file to save, must be not none if fileObject is None
@param outFile: existing object to write to
@type fileName: str?
@type outFile: zipfile?
"""
closeOnFinish = outFile is None # we close the file only if this is the actual function that started the file
if outFile is None:
outFile = ZipFile(fileName, 'w')
SaveNPY(numpy.array([self.fps,
self.width,
self.height,
self.duration,
self.nFrames]), outFile, 'VideoInformation.npy')
if closeOnFinish:
outFile.close()
def Load(self, fileName = None, inFile = None):
"""
Loads in information
@param fileName: name of file to read, must not be none if infile is none
@param inFile: existing object to read from
@param fileName: str?
@param inFile: zipfile?
"""
closeOnFinish = inFile is None
if inFile is None:
inFile = ZipFile(fileName, 'r')
try:
info = ReadNPY(inFile, 'VideoInformation.npy')
self.fps = info[0]
self.width = int(info[1])
self.height = int(info[2])
self.duration = info[3]
self.nFrames = int(info[4])
except KeyError as e:
print(e)
if closeOnFinish:
inFile.close() |
import tensorflow as tf
import renderer
import helpers
def l1(x, y):
return tf.reduce_mean(tf.abs(x-y))
def SSIMLoss(x, y, scale):
ssim = tf.reduce_mean(tf.image.ssim_multiscale(x, y, scale))
return 1.0 - ssim
epsilonL1 = 0.01
epsilonRender = 0.01
class Loss:
lossType = "render"
batchSize = 8
lossValue = 0
materialLossValue = 0
tile_size = 256
material_size = 128
lr = 0.00002
beta1Adam = 0.5
nbDiffuseRendering = 3
nbSpecularRendering = 6
includeDiffuse = False
outputs = None
targets = None
surfaceArray = None
outputsRenderings = None
targetsRenderings = None
trainOp = None
def __init__(self, lossType, outputs, targets, tile_size, batchSize, lr, includeDiffuse, nbSpecularRendering, nbDiffuseRendering) :
self.lossType = lossType
self.outputs = outputs
self.targets = targets
self.tile_size = tile_size
self.batchSize = batchSize
self.lr = lr
self.includeDiffuse = includeDiffuse
self.nbSpecularRendering = nbSpecularRendering
self.nbDiffuseRendering = nbDiffuseRendering
def __l1Loss(self, outputs, targets):
#outputs have shape [?, height, width, 12]
#targets have shape [?, height, width, 12]
outputsNormal = outputs[:,:,:,0:3]
outputsDiffuse = tf.log(epsilonL1 + helpers.deprocess(outputs[:,:,:,3:6]))
outputsRoughness = outputs[:,:,:,6:9]
outputsSpecular = tf.log(epsilonL1 + helpers.deprocess(outputs[:,:,:,9:12]))
targetsNormal = targets[:,:,:,0:3]
targetsDiffuse = tf.log(epsilonL1 + helpers.deprocess(targets[:,:,:,3:6]))
targetsRoughness = targets[:,:,:,6:9]
targetsSpecular = tf.log(epsilonL1 + helpers.deprocess(targets[:,:,:,9:12]))
return l1(outputsNormal, targetsNormal) + l1(outputsDiffuse, targetsDiffuse) + l1(outputsRoughness, targetsRoughness) + l1(outputsSpecular, targetsSpecular)
def __generateRenderings(self, renderer, batchSize, targets, outputs, surfaceArray):
diffuses = helpers.tf_generateDiffuseRendering(batchSize, self.nbDiffuseRendering, targets, outputs, renderer)
speculars = helpers.tf_generateSpecularRendering(batchSize, self.nbSpecularRendering, surfaceArray, targets, outputs, renderer)
targetsRendered = tf.concat([diffuses[0],speculars[0]], axis = 1)
outputsRendered = tf.concat([diffuses[1],speculars[1]], axis = 1)
return targetsRendered, outputsRendered
def __renderLoss(self, tile_size, batchSize, targets, outputs):
surfaceArray = helpers.generateSurfaceArray(tile_size)
rendererImpl = renderer.GGXRenderer(includeDiffuse = self.includeDiffuse)
targetsRenderings, outputsRenderings = self.__generateRenderings(rendererImpl, batchSize, targets, outputs, surfaceArray)
reshapedTargetsRendering = tf.reshape(targetsRenderings, [-1, int(targetsRenderings.get_shape()[2]), int(targetsRenderings.get_shape()[3]), int(targetsRenderings.get_shape()[4])])
reshapedOutputsRendering = tf.reshape(outputsRenderings, [-1, int(outputsRenderings.get_shape()[2]), int(outputsRenderings.get_shape()[3]), int(outputsRenderings.get_shape()[4])])
currentLoss = l1(tf.log(reshapedTargetsRendering + epsilonRender), tf.log(reshapedOutputsRendering + epsilonRender))
ssimLoss = SSIMLoss(tf.log(reshapedTargetsRendering + epsilonRender), tf.log(reshapedOutputsRendering + epsilonRender), 1.0)
lossTotal = currentLoss + ssimLoss
return lossTotal, targetsRenderings, outputsRenderings
def __mixedLoss(self, tile_size, batchSize, targets, outputs, lossL1Factor, lossRenderFactor):
lossVal, targetRenderings, outputsRenderings = self.__renderLoss(tile_size, batchSize, targets, outputs)
return lossVal + (lossL1Factor * self.__l1Loss(outputs, targets)), targetRenderings, outputsRenderings
def createLossGraph(self):
if self.lossType == "render":
self.lossValue, self.targetRenderings, self.outputsRenderings = self.__renderLoss(self.tile_size, self.batchSize, self.targets, self.outputs)
elif self.lossType == "l1":
self.lossValue = self.__l1Loss(self.outputs, self.targets)
elif self.lossType == "mixed":
self.lossValue, self.targetRenderings, self.outputsRenderings = self.__mixedLoss(self.tile_size, self.batchSize, self.targets, self.outputs, 0.1, 1.0)
else:
raise ValueError('No such loss: ' + self.lossType)
def createTrainVariablesGraph(self, reuse_bool = False):
global_step = tf.train.get_or_create_global_step()
tf.summary.scalar("lr", self.lr)
with tf.name_scope("model_train"):
with tf.variable_scope("model_train0", reuse=reuse_bool):
gen_tvars = [var for var in tf.trainable_variables() if var.name.startswith("trainableModel/")]
gen_optim = tf.train.AdamOptimizer(self.lr, self.beta1Adam)
gen_grads_and_vars = gen_optim.compute_gradients(self.lossValue, var_list=gen_tvars)
gen_train = gen_optim.apply_gradients(gen_grads_and_vars)
ema = tf.train.ExponentialMovingAverage(decay=0.99)
update_losses = ema.apply([self.lossValue])
self.lossValue = ema.average(self.lossValue)
incr_global_step = tf.assign(global_step, global_step+1)
self.trainOp = tf.group(update_losses, incr_global_step, gen_train)
|
<gh_stars>1-10
import os
import pycuda.driver as cuda
import tensorrt as trt
import ctypes
import cv2
import numpy as np
from thanos.dataset import IPN
def GiB(val):
"""Calculate Gibibit in bits, used to set workspace for TensorRT engine builder."""
return val * 1 << 30
class HostDeviceMem(object):
"""
Simple helper class to store useful data of an engine's binding
Attributes
----------
host_mem: np.ndarray
data stored in CPU
device_mem: pycuda.driver.DeviceAllocation
represent data pointer in GPU
shape: tuple
dtype: np dtype
name: str
name of the binding
"""
def __init__(self, host_mem, device_mem, shape, dtype, name=""):
self.host = host_mem
self.device = device_mem
self.shape = shape
self.dtype = dtype
self.name = name
self.binding = int(self.device)
def __str__(self):
return "Host:\n" + str(self.host) + "\nDevice:\n" + str(self.device)
def __repr__(self):
return self.__str__()
def allocate_buffers(context, stream=None, sync_mode=True):
"""
Read bindings' information in ExecutionContext, create pagelocked np.ndarray in CPU,
allocate corresponding memory in GPU.
Returns
-------
inputs: list[HostDeviceMem]
outputs: list[HostDeviceMem]
bindings: list[int]
list of pointers in GPU for each bindings
stream: pycuda.driver.Stream
used for memory transfers between CPU-GPU
"""
inputs = []
outputs = []
bindings = []
if stream is None and not sync_mode:
stream = cuda.Stream()
for binding in context.engine:
binding_idx = context.engine.get_binding_index(binding)
name = context.engine.get_binding_name(binding_idx)
shape = context.get_binding_shape(binding_idx)
size = trt.volume(shape) * context.engine.max_batch_size
dtype = trt.nptype(context.engine.get_binding_dtype(binding))
# Allocate host and device buffers
host_mem = cuda.pagelocked_empty(size, dtype)
device_mem = cuda.mem_alloc(host_mem.nbytes)
# Append the device buffer to device bindings.
bindings.append(int(device_mem))
# Append to the appropriate list.
if context.engine.binding_is_input(binding):
inputs.append(HostDeviceMem(host_mem, device_mem, shape, dtype, name))
else:
outputs.append(HostDeviceMem(host_mem, device_mem, shape, dtype, name))
return inputs, outputs, bindings, stream
def execute_async(context, bindings, inputs, outputs, stream):
"""
Execute an TensorRT engine.
Parameters
----------
context: tensorrt.IExecutionContext
bindings: list[int]
list of pointers in GPU for each bindings
inputs: list[HostDeviceMem]
outputs: list[HostDeviceMem]
stream: pycuda.driver.Stream
used for memory transfers between CPU-GPU
Returns
-------
list : np.ndarray
For each outputs of the engine
"""
# Transfer input data to the GPU.
[cuda.memcpy_htod_async(inp.device, inp.host, stream) for inp in inputs]
# Run inference.
check = context.execute_async(bindings=bindings, stream_handle=stream.handle)
assert check, "Kernel execution failed"
# Transfer predictions back from the GPU.
[cuda.memcpy_dtoh_async(out.host, out.device, stream) for out in outputs]
# Synchronize the stream
stream.synchronize()
# Return only the host outputs.
for out in outputs:
out.host = out.host.reshape(out.shape)
return [out.host for out in outputs]
def execute_sync(context, bindings, inputs, outputs):
"""
Execute an TensorRT engine.
Parameters
-----------
context: tensorrt.IExecutionContext
bindings: list[int]
list of pointers in GPU for each bindings
inputs: list[HostDeviceMem]
outputs: list[HostDeviceMem]
stream: pycuda.driver.Stream
used for memory transfers between CPU-GPU
Parameters
----------
list[np.ndarray] for each outputs of the engine
"""
# Transfer input data to the GPU.
[cuda.memcpy_htod(inp.device, inp.host) for inp in inputs]
# Run inference.
check = context.execute_v2(bindings=bindings)
assert check, "Kernel execution failed"
# Transfer predictions back from the GPU.
[cuda.memcpy_dtoh(out.host, out.device) for out in outputs]
# Return only the host outputs.
for out in outputs:
out.host = out.host.reshape(out.shape)
return [out.host for out in outputs]
def draw_result_on_frame(frame:np.ndarray, gesture_id:int):
"""In-place draw gesture name on frame
Parameters
----------
frame: np.ndarray
dtype uint8, shape (H, W, 3)
gesture_id: int
gesture id
"""
if gesture_id == 0: # no gesture
return frame
else:
cv2.putText(
frame,
f"{gesture_id} {IPN.ID_NAME_DICT[gesture_id]}", # text
(10, int(frame.shape[0]*0.95)), # position
cv2.FONT_HERSHEY_DUPLEX, # font
1, # font scale
(255, 0, 255) # font color
)
def draw_fps_on_frame(frame:np.ndarray, fps:int):
"""In-place draw gesture name on frame
Parameters
----------
frame: np.ndarray
dtype uint8, shape (H, W, 3)
gesture_id: int
gesture id
"""
cv2.putText(
frame,
f"{fps} FPS", # text
(10, int(frame.shape[0]*0.10)), # position
cv2.FONT_HERSHEY_DUPLEX, # font
1, # font scale
(255, 0, 255) # font color
) |
from struct import pack, unpack
from binascii import hexlify
GATT_PERMIT_READ = 0x01
GATT_PERMIT_WRITE = 0x02
GATT_PERMIT_AUTH_READ = 0x04
GATT_PERMIT_AUTH_WRITE = 0x08
GATT_PROP_BCAST = 0x01
GATT_PROP_READ = 0x02
GATT_PROP_WRITE_NO_RSP = 0x04
GATT_PROP_WRITE = 0x08
GATT_PROP_NOTIFY = 0x10
GATT_PROP_INDICATE = 0x20
class GATT_Server:
attributes = []
mtu = 23
def __init__(self, db):
self.db = db
def set_mtu(self, mtu):
self.mtu = mtu
def read(self, handle):
value = self.db.read(handle)
if value is None:
return (False, 0x0a)
return (True, value[:self.mtu])
def read_by_type(self, start, end, uuid):
resp = self.db.read_by_type(start, end, uuid)
if len(resp) == 0:
return (False, 0x0a)
value_len = None
total_len = 2
response_body = []
for r in resp:
(handle, value) = r
if value_len is not None and len(value) != value_len:
break
# TODO handle MTU larger than 256+4 (length is a single byte)
value_len = min(len(value), self.mtu-4) # 4 = 2 + an extra 2 for the handle
response_body.append(pack('<h', handle))
response_body.append(value[:value_len])
total_len += value_len+2
if total_len >= self.mtu:
break
return (True, ''.join((chr(value_len+2), ''.join(response_body))))
def find_information(self, start, end):
resp = self.db.find_information(start, end)
if len(resp) == 0:
return (False, 0x0a)
response_body = []
uuid_type = None
total_len = 2
for r in resp:
(handle, uuid) = r
if uuid_type is None:
uuid_type = uuid.type
# hack: we know that uuid_type is the value the spec expects
response_body.append(chr(uuid_type))
if uuid.type != uuid_type:
break
if total_len + 2 + len(uuid.packed) > self.mtu:
break
response_body.append(pack('<h', handle))
response_body.append(uuid.packed)
total_len += 2 + len(uuid.packed)
return (True, ''.join(response_body))
def find_by_type_value(self, start, end, uuid, value):
resp = self.db.find_by_type_value(start, end, uuid, value)
if len(resp) == 0:
return (False, 0x0a)
response_body = []
total_len = 1
for r in resp:
(handle, end) = r
if total_len + 4 > self.mtu:
break
response_body.append(pack('<h', handle))
response_body.append(pack('<h', end))
total_len += 4
return (True, ''.join(response_body))
def read_by_group_type(self, start, end, uuid):
resp = self.db.read_by_group_type(start, end, uuid)
if len(resp) == 0:
return (False, 0x0a)
response_body = []
total_len = 0
value_len = None
for r in resp:
(start, end, value) = r
if value_len is None:
value_len = min(4 + len(value), self.mtu - 2)
response_body.append(chr(value_len))
this_len = min(4 + len(value), self.mtu - 2)
if this_len != value_len or total_len + value_len > self.mtu:
break
response_body.append(pack('<h', start))
response_body.append(pack('<h', end))
response_body.append(value[:value_len-4])
total_len += value_len
return (True, ''.join(response_body))
class UUID:
TYPE_16 = 1
TYPE_128 = 2
uuid = None
packed = None
type = None
def __init__(self, uuid):
if isinstance(uuid, UUID):
self.uuid = uuid.uuid
self.packed = uuid.packed
self.type = uuid.type
# integer
elif isinstance(uuid, int):
# TODO 128 bit
if uuid >= 0 and uuid <= 65536:
self.uuid = '%04X' % uuid
self.packed = pack('<h', uuid)
self.type = UUID.TYPE_16
elif len(uuid) == 4:
self.uuid = uuid
self.packed = uuid.decode("hex")[::-1]
self.type = UUID.TYPE_16
elif len(uuid) == 36:
temp = uuid.translate(None, '-')
if len(temp) == 32:
self.uuid = uuid
self.packed = temp.decode("hex")[::-1]
self.type = UUID.TYPE_128
# binary
elif len(uuid) == 2:
self.uuid = '%04X' % unpack('<h', uuid)[0]
self.packed = uuid
self.type = UUID.TYPE_16
elif len(uuid) == 16:
r = uuid[::-1]
self.uuid = '-'.join(map(lambda x: hexlify(x), (r[0:4], r[4:6], r[6:8], r[8:10], r[10:])))
self.packed = uuid
self.type = UUID.TYPE_128
if self.uuid is None:
raise Exception("Invalid UUID")
def __eq__(self, other):
# TODO expand 16 bit UUIDs
return self.packed == other.packed
def __repr__(self):
return self.uuid
class GATT_Attribute:
uuid = None
permissions = None
handle = None
value = None
def __init__(self, uuid, permissions, value):
self.uuid = uuid
self.permissions = permissions
self.value = value
def __repr__(self):
return "%s: '%s'" % (self.uuid, ' '.join(x.encode('hex') for x in self.value))
class Attribute_DB:
attributes = []
def primary(self, uuid_str):
uuid = UUID(uuid_str)
attr = GATT_Attribute(UUID("2800"), GATT_PERMIT_READ, uuid.packed)
self.attributes.append(attr)
def characteristic(self, uuid_str, properties):
uuid = UUID(uuid_str)
attr = GATT_Attribute(UUID("2803"), GATT_PERMIT_READ, ''.join((chr(properties), '\x00\x00', uuid.packed)))
self.attributes.append(attr)
def client_characteristic_configuration(self):
attr = GATT_Attribute(UUID("2902"), GATT_PERMIT_READ | GATT_PERMIT_WRITE, '\x00\x00')
self.attributes.append(attr)
def attribute(self, uuid_str, permissions, value):
uuid = UUID(uuid_str)
attr = GATT_Attribute(uuid, permissions, value)
self.attributes.append(attr)
# update handle in characteristic attributes
def refresh_handles(self):
chr_uuid = UUID("2803")
for i in range(0, len(self.attributes)):
attr = self.attributes[i]
if attr.uuid == chr_uuid:
attr.value = attr.value[0] + pack('<h', i+2) + attr.value[3:]
def __repr__(self):
a = []
for i in range(0, len(self.attributes)):
a.append('%x - %s' % (i+1, self.attributes[i]))
return '\n'.join(a)
def read(self, handle):
attr = None
try:
attr = self.attributes[handle-1]
return attr.value
except:
pass
return None
def read_by_type(self, start, end, uuid_str):
resp = []
uuid = UUID(uuid_str)
try:
for i in range(start, end+1):
attr = self.attributes[i-1]
if attr.uuid == uuid:
resp.append((i, attr.value))
except:
pass
return resp
def find_information(self, start, end):
resp = []
# TODO check that start < end?
try:
for i in range(start, end+1):
attr = self.attributes[i-1]
resp.append((i, attr.uuid))
except:
pass
return resp
def find_by_type_value(self, start, end, uuid_str, value):
resp = []
uuid = UUID(uuid_str)
try:
for i in range(start, end+1):
attr = self.attributes[i-1]
if attr.uuid == uuid and attr.value == value:
max_handle = i
try:
for j in range(i+1, end+1):
if self.attributes[j-1].uuid == uuid:
break
max_handle = j
except:
pass
resp.append((i, max_handle))
except:
pass
return resp
def read_by_group_type(self, start, end, uuid_str):
resp = []
uuid = UUID(uuid_str)
try:
for i in range(start, end+1):
attr = self.attributes[i-1]
if attr.uuid == uuid:
max_handle = i
try:
for j in range(i+1, end+1):
if self.attributes[j-1].uuid == uuid:
break
max_handle = j
except:
pass
resp.append((i, max_handle, attr.value))
except:
pass
return resp
|
#!/usr/bin/python3
# coding=utf-8
# Copyright 2020 getcarrier.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Processor: quality_gate
"""
from ruamel.yaml.comments import CommentedMap # pylint: disable=E0401
from dusty.tools import log
from dusty.models.module import DependentModuleModel
from dusty.models.processor import ProcessorModel
from dusty.constants import SEVERITIES
class Processor(DependentModuleModel, ProcessorModel):
""" Process quality gate """
def __init__(self, context):
""" Initialize processor instance """
super().__init__()
self.context = context
self.config = \
self.context.config["processing"][__name__.split(".")[-2]]
def execute(self):
""" Run the processor """
log.info("Checking quality gate status")
thresholds = self.config.get("thresholds", dict())
# Count issues by severity
results_by_severity = dict()
for item in self.context.findings:
if item.get_meta("information_finding", False) or \
item.get_meta("false_positive_finding", False) or \
item.get_meta("excluded_finding", False):
continue
severity = item.get_meta("severity", SEVERITIES[-1])
if severity not in results_by_severity:
results_by_severity[severity] = 0
results_by_severity[severity] += 1
# Prepare stats data
stats_data = dict()
for severity in SEVERITIES:
stats_data["total"] = "OK"
stats_data[severity] = {
"findings": results_by_severity.get(severity, "-"),
"threshold": thresholds.get(severity, "-"),
"status": "OK"
}
# Check quality gate
for severity in results_by_severity:
if severity not in thresholds:
continue
#
severity_results = results_by_severity[severity]
policy_threshold = thresholds[severity]
#
if severity_results > policy_threshold:
log.warning(
"Quality gate failed: %s -> %d > %d",
severity, severity_results, policy_threshold
)
self.context.set_meta("fail_quality_gate", True)
stats_data[severity]["status"] = "FAIL"
stats_data["total"] = "FAIL"
# Prepare stats
stats = list()
stats.append("============= Quality gate stats ============")
stats.append("Severity : {:<9} {:<5} {:<7} {:<4} {:<4}".format(
*SEVERITIES
))
stats.append("Findings : {:<9} {:<5} {:<7} {:<4} {:<4}".format(
*[stats_data[severity]["findings"] for severity in SEVERITIES]
))
stats.append("Threshold : {:<9} {:<5} {:<7} {:<4} {:<4}".format(
*[stats_data[severity]["threshold"] for severity in SEVERITIES]
))
stats.append("Status : {:<9} {:<5} {:<7} {:<4} {:<4}".format(
*[stats_data[severity]["status"] for severity in SEVERITIES]
))
stats.append("============= Quality gate: {:<4} ============".format(stats_data["total"]))
self.context.set_meta("quality_gate_stats", stats)
@staticmethod
def fill_config(data_obj):
""" Make sample config """
data_obj.insert(
len(data_obj), "thresholds", CommentedMap(),
comment="Quality gate thresholds by severity"
)
mapping_obj = data_obj["thresholds"]
mapping_obj.insert(
len(mapping_obj),
"Critical", 3
)
mapping_obj.insert(
len(mapping_obj),
"High", 5
)
mapping_obj.insert(
len(mapping_obj),
"Medium", 7
)
mapping_obj.insert(
len(mapping_obj),
"Low", 9
)
mapping_obj.insert(
len(mapping_obj),
"Info", 11
)
@staticmethod
def run_after():
""" Return optional depencies """
return ["exclude_by_endpoint", "false_positive", "min_severity_filter"]
@staticmethod
def get_name():
""" Module name """
return "Quality gate"
@staticmethod
def get_description():
""" Module description """
return "Set and check quality gate for CI/CD process"
|
<filename>ipmi/console.py<gh_stars>1-10
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This represents the low layer message framing portion of IPMI
import fcntl
import os
import struct
import types
from ipmi.private import constants
from ipmi.private import session
class Console(object):
"""IPMI SOL class.
This object represents an SOL channel, multiplexing SOL data with
commands issued by ipmi.command.
:param bmc: hostname or ip address of BMC
:param userid: username to use to connect
:param password: password to connect to the BMC
:param iohandler: Either a function to call with bytes, a filehandle to
use for input and output, or a tuple of (input, output)
handles
:param kg: optional parameter for BMCs configured to require it
"""
#TODO(jbjohnso): still need an exit and a data callin function
def __init__(self, bmc, userid, password,
iohandler=None,
force=False, kg=None):
if type(iohandler) == tuple: # two file handles
self.console_in = iohandler[0]
self.console_out = iohandler[1]
elif type(iohandler) == file: # one full duplex file handle
self.console_out = iohandler
self.console_in = iohandler
elif isinstance(iohandler, types.FunctionType):
self.console_out = None
self.console_in = None
self.out_handler = iohandler
else:
raise(Exception('No IO handler provided'))
if self.console_in is not None:
fcntl.fcntl(self.console_in.fileno(), fcntl.F_SETFL, os.O_NONBLOCK)
self.remseq = 0
self.myseq = 0
self.lastsize = 0
self.sendbreak = 0
self.ackedcount = 0
self.ackedseq = 0
self.retriedpayload = 0
self.pendingoutput = ""
self.awaitingack = False
self.force_session = force
self.ipmi_session = session.Session(bmc=bmc,
userid=userid,
password=password,
kg=kg,
onlogon=self._got_session)
def _got_session(self, response):
"""Private function to navigate SOL payload activation
"""
if 'error' in response:
self._print_data(response['error'])
return
#Send activate sol payload directive
#netfn= 6 (application)
#command = 0x48 (activate payload)
#data = (1, sol payload type
# 1, first instance
# 0b11000000, -encrypt, authenticate,
# disable serial/modem alerts, CTS fine
# 0, 0, 0 reserved
self.ipmi_session.raw_command(netfn=0x6, command=0x48,
data=(1, 1, 192, 0, 0, 0),
callback=self._payload_activated)
def _payload_activated(self, response):
"""Check status of activate payload request
"""
if 'error' in response:
self._print_data(response['error'])
#given that these are specific to the command,
#it's probably best if one can grep the error
#here instead of in constants
sol_activate_codes = {
0x81: 'SOL is disabled',
0x82: 'Maximum SOL session count reached',
0x83: 'Cannot activate payload with encryption',
0x84: 'Cannot activate payload without encryption',
}
if response['code']:
if response['code'] in constants.ipmi_completion_codes:
self._print_data(
constants.ipmi_completion_codes[response['code']])
return
elif response['code'] == 0x80:
if self.force_session and not self.retriedpayload:
self.retriedpayload = 1
self.ipmi_session.raw_command(netfn=0x6, command=0x49,
data=(1, 1, 0, 0, 0, 0),
callback=self._got_session)
return
else:
self._print_data('SOL Session active for another client\n')
return
elif response['code'] in sol_activate_codes:
self._print_data(sol_activate_codes[response['code']]+'\n')
return
else:
self._print_data(
'SOL encountered Unrecognized error code %d\n' %
response['code'])
return
#data[0:3] is reserved except for the test mode, which we don't use
data = response['data']
self.maxoutcount = (data[5] << 8) + data[4]
#BMC tells us this is the maximum allowed size
#data[6:7] is the promise of how small packets are going to be, but we
#don't have any reason to worry about it
if (data[8] + (data[9] << 8)) != 623:
raise Exception("TODO(jbjohnso): support atypical SOL port number")
#ignore data[10:11] for now, the vlan detail, shouldn't matter to this
#code anyway...
self.ipmi_session.sol_handler = self._got_sol_payload
if self.console_in is not None:
self.ipmi_session.register_handle_callback(self.console_in,
self._got_cons_input)
def _got_cons_input(self, handle):
"""Callback for handle events detected by ipmi session
"""
self.pendingoutput += handle.read()
if not self.awaitingack:
self._sendpendingoutput()
def _sendpendingoutput(self):
self.myseq += 1
self.myseq &= 0xf
if self.myseq == 0:
self.myseq = 1
payload = struct.pack("BBBB",
self.myseq,
self.ackedseq,
self.ackedseq,
self.sendbreak)
payload += self.pendingoutput
self.lasttextsize = len(self.pendingoutput)
self.pendingoutput = ""
self.awaitingack = True
payload = struct.unpack("%dB" % len(payload), payload)
self.lastpayload = payload
self.ipmi_session.send_payload(payload, payload_type=1)
def _print_data(self, data):
"""Convey received data back to caller in the format of their choice.
Caller may elect to provide this class filehandle(s) or else give a
callback function that this class will use to convey data back to
caller.
"""
if self.console_out is not None:
self.console_out.write(data)
self.console_out.flush()
elif self.out_handler: # callback style..
self.out_handler(data)
def _got_sol_payload(self, payload):
"""SOL payload callback
"""
#TODO(jbjohnso) test cases to throw some likely scenarios at functions
#for example, retry with new data, retry with no new data
#retry with unexpected sequence number
newseq = payload[0] & 0b1111
ackseq = payload[1] & 0b1111
ackcount = payload[2]
nacked = payload[3] & 0b1000000
poweredoff = payload[3] & 0b100000
deactivated = payload[3] & 0b10000
#for now, ignore overrun. I assume partial NACK for this reason or for
#no reason would be treated the same, new payload with partial data
remdata = ""
remdatalen = 0
if newseq != 0: # this packet at least has some data to send to us..
if len(payload) > 4:
remdatalen = len(payload[4:]) # store remote len before dupe
#retry logic, we must ack *this* many even if it is
#a retry packet with new partial data
remdata = struct.pack("%dB" % remdatalen, *payload[4:])
if newseq == self.remseq: # it is a retry, but could have new data
if remdatalen > self.lastsize:
remdata = remdata[4 + self.lastsize:]
else: # no new data...
remdata = ""
else: # TODO(jbjohnso) what if remote sequence number is wrong??
self.remseq = newseq
self.lastsize = remdatalen
self._print_data(remdata)
ackpayload = (0, self.remseq, remdatalen, 0)
#Why not put pending data into the ack? because it's rare
#and might be hard to decide what to do in the context of
#retry situation
self.ipmi_session.send_payload(ackpayload,
payload_type=1, retry=False)
if self.myseq != 0 and ackseq == self.myseq: # the bmc has something
# to say about last xmit
self.awaitingack = False
if nacked > 0: # the BMC was in some way unhappy
if poweredoff:
self._print_data("Remote system is powered down\n")
if deactivated:
self._print_data("Remote IPMI console disconnected\n")
else: # retry all or part of packet, but in a new form
# also add pending output for efficiency and ease
newtext = self.lastpayload[4 + ackcount:]
self.pendingoutput = newtext + self.pendingoutput
self._sendpendingoutput()
elif self.awaitingack: # session marked us as happy, but we are not
#this does mean that we will occasionally retry a packet
#sooner than retry suggests, but that's no big deal
self.ipmi_session.send_payload(payload=self.lastpayload,
payload_type=1)
def main_loop(self):
"""Process all events until no more sessions exist.
If a caller is a simple little utility, provide a function to
eternally run the event loop. More complicated usage would be expected
to provide their own event loop behavior, though this could be used
within the greenthread implementation of caller's choice if desired.
"""
#wait_for_rsp promises to return a false value when no sessions are
#alive anymore
#TODO(jbjohnso): wait_for_rsp is not returning a true value for our own
#session
while (1):
session.Session.wait_for_rsp(timeout=600)
|
from PIL import Image, ImageEnhance
import random
def GetPatches(im, patchSize, numPatches):
out = []
cursorx = 0
cursory = 0
for i in range(numPatches):
cx, cy = cursorx, cursory
patch = im.crop((cx, cy, cx+patchSize[0], cy+patchSize[1]))
out.append(patch.convert("RGBA"))
cursorx += patchSize[0]
if cursorx + patchSize[0] >= im.size[0]:
cursorx = 0
cursory += patchSize[1]
if cursory + patchSize[1] >= im.size[1]:
cursorx = 0
cursory = 0
return out
def GenMargin(patch, marginSize):
if patch.mode != "RGBA":
patch = patch.convert("RGBA")
else:
patch = patch.copy()
patchPix = patch.load()
for x in range(patch.size[0]):
for y in range(patch.size[1]):
pix = list(patchPix[x, y])
alpha = pix[3]
if x < marginSize:
alpha *= float(x) / marginSize
if x > patch.size[0] - marginSize:
alpha *= float(-x + patch.size[0]) / marginSize
if y < marginSize:
alpha *= float(y) / marginSize
if y > patch.size[1] - marginSize:
alpha *= float(-y + patch.size[1]) / marginSize
pix[3] = alpha
patchPix[x, y] = tuple(map(int,map(round, pix)))
return patch
if __name__=="__main__":
print "Mountain"
im = Image.open("spiralgraphics/High Altitude.jpg")
patchSize = 120, 120
numPatches = 10
marginSize = 20
out = GetPatches(im, patchSize, numPatches)
for i, patch in enumerate(out):
patch.save("spiralgraphics-mountain-solid{0}.png".format(i))
#Generate transparent margins
for i, patch in enumerate(out):
pm = GenMargin(patch, marginSize)
pm.save("spiralgraphics-mountain-margin{0}.png".format(i))
print "Ice"
im = Image.open("spiralgraphics/Deep-Freeze.jpg")
numPatches = 10
out = GetPatches(im, patchSize, numPatches)
for i, patch in enumerate(out):
patch.save("spiralgraphics-ice-solid{0}.png".format(i))
#Generate transparent margins
for i, patch in enumerate(out):
pm = GenMargin(patch, marginSize)
pm.save("spiralgraphics-ice-margin{0}.png".format(i))
print "Sea"
im = Image.open("spiralgraphics/Port of Taganrog.jpg")
numPatches = 10
out = GetPatches(im, patchSize, numPatches)
for i, patch in enumerate(out):
patch.save("spiralgraphics-sea-solid{0}.png".format(i))
#Generate transparent margins
for i, patch in enumerate(out):
pm = GenMargin(patch, marginSize)
pm.save("spiralgraphics-sea-margin{0}.png".format(i))
print "Jungle"
im = Image.open("spiralgraphics/Amazonia.jpg")
im = im.crop((325, 0, im.size[0], im.size[1]))
numPatches = 10
out = GetPatches(im, patchSize, numPatches)
for i, patch in enumerate(out):
patch.save("spiralgraphics-jungle-solid{0}.png".format(i))
#Generate transparent margins
for i, patch in enumerate(out):
pm = GenMargin(patch, marginSize)
pm.save("spiralgraphics-jungle-margin{0}.png".format(i))
print "City"
im = Image.open("spiralgraphics/Urban Jungle.jpg")
im = im.crop((240, 0, 420, im.size[1]))
numPatches = 10
out = GetPatches(im, patchSize, numPatches)
for i, patch in enumerate(out):
patch.save("spiralgraphics-city-solid{0}.png".format(i))
#Generate transparent margins
for i, patch in enumerate(out):
pm = GenMargin(patch, marginSize)
pm.save("spiralgraphics-city-margin{0}.png".format(i))
print "Plain"
im = Image.open("spiralgraphics/Meadow Streams.jpg")
enhancer = ImageEnhance.Brightness(im)
im = enhancer.enhance(1.5)
im = im.crop((240, 0, im.size[0], im.size[1]))
numPatches = 10
out = GetPatches(im, patchSize, numPatches)
for i, patch in enumerate(out):
patch.save("spiralgraphics-plain-solid{0}.png".format(i))
#Generate transparent margins
for i, patch in enumerate(out):
pm = GenMargin(patch, marginSize)
pm.save("spiralgraphics-plain-margin{0}.png".format(i))
print "Desert"
im = Image.open("spiralgraphics/Sahara.jpg")
im = im.crop((240, 0, im.size[0], im.size[1]))
numPatches = 10
out = GetPatches(im, patchSize, numPatches)
for i, patch in enumerate(out):
patch.save("spiralgraphics-desert-solid{0}.png".format(i))
#Generate transparent margins
for i, patch in enumerate(out):
pm = GenMargin(patch, marginSize)
pm.save("spiralgraphics-desert-margin{0}.png".format(i))
|
<filename>whitepaper_extract.py
#Importing packages
import requests, bs4, re, json, os, copy
from collections import Counter
#Reusable Functions
def retrieve_links(url, href = re.compile('(?s).*'), string = re.compile('(?s).*')):
"""This function retrieves all links on a webpage and returns a list of them, with some filtering options
ARGS:
url: url of the site you want to retrieve links
href: Allow the users to filter by a regex pattern within the link. By default, it matches everything
string: Allows user to filter by a regex pattern if a string is followed after the link. E.g. Text layered over a hyperlink By default, it matches everything.
"""
res = requests.get(url)
soup = bs4.BeautifulSoup(res.text, 'lxml')
ls = []
for link in soup.find_all('a', href = href, string = string):
ls.append((link.get('href')))
return ls
def retrieve_html(url):
"""retrieve all links within a text and prints it out to observe
"""
res = requests.get(url)
soup = bs4.BeautifulSoup(res.text, 'lxml')
for link in soup.find_all('a'):
print(link)
def store_dic(dic, name):
"""Saves dic as json file in current directory
"""
with open(name, 'w') as file:
json.dump(dic, file)
def store_soup(soup, name):
"""saves soup object as text file in current directory
"""
with open(name, 'w') as file:
file.write(soup.prettify())
#Workflow
#Obtain a list of individual links for each crypotcurrency from allcryptowhitepapers.com
url = 'https://www.allcryptowhitepapers.com/whitepaper-overview/'
temp_ls = retrieve_links(url)
#Our list has irrelevant links, find out where the relevant links are at
print(temp_ls[:20]) #check where the first relevant crypto paper starts
print(temp_ls[-20:]) #check where does the last relevant paper is at
#Find index of the bookends for the relevant links and filter them
first = 'https://www.allcryptowhitepapers.com/crypto-dictionary/'
last = 'https://www.allcryptowhitepapers.com/about-us/'
first_index = temp_ls.index(first)
last_index = temp_ls.index(last)
crypto_ls = temp_ls[first_index+1:last_index] #final crypto list
crypto_ls = list(set(crypto_ls))
assert len(crypto_ls) == 2826 #check if number of links tally up, accurate as of 7.8.2019
#Generate a list of all the names of cryptocurrencies listed on allcryptowhitepaper
crypto_names = []
res = requests.get(url)
soup = bs4.BeautifulSoup(res.text, 'lxml')
for name in soup.find_all("td", {"class":"column-1"}):
crypto_names.append(name.text)
assert len(crypto_names) == len(crypto_ls)
#Combine both the name and link lists together into a single dictionary
link_name_dic = dict(zip(crypto_ls, crypto_names))
assert len(link_name_dic.values()) == 2817 #notice it is 2817 instead of 2828, i.e. there are 9 duplicates! Of note, bismuth is still duplicated in this dictionary!
store_dic(link_name_dic, 'link_name.json') #save this in case
#Find all links without the div class = entry-content tag to filter out redundant links, then search for the specific link with "whitepaer" in the .string
test_size = len(crypto_ls)
crypto_ls_test = crypto_ls[:test_size]
name_dl_dic = {}
problem_ls = []
for link in crypto_ls_test:
req2 = requests.get(link)
soup2 = bs4.BeautifulSoup(req2.text, 'lxml')
name = link_name_dic[link]
try:
for div in soup2.find_all("div", {"class": "entry-content"}):
for dl_link in div.find_all('a', string = re.compile("whitepaper", re.IGNORECASE)):
name_dl_dic[name] = dl_link['href']
except:
name_dl_dic[name] = None
problem_ls.append(link)
Success_rate = (test_size - len(problem_ls))/test_size * 100
print(Success_rate) #100%, no errors woohoo!
#Edit bitcoin key as it retrieved the comic link
name_dl_dic['Bitcoin'] = 'https://www.bitcoin.com/bitcoin.pdf'
#Count the number of cryptos that do not have a download link
counter = Counter(list(name_dl_dic.values()))
print(len(name_dl_dic)) #2112
print(counter['']) #66 links self referencing itself (i.e. point back to the allcryptowhitepaper site)
print(len(set(link_name_dic.values())- set(name_dl_dic.keys()))) #705 no links at all
#705 + 2112 = 2817 it tallies!
#Store the download dictionary into a json file
store_dic(name_dl_dic, 'crypto_dl.json')
#Remove the cryptocurrencies with the '' values
remove_ls = [key for key,value in name_dl_dic.items() if value == '']
for name in remove_ls:
del name_dl_dic[name]
assert len(name_dl_dic) == 2112-66 #2046
#Store the final download dictionary into a json file
store_dic(name_dl_dic, 'crypto_dl_filtered.json')
#make a directory to store the files
current_dir = os.getcwd()
os.mkdir(current_dir + '\whitepapers')
for name in name_dl_dic.keys():
name = name.replace('?', '-')
name = name.replace('/', '-')
try:
os.mkdir(current_dir + '\whitepapers\\' + name)
except Exception as e:
print(name,e)
#One error was thrown, SafeCoin vs Safecoin, another duplicate!
del name_dl_dic['Safecoin'] #final is 2045
store_dic(name_dl_dic, 'crypto_final.json')
#Test out downloading a single link [WORK IN PROGRESS]
name = crypto_names[5]
test_url = name_dl_dic[name]
dl_req = requests.get(test_url)
content_type = dl_req.headers.get('content-type')
if 'application/pdf' in content_type:
ext = '.pdf'
elif 'text/html' in content_type:
ext = '.html'
else:
ext = ''
print('Unknown format')
#OLD CODES/IDEAS
#Extract names from the links in crypto_ls [NOT an ideal solution]
# def extract_names(link):
# try:
# m = re.search('(?<=.com/).*(?=-whitepaper)', link, re.IGNORECASE)
# return m.group(0)
# except AttributeError: #3 papers don't end with -whitepaper
# m = re.search('(?<=.com/).*',link, re.IGNORECASE)
# return m.group(0)
# else:
# print(link)
# crypto_names = [extract_names(link) for link in crypto_ls]
#Check whether each individual page has a .pdf, code takes too long to run so don't run it LOL
# no_pdf_ls = []
# for link in crypto_ls:
# try:
# res = requests.get(link)
# soup = bs4.BeautifulSoup(res.text)
# if not soup.find_all('a', href = re.compile('.pdf')):
# no_pdf_ls.append(link)
# except Exception as e:
# print(e)
|
#!/usr/bin/env python
import re
import os
import socket
import urllib2
import xml.etree.ElementTree as ET
import logging
import time
from tvst import Tvst
def get_plex_access_token(config):
if os.path.exists(config.get('plex-tvst-scrobbler', 'plex_access_token_location')):
plexfp = open(config.get('plex-tvst-scrobbler', 'plex_access_token_location'), 'r')
plex_access_token = plexfp.read().strip()
plexfp.close()
return plex_access_token
def parse_line(log_line):
''' Matches known TV shows metadata log entries entries against input (log_line)
:param log_line: plex media server log line
:type log_line: string
:returns: plex media server metadata id
:rtype: integer (or None) '''
logger = logging.getLogger(__name__)
REGEX = [
re.compile('.*Updated play state for /library/metadata/([0-9]+).*')
]
for regex in REGEX:
m = regex.match(log_line)
if m:
logger.info('Found played TV show and extracted library id \'{l_id}\' from plex log '.format(l_id=m.group(1)))
return m.group(1)
def fetch_metadata(l_id, config, plex_access_token):
''' retrieves the metadata information from the Plex media Server api. '''
logger = logging.getLogger(__name__)
url = '{url}/library/metadata/{l_id}'.format(url=config.get('plex-tvst-scrobbler',
'mediaserver_url'), l_id=l_id)
logger.info('Fetching library metadata from {url}'.format(url=url))
headers = None
if plex_access_token:
headers = {'X-Plex-Token': plex_access_token}
# fail if request is greater than 2 seconds.
try:
request = urllib2.Request(url, None, headers)
metadata = urllib2.urlopen(request, timeout=2)
except urllib2.URLError, e:
logger.error('urllib2 error reading from {url} \'{error}\''.format(url=url,
error=e))
return False
except socket.timeout, e:
logger.error('Timeout reading from {url} \'{error}\''.format(url=url, error=e))
return False
tree = ET.fromstring(metadata.read())
video = tree.find('Video')
print video
if video is None:
logger.info('Ignoring played item library-id={l_id}, could not determine video library information.'.
format(l_id=l_id))
return False
if video.get('type') != 'episode':
logger.info('Ignoring played item library-id={l_id}, because it is not an episode.'.
format(l_id=l_id))
return False
# matching from the guid field, which should provide the agent TVDB result
episode = video.get('guid')
show_name = video.get('grandparentTitle')
regex = re.compile('com.plexapp.agents.thetvdb://([0-9]+)/([0-9]+)/([0-9]+)\?.*')
m = regex.match(episode)
if m:
episode_label = "{0} S{1}E{2}".format(show_name,
m.group(2).zfill(2),
m.group(3).zfill(2))
logger.info("Matched TV show {0}".format(episode_label))
else:
return False
return {
'show_id': m.group(1),
'season_number': m.group(2),
'number': m.group(3)
}
def monitor_log(config):
logger = logging.getLogger(__name__)
st_mtime = False
last_played = None
plex_access_token = get_plex_access_token(config)
try:
f = open(config.get('plex-tvst-scrobbler', 'mediaserver_log_location'))
except IOError:
logger.error('Unable to read log-file {0}. Shutting down.'.format(config.get(
'plex-tvst-scrobbler', 'mediaserver_log_location')))
return
f.seek(0, 2)
while True:
time.sleep(.03)
# reset our file handle in the event the log file was not written to
# within the last 60 seconds. This is a very crude attempt to support
# the log file i/o rotation detection cross-platform.
if int(time.time()) - int(os.fstat(f.fileno()).st_mtime) >= 60:
if int(os.fstat(f.fileno()).st_mtime) == st_mtime: continue
logger.debug('Possible log file rotation, resetting file handle (st_mtime={mtime})'.format(
mtime=time.ctime(os.fstat(f.fileno()).st_mtime) ))
f.close()
try:
f = open(config.get('plex-tvst-scrobbler', 'mediaserver_log_location'))
except IOError:
logger.error('Unable to read log-file {0}. Shutting down.'.format(config.get(
'plex-tvst-scrobbler', 'mediaserver_log_location')))
return
f.seek(0, 2)
st_mtime = int(os.fstat(f.fileno()).st_mtime)
line = f.readline()
# read all new lines starting at the end. We attempt to match
# based on a regex value. If we have a match, extract the media file
# id and send it off to tvshowtime.com for scrobble.
if line:
played = parse_line(line)
if not played: continue
# when playing via a client, log lines are duplicated (seen via iOS)
# this skips dupes. Note: will also miss songs that have been repeated
if played == last_played:
logger.warn('Dupe detection : {0}, not submitting'.format(last_played))
continue
metadata = fetch_metadata(played, config, plex_access_token)
if not metadata: continue
# submit to tvshowtime.com
tvst = Tvst(config)
a = tvst.scrobble(metadata['show_id'], metadata['season_number'],
metadata['number'])
# scrobble was not successful , FIXME: do something?
# if not a:
last_played = played
|
<reponame>timvink/flee
from flee import flee
from flee.datamanager import handle_refugee_data
from flee.datamanager import DataTable # DataTable.subtract_dates()
from flee import InputGeography
import numpy as np
import flee.postprocessing.analysis as a
import sys
def AddInitialRefugees(e, d, loc):
""" Add the initial refugees to a location, using the location name"""
num_refugees = int(d.get_field(loc.name, 0, FullInterpolation=True))
for i in range(0, num_refugees):
e.addAgent(location=loc)
def date_to_sim_days(date):
return DataTable.subtract_dates(date, "2010-01-01")
if __name__ == "__main__":
end_time = 50
last_physical_day = 50
if len(sys.argv) > 1:
if (sys.argv[1]).isnumeric():
end_time = int(sys.argv[1])
last_physical_day = int(sys.argv[1])
else:
end_time = 100
last_physical_day = 100
duration = flee.SimulationSettings.ReadFromCSV(sys.argv[1])
if duration > 0:
end_time = duration
last_physical_day = end_time
e = flee.Ecosystem()
ig = InputGeography.InputGeography()
flee.SimulationSettings.FlareConflictInputFile = "test_data/test_input_csv/flare-out.csv"
ig.ReadFlareConflictInputCSV(
flee.SimulationSettings.FlareConflictInputFile)
print(ig.conflicts)
assert ig.conflicts["C"][49] == 0
assert ig.conflicts["C"][50] == 1
assert ig.conflicts["A"][0] == 1
assert ig.conflicts["C2"][94] == 0
ig.ReadLocationsFromCSV("test_data/test_input_csv/locations.csv")
ig.ReadLinksFromCSV("test_data/test_input_csv/routes.csv")
ig.ReadClosuresFromCSV("test_data/test_input_csv/closures.csv")
e, lm = ig.StoreInputGeographyInEcosystem(e)
#print("Network data loaded")
d = handle_refugee_data.RefugeeTable(
csvformat="generic", data_directory="test_data/test_input_csv/refugee_data", start_date="2010-01-01", data_layout="data_layout.csv")
output_header_string = "Day,"
camp_locations = ["D", "E", "F"]
# TODO: Add Camps from CSV based on their location type.
for l in camp_locations:
AddInitialRefugees(e, d, lm[l])
output_header_string += "%s sim,%s data,%s error," % (
lm[l].name, lm[l].name, lm[l].name)
output_header_string += "Total error,refugees in camps (UNHCR),total refugees (simulation),raw UNHCR refugee count,refugees in camps (simulation),refugee_debt"
print(output_header_string)
# Set up a mechanism to incorporate temporary decreases in refugees
refugee_debt = 0
# raw (interpolated) data from TOTAL UNHCR refugee count only.
refugees_raw = 0
for t in range(0, end_time):
# if t>0:
ig.AddNewConflictZones(e, t)
# Determine number of new refugees to insert into the system.
new_refs = d.get_daily_difference(
t, FullInterpolation=True) - refugee_debt
refugees_raw += d.get_daily_difference(t, FullInterpolation=True)
if new_refs < 0:
refugee_debt = -new_refs
new_refs = 0
elif refugee_debt > 0:
refugee_debt = 0
# Insert refugee agents
e.add_agents_to_conflict_zones(new_refs)
e.refresh_conflict_weights()
t_data = t
e.enact_border_closures(t)
e.evolve()
# Calculation of error terms
errors = []
abs_errors = []
loc_data = []
camps = []
for i in camp_locations:
camps += [lm[i]]
loc_data += [d.get_field(i, t)]
# calculate retrofitted time.
refugees_in_camps_sim = 0
for c in camps:
refugees_in_camps_sim += c.numAgents
# calculate errors
j = 0
for i in camp_locations:
errors += [a.rel_error(lm[i].numAgents, loc_data[j])]
abs_errors += [a.abs_error(lm[i].numAgents, loc_data[j])]
j += 1
output = "%s" % t
for i in range(0, len(errors)):
output += ",%s,%s,%s" % (lm[camp_locations[i]
].numAgents, loc_data[i], errors[i])
if refugees_raw > 0:
#output_string += ",%s,%s,%s,%s" % (float(np.sum(abs_errors))/float(refugees_raw), int(sum(loc_data)), e.numAgents(), refugees_raw)
output += ",%s,%s,%s,%s,%s,%s" % (float(np.sum(abs_errors)) / float(refugees_raw), int(
sum(loc_data)), e.numAgents(), refugees_raw, refugees_in_camps_sim, refugee_debt)
else:
output += ",0,0,0,0,0,0,0"
#output_string += ",0"
print(output)
print('Test successfully completed.')
|
<reponame>fperignon/sandbox<gh_stars>100-1000
"""Simulation of a slider-crank system.
Check examples manual for details.
"""
import numpy as np
import array
import siconos.mechanisms.mbtb as mbtb
WITH_CLEARANCE_ON_RODE = 1
"""if true, add clearance between rodes 1 and 2."""
# -- Slider-crank system parameters --
l1 = 0.153
"""Crank length"""
l2 = 0.306
"""Connecting rod length"""
a = 0.05
"""half-length of the slider"""
b = 0.025
"""half-height of the slider"""
c = 0.001
"""clearance between slider and guide"""
w10 = -150.
"""initial angular speed for the crank"""
w20 = 75.
"""initial angular speed for the connecting rod"""
w30 = 0.
"""initial angular speed for the slider"""
NBBODIES = 3
"""Number of bodies """
BATIE = -1
"""Identifier of the world, an object attached to the referential frame."""
PART1 = 0
"""First body id"""
PART2 = 1
"""2nd body id """
PISTON = 2
"""Third body id"""
body = np.array(['part1', 'part2', 'slider'])
""" Bodies names"""
initPos = np.array([(0, 0, 0, 0, 0, 1, 0),
(l1, 0.0, 0.0, 0, 0, 1, 0),
(l1 + l2 - a, 0, 0, 0, 1, 0, 0)])
"""initial positions of the bodies """
initVel = np.array([(0, 0, -0.5 * w10 * l1, 0, w10, 0),
(0, 0, -0.5 * w10 * l1, 0, w20, 0),
(0, 0, 0, 0, w30, 0)])
"""initial velocities of the bodies """
initCenterMass = np.array([(0.5 * l1, 0.0, 0.0),
(0.5 * l2, 0.0, 0.0),
(a, 0., 0.)])
"""initial positions of the centers of mass of the bodies
positions, ie with the position (0,0,0,1,0,0,0)"""
m = array.array('d', [0.038, 0.038, 0.076])
"""masses of the bodies"""
inertialMatrix = np.array([((1, 0, 0), (0, 7.4e-5, 0), (0, 0, 1)),
((1, 0, 0), (0, 5.9e-4, 0), (0, 0, 1)),
((1, 0, 0), (0, 2.7e-6, 0), (0, 0, 1))])
"""inertia matrix"""
afile = ['./CAD/body1.step',
'./CAD/body2.step',
'./CAD/Slider.step']
"""CAD files """
plugin = "SliderCrankPlugin.so"
"""plugins library name"""
fctfext = np.array(['externalForcesB1', 'externalForcesB2', 'externalForcesS'])
"""external forces"""
# REQUIRED the external momentums.
#fctmext = np.array(['','',''])
# REQUIRED the internal forces.
#fctfint = np.array(['internalForcesB1','',''])
# REQUIRED the internal momentums.
#fctmint = np.array(['internalMomentsB1','',''])
# REQUIRED the internal forces.
#fctfintjacq = np.array(['internalForcesB1_Jacq','',''])
# REQUIRED the internal momentums.
#fctmintjacq = np.array(['internalMomentsB1_Jacq','',''])
# REQUIRED the internal forces.
#fctfintjacv = np.array(['','',''])
# REQUIRED the internal momentums.
#fctmintjacv = np.array(['','',''])
boundaryCondition = np.array(['prescribedvelocityB1', '', ''])
""""a boundary condition (given by the plugin)
is enforced for the first body"""
boundaryConditionIndex = np.array([np.array([4]),
np.array([]),
np.array([])])
""""we prescribe the 4th component of the velocity of the first body
i.e we prescribe a angular velocity around the y-axis."""
# --- JOINTS DESCRIPTION ---
NBJOINTS = 3
"""number of joints"""
if WITH_CLEARANCE_ON_RODE:
NBJOINTS = 2
else:
NBJOINTS = 3
jointName = np.array(['Part1_0',
'Part2_Piston',
'Part1_2'])
"""joints' names"""
jointType = array.array('I', [mbtb.PIVOT_0,
mbtb.PIVOT_1,
mbtb.PIVOT_1])
"""joints' types"""
jointBody1 = array.array('I', [PART1,
PART2,
PART1])
"""index of the first body involved in the joint."""
jointBody2 = array.array('I', [0,
PISTON,
PART2])
"""index of the second body involved in the joint."""
jointPos = np.array([(0, 1, 0, 0.0, 0, 0.),
(0, 1, 0, l2, 0., 0.),
(0, 1, 0, l1, 0., 0)])
"""joints' positions"""
#--- CONTACTS DESCRIPTION ---
NBCONTACTS = 3
"""number of contacts"""
if WITH_CLEARANCE_ON_RODE:
NBCONTACTS = 3
else:
NBCONTACTS = 2
contactName = np.array([
'contact_h',
'contact_b',
'contact_boby1_2'])
"""contacts' names"""
afileContact1 = [
'./CAD/contact_b_cyl.step',
'./CAD/contact_h_cyl.step',
'./CAD/RingBody1.stp']
"""CAD files attached to the first body involved in the contact"""
afileContact2 = [
'./CAD/chamber.step',
'./CAD/chamber.step',
'./CAD/AxisBody2.stp']
"""CAD files attached to the second body involved in the contact"""
contactBody1 = array.array('I', [
PISTON,
PISTON,
PART1])
"""identifier of the first body involved in the contact."""
contactBody2 = array.array('i', [
BATIE,
BATIE,
PART2])
"""identifier of the second body involved in the contact"""
contactOffset = array.array('d', [
0.024,
0.024,
0.006])
"""the artificial offset sub to the geometrical computation."""
contactOffsetP1 = array.array('I', [
0,
0,
0])
"""defining if the offset is applied to the first surface.
Useful to place the contact point."""
contactNormalFromFace1 = array.array('I', [
0,
0,
0])
"""defining if the normal is computed from the first surface."""
contactType3D = array.array('I', [
1,
1,
1])
"""for each contact, 1 if 3D friction, 0 if perfect unilateral constraints."""
contactmu = array.array('d', [
0.01,
0.01,
0.01])
""""friction coeff, only useful in the case of friction."""
contacten = array.array('d', [
0.4,
0.4,
0.0])
"""restitution coeff, for each contact"""
|
import traceback
import importlib
import pkgutil
from PyQt5.QtWidgets import QWidget, QVBoxLayout, QHBoxLayout, QFrame , QMenu
from PyQt5.QtCore import Qt, QThread, pyqtSignal
from gpi.widgets import terminalWidgets, simpleWidgets
from gpi.widgets.templateWidget import nodeHandler
from gpi.helpers import astHelpers, menuHelpers, dialogs , plugins
def getWidgets():
return [
fallbackMulti,
fallbackLE,
assignmentCallWidget,
callWidget,
emptyLineCatch,
commentCatch,
forLoopCatch,
ifelseCatch,
]
# ======================================================================================================
# Available widgets to follow
# ======================================================================================================
# ===========================================================================================
class assignmentCallWidget(nodeHandler):
"""
This is a widget framework specialized for assignment widgets
It works by identifying an assignment node in the first level of the ast
Makes a vertical layout, titles it with a label with the function name in it
Drops output labels and line edits, one for each output
Drops input labels and line edits, one for each input
"""
value = 4
def __init__(self, trueNode, astTools):
"""
:param trueNode:
:param astTools:
"""
nodeHandler.__init__(self , trueNode, astTools)
@classmethod
def nH_getPriority(cls,node, astTools):
condition1 = False
condition2 = False
try:
condition1 = node.type == 'assignment'
condition2 = node.value[-1].type == 'call'
except AttributeError:
# The node didn't have the values in the right places
pass
except IndexError:
# The node didn't have enough arguments
pass
except TypeError:
# Object doesn't support indexing
pass
if condition1 and condition2:
return cls.value
else:
return 0
@classmethod
def nH_widgetBuilder(cls, node, astTools):
# Make the frame
widget = QFrame()
layout = QVBoxLayout()
widget.setLayout(layout)
terminalsDict = {}
# Build the widget from the assignment node
# Set up the title
titleString = ''
if len(node.value) == 1:
titleString = node.value.value
else:
for i in range(len(node.value) - 1):
if i==0:
pass
else:
titleString += '.'
titleString += node.value[i].value
titleLabel = simpleWidgets.simpleLabel(titleString)
titleLabel.setAlignment(Qt.AlignHCenter)
titleLabel.setToolTip(node.dumps())
layout.addWidget(titleLabel)
# Add a horizontal widget to put the input and output widgets into
# Output vertical layout
input_output_widget = QWidget()
input_output_layout = QHBoxLayout()
input_output_widget.setLayout(input_output_layout)
# Add the outputs
# If there is just one target, put it in a list
n = len(node.target)
if n == 1:
outputs = [node.target]
else:
outputs = node.target
# Output vertical layout
output_widget = QWidget()
output_layout = QVBoxLayout()
output_widget.setLayout(output_layout)
outputTitleLabel= simpleWidgets.simpleLabel('Outputs')
outputTitleLabel.setAlignment(Qt.AlignHCenter)
output_layout.addWidget(outputTitleLabel)
for i in range(len(outputs)):
eachWidget , eachLayout = simpleWidgets.simpleWidget()
eachLabel = simpleWidgets.simpleLabel('Output {} :'.format(i + 1))
eachLE = terminalWidgets.LE_terminal()
# eachLE = terminalWidgets.COMBO_terminal()
eachLE.setup(outputs[i])
eachLayout.addWidget(eachLabel)
eachLayout.addWidget(eachLE,1)
# eachLayout.addStretch(1)
output_layout.addWidget(eachWidget,1)
terminalsDict.update({id(eachLE):eachLE})
input_output_layout.addWidget(output_widget,1)
# Add the inputs
inputs = node.value[-1]
# input vertical layout
input_widget = QWidget()
input_layout = QVBoxLayout()
input_widget.setLayout(input_layout)
inputTitleLabel= simpleWidgets.simpleLabel('Inputs')
inputTitleLabel.setAlignment(Qt.AlignHCenter)
input_layout.addWidget(inputTitleLabel)
for i, eachInput in enumerate(inputs):
eachWidget , eachLayout = simpleWidgets.simpleWidget()
if eachInput.target: # Check for keyword arguments
input_label = '{} : '.format(eachInput.target)
else:
input_label = 'Argument {} : '.format(i+1) # No kwargs
eachLabel = simpleWidgets.simpleLabel(input_label)
eachLE = terminalWidgets.LE_terminal()
# eachLE = terminalWidgets.COMBO_terminal()
eachLE.setup(eachInput.value)
eachLayout.addWidget(eachLabel)
eachLayout.addWidget(eachLE,1)
# eachLayout.addStretch(1)
input_layout.addWidget(eachWidget,1)
terminalsDict.update({id(eachLE):eachLE})
input_output_layout.addWidget(input_widget,1)
layout.addWidget(input_output_widget,1)
return widget , terminalsDict
# ===========================================================================================
class callWidget(nodeHandler):
# This is a widget framework specialized for assignment widgets
# It works by identifying an assignment node in the first level of the ast
# Makes a vertical layout, titles it with a label with the function name in it
# Drops output labels and line edits, one for each output
# Drops input labels and line edits, one for each input
value = 4
def __init__(self, trueNode, astTools):
nodeHandler.__init__(self,trueNode, astTools)
@classmethod
def nH_getPriority(cls,node, astTools):
condition1 = False
condition2 = False
try:
condition1 = node.type == 'atomtrailers'
condition2 = node.value[-1].type == 'call'
except AttributeError:
# The node didn't have the values in the right places
pass
except IndexError:
# The node didn't have enough arguments
pass
except TypeError:
# Object doesn't support indexing
pass
if condition1 and condition2:
return cls.value
else:
return 0
@classmethod
def nH_widgetBuilder(cls, node, astTools):
# Make the frame
widget = QFrame()
layout = QVBoxLayout()
widget.setLayout(layout)
terminalsDict = {}
# Build the widget from the assignment node
# Set up the title
titleString = ''
if len(node.value) == 1:
titleString = node.value.value
else:
for i in range(len(node.value) - 1):
if i==0:
pass
else:
titleString += '.'
titleString += node.value[i].value
titleLabel = simpleWidgets.simpleLabel(titleString)
titleLabel.setAlignment(Qt.AlignHCenter)
titleLabel.setToolTip(node.dumps())
layout.addWidget(titleLabel)
# Add the inputs
inputs = node.value[-1]
if len(inputs) == 0:
pass
else:
inputTitleLabel= simpleWidgets.simpleLabel('Inputs')
inputTitleLabel.setAlignment(Qt.AlignHCenter)
layout.addWidget(inputTitleLabel)
for i, eachInput in enumerate(inputs):
eachWidget , eachLayout = simpleWidgets.simpleWidget()
if eachInput.target: # Check for keyword arguments
input_label = '{} : '.format(eachInput.target)
else:
input_label = 'Argument {} : '.format(i+1) # No kwargs
eachLabel = simpleWidgets.simpleLabel(input_label)
eachLE = terminalWidgets.LE_terminal()
# eachLE = terminalWidgets.COMBO_terminal()
eachLE.setup(eachInput.value)
eachLayout.addWidget(eachLabel)
eachLayout.addWidget(eachLE,1)
# eachLayout.addStretch(1)
layout.addWidget(eachWidget,1)
terminalsDict.update({id(eachLE):eachLE})
return widget , terminalsDict
# ======================================================================================================
class fallbackMulti(nodeHandler):
# The value of the widget dictates its priority
value = 2
def __init__(self, trueNode, astTools):
nodeHandler.__init__(self , trueNode, astTools)
@classmethod
def nH_widgetBuilder(cls, node, astTools):
widget = terminalWidgets.MULTI_terminal()
widget.setup(node)
return widget , {id(widget):widget}
# ======================================================================================================
class fallbackLE(nodeHandler):
# The value of the widget dictates its priority
value = 3
def __init__(self, trueNode, astTools):
nodeHandler.__init__(self , trueNode, astTools)
@classmethod
def nH_widgetBuilder(cls, node, astTools):
widget = terminalWidgets.LE_terminal()
widget.setup(node)
return widget , {id(widget):widget}
# ======================================================================================================
class commentCatch(nodeHandler):
# The value of the widget dictates its priority
value = 10
def __init__(self, trueNode, astTools):
nodeHandler.__init__(self , trueNode, astTools)
@classmethod
def nH_getPriority(cls,node, astTools):
# If the node is a comment, return a value for this gui
if node.type == 'comment':
return cls.value
else:
return 0
@classmethod
def nH_widgetBuilder(cls, node, astTools):
widget = terminalWidgets.COMMENT_terminal()
widget.setup(node)
return widget , {id(widget):widget}
# ======================================================================================================
class emptyLineCatch(nodeHandler):
# The value of the widget dictates its priority
value = 10
def __init__(self, trueNode, astTools):
nodeHandler.__init__(self, trueNode, astTools)
@classmethod
def nH_getPriority(cls,node, astTools):
# If the node is an empty line, return a value for this gui
if node.type == 'endl':
return cls.value
else:
return 0
@classmethod
def nH_widgetBuilder(cls, node, astTools):
widget = terminalWidgets.ENDL_terminal()
return widget , {id(widget):widget}
# ======================================================================================================
class forLoopCatch(nodeHandler):
# The value of the widget dictates its priority
value = 10
def __init__(self, trueNode, astTools):
nodeHandler.__init__(self , trueNode, astTools)
@classmethod
def nH_getPriority(cls,node, astTools):
# If the node is an empty line, return a value for this gui
if node.type == 'for':
return cls.value
else:
return 0
@classmethod
def nH_widgetBuilder(cls, node, astTools):
widget, layout = simpleWidgets.simpleWidget(vertical=True)
logs = {}
# "For" line
For_widget, For_layout = simpleWidgets.simpleWidget()
# for
For_layout.addWidget(simpleWidgets.simpleLabel('for'))
# Get the appropriate widget and add it to the layout
sub_widget = terminalWidgets.LE_terminal()
sub_widget.setup(node.iterator)
For_layout.addWidget(sub_widget)
logs.update( {id(sub_widget):sub_widget} )
# in
For_layout.addWidget(simpleWidgets.simpleLabel('in'))
# Get the appropriate widget and add it to the layout
sub_widget = terminalWidgets.LE_terminal()
sub_widget.setup(node.target)
For_layout.addWidget(sub_widget)
logs.update( {id(sub_widget):sub_widget} )
# :
For_layout.addWidget(simpleWidgets.simpleLabel(':'))
layout.addWidget(For_widget)
# Draw up a new ASTWidget to handle the branch
space_widget, space_layout = simpleWidgets.simpleWidget()
space_layout.addSpacing(50)
ASTWidget_widget = ASTWidget(node)
space_layout.addWidget(ASTWidget_widget)
layout.addWidget(space_widget)
# TODO: EditAST needs a way to pass the terminals in a way that makes sense
for branch in node:
t = ASTWidget_widget.branchID_2_terminals[id(branch)]
logs.update(t)
return widget , logs
class ifelseCatch(nodeHandler):
# The value of the widget dictates its priority
value = 10
def __init__(self, trueNode, astTools):
nodeHandler.__init__(self, trueNode, astTools)
@classmethod
def nH_getPriority(cls, node, astTools):
# If the node is an empty line, return a value for this gui
if node.type == 'ifelseblock':
return cls.value
else:
return 0
@classmethod
def nH_widgetBuilder(cls, node, astTools):
widget, layout = simpleWidgets.simpleWidget(vertical=True)
logs = {}
## First if statement ==========================================================================================
ifnode = node.value[0]
if_widget , space_widget , new_logs = cls.if_elif_func(ifnode,'if')
layout.addWidget(if_widget)
layout.addWidget(space_widget)
## =============================================================================================================
if len(node.value) > 0:
for i in range(len(node.value)):
if i == 0:
pass
elif node.value[i].type == 'elif':
elifnode = node.value[i]
if_widget , space_widget , new_logs = cls.if_elif_func(elifnode,'elif')
layout.addWidget(if_widget)
layout.addWidget(space_widget)
else:
elsenode = node.value[i]
if_widget , space_widget , new_logs = cls.if_elif_func(elsenode)
layout.addWidget(if_widget)
layout.addWidget(space_widget)
return widget, logs
@staticmethod
def if_elif_func(node, label):
logs = {}
# "if" line
if_widget, if_layout = simpleWidgets.simpleWidget()
# if
if_layout.addWidget(simpleWidgets.simpleLabel(label))
# condition
sub_widget = terminalWidgets.LE_terminal()
sub_widget.setup(node.test)
if_layout.addWidget(sub_widget)
logs.update({id(sub_widget): sub_widget})
# :
if_layout.addWidget(simpleWidgets.simpleLabel(':'))
# Draw up a new ASTWidget to handle the branch
space_widget, space_layout = simpleWidgets.simpleWidget()
space_layout.addSpacing(50)
ASTWidget_widget = ASTWidget(node)
space_layout.addWidget(ASTWidget_widget)
for branch in node:
t = ASTWidget_widget.branchID_2_terminals[id(branch)]
logs.update(t)
return if_widget , space_widget , logs
@staticmethod
def else_func(node):
logs = {}
# "if" line
else_widget, else_layout = simpleWidgets.simpleWidget()
# if
else_layout.addWidget(simpleWidgets.simpleLabel('else'))
# :
else_layout.addWidget(simpleWidgets.simpleLabel(':'))
# Draw up a new ASTWidget to handle the branch
space_widget, space_layout = simpleWidgets.simpleWidget()
space_layout.addSpacing(50)
ASTWidget_widget = ASTWidget(node)
space_layout.addWidget(ASTWidget_widget)
for branch in node:
t = ASTWidget_widget.branchID_2_terminals[id(branch)]
logs.update(t)
return else_widget , space_widget , logs
# ======================================================================================================
class ASTWidget(QFrame):
# One Dictionary to have all the widgets with their branch id's as keys
# Calls to 'getWidget' add widgets to this dictionary
# Make a list of all the plugins
widgetList = getWidgets()
plugin_widgetList , intro_widgets = plugins.parse_plugin_widgets()
# Add the plugin list to the main list
widgetList.extend(plugin_widgetList)
def __init__(self, AST):
QFrame.__init__(self)
# Bounding box stuff
self.setFrameStyle(QFrame.StyledPanel | QFrame.Plain)
# Set the widget information
self.layout = QVBoxLayout()
self.setLayout(self.layout)
# This is the primary copy of the abstract syntax tree
self.AST = AST
# This dictionary can be used to make sure widgets are destroyed upon removal
self.branchID_2_widget = {}
self.branchID_2_terminals = {}
# TODO: This is the place to include an object for tracking the AST as a whole
self.astTools = astHelpers.tools(self.AST)
self.populate()
def populate(self, intro=True):
# =====================================================================
# Include a section for checking the intro stuff ----------------------
# Check to see if an intro is possible:
if intro and len(self.intro_widgets):
try:
widget = self.getByPriority(self.AST, self.intro_widgets, self.astTools)
self.last_intro_node = widget.widget.last_intro_node
introAstID = id( self.AST[0] )
# Set up the context menu for the widget
widget.setContextMenuPolicy(Qt.CustomContextMenu)
widget.customContextMenuRequested.connect( lambda pos , astID = introAstID : self.introLineNumCMenu( astID , pos ) )
# Save the widget and the widgets terminals in two dictionaries
for i , astBranch in enumerate(self.AST):
if i > self.last_intro_node:
pass
else:
astID = id( astBranch )
self.branchID_2_widget[astID] = widget
self.branchID_2_terminals[astID] = widget.terminals
self.layout.addWidget(widget)
except Exception as err:
# dialogs.genDialog('Open Error', 'Trace:\n{}'.format(err))
traceback.print_exc()
last_intro_node = 0
self.last_intro_node = last_intro_node
else:
last_intro_node = 0
self.last_intro_node = last_intro_node
# Handles for the rest of the AST -------------------------------------
# Pick the right widget for the ast node
# for node in rest_of_ast:
for i , node in enumerate(self.AST):
if i < self.last_intro_node:
pass
else:
widget , terminals = self.getWidget(node,self.astTools)
# print(widget)
# input()
self.layout.addWidget(widget)
# =====================================================================
# Add a stretch to the end so that the widgets will bunch up on the top of the window if there are few widgets drawn.
# Without this the few widgets will expand and take up entire screen with lots of blank space inside widgets
self.layout.addStretch(1)
# Update the line numbers as nodes change
self.updateLineNo()
# Right now it is defined because of how the gui is tested
def slot(self):
pass
def testStatus(self):
# Check for failed gui elements
problemBranches = []
fail = 0
success = 1
# Look through each branch at the root list
for i , branch in enumerate(self.AST):
t = self.branchID_2_terminals[id(branch)]
# Check for failed statuses in the terminals
statuses = [t[key].status for key in t]
# Return the branch number on the root
if fail in statuses: problemBranches.append( self.AST.index(branch) )
return problemBranches
def branchID_2_branch(self):
# A list with all the branch ids in order
# Created at function call, it is intimately tied to the AST status at function call
return [ id(branch) for branch in self.AST ]
def removeWidget(self, astId):
# Disconnect the context menu slot
try:
self.branchID_2_widget[astId].customContextMenuRequested.disconnect()
# Disable the slots on the widget
for key in self.branchID_2_terminals[astId]:
self.branchID_2_terminals[astId][key].disable()
except RuntimeError:
pass
# Remove the widget from the view
self.branchID_2_widget[astId].setParent(None)
# Remove the widget from the widget dictionary
del self.branchID_2_widget[astId]
def insertWidget(self, astIndex, widget):
# With intro, index of layout may not match index of ast
if self.last_intro_node == 0:
self.layout.insertWidget(astIndex ,widget)
else:
self.layout.insertWidget(astIndex - self.last_intro_node + 1,widget)
def getWidget(self,node,astTools,simple=False):
# Get the widget that will hold the AST gui elements
# If simple is called for, use a line edit
# TODO: This call doesn't look for compatibility, should separate compatibility check from priority check
if simple:
widget = fallbackMulti(node, astTools)
else:
widget = self.getByPriority(node, self.widgetList, astTools)
# Set up the context menu for the widget
widget.setContextMenuPolicy(Qt.CustomContextMenu)
widget.customContextMenuRequested.connect( lambda pos , astID = id(node) : self.lineNumCMenu( astID , pos ) )
# Save the widget and the widgets terminals in two dictionaries
self.branchID_2_widget[id(node)] = widget
self.branchID_2_terminals[id(node)] = widget.terminals
return widget , widget.terminals
def getByPriority(self, node, widgetSet, astTools):
# Test each available widget
# Return a widget representing the best widget available
# Highest priority means is the largest number
# Incompatability is a 0
# Make a list of the priorities
priorityMap = [w.nH_getPriority(node, astTools) for w in widgetSet]
# Make a dictionary where the id of the widgets are the keys and the priorities are stored
d = { id(w):p for p , w in zip(priorityMap,widgetSet)}
# Use that dictionary to sort the widgets
sortedWidgets = sorted(widgetSet, key=lambda w: d[id(w)] , reverse=True)
# Return the widget with the highest priority
return sortedWidgets[0](node, astTools)
def lineNumCMenu(self , astID , pos ):
# Create the menu
CMenu = QMenu()
CMenu.addAction(menuHelpers.newAction(self, 'New' , lambda ignore , astID = astID : self.new(astID) ) )
CMenu.addAction(menuHelpers.newAction(self, 'Duplicate' , lambda ignore , astID = astID : self.duplicate(astID) ) )
CMenu.addAction(menuHelpers.newAction(self, 'Move Up' , lambda ignore , astID = astID : self.move(astID, -1) ) )
CMenu.addAction(menuHelpers.newAction(self, 'Move Down' , lambda ignore , astID = astID : self.move(astID, 1) ) )
CMenu.addAction(menuHelpers.newAction(self, 'Remove' , lambda ignore , astID = astID : self.remove(astID) ) )
CMenu.addAction(menuHelpers.newAction(self, 'Regenerate' , lambda ignore , astID = astID : self.regenerate(astID) ) )
CMenu.addAction(menuHelpers.newAction(self, 'Raw' , lambda ignore , astID = astID : self.regenerate(astID, simple=True) ) )
# Get the global position
globalPos = self.branchID_2_widget[astID].mapToGlobal(pos)
# Send out the menu
CMenu.exec_(globalPos)
# Functions to be used only if there is an introductory widget =========================================================
def introLineNumCMenu(self , astID , pos ):
# Create the menu
CMenu = QMenu()
CMenu.addAction( menuHelpers.newAction(self, 'Split' , lambda ignore : self.introSplit() ) )
# Get the global position
globalPos = self.branchID_2_widget[astID].mapToGlobal(pos)
# Send out the menu
CMenu.exec_(globalPos)
def introSplit(self):
# Remove everything from the gui
def deleteItems(layout):
if layout is not None:
while layout.count():
item = layout.takeAt(0)
widget = item.widget()
if widget is not None:
widget.deleteLater()
else:
deleteItems(item.layout())
deleteItems(self.layout)
# Rebuild the dictionaries
self.branchID_2_widget = {}
self.branchID_2_terminals = {}
# Add everything back, ignoring the introduction special treatment
self.populate(intro=False)
pass
# ======================================================================================================================
def new(self,astID):
# Find what widget was chosen
index = self.branchID_2_branch().index(astID)
# Insert a new line in the AST, a comment
newLine = '# New Line'
self.AST.insert(index,newLine)
# Add the widget to the GUI
widget , terminals = self.getWidget(self.AST[index], self.astTools)
self.insertWidget(index,widget)
# Update Line Numbers
self.updateLineNo()
def duplicate(self,astID):
# Find what widget was chosen
index = self.branchID_2_branch().index(astID)
# Add the new line to the ast
duplicateSyntax = self.AST[index].dumps()
self.AST.insert(index,duplicateSyntax)
# Insert new widget in the gui
widget , terminals = self.getWidget(self.AST[index], self.astTools)
self.insertWidget(index,widget)
# Update Line Numbers
self.updateLineNo()
def regenerate(self,astID,simple=False):
# Find what widget was chosen
index = self.branchID_2_branch().index(astID)
# Remove the old widget
self.removeWidget(astID)
# Insert the new widget
widget , terminals = self.getWidget(self.AST[index], self.astTools, simple=simple)
# With intro, index of layout may not match index of ast
self.insertWidget(index,widget)
# Update Line Numbers for this widget
self.updateLineNo()
def move(self, astID, step):
# Find what widget was chosen
index = self.branchID_2_branch().index(astID)
# Check if it is the first or the last entry
if self.last_intro_node == 0:
introFudge = 0
else:
introFudge = self.last_intro_node
if (index + step >= 0 + introFudge) and (index + step < len(self.AST)):
# Remove the old widget
# With intro, index of layout may not match index of ast
self.removeWidget(astID)
# Duplicate the branch
duplicateSyntax = self.AST[index].dumps()
# Remove the old branch in the tree
del self.AST[index]
# Add it back
self.AST.insert(index+step,duplicateSyntax)
# Make a new widget
widget , terminals = self.getWidget(self.AST[index+step], self.astTools)
# Add the widget to the layout where it is supposed to be
# With intro, index of layout may not match index of ast
# self.layout.insertWidget(index+step,widget)
self.insertWidget(index+step,widget)
# Update Line Numbers
self.updateLineNo()
def remove(self, astID):
# Remove the branch from the root node
index = self.branchID_2_branch().index(astID)
del self.AST[index]
# Remove the old widget
# With intro, index of layout may not match index of ast
self.removeWidget(astID)
# Update Line Numbers
self.updateLineNo()
def updateLineNo(self):
# Loop through each widget in the gui
"""
"""
self.get_thread = getNumberingThread(self.branchID_2_widget, self.updateLineNumber)
self.get_thread.start()
def updateLineNumber(self, line_numbers , nodeID):
# print(line_numbers)
# print(nodeID)
for i in range(len(line_numbers)):
w = self.branchID_2_widget[int(nodeID[i])]
w.line_number.setText(line_numbers[i])
class getNumberingThread(QThread):
"""
Parse the abstract syntax tree to find all the line numbers in the
background. When all the line numbers are found, pass them back out
and populate the gui with them.
:param branchID_2_widget: connect the ast branch id to the widget
:type branchID_2_widget: dict
:param slot: function to call when all the work is done
:type slot: function
"""
# Make a signal to do all the gui work when the line numbers are ready
signal = pyqtSignal(list, list)
def __init__(self, branchID_2_widget, slot):
QThread.__init__(self)
self.branchID_2_widget = branchID_2_widget
self.signal.connect(slot)
def run(self):
linenumber_strings = []
id_strings = []
for key in self.branchID_2_widget.keys():
w = self.branchID_2_widget[key]
# Check to see if lineNumbers method is defined in the widget class
# If not, use the standard one in astHelpers
if 'lineNumbers' in dir(w):
line_numbers = w.lineNumbers(w.node)
else:
line_numbers = astHelpers.lineNumbers(w.node)
linenumber_strings.append(line_numbers)
id_strings.append(str(key))
self.signal.emit(linenumber_strings, id_strings)
|
# -------------------------------------------------------------
# NDN Hydra MainLoop
# -------------------------------------------------------------
# @Project: NDN Hydra
# @Date: 2021-01-25
# @Authors: Please check AUTHORS.rst
# @Source-Code: https://github.com/justincpresley/ndn-hydra
# @Documentation: https://ndn-hydra.readthedocs.io
# @Pip-Library: https://pypi.org/project/ndn-hydra
# -------------------------------------------------------------
import asyncio as aio
import logging
import secrets
import time
import random
from typing import Dict, List
from ndn.app import NDNApp
from ndn.encoding import Name, Component
from ndn.types import InterestNack, InterestTimeout
from ndn.svs import SVSync
from ndn.storage import Storage, SqliteStorage
from ndn_hydra.repo.modules import *
from ndn_hydra.repo.group_messages import *
from ndn_hydra.repo.utils.concurrent_fetcher import concurrent_fetcher
class MainLoop:
def __init__(self, app:NDNApp, config:Dict, global_view:GlobalView, data_storage:Storage, svs_storage:Storage):
self.app = app
self.config = config
self.global_view = global_view
self.data_storage = data_storage
self.svs_storage = svs_storage
self.svs = None
self.logger = logging.getLogger()
self.node_name = self.config['node_name']
self.tracker = HeartbeatTracker(self.node_name, global_view, config['loop_period'], config['heartbeat_rate'], config['tracker_rate'], config['beats_to_fail'], config['beats_to_renew'])
self.fetching = []
async def start(self):
self.svs = SVSync(self.app, Name.normalize(self.config['repo_prefix'] + "/group"), Name.normalize(self.node_name), self.svs_missing_callback, storage=self.svs_storage)
await aio.sleep(5)
while True:
await aio.sleep(self.config['loop_period'] / 1000.0)
self.periodic()
def periodic(self):
self.tracker.detect()
if self.tracker.beat():
self.send_heartbeat()
self.tracker.reset(self.node_name)
self.backup_list_check()
self.claim()
def svs_missing_callback(self, missing_list):
aio.ensure_future(self.on_missing_svs_messages(missing_list))
async def on_missing_svs_messages(self, missing_list):
# if missing list is greater than 100 messages, bootstrap
for i in missing_list:
if i.nid == self.config["node_name"]:
self.tracker.restart(self.config["node_name"])
# bootstrap
continue
while i.lowSeqno <= i.highSeqno:
message_bytes = await self.svs.fetchData(Name.from_str(i.nid), i.lowSeqno)
if message_bytes == None:
continue
message = Message.specify(i.nid, i.lowSeqno, message_bytes)
self.tracker.reset(i.nid)
aio.ensure_future(message.apply(self.global_view, self.fetch_file, self.svs, self.config))
i.lowSeqno = i.lowSeqno + 1
def send_heartbeat(self):
favor = 1.85
heartbeat_message = HeartbeatMessageTlv()
heartbeat_message.node_name = self.config['node_name'].encode()
heartbeat_message.favor = str(favor).encode()
message = Message()
message.type = MessageTypes.HEARTBEAT
message.value = heartbeat_message.encode()
try:
next_state_vector = self.svs.getCore().getStateTable().getSeqno(Name.to_str(Name.from_str(self.config['node_name']))) + 1
except TypeError:
next_state_vector = 0
self.global_view.update_node(self.config['node_name'], favor, next_state_vector)
self.svs.publishData(message.encode())
def backup_list_check(self):
underreplicated_files = self.global_view.get_underreplicated_files()
for underreplicated_file in underreplicated_files:
deficit = underreplicated_file['desired_copies'] - len(underreplicated_file['stores'])
for backuped_by in underreplicated_file['backups']:
if (backuped_by['node_name'] == self.config['node_name']) and (backuped_by['rank'] < deficit):
self.fetch_file(underreplicated_file['file_name'], underreplicated_file['packets'], underreplicated_file['packet_size'], underreplicated_file['fetch_path'])
def claim(self):
# TODO: possibility based on # active sessions and period
if random.random() < 0.618:
return
backupable_files = self.global_view.get_backupable_files()
for backupable_file in backupable_files:
if random.random() < 0.618:
continue
# print(json.dumps(backupable_insertion['stores']))
# print(json.dumps(backupable_insertion['backups']))
already_in = False
for stored_by in backupable_file['stores']:
if stored_by == self.config['node_name']:
already_in = True
break
for backuped_by in backupable_file['backups']:
if backuped_by['node_name'] == self.config['node_name']:
already_in = True
break
if already_in == True:
continue
if len(backupable_file['backups']) == 0 and len(backupable_file['stores']) == 0:
continue
authorizer = None
if len(backupable_file['backups']) == 0:
authorizer = {
'node_name': backupable_file['stores'][-1],
'rank': -1,
'nonce': backupable_file['file_name']
}
else:
authorizer = backupable_file['backups'][-1]
# generate claim (request) msg and send
# claim tlv
favor = 1.85
claim_message = ClaimMessageTlv()
claim_message.node_name = self.config['node_name'].encode()
claim_message.favor = str(favor).encode()
claim_message.file_name = Name.from_str(backupable_file['file_name'])
claim_message.type = ClaimTypes.REQUEST
claim_message.claimer_node_name = self.config['node_name'].encode()
claim_message.claimer_nonce = secrets.token_hex(4).encode()
claim_message.authorizer_node_name = authorizer['node_name'].encode()
claim_message.authorizer_nonce = authorizer['nonce'].encode()
# claim msg
message = Message()
message.type = MessageTypes.CLAIM
message.value = claim_message.encode()
self.svs.publishData(message.encode())
self.logger.info(f"[MSG][CLAIM.R]* nam={self.config['node_name']};fil={backupable_file['file_name']}")
def store(self, file_name: str):
favor = 1.85
store_message = StoreMessageTlv()
store_message.node_name = self.config['node_name'].encode()
store_message.favor = str(favor).encode()
store_message.file_name = Name.from_str(file_name)
message = Message()
message.type = MessageTypes.STORE
message.value = store_message.encode()
self.global_view.store_file(file_name, self.config['node_name'])
self.svs.publishData(message.encode())
self.logger.info(f"[MSG][STORE]* nam={self.config['node_name']};fil={file_name}")
def fetch_file(self, file_name: str, packets: int, packet_size: int, fetch_path: str):
aio.ensure_future(self.fetch_file_helper(file_name, packets, packet_size, fetch_path))
async def fetch_file_helper(self, file_name: str, packets: int, packet_size: int, fetch_path: str):
if file_name in self.fetching:
return
self.fetching.append(file_name)
self.logger.info(f"[ACT][FETCH]* fil={file_name};pcks={packets};fetch_path={fetch_path}")
start = time.time()
inserted_packets = 0
async for (_, _, content, data_bytes, key) in concurrent_fetcher(self.app, fetch_path, file_name, 0, packets-1, aio.Semaphore(15)):
self.data_storage.put_packet(key, data_bytes) #TODO: check digest
inserted_packets += 1
if inserted_packets == packets:
end = time.time()
duration = end -start
self.logger.info(f"[ACT][FETCHED]* pcks={packets};duration={duration}")
self.store(file_name)
self.fetching.remove(file_name) |
import math
import random
import requests
import abc
import itertools
"""[summary]
This file handles the logic for getting picture information from K-samsok.
Info such as item id,service organization,thumbnail,item type and kringla link
Author: <NAME> 2019-05-21
Sources: Some code is taken/based on Abbe98 code on github link:
https://gist.github.com/Abbe98/882a374350d20b980190c3148f787f5a
"""
# (This comment is from Abbe98) K-samsök supports JSON if given the following Accept header
headers = {
'Accept': 'application/json'
}
def random_ksamsok_pics(num_of_req,item_type,service_organizations):
"""[summary]
This method takes all our created functions and outputs a generator with dictonaries from random startrecords.
The dictonaries contains the following info:item id,service organization,thumbnail,item type and kringla link.
Args:
num_of_req ([INT]): [This is how many requests we want to do. This decides how many random pictures we want]
item_type ([STR]): [This is what type of item we want from request]
service_organization ([STR]): [The org we want pictures from]
"""
#Splits our requests in to a number in two and rounds it upwards, this to get the correct amount of images.
num_of_req = math.ceil(num_of_req/2)
list_org = service_org_list(service_organizations)
result_final = []
for org in list_org:
#We request two from the query because one does not return anything
req_url,query = concat_url(2,item_type,org)
total_results = get_totalt_result(req_url)
result = loop_throught_req(req_url,num_of_req,total_results,"random_pics")
result_final = itertools.chain(result_final,result)
result_final = add_kringla_link(result_final,query)
return result_final
def ksamsok_pics(num_of_pics,num_of_req,item_type,service_organizations):
"""[summary]
This method takes all our created functions and outputs a generator with dictonaries from one to number of pictures as startrecords.
The dictonaries contains the following info:item id,service organization,thumbnail,item type and kringla link.
Args:
num_of_pics ([INT]): [This is how many pictures we want from K-samsok. Max is 500 pictures due to API]
item_type ([STR]): [This is what type of item we want from request]
service_organization ([STR]): [The org we want pictures from]
"""
list_org = service_org_list(service_organizations)
result_final = []
num_of_pics = default_to_500_pics(num_of_pics)
for org in list_org:
req_url,query = concat_url(num_of_pics,item_type,org)
total_results = get_totalt_result(req_url)
result = loop_throught_req(req_url,num_of_req,total_results,"pics")
result_final = itertools.chain(result_final,result)
result_final = add_kringla_link(result_final,query)
return result_final
def default_to_500_pics(num_of_pics):
"""[summary]
This defaults our user input always to 500 pictures
Args:
num_of_pics ([INT]): [How many pictures wanted]
"""
if(num_of_pics > 500):
return 500
else:
return num_of_pics
def service_org_list(service_organizations):
"""[summary]
Takes all our inputed orgs and splits them into a list
Args:
service_organization ([STR]): [The org we want pictures from]
"""
if(service_organizations == "all"):
service_organizations ='s-vlm,kbg,enk,smvk-mm,shm,hallwylska museet,aero,vgm,osmu,smvk-om,smm-mm,bhm,socken,lsh,vm,nomu,jm,Kortnamn,arkm,blm,skoklosters slott,pm,s-tek,s-hm,rsms,shfa,jlm,slm,mili,imvg,heo,smm-sm,mm,s-fv,tum,s-om,soc,livrustkammaren,smm-vm,smvk-em,kulturen,jfl,vax,gnm,hem,vbg,tes,upmu,smha,gfm,dramawebben,smvk-vkm,sm,sk,dfh,litografiska,s-xlm,raä,arme,ajtte,wws,ablm,fmb,s-fbm,gsm,s-olm'
return service_organizations.split(',')
else:
return service_organizations.split(',')
def add_kringla_link(result,ksamsok_query):
"""[summary]
Methods creates and inputs the kringla link in to each dictonary in the generator
Args:
result ([GENERATOR]): [All our dictonaries in a generator]
ksamsok_query ([type]): [Our query we searched for]
"""
kringlaLink = "http://www.kringla.nu/kringla/objekt?referens="
for item in result:
ref = item["itemId"]
full_link = kringlaLink+ref.replace("http://kulturarvsdata.se/","")
item["kringlaLink"] = full_link
yield item
def get_totalt_result(req_url):
"""[summary]
This gets all the results in INT from the specified query
Args:
req_url ([STR]): [The request query that decides the data]
"""
r = requests.get(req_url, headers=headers)
json = r.json()
return json['result']['totalHits']
def concat_url(num_of_pics,item_type,org):
"""[summary]
This concats our full API url
Args:
num_of_pics ([INT]): [How many pictures wanted, the hitsPerPage in our case]
item_type ([STR]): [This is what type of item we want from request]
org ([STR]): [The org we want pictures from]
"""
endpoint = 'http://www.kulturarvsdata.se/ksamsok/api'
fields = 'serviceOrganization,thumbnail,itemType'
endpoint_fields = F'?&x-api=test&method=search&hitsPerPage={num_of_pics}&recordSchema=xml'
#All the "OR NOT" in the query is photos that resembles objects something we dont want when item_type is photo
query = F'thumbnailExists="j" AND itemType="{item_type}" AND serviceOrganization="{org}" OR NOT itemSpecification="Dokumentationsbild" OR NOT itemSpecification="ID-bild" OR NOT itemSpecification="Placeringsbild" OR NOT itemSpecification="Presentationsbild" OR NOT itemName="föremålsbild"'
req_url = F'{endpoint}{endpoint_fields}&query={query}&fields={fields}&startRecord='
return req_url,query
def loop_throught_req(req_url,required_requests,total_results,kind_startrecord):
"""[summary]
This method loop throught the results we got from K-samsok and puts it in a dict.
It returns the dict so we cant loop throught the results and get out the data.
Args:
req_url ([STR]): [Full url of the query wanted]
required_requests ([INT]): [Number of requested wanted that need to run]
total_results ([INT]): [The amount of requests from the full query]
kind_startrecord ([INT]): [Start record is were our loop starts gaterhing from the total result in K-samsok]
"""
count = 0
#If collections is empty print and skip rest
if total_results <= 1:
print("Error: Collection is empty\n")
else:
#Makes an array of unique random numbers
try:
startrecord_unique = random.sample(range(0,total_results), required_requests)
except:
print("Collection less than requested download what i can from collection")
startrecord_unique = random.sample(range(0,total_results), total_results)
while len(startrecord_unique) != count:
#Gets our randomed startrecord
if kind_startrecord == "random_pics":
startrecord = startrecord_unique[count]
else:
startrecord = count * 500
count += 1
r = requests.get(req_url + str(startrecord), headers=headers)
response_data = r.json()
#Give sometimes the following error "TypeError: string indices must be integers"
try:
for record in response_data['result']['records']['record']:
#(This comment is from Abbe98) sometimes there are empty records and those has no fields :-(
if not len(record) == 2:
continue
item_to_yield = {}
#(This comment is from Abbe98) some fields can appear multiply times
#(This comment is from Abbe98) therefor we need to merge those to lists if needed
for field in record['field']:
#(This comment is from Abbe98) if the field is already a list
if isinstance(item_to_yield.get(field['name'], False), list):
item_to_yield[field['name']].append(field['content'])
#(This comment is from Abbe98) if it's not yet a list but we found the same field name/key again
elif item_to_yield.get(field['name'], False):
item_to_yield[field['name']] = list([item_to_yield[field['name']], field['content']])
#(This comment is from Abbe98) default to just a regular value
else:
item_to_yield[field['name']] = field['content']
yield item_to_yield
except TypeError:
print("Error: Incorrect type, download what I can from current query\n") |
<reponame>roadnarrows-robotics/rnr-sdk
################################################################################
#
# GuiDlgKheCalIrLed.py
#
""" Graphical User Interface vKhepera IR LED Calibration Dialog Module
Graphical User Interface (GUI) Tkinter vKhepera calibration dialog
module for the built-in Khepera proximity/ambient IR LED sensors.
Author: <NAME>
Email: <EMAIL>
URL: http://www.roadnarrowsrobotics.com
Date: 2006.01.27
Copyright (C) 2006. RoadNarrows LLC.
"""
#
# All Rights Reserved
#
# Permission is hereby granted, without written agreement and without
# license or royalty fees, to use, copy, modify, and distribute this
# software and its documentation for any purpose, provided that
# (1) The above copyright notice and the following two paragraphs
# appear in all copies of the source code and (2) redistributions
# including binaries reproduces these notices in the supporting
# documentation. Substantial modifications to this software may be
# copyrighted by their authors and need not follow the licensing terms
# described here, provided that the new terms are clearly indicated in
# all files where they apply.
#
# IN NO EVENT SHALL THE AUTHOR, ROADNARROWS LLC, OR ANY MEMBERS/EMPLOYEES
# OF ROADNARROW LLC OR DISTRIBUTORS OF THIS SOFTWARE BE LIABLE TO ANY
# PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL
# DAMAGES ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION,
# EVEN IF THE AUTHORS OR ANY OF THE ABOVE PARTIES HAVE BEEN ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
#
# THE AUTHOR AND ROADNARROWS LLC SPECIFICALLY DISCLAIM ANY WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN
# "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE NO OBLIGATION TO
# PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
################################################################################
import tkinter as tk
import tkinter.simpledialog
import Fusion.Gui.GuiTypes as gt
import Fusion.Gui.GuiUtils as gut
import Fusion.Gui.GuiToolTip as GuiToolTip
import Fusion.Gui.GuiXYGraph as GuiXYGraph
#-------------------------------------------------------------------------------
# Global Data
#-------------------------------------------------------------------------------
def GetSettingNames(sensorName):
""" Return list of per sensor dictionary keys of selected settings
and results.
"""
return ['enabled', 'k_brightness', 'noise_floor']
#-------------------------------------------------------------------------------
# CLASS: GuiDlgKheCalIrLed
#-------------------------------------------------------------------------------
class GuiDlgKheCalIrLed(tkinter.simpledialog.Dialog):
""" vKhepera IR LED Sensors Calibration Dialog Class
The result on dialog exit:
On ok success, result dictionary:
{<sensorId>:
{'enabled':bool, 'k_brightness':float, 'noise_floor':int},
<sensorId>: ...
...
}
On cancel, returns None
"""
#--
def __init__(self, guiParent, sensorMimeType, sensorName, cbCalData,
factSettings, lastSettings):
""" Initialize the Debug Dialog.
Parameters:
guiParent - this dialog's parent GUI object
sensorMimeType - MIME type of sensor
sensorName - sensor name string
cbCalData - callback to get calibration data
factSettings - factory limits and defaults
lastSettings - settings of last configurations.
See result on exit.
"""
self.result = None
self.mSensorMimeType = sensorMimeType
self.mSensorName = sensorName
self.mCbCalData = cbCalData
self.mFactory = factSettings
self.mSettings = lastSettings
self.mHasDim = False
tkinter.simpledialog.Dialog.__init__(self, guiParent)
#--
def body(self, master):
"""Create the dialog body."""
# dialog title
self.wm_title('vKhepera %s IR LED Sensors Calibration' % self.mSensorName)
row = 0
column = 0
# calibration graph canvas
self._bodyCalCanvas(master, row, column)
# control panel frame
row +=1
self._bodyCtlPanel(master, row, column)
# graph current calibration data
self._graph(self.mSliderK.get(), self.mSliderNF.get())
# bind resizer
self.bind('<Configure>', self.resize)
#--
def _bodyCalCanvas(self, master, row, column):
"""Create calibration x-y graph canvas. """
# calibration graph canvas
self.mCalCanvas = tk.Canvas(master, relief=tk.SUNKEN, borderwidth=0,
width=500, height=400)
self.mCalCanvas.grid(row=row, column=column, padx=3, ipadx=3, ipady=3,
sticky=tk.N+tk.W+tk.E)
# factory data
factdata = self.mCbCalData()
self.mUnitsY = factdata['unitsY']
self.mUnitsX = factdata['unitsX']
self.mFactCalData = factdata['calData']
# graph title
self.mGraphTitle = self.mSensorName + ' IR LED Calibration (' + \
self.mSensorMimeType + ')'
# tailor y-axis step size
if self.mFactory['MaxDist'] >= 300.0:
self.mYStep = 100
else:
self.mYStep = 10
# start with empty graph
self.mXYGraph = GuiXYGraph.GuiXYGraph(self.mCalCanvas)
#--
def _bodyCtlPanel(self, master, row, column):
"""Create Control Panel subdialog frame."""
# control panel frame
cpframe = tk.Frame(master, relief=tk.FLAT, borderwidth=1)
cpframe.grid(row=row, column=column, padx=3, ipadx=3, ipady=3,
sticky=tk.N+tk.W+tk.E)
self.mCtlPanelFrame = cpframe
row = 0
column = 0
# sensor sets subframe
ssframe = tk.Frame(cpframe, relief=tk.RAISED, borderwidth=1)
ssframe.grid(row=row, column=column, padx=1, pady=1, ipadx=1, ipady=1,
sticky=tk.N+tk.W+tk.S)
row = 0
column = 0
# sensor sets subpanel title
w = tk.Label(ssframe, text='Calibration Set Inclusion', fg=gt.ColorBlue)
w.grid(row=row, column=column, columnspan=11, sticky=tk.N)
row += 1
ids = list(self.mSettings.keys())
ids.sort()
for id in ids:
self.mSettings[id]['incvar'] = tk.IntVar()
w = tk.Checkbutton(ssframe, variable=self.mSettings[id]['incvar'],
command=self.CbSensorInc, anchor=tk.W)
w.grid(row=row, column=column, sticky=tk.W)
w.select()
column += 1
w = tk.Label(ssframe, text=id)
w.grid(row=row, column=column, sticky=tk.W)
self.mSettings[id]['widgets'] = [w]
column += 1
w = tk.Label(ssframe, text=' ', width=2)
w.grid(row=row, column=column, sticky=tk.E)
column += 1
w = tk.Label(ssframe, text='enabled:', anchor=tk.E)
w.grid(row=row, column=column, sticky=tk.E)
self.mSettings[id]['widgets'] += [w]
column += 1
self.mSettings[id]['envar'] = tk.StringVar()
w = tk.Label(ssframe, width=5, textvariable=self.mSettings[id]['envar'],
anchor=tk.W, fg=gt.ColorGreen1, relief=tk.SUNKEN)
w.grid(row=row, column=column, sticky=tk.W)
self.mSettings[id]['widgets'] += [w]
column += 1
w = tk.Label(ssframe, text=' ', width=1)
w.grid(row=row, column=column, sticky=tk.E)
column += 1
w = tk.Label(ssframe, text='k_brightness:', anchor=tk.E)
w.grid(row=row, column=column, sticky=tk.E)
self.mSettings[id]['widgets'] += [w]
column += 1
self.mSettings[id]['kbvar'] = tk.DoubleVar()
w = tk.Label(ssframe, width=5, textvariable=self.mSettings[id]['kbvar'],
anchor=tk.W, fg=gt.ColorGreen1, relief=tk.SUNKEN)
w.grid(row=row, column=column, sticky=tk.W)
self.mSettings[id]['widgets'] += [w]
column += 1
w = tk.Label(ssframe, text=' ', width=1)
w.grid(row=row, column=column, sticky=tk.E)
column += 1
w = tk.Label(ssframe, text='noise_floor:', anchor=tk.E)
w.grid(row=row, column=column, sticky=tk.E)
self.mSettings[id]['widgets'] += [w]
column += 1
self.mSettings[id]['nfvar'] = tk.IntVar()
w = tk.Label(ssframe, width=5, textvariable=self.mSettings[id]['nfvar'],
anchor=tk.W, fg=gt.ColorGreen1, relief=tk.SUNKEN)
w.grid(row=row, column=column, sticky=tk.W)
self.mSettings[id]['widgets'] += [w]
row += 1
column = 0
self.SetSet()
row = 1
column = 0
# sensor control subframe
scframe = tk.Frame(cpframe, relief=tk.RAISED, borderwidth=1)
scframe.grid(row=row, column=column, padx=1, pady=1, ipadx=1, ipady=1,
sticky=tk.N+tk.W+tk.E)
row = 0
column = 0
# sensor control subpanel title
w = tk.Label(scframe, text='Calibration Control', fg=gt.ColorBlue)
w.grid(row=row, column=column, columnspan=5, sticky=tk.N)
row += 1
# k-brightness slider
w = tk.Scale(scframe, width=10, length=250,
from_=self.mFactory['KBrightnessMin'],
to=self.mFactory['KBrightnessMax'],
resolution=0.01, orient=tk.HORIZONTAL, command=self.CbK,
label='k_brightness Ratio')
w.grid(row=row, column=column, sticky=tk.SW, pady=1)
GuiToolTip.GuiToolTip(w,
text="Set %s k_brightness ratio.\nWhite paper = 1.0" % self.mSensorName)
self.mSliderK = w
self.mSliderK.set(self.mFactory['KBrightnessDft'])
row += 1
# Noise Floor slider
w = tk.Scale(scframe, width=10, length=250,
from_=self.mFactory['NoiseFloorMin'],
to=self.mFactory['NoiseFloorMax'],
resolution=1, orient=tk.HORIZONTAL, command=self.CbNF,
label='Sensor ADC Noise Floor')
w.grid(row=row, column=column, sticky=tk.SW, pady=1)
GuiToolTip.GuiToolTip(w,
text="Set sensor noise floor. Any ADC values < noise floor are "
"considered noise which will map to the sensor's 'infinite' distance.")
self.mSliderNF = w
self.mSliderNF.set(self.mFactory['NoiseFloorDft'])
row = 1
column += 1
# column spacer
tk.Label(scframe, text=' ', width=2).grid(row=row, column=column)
column += 1
# enable/disable
self.mVarEnable = tk.IntVar()
w = tk.Checkbutton(scframe, variable=self.mVarEnable,
text='Enable Sensors', command=self.CbSensorEnable, anchor=tk.W)
w.grid(row=row, column=column, sticky=tk.W+tk.S)
GuiToolTip.GuiToolTip(w,
'Enable/disable selected sensors from robot sensing operations.')
w.select()
row += 1
# factory defaults button
w = tk.Button(scframe, text='Factory', fg=gt.ColorBlack, width=8,
command=self.CbFact)
w.grid(row=row, column=column, sticky=tk.W)
GuiToolTip.GuiToolTip(w, text="Set calibration to factory defaults.")
row = 1
column += 1
# column spacer
tk.Label(scframe, text=' ', width=2).grid(row=row, column=column)
column += 1
# apply button
w = tk.Button(scframe, text='Update\nSelected\nSensors',
fg=gt.ColorBlack, width=8, height=4, command=self.CbUpdate)
w.grid(row=row, column=column, rowspan=2)
GuiToolTip.GuiToolTip(w,
text="Update calibration to selected sensors data.")
#--
def SetSet(self):
""" Initial sensor set values. """
for sensor in self.mSettings.values():
if sensor['incvar'].get() == 0:
state = tk.DISABLED
else:
state = tk.NORMAL
for label in sensor['widgets']:
label['state'] = state
if sensor['enabled']:
sensor['envar'].set('True')
else:
sensor['envar'].set('False')
sensor['kbvar'].set(sensor['k_brightness'])
sensor['nfvar'].set(sensor['noise_floor'])
#--
def CalcDim(self):
""" Caculate widget dimensions needed for resizing effort.
Note: Cannot do the calculations within the body() or incorrect
dialog sizes will result. Must wait until parent dialog
finishes drawing.
"""
# force idletask to determine size
self.update_idletasks()
# current window dimensions
self.mDlgGeo = gut.geometry(self)
# current control panel dimensions
cpgeo = gut.geometry(self.mCtlPanelFrame)
# calibration canvas dimensions
ccw = int(self.mCalCanvas['width'])
cch = int(self.mCalCanvas['height'])
# control panel frame height is fixed (includes 'Ok' and 'Cancel'
self.mDlgFixH = self.mDlgGeo[gut.H] - cch
# window border width and height
self.mWinBorder = self.mDlgGeo[gut.W] - ccw
# set window's minimum size
self.wm_minsize(
width=500+self.mWinBorder,
height=200+self.mWinBorder+self.mDlgFixH
)
#--
def resize(self, event):
""" Resize callback event. """
# first time through: calculate important window and widget dimensions
# used for resizing
if not self.mHasDim:
self.CalcDim()
self.mHasDim = True
return
# real resize event
geo = gut.geometry(self)
if geo[gut.W] != self.mDlgGeo[gut.W] or geo[gut.H] != self.mDlgGeo[gut.H]:
width = geo[gut.W] - self.mWinBorder
height = geo[gut.H] - self.mWinBorder - self.mDlgFixH
self.mXYGraph.configure(width=width, height=height)
self.mDlgGeo = geo
#--
def ok(self, event=None):
"""Dialog OK button callback."""
if not self.validate():
return
self.withdraw()
self.update_idletasks()
self.apply()
self.cancel() # exit
return
#--
def validate(self):
"""Validate dialog settings."""
self.result = {}
for id,sensor in self.mSettings.items():
self.result[id] = {}
if sensor['envar'].get() == 'True':
self.result[id]['enabled'] = True
else:
self.result[id]['enabled'] = False
self.result[id]['k_brightness'] = sensor['kbvar'].get()
self.result[id]['noise_floor'] = sensor['nfvar'].get()
return True
#--
def apply(self):
"""Apply dialog data and settings."""
pass
#--
def CbK(self, val):
""" K-Brightness Slider callback. """
self._graph(self.mSliderK.get(), self.mSliderNF.get())
#--
def CbNF(self, val):
""" Noise Floor Slider callback. """
self._graph(self.mSliderK.get(), self.mSliderNF.get())
#--
def CbSensorInc(self):
""" Sensor Set Inclusion Checkbox callback. """
for sensor in self.mSettings.values():
if sensor['incvar'].get() == 0:
state = tk.DISABLED
else:
state = tk.NORMAL
for label in sensor['widgets']:
label['state'] = state
#--
def CbSensorEnable(self):
""" Sensors Control Enable Checkbox callback """
pass
#--
def CbFact(self):
""" Factory Defaults Button callback. """
self.mSliderK.set(self.mFactory['KBrightnessDft'])
self.mSliderNF.set(self.mFactory['NoiseFloorDft'])
self.mVarEnable.set(1)
self._graph(self.mFactory['KBrightnessDft'],
self.mFactory['NoiseFloorDft'])
#--
def CbUpdate(self):
""" Update Parameters to Inclusion Set Button callback. """
enable = self.mVarEnable.get()
k = self.mSliderK.get()
nf = self.mSliderNF.get()
for sensor in self.mSettings.values():
if sensor['incvar'].get():
if enable:
sensor['envar'].set('True')
else:
sensor['envar'].set('False')
sensor['kbvar'].set(k)
sensor['nfvar'].set(nf)
#--
def _graph(self, k, nf):
""" Graph the calibration data.
Parameters:
k - k_brightness ratio
"""
xData = []
yData = []
for x, y in self.mFactCalData:
if x >= nf:
d = y * k
xData.append(x)
yData.append(d)
self.mXYGraph.graph(title=self.mGraphTitle,
xdata=xData, ydata=yData,
xstep=100, ystep=self.mYStep,
xlabel='Raw ~ %s' % self.mUnitsX,
ylabel='Distance ~ %s' % self.mUnitsY)
#-------------------------------------------------------------------------------
# Test Code
#-------------------------------------------------------------------------------
if __name__ == '__main__':
import Fusion.Khepera.Cmd.KheCmdBase as KheCmdBase
def main():
""" GuiDlgKheCalIrLed Test Main """
factSettings = {
'KBrightnessMin': KheCmdBase.KheIrProxMinKBrightness,
'KBrightnessMax': KheCmdBase.KheIrProxMaxKBrightness,
'KBrightnessDft': KheCmdBase.KheIrProxDftKBrightness,
'NoiseFloorMin': KheCmdBase.KheIrProxMinNoiseFloor,
'NoiseFloorMax': KheCmdBase.KheIrProxMaxNoiseFloor,
'NoiseFloorDft': KheCmdBase.KheIrProxDftNoiseFloor,
'MaxDist': KheCmdBase.KheIrProxMaxDist * 2 # for graphing
}
cmd = KheCmdBase.KheCmdBase()
root = tk.Tk()
dlg = GuiDlgKheCalIrLed(root,
'sensor/reflective-irled',
'Proximity',
cmd.ProximitySensorGetDftCalibration,
factSettings,
cmd.ProximitySensorsGetCalParams())
if dlg.result:
print('ok:', dlg.result)
else:
print('cancel')
# run test
main()
|
<reponame>bpatel28/practice_python
from concepts.linked_list.linked_list_node import LinkedListNode
class LinkedList:
def __init__(self, head=None, tail=None):
if head is not None and type(head) is not LinkedListNode:
raise TypeError("head must be type of LinkedListNode.")
if tail is not None and type(tail) is not LinkedListNode:
raise TypeError("tail must be type of LinkedListNode.")
if tail is None:
tail = head
self._head = head
self._tail = tail
@property
def head(self):
return self._head
def set_head(self, val):
self._head = val
if self._tail is None:
self._tail = val
@property
def tail(self):
return self._tail
def set_tail(self, val):
self._tail = val
def append(self, item):
if type(item) is not LinkedListNode:
item = LinkedListNode(item)
if self._tail is None:
self._head = item
self._tail = item
else:
item.set_prev_node(self._tail)
self._tail.set_next_node(item)
self._tail = item
def append_left(self, item):
if type(item) is not LinkedListNode:
item = LinkedListNode(item)
if self._head is None:
self._head = item
self._tail = item
else:
item.set_next_node(self._head)
self._head.set_prev_node(item)
self._head = item
def remove(self, item):
if item is None:
return
if type(item) is not LinkedListNode:
item = LinkedListNode(item)
curr = self._head
while curr is not None:
if curr == item:
prev_node = curr.prev_node
next_node = curr.next_node
if prev_node is not None:
prev_node.set_next_node(next_node)
else:
self._head = next_node
if next_node is not None:
next_node.set_prev_node(prev_node)
else:
self._tail = prev_node
del curr
break
curr = curr.next_node
def remove_first(self):
if self._head is None:
return
data = self._head.data
if self._head is self._tail:
del self._head
self._head = None
self._tail = None
return data
self._head = self._head.next_node
self._head.set_prev_node(None)
return data
def pop(self):
if self._tail is None:
return
last_data = self._tail.data
if self._head is self._tail:
del self._head
self._head = None
self._tail = None
return last_data
self._tail = self._tail.prev_node
self._tail.set_next_node(None)
return last_data
def __iter__(self):
self._curr = self._head
return self
def __next__(self):
val = self._curr
if val is None:
raise StopIteration()
self._curr = self._curr.next_node
return val
def __len__(self):
length = 0
curr = self._head
while curr is not None:
length += 1
curr = curr.next_node
return length
def __contains__(self, item):
curr = self._head
if type(item) is not LinkedListNode:
item = LinkedListNode(item)
while curr is not None:
if curr == item:
return True
curr = curr.next_node
return False
def __str__(self):
return f'[{", ".join(map(lambda item: str(item),self))}]'
def __getitem__(self, index):
if not isinstance(index, int):
raise TypeError("Index must be integer.")
if index >= 0:
curr = self._head
counter = 0
else:
curr = self._tail
counter = -1
while curr is not None:
if counter == index:
return curr.data
if index >= 0:
counter += 1
curr = curr.next_node
else:
counter -= 1
curr = curr.prev_node
raise IndexError("Item not found at given index.")
|
<reponame>fanfank/aap
# -*- coding: utf8 -*-
"""
@author xuruiqi
@date 20160608
@desc 初始化form记录
"""
from common import *
form_list = [
# 添加页面
{
"name": "添加页面",
"post_api": "/api/page/add_page",
"urlmark": "add_page",
"content_type": "application/x-www-form-urlencoded;charset=utf-8",
"components": json.dumps({
"form_input": ["n-n", "p-name", "p-title", "fi-urlmark", "p-comp", "p-ct", "n-opu", "n-ext"],
}),
"op_user": "xuruiqi",
},
# 添加Header
{
"name": "添加Header",
"post_api": "/api/header/add_header",
"urlmark": "add_header",
"content_type": "application/x-www-form-urlencoded;charset=utf-8",
"components": json.dumps({
"form_input": ["n-n", "h-name", "fi-uniqkey", "h-comp", "n-opu", "n-ext"],
}),
"op_user": "xuruiqi",
},
# 添加Lefter
{
"name": "添加Lefter",
"post_api": "/api/lefter/add_lefter",
"urlmark": "add_lefter",
"content_type": "application/x-www-form-urlencoded;charset=utf-8",
"components": json.dumps({
"form_input": ["n-n", "l-name", "fi-uniqkey", "l-comp", "n-opu", "n-ext"],
}),
"op_user": "xuruiqi",
},
# 添加Item
{
"name": "添加Item",
"post_api": "/api/item/add_item",
"urlmark": "add_item",
"content_type": "application/x-www-form-urlencoded;charset=utf-8",
"components": json.dumps({
"form_input": ["n-n", "i-name", "fi-uniqkey", "fi-disp", "i-itype", "i-jtype", "n-opu", "n-ext"],
}),
"op_user": "xuruiqi",
},
# 添加Form
{
"name": "添加表单",
"post_api": "/api/form/add_form",
"urlmark": "add_form",
"content_type": "application/x-www-form-urlencoded;charset=utf-8",
"components": json.dumps({
"form_input": ["n-n", "f-name", "f-papi", "fi-urlmark", "f-submittype", "f-comp", "n-opu", "n-ext"],
}),
"op_user": "xuruiqi",
},
# 添加Form Input
{
"name": "添加表单输入",
"post_api": "/api/form_input/add_form_input",
"urlmark": "add_form_input",
"content_type": "application/x-www-form-urlencoded;charset=utf-8",
"components": json.dumps({
"form_input": ["n-n", "fi-name", "fi-uniqkey", "fi-disp", "fi-help", "fi-default", "fi-pkey", "fi-itype", "fi-rhattrs", "n-opu", "n-ext"],
}),
"op_user": "xuruiqi",
},
]
if __name__ == "__main__":
for form in form_list:
req = {
"url": "http://%s/api/form/add_form" % HOST_N_PORT,
"method": "POST",
"data": form,
"response_format": "json",
}
resp = http_request(req)
print "%s:%s:%r" % (resp["errno"], resp["errmsg"], resp["data"])
|
from hitchstory import HitchStoryException, StoryCollection
from hitchrun import expected
from commandlib import CommandError
from strictyaml import Str, Map, Bool, load
from pathquery import pathquery
from hitchrun import DIR
import dirtemplate
import hitchpylibrarytoolkit
from engine import Engine
"""
----------------------------
Non-runnable utility methods
---------------------------
"""
def _storybook(settings):
return StoryCollection(pathquery(DIR.key).ext("story"), Engine(DIR, settings))
def _current_version():
return DIR.project.joinpath("VERSION").bytes().decode("utf8").rstrip()
def _personal_settings():
settings_file = DIR.key.joinpath("personalsettings.yml")
if not settings_file.exists():
settings_file.write_text(
(
"engine:\n"
" rewrite: no\n"
" cprofile: no\n"
"params:\n"
" python version: 3.7.0\n"
)
)
return load(
settings_file.bytes().decode("utf8"),
Map(
{
"engine": Map({"rewrite": Bool(), "cprofile": Bool()}),
"params": Map({"python version": Str()}),
}
),
)
"""
-----------------
RUNNABLE COMMANDS
-----------------
"""
@expected(HitchStoryException)
def bdd(*keywords):
"""
Run story matching keywords.
"""
settings = _personal_settings().data
_storybook(settings["engine"]).with_params(
**{"python version": settings["params"]["python version"]}
).only_uninherited().shortcut(*keywords).play()
@expected(HitchStoryException)
def rbdd(*keywords):
"""
Run story matching keywords and rewrite story if code changed.
"""
settings = _personal_settings().data
settings["engine"]["rewrite"] = True
_storybook(settings["engine"]).with_params(
**{"python version": settings["params"]["python version"]}
).only_uninherited().shortcut(*keywords).play()
@expected(HitchStoryException)
def regressfile(filename):
"""
Run all stories in filename 'filename' in python 2 and 3.
"""
_storybook({"rewrite": False}).in_filename(filename).with_params(
**{"python version": "2.7.14"}
).filter(
lambda story: not story.info.get("fails_on_python_2")
).ordered_by_name().play()
_storybook({"rewrite": False}).with_params(
**{"python version": "3.7.0"}
).in_filename(filename).ordered_by_name().play()
@expected(HitchStoryException)
def regression():
"""
Run regression testing - lint and then run all tests.
"""
lint()
doctests()
storybook = _storybook({}).only_uninherited()
storybook.with_params(**{"python version": "2.7.14"}).filter(
lambda story: not story.info.get("fails_on_python_2")
).ordered_by_name().play()
storybook.with_params(**{"python version": "3.7.0"}).ordered_by_name().play()
def reformat():
"""
Reformat using black and then relint.
"""
hitchpylibrarytoolkit.reformat(DIR.project, "strictyamljsonschema")
def lint():
"""
Lint project code and hitch code.
"""
hitchpylibrarytoolkit.lint(DIR.project, "strictyamljsonschema")
def deploy(version):
"""
Deploy to pypi as specified version.
"""
hitchpylibrarytoolkit.deploy(DIR.project, "strictyamljsonschema", version)
@expected(dirtemplate.exceptions.DirTemplateException)
def docgen():
"""
Build documentation.
"""
hitchpylibrarytoolkit.docgen(_storybook({}), DIR.project, DIR.key, DIR.gen)
@expected(dirtemplate.exceptions.DirTemplateException)
def readmegen():
"""
Build documentation.
"""
hitchpylibrarytoolkit.readmegen(
_storybook({}), DIR.project, DIR.key, DIR.gen, "strictyamljsonschema"
)
@expected(CommandError)
def doctests():
"""
Run doctests in utils.py in python 2 and 3.
"""
for python_version in ["2.7.14", "3.7.0"]:
pylibrary = hitchpylibrarytoolkit.project_build(
"strictyamljsonschema", DIR, python_version
)
pylibrary.bin.python(
"-m", "doctest", "-v", DIR.project.joinpath("strictyamljsonschema", "utils.py")
).in_dir(DIR.project.joinpath("strictyamljsonschema")).run()
@expected(CommandError)
def rerun(version="3.7.0"):
"""
Rerun last example code block with specified version of python.
"""
from commandlib import Command
Command(DIR.gen.joinpath("py{0}".format(version), "bin", "python"))(
DIR.gen.joinpath("state", "examplepythoncode.py")
).in_dir(DIR.gen.joinpath("state")).run()
|
<reponame>sankhesh/vtk-examples<filename>src/Python/Meshes/ClipDataSetWithPolyData1.py
import numpy as np
import vtk
def main():
colors = vtk.vtkNamedColors()
# Create polydata to slice the grid with. In this case, use a cone. This could
# be any polydata including a stl file.
cone = vtk.vtkConeSource()
cone.SetResolution(20)
cone.Update()
# implicit function that will be used to slice the mesh
implicitPolyDataDistance = vtk.vtkImplicitPolyDataDistance()
implicitPolyDataDistance.SetInput(cone.GetOutput())
# create a grid
xCoords = vtk.vtkFloatArray()
for x, i in enumerate(np.linspace(-1.0, 1.0, 15)):
xCoords.InsertNextValue(i)
yCoords = vtk.vtkFloatArray()
for y, i in enumerate(np.linspace(-1.0, 1.0, 15)):
yCoords.InsertNextValue(i)
zCoords = vtk.vtkFloatArray()
for z, i in enumerate(np.linspace(-1.0, 1.0, 15)):
zCoords.InsertNextValue(i)
# The coordinates are assigned to the rectilinear grid. Make sure that
# the number of values in each of the XCoordinates, YCoordinates,
# and ZCoordinates is equal to what is defined in SetDimensions().
rgrid = vtk.vtkRectilinearGrid()
rgrid.SetDimensions(x + 1, y + 1, z + 1)
rgrid.SetXCoordinates(xCoords)
rgrid.SetYCoordinates(yCoords)
rgrid.SetZCoordinates(zCoords)
# Create an array to hold distance information
signedDistances = vtk.vtkFloatArray()
signedDistances.SetNumberOfComponents(1)
signedDistances.SetName('SignedDistances')
# Evaluate the signed distance function at all of the grid points
for pointId in range(rgrid.GetNumberOfPoints()):
p = rgrid.GetPoint(pointId)
signedDistance = implicitPolyDataDistance.EvaluateFunction(p)
signedDistances.InsertNextValue(signedDistance)
# add the SignedDistances to the grid
rgrid.GetPointData().SetScalars(signedDistances)
# use vtkClipDataSet to slice the grid with the polydata
clipper = vtk.vtkClipDataSet()
clipper.SetInputData(rgrid)
clipper.InsideOutOn()
clipper.SetValue(0.0)
clipper.Update()
# --- mappers, actors, render, etc. ---
# mapper and actor to view the cone
coneMapper = vtk.vtkPolyDataMapper()
coneMapper.SetInputConnection(cone.GetOutputPort())
coneActor = vtk.vtkActor()
coneActor.SetMapper(coneMapper)
# geometry filter to view the background grid
geometryFilter = vtk.vtkRectilinearGridGeometryFilter()
geometryFilter.SetInputData(rgrid)
geometryFilter.SetExtent(0, x + 1, 0, y + 1, (z + 1) // 2, (z + 1) // 2)
geometryFilter.Update()
rgridMapper = vtk.vtkPolyDataMapper()
rgridMapper.SetInputConnection(geometryFilter.GetOutputPort())
wireActor = vtk.vtkActor()
wireActor.SetMapper(rgridMapper)
wireActor.GetProperty().SetRepresentationToWireframe()
wireActor.GetProperty().SetColor(colors.GetColor3d('Black'))
# mapper and actor to view the clipped mesh
clipperMapper = vtk.vtkDataSetMapper()
clipperMapper.SetInputConnection(clipper.GetOutputPort())
clipperActor = vtk.vtkActor()
clipperActor.SetMapper(clipperMapper)
clipperActor.GetProperty().SetRepresentationToWireframe()
clipperActor.GetProperty().SetColor(colors.GetColor3d('Black'))
# A renderer and render window
renderer = vtk.vtkRenderer()
renderer.SetBackground(colors.GetColor3d('Snow'))
# add the actors
# renderer.AddActor(coneActor)
renderer.AddActor(wireActor)
renderer.AddActor(clipperActor)
renwin = vtk.vtkRenderWindow()
renwin.AddRenderer(renderer)
renwin.SetWindowName('ClipDataSetWithPolyData')
# An interactor
interactor = vtk.vtkRenderWindowInteractor()
interactor.SetRenderWindow(renwin)
# Start
interactor.Initialize()
renwin.Render()
renderer.GetActiveCamera().SetPosition(0, -1, 0)
renderer.GetActiveCamera().SetFocalPoint(0, 0, 0)
renderer.GetActiveCamera().SetViewUp(0, 0, 1)
renderer.GetActiveCamera().Azimuth(30)
renderer.GetActiveCamera().Elevation(30)
renderer.ResetCamera()
renwin.Render()
interactor.Start()
if __name__ == '__main__':
main()
|
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""googledatastore helper test suite."""
__author__ = '<EMAIL> (<NAME>)'
import collections
import copy
import datetime
import unittest
import googledatastore as datastore
from googledatastore.helper import *
class DatastoreHelperTest(unittest.TestCase):
def testSetKeyPath(self):
key = datastore.Key()
add_key_path(key, 'Foo', 1, 'Bar', 'bar')
self.assertEquals(2, len(key.path_element))
self.assertEquals('Foo', key.path_element[0].kind)
self.assertEquals(1, key.path_element[0].id)
self.assertEquals('Bar', key.path_element[1].kind)
self.assertEquals('bar', key.path_element[1].name)
def testIncompleteKey(self):
key = datastore.Key()
add_key_path(key, 'Foo')
self.assertEquals(1, len(key.path_element))
self.assertEquals('Foo', key.path_element[0].kind)
self.assertEquals(0, key.path_element[0].id)
self.assertEquals('', key.path_element[0].name)
def testInvalidKey(self):
key = datastore.Key()
self.assertRaises(TypeError, add_key_path, key, 'Foo', 1.0)
def testPropertyValues(self):
blob_key = datastore.Value()
blob_key.blob_key_value = 'blob-key'
property_dict = collections.OrderedDict(
a_string=u'a',
a_blob='b',
a_boolean=True,
a_integer=1,
a_double=1.0,
a_timestamp_microseconds=datetime.datetime.now(),
a_key=datastore.Key(),
a_entity=datastore.Entity(),
a_blob_key=blob_key,
many_integer=[1, 2, 3])
entity = datastore.Entity()
add_properties(entity, property_dict)
d = dict((prop.name, get_value(prop.value))
for prop in entity.property)
self.assertDictEqual(d, property_dict)
def testAddPropertyValuesBlindlyAdd(self):
entity = datastore.Entity()
add_properties(entity, {'a': 1})
add_properties(entity, {'a': 2})
self.assertEquals(2, len(entity.property))
def testEmptyValues(self):
v = datastore.Value()
self.assertEquals(None, get_value(v))
def testSetPropertyOverwrite(self):
property = datastore.Property()
set_property(property, 'a', 1, indexed=False)
set_property(property, 'a', 'a')
self.assertEquals('a', get_value(property.value))
self.assertEquals(True, property.value.indexed)
def testIndexedPropagation_Literal(self):
value = datastore.Value()
set_value(value, 'a')
self.assertEquals(False, value.HasField('indexed'))
set_value(value, 'a', False)
self.assertEquals(True, value.HasField('indexed'))
self.assertEquals(False, value.indexed)
set_value(value, 'a', True)
self.assertEquals(False, value.HasField('indexed'))
self.assertEquals(True, value.indexed)
def testIndexedPropagation_Value(self):
value = datastore.Value()
set_value(value, datastore.Value())
self.assertEquals(False, value.HasField('indexed'))
set_value(value, datastore.Value(), False)
self.assertEquals(True, value.HasField('indexed'))
self.assertEquals(False, value.indexed)
set_value(value, copy.deepcopy(value))
self.assertEquals(True, value.HasField('indexed'))
self.assertEquals(False, value.indexed)
set_value(value, datastore.Value(), True)
self.assertEquals(False, value.HasField('indexed'))
self.assertEquals(True, value.indexed)
value.indexed = True
set_value(value, copy.deepcopy(value))
self.assertEquals(True, value.HasField('indexed'))
self.assertEquals(True, value.indexed)
def testIndexedPropagation_List(self):
value = datastore.Value()
set_value(value, ['a'])
self.assertEquals(False, value.HasField('indexed'))
self.assertEquals(False, value.list_value[0].HasField('indexed'))
set_value(value, ['a'], True)
self.assertEquals(False, value.HasField('indexed'))
self.assertEquals(False, value.list_value[0].HasField('indexed'))
self.assertEquals(True, value.list_value[0].indexed)
set_value(value, ['a'], False)
self.assertEquals(False, value.HasField('indexed'))
self.assertEquals(True, value.list_value[0].HasField('indexed'))
self.assertEquals(False, value.list_value[0].indexed)
def testSetValueBadType(self):
value = datastore.Value()
self.assertRaises(TypeError, set_value, value, 'a', object())
self.assertRaises(TypeError, set_value, value, object(), None)
def testSetPropertyIndexed(self):
property = datastore.Property()
set_property(property, 'a', 1)
self.assertEquals(False, property.value.HasField('indexed'))
set_property(property, 'a', 1, indexed=True)
self.assertEquals(False, property.value.HasField('indexed'))
self.assertEquals(True, property.value.indexed)
set_property(property, 'a', 1, indexed=False)
self.assertEquals(True, property.value.HasField('indexed'))
self.assertEquals(False, property.value.indexed)
def testQuery(self):
q = datastore.Query()
set_kind(q, 'Foo')
self.assertEquals('Foo', q.kind[0].name)
add_property_orders(q, '-bar', 'foo')
self.assertEquals(datastore.PropertyOrder.DESCENDING,
q.order[0].direction)
self.assertEquals('bar', q.order[0].property.name)
self.assertEquals(datastore.PropertyOrder.ASCENDING,
q.order[1].direction)
self.assertEquals('foo', q.order[1].property.name)
add_projection(q, '__key__', 'bar')
self.assertEquals('__key__', q.projection[0].property.name)
self.assertEquals('bar', q.projection[1].property.name)
def testFilter(self):
f = datastore.Filter()
set_composite_filter(
f,
datastore.CompositeFilter.AND,
set_property_filter(datastore.Filter(),
'foo', datastore.PropertyFilter.EQUAL, u'bar'),
set_property_filter(datastore.Filter(),
'hop', datastore.PropertyFilter.GREATER_THAN, 2.0))
cf = f.composite_filter
pf = cf.filter[0].property_filter
self.assertEquals('foo', pf.property.name)
self.assertEquals('bar', pf.value.string_value)
self.assertEquals(datastore.PropertyFilter.EQUAL, pf.operator)
pf = cf.filter[1].property_filter
self.assertEquals('hop', pf.property.name)
self.assertEquals(2.0, pf.value.double_value)
self.assertEquals(datastore.PropertyFilter.GREATER_THAN, pf.operator)
self.assertEquals(datastore.CompositeFilter.AND, cf.operator)
if __name__ == '__main__':
unittest.main()
|
<gh_stars>0
#!/usr/bin/env python
#The MIT License (MIT)
#
#Copyright (C) 2014 OpenBet Limited
#
#Permission is hereby granted, free of charge, to any person obtaining a copy of
#this software and associated documentation files (the "Software"), to deal in
#the Software without restriction, including without limitation the rights to
#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
#of the Software, and to permit persons to whom the Software is furnished to do
#so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#ITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
#THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
"""ShutIt is a means of building stateless target hosts in a flexible and predictable way.
"""
from shutit_module import ShutItModule, ShutItException, ShutItFailException
import ConfigParser
import util
import urllib
import shutit_global
import sys
import os
import json
import re
import signal
from distutils import spawn
def module_ids(shutit, rev=False):
"""Gets a list of module ids guaranteed to be sorted by run_order, ignoring conn modules
(run order < 0).
"""
ids = sorted(shutit.shutit_map.keys(),key=lambda module_id: shutit.shutit_map[module_id].run_order)
if rev:
return list(reversed(ids))
else:
return ids
def allowed_module_ids(shutit, rev=False):
"""Gets a list of module ids that are allowed to be run,
guaranteed to be sorted by run_order, ignoring conn modules
(run order < 0).
"""
module_ids_list = module_ids(shutit,rev)
allowed_module_ids = []
for module_id in module_ids_list:
if allowed_image(shutit,module_id):
allowed_module_ids.append(module_id)
return allowed_module_ids
def disallowed_module_ids(shutit, rev=False):
"""Gets a list of disallowed module ids that are not allowed to be run,
guaranteed to be sorted by run_order, ignoring conn modules
(run order < 0).
"""
module_ids_list = module_ids(shutit,rev)
disallowed_module_ids = []
for module_id in module_ids_list:
if not allowed_image(shutit,module_id):
disallowed_module_ids.append(module_id)
return disallowed_module_ids
def print_modules(shutit):
"""Returns a string table representing the modules in the ShutIt module map.
"""
cfg = shutit.cfg
string = ''
string = string + 'Modules: \n'
string = string + ' Run order Build Remove Module ID\n'
for module_id in module_ids(shutit):
string = string + (' ' + str(shutit.shutit_map[module_id].run_order) +
' ' +
str(cfg[module_id]['shutit.core.module.build']) + ' ' +
str(cfg[module_id]['shutit.core.module.remove']) + ' ' +
module_id + '\n')
return string
# run_order of -1 means 'stop everything'
def stop_all(shutit, run_order=-1):
"""Runs stop method on all modules less than the passed-in run_order.
Used when target is exporting itself mid-build, so we clean up state
before committing run files etc.
"""
cfg = shutit.cfg
if cfg['build']['interactive'] >= 3:
print('\nRunning stop on all modules' + \
util.colour('31', '\n\n[Hit return to continue]'))
util.util_raw_input(shutit=shutit)
# sort them so they're stopped in reverse order
for module_id in module_ids(shutit, rev=True):
shutit_module_obj = shutit.shutit_map[module_id]
if run_order == -1 or shutit_module_obj.run_order <= run_order:
if is_installed(shutit, shutit_module_obj):
if not shutit_module_obj.stop(shutit):
shutit.fail('failed to stop: ' + \
module_id, child=shutit.pexpect_children['target_child'])
# Start all apps less than the supplied run_order
def start_all(shutit, run_order=-1):
"""Runs start method on all modules less than the passed-in run_order.
Used when target is exporting itself mid-build, so we can export a clean
target and still depended-on modules running if necessary.
"""
cfg = shutit.cfg
if cfg['build']['interactive'] >= 3:
print('\nRunning start on all modules' +
util.colour('31', '\n\n[Hit return to continue]\n'))
util.util_raw_input(shutit=shutit)
# sort them so they're started in order
for module_id in module_ids(shutit):
shutit_module_obj = shutit.shutit_map[module_id]
if run_order == -1 or shutit_module_obj.run_order <= run_order:
if is_installed(shutit, shutit_module_obj):
if not shutit_module_obj.start(shutit):
shutit.fail('failed to start: ' + module_id, \
child=shutit.pexpect_children['target_child'])
def is_installed(shutit, shutit_module_obj):
"""Returns true if this module is installed.
Uses cache where possible.
"""
# Cache first
if shutit_module_obj.module_id in shutit.cfg['target']['modules_installed']:
return True
if shutit_module_obj.module_id in shutit.cfg['target']['modules_not_installed']:
return False
# Is it installed?
if shutit_module_obj.is_installed(shutit):
shutit.cfg['target']['modules_installed'].append(shutit_module_obj.module_id)
return True
# If not installed, and not in cache, add it.
else:
if shutit_module_obj.module_id not in shutit.cfg['target']['modules_not_installed']:
shutit.cfg['target']['modules_not_installed'].append(shutit_module_obj.module_id)
return False
def is_to_be_built_or_is_installed(shutit, shutit_module_obj):
"""Returns true if this module is configured to be built,
or if it is already installed.
"""
if shutit.cfg[shutit_module_obj.module_id]['shutit.core.module.build']:
return True
return is_installed(shutit, shutit_module_obj)
def is_ready(shutit, shutit_module_obj):
"""Returns true if this module is ready to be built.
Caches the result (as it's assumed not to change during the build).
"""
if shutit_module_obj.module_id in shutit.cfg['target']['modules_ready']:
shutit.log('is_ready: returning True from cache')
return True
ready = shutit_module_obj.check_ready(shutit)
if ready:
shutit.cfg['target']['modules_ready'].append(shutit_module_obj.module_id)
return True
else:
return False
def init_shutit_map(shutit):
"""Initializes the module map of shutit based on the modules
we have gathered.
Checks we have core modules
Checks for duplicate module details.
Sets up common config.
Sets up map of modules.
"""
cfg = shutit.cfg
modules = shutit.shutit_modules
# Have we got anything to process outside of special modules?
if len([mod for mod in modules if mod.run_order > 0]) < 1:
shutit.log(modules)
path = ':'.join(cfg['host']['shutit_module_path'])
if path == '':
shutit.fail('No modules aside from core ones found and no ShutIt' +
' module path given. ' +
'\nDid you set --shutit_module_path/-m' +
' wrongly?')
elif path == '.':
shutit.fail('No modules aside from core ones found and no ShutIt' +
' module path given apart from default (.).\nDid you' +
' set--shutit_module_path/-m? Is there a STOP* file' +
' in your . dir?')
else:
shutit.fail('No modules aside from core ones found and no ShutIt ' +
'modules in path:\n\n' + path +
'\n\nor their subfolders. Check your ' +
'--shutit_module_path/-m setting and check that there are ' +
'ShutItmodules below without STOP* files in any relevant ' +
'directories.')
shutit.log('PHASE: base setup', code='31')
if cfg['build']['interactive'] >= 3:
shutit.log('\nChecking to see whether there are duplicate module ids ' +
'or run orders in the visible modules.', force_stdout=True)
shutit.log('\nModules I see are:\n', force_stdout=True)
for module in modules:
shutit.log(module.module_id, force_stdout=True, code='31')
shutit.log('\n', force_stdout=True)
run_orders = {}
has_core_module = False
for module in modules:
assert isinstance(module, ShutItModule)
if module.module_id in shutit.shutit_map:
shutit.fail('Duplicated module id: ' + module.module_id +
'\n\nYou may want to check your --shutit_module_path setting')
if module.run_order in run_orders:
shutit.fail('Duplicate run order: ' + str(module.run_order) +
' for ' + module.module_id + ' and ' +
run_orders[module.run_order].module_id +
'\n\nYou may want to check your --shutit_module_path setting')
if module.run_order == 0:
has_core_module = True
shutit.shutit_map[module.module_id] = run_orders[module.run_order] = module
if not has_core_module:
shutit.fail('No module with run_order=0 specified! This is required.')
if cfg['build']['interactive'] >= 3:
print(util.colour('31', 'Module id and run order checks OK' +
'\n\n[Hit return to continue]\n'))
util.util_raw_input(shutit=shutit)
def config_collection(shutit):
"""Collect core config from config files for all seen modules.
"""
shutit.log('In config_collection')
cfg = shutit.cfg
for module_id in module_ids(shutit):
# Default to None so we can interpret as ifneeded
shutit.get_config(module_id, 'shutit.core.module.build', None, boolean=True, forcenone=True)
shutit.get_config(module_id, 'shutit.core.module.remove', False, boolean=True)
shutit.get_config(module_id, 'shutit.core.module.tag', False, boolean=True)
# Default to allow any image
shutit.get_config(module_id, 'shutit.core.module.allowed_images', [".*"])
module = shutit.shutit_map[module_id]
cfg_file = os.path.dirname(module.__module_file) + '/configs/build.cnf'
if os.path.isfile(cfg_file):
# use shutit.get_config, forcing the passed-in default
config_parser = ConfigParser.ConfigParser()
config_parser.read(cfg_file)
for section in config_parser.sections():
if section == module_id:
for option in config_parser.options(section):
if option == 'shutit.core.module.allowed_images':
override = False
for mod, opt, val in shutit.cfg['build']['config_overrides']:
# skip overrides
if mod == module_id and opt == option:
override = True
if override:
continue
value = config_parser.get(section,option)
if option == 'shutit.core.module.allowed_images':
value = json.loads(value)
shutit.get_config(module_id, option,
value, forcedefault=True)
# ifneeded will (by default) only take effect if 'build' is not
# specified. It can, however, be forced to a value, but this
# should be unusual.
if cfg[module_id]['shutit.core.module.build'] is None:
shutit.get_config(module_id, 'shutit.core.module.build_ifneeded', True, boolean=True)
cfg[module_id]['shutit.core.module.build'] = False
else:
shutit.get_config(module_id, 'shutit.core.module.build_ifneeded', False, boolean=True)
def config_collection_for_built(shutit):
"""Collect configuration for modules that are being built.
When this is called we should know what's being built (ie after
dependency resolution).
"""
shutit.log('In config_collection_for_built')
for module_id in module_ids(shutit):
# Get the config even if installed or building (may be needed in other
# hooks, eg test).
if (is_to_be_built_or_is_installed(shutit, shutit.shutit_map[module_id]) and
not shutit.shutit_map[module_id].get_config(shutit)):
shutit.fail(module_id + ' failed on get_config')
# Collect the build.cfg if we are building here.
# If this file exists, process it.
if shutit.cfg[module_id]['shutit.core.module.build']:
module = shutit.shutit_map[module_id]
cfg_file = os.path.dirname(module.__module_file) + '/configs/build.cnf'
if os.path.isfile(cfg_file):
# use shutit.get_config, forcing the passed-in default
config_parser = ConfigParser.ConfigParser()
config_parser.read(cfg_file)
for section in config_parser.sections():
if section == module_id:
for option in config_parser.options(section):
override = False
for mod, opt, val in shutit.cfg['build']['config_overrides']:
# skip overrides
if mod == module_id and opt == option:
override = True
if override:
continue
is_bool = (type(shutit.cfg[module_id][option]) == bool)
if is_bool:
value = config_parser.getboolean(section,option)
else:
value = config_parser.get(section,option)
if option == 'shutit.core.module.allowed_images':
value = json.loads(value)
shutit.get_config(module_id, option,
value, forcedefault=True)
# Check the allowed_images against the base_image
passed = True
for module_id in module_ids(shutit):
if (shutit.cfg[module_id]['shutit.core.module.build'] and
(shutit.cfg[module_id]['shutit.core.module.allowed_images'] and
shutit.cfg['target']['docker_image'] not in shutit.cfg[module_id]['shutit.core.module.allowed_images'])):
if not allowed_image(shutit,module_id):
passed = False
print('\n\nWARNING!\n\nAllowed images for ' + module_id + ' are: ' +
str(shutit.cfg[module_id]['shutit.core.module.allowed_images']) +
' but the configured image is: ' +
shutit.cfg['target']['docker_image'] +
'\n\nIs your shutit_module_path set correctly?' +
'\n\nIf you want to ignore this, ' +
'pass in the --ignoreimage flag to shutit.\n\n')
if not passed:
if shutit.cfg['build']['imageerrorok']:
# useful for test scripts
print('Exiting on allowed images error, with return status 0')
sys.exit(0)
else:
raise ShutItFailException('Allowed images checking failed')
def allowed_image(shutit,module_id):
"""Given a module id and a shutit object, determine whether the image is allowed to be built.
"""
shutit.log("In allowed_image: " + module_id)
if shutit.cfg['build']['ignoreimage']:
shutit.log("ignoreimage == true, returning true" + module_id,force_stdout=True)
return True
shutit.log(str(shutit.cfg[module_id]['shutit.core.module.allowed_images']))
if shutit.cfg[module_id]['shutit.core.module.allowed_images']:
# Try allowed images as regexps
for regexp in shutit.cfg[module_id]['shutit.core.module.allowed_images']:
if re.match('^' + regexp + '$', shutit.cfg['target']['docker_image']):
return True
return False
def conn_target(shutit):
"""Connect to the target.
"""
conn_module = None
for mod in shutit.conn_modules:
if mod.module_id == shutit.cfg['build']['conn_module']:
conn_module = mod
break
if conn_module is None:
shutit.fail('Couldn\'t find conn_module ' + shutit.cfg['build']['conn_module'])
# Set up the target in pexpect.
if shutit.cfg['build']['interactive'] >= 3:
print('\nRunning the conn module (' +
shutit.shutit_main_dir + '/setup.py)' + util.colour('31',
'\n\n[Hit return to continue]\n'))
util.util_raw_input(shutit=shutit)
conn_module.get_config(shutit)
conn_module.build(shutit)
def finalize_target(shutit):
"""Finalize the target using the core finalize method.
"""
shutit.pause_point('\nFinalizing the target module (' +
shutit.shutit_main_dir + '/setup.py)', print_input=False, level=3)
# Can assume conn_module exists at this point
for mod in shutit.conn_modules:
if mod.module_id == shutit.cfg['build']['conn_module']:
conn_module = mod
break
conn_module.finalize(shutit)
# Once we have all the modules, then we can look at dependencies.
# Dependency validation begins.
def resolve_dependencies(shutit, to_build, depender):
"""Add any required dependencies.
"""
shutit.log('In resolve_dependencies')
cfg = shutit.cfg
for dependee_id in depender.depends_on:
dependee = shutit.shutit_map.get(dependee_id)
# Don't care if module doesn't exist, we check this later
if (dependee and dependee not in to_build
and cfg[dependee_id]['shutit.core.module.build_ifneeded']):
to_build.append(dependee)
cfg[dependee_id]['shutit.core.module.build'] = True
return True
def check_dependee_exists(shutit, depender, dependee, dependee_id):
"""Checks whether a depended-on module is available.
"""
# If the module id isn't there, there's a problem.
if dependee == None:
return ('module: \n\n' + dependee_id + '\n\nnot found in paths: ' +
str(shutit.cfg['host']['shutit_module_path']) +
' but needed for ' + depender.module_id +
'\nCheck your --shutit_module_path setting and ensure that ' +
'all modules configured to be built are in that path setting, ' +
'eg "--shutit_module_path /path/to/other/module/:."\n\n' +
'Also check that the module is configured to be built with ' +
'the correct module id in that module\'s configs/build.cnf file.' +
'\n\nSee also help.')
def check_dependee_build(shutit, depender, dependee, dependee_id):
"""Checks whether a depended on module is configured to be built.
"""
# If depender is installed or will be installed, so must the dependee
if not (shutit.cfg[dependee.module_id]['shutit.core.module.build'] or
is_to_be_built_or_is_installed(shutit,dependee)):
return ('depender module id:\n\n[' + depender.module_id + ']\n\n' +
'is configured: "build:yes" or is already built ' +
'but dependee module_id:\n\n[' + dependee_id + ']\n\n' +
'is not configured: "build:yes"')
def check_dependee_order(_shutit, depender, dependee, dependee_id):
"""Checks whether run orders are in the appropriate order.
"""
# If it depends on a module id, then the module id should be higher up
# in the run order.
if dependee.run_order > depender.run_order:
return ('depender module id:\n\n' + depender.module_id +
'\n\n(run order: ' + str(depender.run_order) + ') ' +
'depends on dependee module_id:\n\n' + dependee_id +
'\n\n(run order: ' + str(dependee.run_order) + ') ' +
'but the latter is configured to run after the former')
def make_dep_graph(depender):
"""Returns a digraph string fragment based on the passed-in module
"""
digraph = ''
for dependee_id in depender.depends_on:
digraph = (digraph + '"' + depender.module_id + '"->"' +
dependee_id + '";\n')
return digraph
def check_deps(shutit):
"""Dependency checking phase is performed in this method.
"""
cfg = shutit.cfg
shutit.log('PHASE: dependencies', code='31')
shutit.pause_point('\nNow checking for dependencies between modules',
print_input=False, level=3)
# Get modules we're going to build
to_build = [
shutit.shutit_map[module_id] for module_id in shutit.shutit_map
if module_id in cfg and cfg[module_id]['shutit.core.module.build']
]
# Add any deps we may need by extending to_build and altering cfg
for module in to_build:
resolve_dependencies(shutit, to_build, module)
# Dep checking
def err_checker(errs, triples):
"""Collate error information.
"""
new_triples = []
for err, triple in zip(errs, triples):
if not err:
new_triples.append(triple)
continue
found_errs.append(err)
return new_triples
found_errs = []
triples = []
for depender in to_build:
for dependee_id in depender.depends_on:
triples.append((depender, shutit.shutit_map.get(dependee_id),
dependee_id))
triples = err_checker([
check_dependee_exists(shutit, depender, dependee, dependee_id)
for depender, dependee, dependee_id in triples
], triples)
triples = err_checker([
check_dependee_build(shutit, depender, dependee, dependee_id)
for depender, dependee, dependee_id in triples
], triples)
triples = err_checker([
check_dependee_order(shutit, depender, dependee, dependee_id)
for depender, dependee, dependee_id in triples
], triples)
if found_errs:
return [(err,) for err in found_errs]
if cfg['build']['debug']:
shutit.log('Modules configured to be built (in order) are: ', code='31')
for module_id in module_ids(shutit):
module = shutit.shutit_map[module_id]
if cfg[module_id]['shutit.core.module.build']:
shutit.log(module_id + ' ' + str(module.run_order), code='31')
shutit.log('\n', code='31')
return []
def check_conflicts(shutit):
"""Checks for any conflicts between modules configured to be built.
"""
cfg = shutit.cfg
# Now consider conflicts
shutit.log('PHASE: conflicts', code='31')
errs = []
shutit.pause_point('\nNow checking for conflicts between modules',
print_input=False, level=3)
for module_id in module_ids(shutit):
if not cfg[module_id]['shutit.core.module.build']:
continue
conflicter = shutit.shutit_map[module_id]
for conflictee in conflicter.conflicts_with:
# If the module id isn't there, there's no problem.
conflictee_obj = shutit.shutit_map.get(conflictee)
if conflictee_obj == None:
continue
if ((cfg[conflicter.module_id]['shutit.core.module.build'] or
is_to_be_built_or_is_installed(shutit,conflicter)) and
(cfg[conflictee_obj.module_id]['shutit.core.module.build'] or
is_to_be_built_or_is_installed(shutit,conflictee_obj))):
errs.append(('conflicter module id: ' + conflicter.module_id +
' is configured to be built or is already built but ' +
'conflicts with module_id: ' + conflictee_obj.module_id,))
return errs
def check_ready(shutit, throw_error=True):
"""Check that all modules are ready to be built, calling check_ready on
each of those configured to be built and not already installed
(see is_installed).
"""
cfg = shutit.cfg
shutit.log('PHASE: check_ready', code='31')
errs = []
shutit.pause_point('\nNow checking whether we are ready to build modules' +
' configured to be built',
print_input=False, level=3)
# Find out who we are to see whether we need to log in and out or not.
whowasi = shutit.whoami()
# Login at least once to get any exports.
if whowasi == 'root':
shutit.login()
for module_id in module_ids(shutit):
module = shutit.shutit_map[module_id]
shutit.log('considering check_ready (is it ready to be built?): ' +
module_id, code='31')
if cfg[module_id]['shutit.core.module.build'] and module.module_id not in shutit.cfg['target']['modules_ready'] and not is_installed(shutit,module):
shutit.log('checking whether module is ready to build: ' + module_id,
code='31')
if whowasi != 'root':
shutit.login(prompt_prefix=module_id)
# Move to the directory so context is correct (eg for checking for
# the existence of files needed for build)
revert_dir = os.getcwd()
cfg['target']['module_root_dir'] = os.path.dirname(module.__module_file)
shutit.chdir(cfg['target']['module_root_dir'])
if not is_ready(shutit, module) and throw_error:
errs.append((module_id + ' not ready to install.\nRead the ' +
'check_ready function in the module,\nor log ' +
'messages above to determine the issue.\n\n',
shutit.pexpect_children['target_child']))
if whowasi != 'root':
shutit.logout()
shutit.chdir(revert_dir)
if whowasi == 'root':
shutit.logout()
return errs
def do_remove(shutit):
"""Remove modules by calling remove method on those configured for removal.
"""
cfg = shutit.cfg
# Now get the run_order keys in order and go.
shutit.log('PHASE: remove', code='31')
shutit.pause_point('\nNow removing any modules that need removing',
print_input=False, level=3)
whowasi = shutit.whoami()
# Login at least once to get the exports.
if whowasi == 'root':
shutit.login()
for module_id in module_ids(shutit):
module = shutit.shutit_map[module_id]
shutit.log('considering whether to remove: ' + module_id, code='31')
if cfg[module_id]['shutit.core.module.remove']:
shutit.log('removing: ' + module_id, code='31')
if whowasi != 'root':
shutit.login(prompt_prefix=module_id)
if not module.remove(shutit):
shutit.log(print_modules(shutit), code='31')
shutit.fail(module_id + ' failed on remove',
child=shutit.pexpect_children['target_child'])
else:
# Create a directory and files to indicate this has been removed.
shutit.send('mkdir -p /root/shutit_build/module_record/' + module.module_id + ' && rm -f /root/shutit_build/module_record/' + module.module_id + '/built && touch /root/shutit_build/module_record/' + module.module_id + '/removed')
# Remove from "installed" cache
shutit.cfg['target']['modules_installed'].remove(module.module_id)
# Add to "not installed" cache
shutit.cfg['target']['modules_not_installed'].append(module.module_id)
if whowasi != 'root':
shutit.logout()
if whowasi == 'root':
shutit.logout()
def build_module(shutit, module):
"""Build passed-in module.
"""
cfg = shutit.cfg
shutit.log('building: ' + module.module_id + ' with run order: ' +
str(module.run_order), code='31')
cfg['build']['report'] = (cfg['build']['report'] + '\nBuilding: ' +
module.module_id + ' with run order: ' +
str(module.run_order))
if not module.build(shutit):
shutit.fail(module.module_id + ' failed on build',
child=shutit.pexpect_children['target_child'])
else:
# Create a directory and files to indicate this has been built.
shutit.send('mkdir -p /root/shutit_build/module_record/' + module.module_id + ' && touch /root/shutit_build/module_record/' + module.module_id + '/built && rm -f /root/shutit_build/module_record/' + module.module_id + '/removed')
# Put it into "installed" cache
shutit.cfg['target']['modules_installed'].append(module.module_id)
# Remove from "not installed" cache
shutit.cfg['target']['modules_not_installed'].remove(module.module_id)
shutit.pause_point('\nPausing to allow inspect of build for: ' +
module.module_id, print_input=True, level=2)
cfg['build']['report'] = (cfg['build']['report'] + '\nCompleted module: ' +
module.module_id)
if cfg[module.module_id]['shutit.core.module.tag'] or cfg['build']['interactive'] >= 3:
shutit.log(util.build_report(shutit, '#Module:' + module.module_id),
code='31')
if (not cfg[module.module_id]['shutit.core.module.tag'] and
cfg['build']['interactive'] >= 2):
shutit.log("\n\nDo you want to save state now we\'re at the " +
"end of this module? (" + module.module_id +
") (input y/n)", force_stdout=True, code='31')
cfg[module.module_id]['shutit.core.module.tag'] = (util.util_raw_input(shutit=shutit,default='y') == 'y')
if cfg[module.module_id]['shutit.core.module.tag'] or cfg['build']['tag_modules']:
shutit.log(module.module_id +
' configured to be tagged, doing repository work',
force_stdout=True)
# Stop all before we tag to avoid file changing errors,
# and clean up pid files etc..
stop_all(shutit, module.run_order)
shutit.do_repository_work(str(module.module_id) + '_' +
str(module.run_order),
password=cfg['host']['password'],
docker_executable=cfg['host']['docker_executable'],
force=True)
# Start all after we tag to ensure services are up as expected.
start_all(shutit, module.run_order)
if cfg['build']['interactive'] >= 2:
shutit.log("\n\nDo you want to stop interactive mode? (input y/n)\n",
force_stdout=True,code='31')
if util.util_raw_input(shutit=shutit,default='y') == 'y':
cfg['build']['interactive'] = 0
def do_build(shutit):
"""Runs build phase, building any modules that we've determined
need building.
"""
cfg = shutit.cfg
shutit.log('PHASE: build, repository work', code='31')
shutit.log(util.print_config(shutit.cfg))
if cfg['build']['interactive'] >= 3:
print ('\nNow building any modules that need building' +
util.colour('31', '\n\n[Hit return to continue]\n'))
util.util_raw_input(shutit=shutit)
module_id_list = module_ids(shutit)
if cfg['build']['deps_only']:
module_id_list_build_only = filter(lambda x: cfg[x]['shutit.core.module.build'], module_id_list)
for module_id in module_id_list:
module = shutit.shutit_map[module_id]
shutit.log('considering whether to build: ' + module.module_id,
code='31')
if cfg[module.module_id]['shutit.core.module.build']:
if is_installed(shutit,module):
cfg['build']['report'] = (cfg['build']['report'] +
'\nBuilt already: ' + module.module_id +
' with run order: ' + str(module.run_order))
else:
# We move to the module directory to perform the build, returning immediately afterwards.
if cfg['build']['deps_only'] and module_id == module_id_list_build_only[-1]:
# If this is the last module, and we are only building deps, stop here.
cfg['build']['report'] = (cfg['build']['report'] + '\nSkipping: ' +
module.module_id + ' with run order: ' + str(module.run_order) +
'\n\tas this is the final module and we are building dependencies only')
else:
revert_dir = os.getcwd()
cfg['target']['module_root_dir'] = os.path.dirname(module.__module_file)
shutit.chdir(cfg['target']['module_root_dir'])
shutit.login(prompt_prefix=module_id)
build_module(shutit, module)
shutit.logout()
shutit.chdir(revert_dir)
if is_installed(shutit, module):
shutit.log('Starting module')
if not module.start(shutit):
shutit.fail(module.module_id + ' failed on start',
child=shutit.pexpect_children['target_child'])
def do_test(shutit):
"""Runs test phase, erroring if any return false.
"""
cfg = shutit.cfg
if not cfg['build']['dotest']:
shutit.log('Tests configured off, not running')
return
# Test in reverse order
shutit.log('PHASE: test', code='31')
if cfg['build']['interactive'] >= 3:
print '\nNow doing test phase' + util.colour('31',
'\n\n[Hit return to continue]\n')
util.util_raw_input(shutit=shutit)
stop_all(shutit)
start_all(shutit)
whowasi = shutit.whoami()
# Login at least once to get the exports.
if whowasi == 'root':
shutit.login()
for module_id in module_ids(shutit, rev=True):
module = shutit.shutit_map[module_id]
# Only test if it's installed.
if is_installed(shutit, shutit.shutit_map[module_id]):
shutit.log('RUNNING TEST ON: ' + module_id, code='31')
if whowasi != 'root':
shutit.login(prompt_prefix=module_id)
if not shutit.shutit_map[module_id].test(shutit):
shutit.fail(module_id + ' failed on test',
child=shutit.pexpect_children['target_child'])
if whowasi != 'root':
shutit.logout()
if whowasi == 'root':
shutit.logout()
def do_finalize(shutit):
"""Runs finalize phase; run after all builds are complete and all modules
have been stopped.
"""
cfg = shutit.cfg
# Stop all the modules
if cfg['build']['interactive'] >= 3:
print('\nStopping all modules before finalize phase' + util.colour('31',
'\n\n[Hit return to continue]\n'))
util.util_raw_input(shutit=shutit)
stop_all(shutit)
# Finalize in reverse order
shutit.log('PHASE: finalize', code='31')
if cfg['build']['interactive'] >= 3:
print('\nNow doing finalize phase, which we do when all builds are ' +
'complete and modules are stopped' +
util.colour('31', '\n\n[Hit return to continue]\n'))
util.util_raw_input(shutit=shutit)
whowasi = shutit.whoami()
# Login at least once to get the exports.
if whowasi == 'root':
shutit.login()
for module_id in module_ids(shutit, rev=True):
# Only finalize if it's thought to be installed.
if is_installed(shutit, shutit.shutit_map[module_id]):
if whowasi != 'root':
shutit.login(prompt_prefix=module_id)
if not shutit.shutit_map[module_id].finalize(shutit):
shutit.fail(module_id + ' failed on finalize',
child=shutit.pexpect_children['target_child'])
if whowasi != 'root':
shutit.logout()
if whowasi == 'root':
shutit.logout()
def setup_shutit_path(cfg):
# try the current directory, the .. directory, or the ../shutit directory, the ~/shutit
if not cfg['host']['add_shutit_to_path']:
return
res = util.util_raw_input(prompt='shutit appears not to be on your path - should try and we find it and add it to your ~/.bashrc (Y/n)?')
if res in ['n','N']:
with open(os.path.join(cfg['shutit_home'], 'config'), 'a') as f:
f.write('\n[host]\nadd_shutit_to_path: no\n')
return
path_to_shutit = ''
for d in ['.','..','~','~/shutit']:
path = os.path.abspath(d + '/shutit')
if not os.path.isfile(path):
continue
path_to_shutit = path
while path_to_shutit == '':
d = util.util_raw_input(prompt='cannot auto-find shutit - please input the path to your shutit dir\n')
path = os.path.abspath(d + '/shutit')
if not os.path.isfile(path):
continue
path_to_shutit = path
if path_to_shutit != '':
bashrc = os.path.expanduser('~/.bashrc')
with open(bashrc, "a") as myfile:
#http://unix.stackexchange.com/questions/26676/how-to-check-if-a-shell-is-login-interactive-batch
myfile.write('\nexport PATH="$PATH:' + os.path.dirname(path_to_shutit) + '"\n')
util.util_raw_input(prompt='\nPath set up - please open new terminal and re-run command\n')
sys.exit()
def shutit_main():
"""Main ShutIt function.
Handles the configured actions:
- skeleton - create skeleton module
- serve - run as a server
- list_configs - output computed configuration
- depgraph - output digraph of module dependencies
"""
if sys.version_info.major == 2:
if sys.version_info.minor < 7:
shutit_global.shutit.fail('Python version must be 2.7+')
shutit = shutit_global.shutit
cfg = shutit.cfg
util.parse_args(shutit)
if cfg['action']['skeleton']:
util.create_skeleton(shutit)
cfg['build']['completed'] = True
return
if cfg['action']['serve']:
import shutit_srv
cfg['build']['interactive'] = 0
revert_dir = os.getcwd()
os.chdir(sys.path[0])
shutit_srv.start()
os.chdir(revert_dir)
return
util.load_configs(shutit)
# Try and ensure shutit is on the path - makes onboarding easier
# Only do this if we're in a terminal
if util.determine_interactive() and spawn.find_executable('shutit') is None:
setup_shutit_path(cfg)
util.load_mod_from_file(shutit, os.path.join(shutit.shutit_main_dir, 'setup.py'))
util.load_shutit_modules(shutit)
if cfg['action']['list_modules']:
util.list_modules(shutit)
sys.exit(0)
init_shutit_map(shutit)
config_collection(shutit)
conn_target(shutit)
errs = []
errs.extend(check_deps(shutit))
if cfg['action']['list_deps']:
# Show dependency graph
digraph = 'digraph depgraph {\n'
digraph = digraph + '\n'.join([
make_dep_graph(module) for module_id, module in shutit.shutit_map.items()
if module_id in shutit.cfg and shutit.cfg[module_id]['shutit.core.module.build']
])
digraph = digraph + '\n}'
f = file(cfg['build']['log_config_path'] + '/digraph.txt','w')
f.write(digraph)
f.close()
digraph_all = 'digraph depgraph {\n'
digraph_all = digraph_all + '\n'.join([
make_dep_graph(module) for module_id, module in shutit.shutit_map.items()
])
digraph_all = digraph_all + '\n}'
f = file(cfg['build']['log_config_path'] + '/digraph_all.txt','w')
f.write(digraph_all)
f.close()
shutit.log('\n================================================================================\n' + digraph_all, force_stdout=True)
shutit.log('\nAbove is the digraph for all modules seen in this shutit invocation. Use graphviz to render into an image, eg\n\n\tshutit depgraph -m library | dot -Tpng -o depgraph.png\n', force_stdout=True)
shutit.log('\n================================================================================\n', force_stdout=True)
shutit.log('\n\n' + digraph, force_stdout=True)
shutit.log('\n================================================================================\n' + digraph, force_stdout=True)
shutit.log('\nAbove is the digraph for all modules configured to be built in this shutit invocation. Use graphviz to render into an image, eg\n\n\tshutit depgraph -m library | dot -Tpng -o depgraph.png\n', force_stdout=True)
shutit.log('\n================================================================================\n', force_stdout=True)
# Exit now
sys.exit(0)
# Dependency validation done, now collect configs of those marked for build.
config_collection_for_built(shutit)
if cfg['action']['list_configs'] or cfg['build']['debug']:
shutit.log(util.print_config(cfg, history=cfg['list_configs']['cfghistory']),
force_stdout=True)
# Set build completed
cfg['build']['completed'] = True
f = file(cfg['build']['log_config_path'] + '/cfg.txt','w')
f.write(util.print_config(cfg, history=cfg['list_configs']['cfghistory']))
f.close()
shutit.log('================================================================================', force_stdout=True)
shutit.log('Config details placed in: ' + cfg['build']['log_config_path'], force_stdout=True)
shutit.log('================================================================================', force_stdout=True)
shutit.log('To render the digraph of this build into an image run eg:\n\ndot -Tgv -o ' + cfg['build']['log_config_path'] + '/digraph.gv ' + cfg['build']['log_config_path'] + '/digraph.txt && dot -Tpdf -o digraph.pdf ' + cfg['build']['log_config_path'] + '/digraph.gv\n\n', force_stdout=True)
shutit.log('================================================================================', force_stdout=True)
shutit.log('To render the digraph of all visible modules into an image, run eg:\n\ndot -Tgv -o ' + cfg['build']['log_config_path'] + '/digraph_all.gv ' + cfg['build']['log_config_path'] + '/digraph_all.txt && dot -Tpdf -o digraph_all.pdf ' + cfg['build']['log_config_path'] + '/digraph_all.gv\n\n', force_stdout=True)
shutit.log('================================================================================', force_stdout=True)
shutit.log('\nConfiguration details have been written to the folder: ' + cfg['build']['log_config_path'] + '\n', force_stdout=True)
shutit.log('================================================================================', force_stdout=True)
if cfg['action']['list_configs']:
return
# Check for conflicts now.
errs.extend(check_conflicts(shutit))
# Cache the results of check_ready at the start.
errs.extend(check_ready(shutit, throw_error=False))
if errs:
shutit.log(print_modules(shutit), code='31')
child = None
for err in errs:
shutit.log(err[0], force_stdout=True, code='31')
if not child and len(err) > 1:
child = err[1]
shutit.fail("Encountered some errors, quitting", child=child)
shutit.record_config()
do_remove(shutit)
do_build(shutit)
do_test(shutit)
do_finalize(shutit)
finalize_target(shutit)
shutit.log(util.build_report(shutit, '#Module: N/A (END)'), prefix=False,
force_stdout=True, code='31')
if cfg['build']['build_log']:
shutit.cfg['build']['report_final_messages'] += "Build log file: " + cfg['host']['logfile']
# Show final report messages (ie messages to show after standard report).
if shutit.cfg['build']['report_final_messages'] != '':
shutit.log(shutit.cfg['build']['report_final_messages'], prefix=False,
force_stdout=True, code='31')
if shutit.cfg['build']['interactive'] >= 3:
shutit.log('\n' +
'The build is complete. You should now have a target ' +
'called ' + shutit.cfg['target']['name'] +
' and a new image if you chose to commit it.\n\n' +
'Look and play with the following files from the newly-created ' +
'module directory to dig deeper:\n\n configs/build.cnf\n ' +
'*.py\n\nYou can rebuild at any time by running the supplied ' +
'./build.sh and run with the supplied ./run.sh. These may need ' +
'tweaking for your particular environment, eg sudo\n\n' +
'You can inspect the details of the build in the target image\'s ' +
'/root/shutit_build directory.', force_stdout=True, code='31')
# Mark the build as completed
shutit.cfg['build']['completed'] = True
def do_phone_home(msg=None,question='Error seen - would you like to inform the maintainers?'):
"""Report message home.
msg - message to send home
question - question to ask - assumes Y/y for send message, else no
"""
if msg is None:
msg = {}
if shutit_global.shutit.cfg['build']['interactive'] == 0:
return
msg.update({'shutitrunstatus':'fail','pwd':<PASSWORD>(),'user':os.environ.get('LOGNAME', '')})
if question != '' and util.util_raw_input(prompt=question + ' (Y/n)\n') not in ('y','Y',''):
return
try:
urllib.urlopen("http://shutit.tk?" + urllib.urlencode(msg))
except Exception as e:
shutit_global.shutit.log('failed to send message: ' + str(e.message))
signal.signal(signal.SIGINT, util.ctrl_c_signal_handler)
if __name__ == '__main__':
shutit_main()
|
"""Bridge to sublime functionality."""
import sublime
import logging
import re
_log = logging.getLogger("ECC")
class SublBridge:
"""A small help class that bridges with sublime (maybe will grow)."""
NO_DEFAULT_COMPLETIONS = sublime.INHIBIT_WORD_COMPLETIONS \
| sublime.INHIBIT_EXPLICIT_COMPLETIONS
SHOW_DEFAULT_COMPLETIONS = None
HIDE_DEFAULT_COMPLETIONS = ([], sublime.INHIBIT_WORD_COMPLETIONS |
sublime.INHIBIT_EXPLICIT_COMPLETIONS)
@staticmethod
def set_status(message):
"""Set status message for the current view."""
view = SublBridge.active_view()
view.set_status("000_ECC", message)
@staticmethod
def erase_status():
"""Erase status message for the current view."""
view = SublBridge.active_view()
if not view:
# do nothing if there is no view
return
view.erase_status("000_ECC")
@staticmethod
def erase_phantoms(tag):
"""Erase phantoms for the current view."""
SublBridge.active_view().erase_phantoms(tag)
@staticmethod
def active_view():
"""Get the active view.
Returns:
View: Active view
"""
return sublime.active_window().active_view()
@staticmethod
def active_view_id():
"""Get the id of the active view.
Returns:
int: buffer id of the active view
"""
return SublBridge.active_view().buffer_id()
@staticmethod
def cursor_pos(view, pos=None):
"""Get current cursor position.
Args:
view (sublime.View): current view
pos (int, optional): given position. First selection by default.
Returns:
(row, col): tuple of row and col for cursor position
"""
if not pos:
pos = view.sel()
if len(pos) < 1:
# something is wrong
return None
# we care about the first position
pos = pos[0].a
(row, col) = view.rowcol(pos)
return CursorPosition(row + 1, col + 1)
@staticmethod
def get_line(view, pos=None):
"""Get next line as text.
Args:
view (sublime.View): current view
Returns:
str: text that the next line contains
"""
pos = SublBridge.cursor_pos(view, pos)
point_on_line = view.text_point(pos.row, 0)
line = view.line(point_on_line)
return view.substr(line)
@staticmethod
def next_line(view):
"""Get next line as text.
Args:
view (sublime.View): current view
Returns:
str: text that the next line contains
"""
pos = SublBridge.cursor_pos(view)
point_on_next_line = view.text_point(pos.row + 1, 0)
line = view.line(point_on_next_line)
return view.substr(line)
@staticmethod
def format_completions(completions, hide_default_completions):
"""Get completions. Manage hiding default ones.
Args:
hide_default_completions (bool): True if we hide default ones
Returns:
tuple: (completions, flags)
"""
if completions and hide_default_completions:
_log.debug("Hiding default completions")
return (completions, SublBridge.NO_DEFAULT_COMPLETIONS)
else:
_log.debug("Adding clang completions to default ones")
return completions
@staticmethod
def show_auto_complete(view):
"""Reopen completion popup.
It therefore subsequently calls
EasyClangComplete.on_query_completions(...)
Args:
view (sublime.View): view to open completion window in
"""
_log.debug("reload completion tooltip")
view.run_command('hide_auto_complete')
view.run_command('auto_complete', {
'disable_auto_insert': True,
'api_completions_only': False,
'next_competion_if_showing': False})
@staticmethod
def show_error_dialog(message):
"""Show an error message dialog."""
sublime.error_message("EasyClangComplete:\n\n" + message)
SYNTAX_REGEX = re.compile(r"\/([^\/]+)\.(?:tmLanguage|sublime-syntax)")
LANG_TAG = "lang"
SYNTAXES_TAG = "syntaxes"
LANG_C_TAG = "C"
LANG_CPP_TAG = "CPP"
LANG_OBJECTIVE_C_TAG = "OBJECTIVE_C"
LANG_OBJECTIVE_CPP_TAG = "OBJECTIVE_CPP"
LANG_TAGS = [LANG_C_TAG, LANG_CPP_TAG,
LANG_OBJECTIVE_C_TAG, LANG_OBJECTIVE_CPP_TAG]
LANG_NAMES = {
LANG_C_TAG: 'c',
LANG_CPP_TAG: 'c++',
LANG_OBJECTIVE_CPP_TAG: 'objective-c++',
LANG_OBJECTIVE_C_TAG: 'objective-c'
}
@staticmethod
def get_view_lang(view, settings_storage):
"""Get language from view description.
Args:
view (sublime.View): Current view
settings_storage (SettingsStorage): ECC settings for the view
Returns:
str: language, one of LANG_TAGS or None if nothing matched
"""
syntax = SublBridge.get_view_syntax(view)
for lang_tag, syntaxes in settings_storage.valid_lang_syntaxes.items():
if syntax in syntaxes and lang_tag in SublBridge.LANG_NAMES:
return lang_tag, SublBridge.LANG_NAMES[lang_tag]
_log.debug("ECC does nothing for language syntax: '%s'", syntax)
return None, None
@staticmethod
def get_view_syntax(view):
"""Get syntax from view description.
Args:
view (sublime.View): Current view
Returns:
str: syntax, e.g. "C", "C++"
"""
try:
syntax = re.findall(SublBridge.SYNTAX_REGEX,
view.settings().get('syntax'))
if len(syntax) > 0:
return syntax[0]
except TypeError as e:
# if the view is killed while this is being run, an exception is
# thrown. Let's dela with it gracefully.
_log.error("error while getting current language: '%s'", e)
return None
@staticmethod
def has_valid_syntax(view, settings_storage):
"""Check if syntax is valid for this plugin.
Args:
view (sublime.View): current view
settings_storage (SettingsStorage): ECC settings for this view
Returns:
bool: True if valid, False otherwise
"""
lang_tag, lang = SublBridge.get_view_lang(view, settings_storage)
if not lang:
# We could not determine the language from syntax. Means the syntax
# is not valid for us.
return False
return True
@staticmethod
def is_valid_view(view):
"""Check whether the given view is one we can and want to handle.
Args:
view (sublime.View): view to check
Returns:
bool: True if we want to handle this view, False otherwise
"""
from os import path
if not view:
_log.debug("view is None")
return False
if not view.file_name():
_log.debug("view file_name is None")
return False
if view.is_scratch():
_log.debug("view is scratch view")
return False
if view.buffer_id() == 0:
_log.debug("view buffer id is 0")
return False
if not path.exists(view.file_name()):
_log.debug("view file_name does not exist in system")
return False
return True
@staticmethod
def get_pos_status(point, view, settings):
"""Check if the cursor focuses a valid trigger.
Args:
point (int): position of the cursor in the file as defined by subl
view (sublime.View): current view
settings (TYPE): Description
Returns:
PosStatus: statuf for this position
"""
trigger_length = 1
word_on_the_left = view.substr(view.word(point - trigger_length))
if word_on_the_left.isdigit():
# don't autocomplete digits
_log.debug("trying to autocomplete digit, are we? Not allowed.")
return PosStatus.WRONG_TRIGGER
# slightly counterintuitive `view.substr` returns ONE character
# to the right of given point.
curr_char = view.substr(point - trigger_length)
wrong_trigger_found = False
for trigger in settings.triggers:
# compare to the last char of a trigger
if curr_char == trigger[-1]:
trigger_length = len(trigger)
prev_char = view.substr(point - trigger_length)
if prev_char == trigger[0]:
_log.debug("matched trigger '%s'.", trigger)
return PosStatus.COMPLETION_NEEDED
else:
_log.debug("wrong trigger '%s%s'.", prev_char, curr_char)
wrong_trigger_found = True
if wrong_trigger_found:
# no correct trigger found, but a wrong one fired instead
_log.debug("wrong trigger fired")
return PosStatus.WRONG_TRIGGER
if settings.autocomplete_all:
return PosStatus.COMPLETION_NEEDED
this_line = SublBridge.get_line(view, point)
if this_line.startswith('#include'):
_log.debug("completing an include")
return PosStatus.COMPLETE_INCLUDES
# if nothing fired we don't need to do anything
_log.debug("no completions needed")
return PosStatus.COMPLETION_NOT_NEEDED
class PosStatus:
"""Enum class with values for completion status."""
COMPLETION_NEEDED = 0
COMPLETION_NOT_NEEDED = 1
WRONG_TRIGGER = 2
COMPLETE_INCLUDES = 3
class CursorPosition():
"""Stores a cursor position."""
def __init__(self, row, col):
"""Initialize from row and column as seen in file (start with 1)."""
self.row = row - 1
self.col = col - 1
def file_row(self):
"""Get 1-based row index."""
return self.row + 1
def file_col(self):
"""Get 1-based column index."""
return self.col + 1
def location(self, view):
"""Return the cursor position as sublime text location."""
return view.text_point(self.row, self.col)
|
<filename>versioneer.py
"""versioneer.py
(like a rocketeer, but for versions)
* https://github.com/warner/python-versioneer
* <NAME>
* License: Public Domain
* Version: 0.7+
This file helps distutils-based projects manage their version number by just
creating version-control tags.
For developers who work from a VCS-generated tree (e.g. 'git clone' etc),
each 'setup.py version', 'setup.py build', 'setup.py sdist' will compute a
version number by asking your version-control tool about the current
checkout. The version number will be written into a generated _version.py
file of your choosing, where it can be included by your __init__.py
For users who work from a VCS-generated tarball (e.g. 'git archive'), it will
compute a version number by looking at the name of the directory created when
te tarball is unpacked. This conventionally includes both the name of the
project and a version number.
For users who work from a tarball built by 'setup.py sdist', it will get a
version number from a previously-generated _version.py file.
As a result, loading code directly from the source tree will not result in a
real version. If you want real versions from VCS trees (where you frequently
update from the upstream repository, or do new development), you will need to
do a 'setup.py version' after each update, and load code from the build/
directory.
You need to provide this code with a few configuration values:
versionfile_source:
A project-relative pathname into which the generated version strings
should be written. This is usually a _version.py next to your project's
main __init__.py file. If your project uses src/myproject/__init__.py,
this should be 'src/myproject/_version.py'. This file should be checked
in to your VCS as usual: the copy created below by 'setup.py
update_files' will include code that parses expanded VCS keywords in
generated tarballs. The 'build' and 'sdist' commands will replace it with
a copy that has just the calculated version string.
versionfile_build:
Like versionfile_source, but relative to the build directory instead of
the source directory. These will differ when your setup.py uses
'package_dir='. If you have package_dir={'myproject': 'src/myproject'},
then you will probably have versionfile_build='myproject/_version.py' and
versionfile_source='src/myproject/_version.py'.
tag_prefix: a string, like 'PROJECTNAME-', which appears at the start of all
VCS tags. If your tags look like 'myproject-1.2.0', then you
should use tag_prefix='myproject-'. If you use unprefixed tags
like '1.2.0', this should be an empty string.
parentdir_prefix: a string, frequently the same as tag_prefix, which
appears at the start of all unpacked tarball filenames. If
your tarball unpacks into 'myproject-1.2.0', this should
be 'myproject-'.
To use it:
1: include this file in the top level of your project
2: make the following changes to the top of your setup.py:
import versioneer
versioneer.versionfile_source = 'src/myproject/_version.py'
versioneer.versionfile_build = 'myproject/_version.py'
versioneer.tag_prefix = '' # tags are like 1.2.0
versioneer.parentdir_prefix = 'myproject-' # dirname like 'myproject-1.2.0'
3: add the following arguments to the setup() call in your setup.py:
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
4: run 'setup.py update_files', which will create _version.py, and will
append the following to your __init__.py:
from _version import __version__
5: modify your MANIFEST.in to include versioneer.py
6: add both versioneer.py and the generated _version.py to your VCS
"""
import os, sys, re
from distutils.core import Command
from distutils.command.sdist import sdist as _sdist
from distutils.command.build import build as _build
versionfile_source = None
versionfile_build = None
tag_prefix = None
parentdir_prefix = None
VCS = "git"
IN_LONG_VERSION_PY = False
LONG_VERSION_PY = '''
IN_LONG_VERSION_PY = True
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (build by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.7+ (https://github.com/warner/python-versioneer)
# these strings will be replaced by git during git-archive
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
import subprocess
import sys
def run_command(args, cwd=None, verbose=False):
try:
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd)
except EnvironmentError:
e = sys.exc_info()[1]
if verbose:
print("unable to run %%s" %% args[0])
print(e)
return None
stdout = p.communicate()[0].strip()
if sys.version >= '3':
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% args[0])
return None
return stdout
import sys
import re
import os.path
def get_expanded_variables(versionfile_source):
# the code embedded in _version.py can just fetch the value of these
# variables. When used from setup.py, we don't want to import
# _version.py, so we do it with a regexp instead. This function is not
# used from _version.py.
variables = {}
try:
for line in open(versionfile_source,"r").readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
variables["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
variables["full"] = mo.group(1)
except EnvironmentError:
pass
return variables
def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
refnames = variables["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("variables are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")])
for ref in list(refs):
if not re.search(r'\d', ref):
if verbose:
print("discarding '%%s', no digits" %% ref)
refs.discard(ref)
# Assume all version tags have a digit. git's %%d expansion
# behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us
# distinguish between branches and tags. By ignoring refnames
# without digits, we filter out many common branch names like
# "release" and "stabilization", as well as "HEAD" and "master".
if verbose:
print("remaining refs: %%s" %% ",".join(sorted(refs)))
for ref in sorted(refs):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return { "version": r,
"full": variables["full"].strip() }
# no suitable tags, so we use the full revision id
if verbose:
print("no suitable tags, using full revision id")
return { "version": variables["full"].strip(),
"full": variables["full"].strip() }
def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
# this runs 'git' from the root of the source tree. That either means
# someone ran a setup.py command (and this code is in versioneer.py, so
# IN_LONG_VERSION_PY=False, thus the containing directory is the root of
# the source tree), or someone ran a project-specific entry point (and
# this code is in _version.py, so IN_LONG_VERSION_PY=True, thus the
# containing directory is somewhere deeper in the source tree). This only
# gets called if the git-archive 'subst' variables were *not* expanded,
# and _version.py hasn't already been rewritten with a short version
# string, meaning we're inside a checked out source tree.
try:
here = os.path.abspath(__file__)
except NameError:
# some py2exe/bbfreeze/non-CPython implementations don't do __file__
return {} # not always correct
# versionfile_source is the relative path from the top of the source tree
# (where the .git directory might live) to this file. Invert this to find
# the root from __file__.
root = here
if IN_LONG_VERSION_PY:
for i in range(len(versionfile_source.split("/"))):
root = os.path.dirname(root)
else:
root = os.path.dirname(here)
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %%s" %% root)
return {}
GIT = "git"
if sys.platform == "win32":
GIT = "git.cmd"
stdout = run_command([GIT, "describe", "--tags", "--dirty", "--always"],
cwd=root)
if stdout is None:
return {}
if not stdout.startswith(tag_prefix):
if verbose:
print("tag '%%s' doesn't start with prefix '%%s'" %% (stdout, tag_prefix))
return {}
tag = stdout[len(tag_prefix):]
stdout = run_command([GIT, "rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
if tag.endswith("-dirty"):
full += "-dirty"
return {"version": tag, "full": full}
def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False):
if IN_LONG_VERSION_PY:
# We're running from _version.py. If it's from a source tree
# (execute-in-place), we can work upwards to find the root of the
# tree, and then check the parent directory for a version string. If
# it's in an installed application, there's no hope.
try:
here = os.path.abspath(__file__)
except NameError:
# py2exe/bbfreeze/non-CPython don't have __file__
return {} # without __file__, we have no hope
# versionfile_source is the relative path from the top of the source
# tree to _version.py. Invert this to find the root from __file__.
root = here
for i in range(len(versionfile_source.split("/"))):
root = os.path.dirname(root)
else:
# we're running from versioneer.py, which means we're running from
# the setup.py in a source tree. sys.argv[0] is setup.py in the root.
here = os.path.abspath(sys.argv[0])
root = os.path.dirname(here)
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%%s', but '%%s' doesn't start with prefix '%%s'" %%
(root, dirname, parentdir_prefix))
return None
return {"version": dirname[len(parentdir_prefix):], "full": ""}
tag_prefix = "%(TAG_PREFIX)s"
parentdir_prefix = "%(PARENTDIR_PREFIX)s"
versionfile_source = "%(VERSIONFILE_SOURCE)s"
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
variables = { "refnames": git_refnames, "full": git_full }
ver = versions_from_expanded_variables(variables, tag_prefix, verbose)
if not ver:
ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)
if not ver:
ver = versions_from_parentdir(parentdir_prefix, versionfile_source,
verbose)
if not ver:
ver = default
return ver
'''
import subprocess
import sys
def run_command(args, cwd=None, verbose=False):
try:
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd)
except EnvironmentError:
e = sys.exc_info()[1]
if verbose:
print("unable to run %s" % args[0])
print(e)
return None
stdout = p.communicate()[0].strip()
if sys.version >= '3':
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % args[0])
return None
return stdout
import sys
import re
import os.path
def get_expanded_variables(versionfile_source):
# the code embedded in _version.py can just fetch the value of these
# variables. When used from setup.py, we don't want to import
# _version.py, so we do it with a regexp instead. This function is not
# used from _version.py.
variables = {}
try:
for line in open(versionfile_source,"r").readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
variables["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
variables["full"] = mo.group(1)
except EnvironmentError:
pass
return variables
def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
refnames = variables["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("variables are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")])
for ref in list(refs):
if not re.search(r'\d', ref):
if verbose:
print("discarding '%s', no digits" % ref)
refs.discard(ref)
# Assume all version tags have a digit. git's %d expansion
# behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us
# distinguish between branches and tags. By ignoring refnames
# without digits, we filter out many common branch names like
# "release" and "stabilization", as well as "HEAD" and "master".
if verbose:
print("remaining refs: %s" % ",".join(sorted(refs)))
for ref in sorted(refs):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return { "version": r,
"full": variables["full"].strip() }
# no suitable tags, so we use the full revision id
if verbose:
print("no suitable tags, using full revision id")
return { "version": variables["full"].strip(),
"full": variables["full"].strip() }
def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
# this runs 'git' from the root of the source tree. That either means
# someone ran a setup.py command (and this code is in versioneer.py, so
# IN_LONG_VERSION_PY=False, thus the containing directory is the root of
# the source tree), or someone ran a project-specific entry point (and
# this code is in _version.py, so IN_LONG_VERSION_PY=True, thus the
# containing directory is somewhere deeper in the source tree). This only
# gets called if the git-archive 'subst' variables were *not* expanded,
# and _version.py hasn't already been rewritten with a short version
# string, meaning we're inside a checked out source tree.
try:
here = os.path.abspath(__file__)
except NameError:
# some py2exe/bbfreeze/non-CPython implementations don't do __file__
return {} # not always correct
# versionfile_source is the relative path from the top of the source tree
# (where the .git directory might live) to this file. Invert this to find
# the root from __file__.
root = here
if IN_LONG_VERSION_PY:
for i in range(len(versionfile_source.split("/"))):
root = os.path.dirname(root)
else:
root = os.path.dirname(here)
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
return {}
GIT = "git"
if sys.platform == "win32":
GIT = "git.cmd"
stdout = run_command([GIT, "describe", "--tags", "--dirty", "--always"],
cwd=root)
if stdout is None:
return {}
if not stdout.startswith(tag_prefix):
if verbose:
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
return {}
tag = stdout[len(tag_prefix):]
stdout = run_command([GIT, "rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
if tag.endswith("-dirty"):
full += "-dirty"
return {"version": tag, "full": full}
def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False):
if IN_LONG_VERSION_PY:
# We're running from _version.py. If it's from a source tree
# (execute-in-place), we can work upwards to find the root of the
# tree, and then check the parent directory for a version string. If
# it's in an installed application, there's no hope.
try:
here = os.path.abspath(__file__)
except NameError:
# py2exe/bbfreeze/non-CPython don't have __file__
return {} # without __file__, we have no hope
# versionfile_source is the relative path from the top of the source
# tree to _version.py. Invert this to find the root from __file__.
root = here
for i in range(len(versionfile_source.split("/"))):
root = os.path.dirname(root)
else:
# we're running from versioneer.py, which means we're running from
# the setup.py in a source tree. sys.argv[0] is setup.py in the root.
here = os.path.abspath(sys.argv[0])
root = os.path.dirname(here)
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
(root, dirname, parentdir_prefix))
return None
return {"version": dirname[len(parentdir_prefix):], "full": ""}
import sys
def do_vcs_install(versionfile_source, ipy):
GIT = "git"
if sys.platform == "win32":
GIT = "git.cmd"
run_command([GIT, "add", "versioneer.py"])
run_command([GIT, "add", versionfile_source])
run_command([GIT, "add", ipy])
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
run_command([GIT, "add", ".gitattributes"])
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.7+) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
version_version = '%(version)s'
version_full = '%(full)s'
def get_versions(default={}, verbose=False):
return {'version': version_version, 'full': version_full}
"""
DEFAULT = {"version": "unknown", "full": "unknown"}
def versions_from_file(filename):
versions = {}
try:
f = open(filename)
except EnvironmentError:
return versions
for line in f.readlines():
mo = re.match("version_version = '([^']+)'", line)
if mo:
versions["version"] = mo.group(1)
mo = re.match("version_full = '([^']+)'", line)
if mo:
versions["full"] = mo.group(1)
return versions
def write_to_version_file(filename, versions):
f = open(filename, "w")
f.write(SHORT_VERSION_PY % versions)
f.close()
print("set %s to '%s'" % (filename, versions["version"]))
def get_best_versions(versionfile, tag_prefix, parentdir_prefix,
default=DEFAULT, verbose=False):
# returns dict with two keys: 'version' and 'full'
#
# extract version from first of _version.py, 'git describe', parentdir.
# This is meant to work for developers using a source checkout, for users
# of a tarball created by 'setup.py sdist', and for users of a
# tarball/zipball created by 'git archive' or github's download-from-tag
# feature.
variables = get_expanded_variables(versionfile_source)
if variables:
ver = versions_from_expanded_variables(variables, tag_prefix)
if ver:
if verbose: print("got version from expanded variable %s" % ver)
return ver
ver = versions_from_file(versionfile)
if ver:
if verbose: print("got version from file %s %s" % (versionfile, ver))
return ver
ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)
if ver:
if verbose: print("got version from git %s" % ver)
return ver
ver = versions_from_parentdir(parentdir_prefix, versionfile_source, verbose)
if ver:
if verbose: print("got version from parentdir %s" % ver)
return ver
if verbose: print("got version from default %s" % ver)
return default
def get_versions(default=DEFAULT, verbose=False):
assert versionfile_source is not None, "please set versioneer.versionfile_source"
assert tag_prefix is not None, "please set versioneer.tag_prefix"
assert parentdir_prefix is not None, "please set versioneer.parentdir_prefix"
return get_best_versions(versionfile_source, tag_prefix, parentdir_prefix,
default=default, verbose=verbose)
def get_version(verbose=False):
return get_versions(verbose=verbose)["version"]
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
ver = get_version(verbose=True)
print("Version is currently: %s" % ver)
class cmd_build(_build):
def run2(self):
versions = get_versions(verbose=True)
_build.run(self)
# now locate _version.py in the new build/ directory and replace it
# with an updated value
target_versionfile = os.path.join(self.build_lib, versionfile_build)
print("UPDATING %s" % target_versionfile)
os.unlink(target_versionfile)
f = open(target_versionfile, "w")
f.write(SHORT_VERSION_PY % versions)
f.close()
class cmd_sdist(_sdist):
def run(self):
versions = get_versions(verbose=True)
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory (remembering
# that it may be a hardlink) and replace it with an updated value
target_versionfile = os.path.join(base_dir, versionfile_source)
print("UPDATING %s" % target_versionfile)
os.unlink(target_versionfile)
f = open(target_versionfile, "w")
f.write(SHORT_VERSION_PY % self._versioneer_generated_versions)
f.close()
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
class cmd_update_files(Command):
description = "modify __init__.py and create _version.py"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
ipy = os.path.join(os.path.dirname(versionfile_source), "__init__.py")
print(" creating %s" % versionfile_source)
f = open(versionfile_source, "w")
f.write(LONG_VERSION_PY % {"DOLLAR": "$",
"TAG_PREFIX": tag_prefix,
"PARENTDIR_PREFIX": parentdir_prefix,
"VERSIONFILE_SOURCE": versionfile_source,
})
f.close()
try:
old = open(ipy, "r").read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
f = open(ipy, "a")
f.write(INIT_PY_SNIPPET)
f.close()
else:
print(" %s unmodified" % ipy)
do_vcs_install(versionfile_source, ipy)
def get_cmdclass():
return {'version': cmd_version,
'update_files': cmd_update_files,
'build': cmd_build,
'sdist': cmd_sdist,
}
|
#!/usr/bin/env python
import sys
import logging
from pkg_resources import Requirement
from setuptools.package_index import PackageIndex
_logger = logging.getLogger()
_pypi = 'http://pypi.python.org/simple'
def _enable_console(level):
""" Configure logging to receive log messages at the console. """
global _logger
# define a Handler which writes messages to sys.stderr
CONSOLE = logging.StreamHandler()
CONSOLE.setLevel(level)
CONSOLE.setFormatter(logging.Formatter('%(message)s'))
_logger.addHandler(CONSOLE)
def grab_distrib(req, index=None, dest='.', search_pypi=True):
"""\
Downloads a distribution from the given package index(s) based on the
given requirement string(s). Downloaded distributions are placed in the
specified destination or the current directory if no destination is
specified. If a distribution cannot be found in the given index(s), the
Python Package Index will be searched as a last resort unless
search_pypi is False. This does NOT install the distribution.
"""
# allow multiple package indexes to be specified
if index is None:
index = []
elif isinstance(index, basestring):
index = [index]
# else just assume it's some iterator of indexes
# add PyPI as last place to search if it wasn't already specified
if search_pypi and _pypi not in index and (_pypi+'/') not in index:
index.append(_pypi)
# allow specification of single or multiple requirements
if isinstance(req, basestring):
reqs = [Requirement.parse(req)]
elif isinstance(req, Requirement):
reqs = [req]
else:
reqs = []
for rr in req:
if isinstance(rr, basestring):
reqs.append(Requirement.parse(rr))
elif isinstance(rr, Requirement):
reqs.append(rr)
else:
raise TypeError("supplied requirement arg must be a string"+
" or a Requirement, but given type is %s" %
type(rr))
index_list = [PackageIndex(idx,search_path=[]) for idx in index]
for req in reqs:
fetched = None
for idx in index_list:
_logger.info('Looking for %s at package index %s' % (req, idx.index_url))
fetched = idx.download(req, dest)
if fetched:
_logger.info(' %s successfully downloaded' % fetched)
break
else:
_logger.error("couldn't find distrib for %s" % req)
return fetched
if __name__ == '__main__':
from optparse import OptionParser
usage = "%prog [options] req(s)"
parser = OptionParser(usage=usage, description=grab_distrib.__doc__)
parser.add_option("-i", "--index", action="append", type="string", dest="index",
help="package index url (separate -i for each one)", default=[])
parser.add_option("-d", "--dest", action="store", type="string", dest="dest",
help="destination directory", default='.')
parser.add_option("-q", "--quiet", action="store_true", dest="quiet",
help="no output")
parser.add_option("--nopypi", action="store_true", dest="nopypi",
help="do not search PyPI")
(options, args) = parser.parse_args(sys.argv[1:])
if len(args) < 1:
parser.print_help()
sys.exit(1)
if options.quiet:
loglevel = logging.CRITICAL
else:
loglevel = logging.INFO
_logger.setLevel(loglevel)
_enable_console(loglevel)
grab_distrib(req=args, index=options.index, dest=options.dest,
search_pypi=not options.nopypi)
grab_distrib.__doc__ += """
Requirements may be supplied as strings or as Requirement objects.
"""
|
import os
from typing import Optional, Union, List
import numpy as np
from persia.prelude import _PersiaBatch
from persia.logger import get_default_logger
_logger = get_default_logger()
# Maximum batch_size supported.
MAX_BATCH_SIZE = 65535
# Skip all PERSIA data checks except batch size.
# Raise RuntimeError when data does not meet requirement, such as
# type, dtype or shape mismatch.
SKIP_CHECK_PERSIA_DATA = bool(int(os.environ.get("SKIP_CHECK_PERSIA_DATA", "0")))
_ND_ARRAY_SUPPORT_TYPE = set(
[np.bool, np.int8, np.int16, np.int32, np.int64, np.float32, np.float64, np.uint8]
)
def _id_type_data_check(id_type_feature: np.ndarray, feature_name: str):
"""Check the type, dimension and dtype for id_type_feature.
Arguments:
id_type_feature (np.ndarray): id_type_feature array.
feature_name (str): Name of id_type_feature
"""
assert isinstance(
id_type_feature, np.ndarray
), f"expected id_type_feature: {feature_name} type to be np.ndarray but got tpye: {type(id_type_feature)}"
assert (
id_type_feature.ndim == 1
), f"expected id_type_feature: {feature_name} ndim equal to one but got ndim: {id_type_feature.ndim}"
assert (
id_type_feature.dtype == np.uint64
), f"expected id_type_feature: {feature_name} dtype as np.uint64 but got dtype: {id_type_feature.dtype}"
def _ndarray_check(data: np.ndarray, data_name) -> bool:
r"""Check if the dtype, shape and batch_size is valid or not.
Arguments:
data (np.ndarray): Data that needs to be checked.
data_name (str): Name of data.
"""
assert isinstance(
data, np.ndarray
), f"input data {data_name}, type: {type(data)} no match numpy ndarray "
assert (
data.dtype.type in _ND_ARRAY_SUPPORT_TYPE
), f"np.array only support dtype {_ND_ARRAY_SUPPORT_TYPE} but got {data_name} dtype {data.dtype}"
assert (
data.ndim > 0
), f"{data_name} ndarray got ndim: {data.ndim} expect ndim greater than one"
def _batch_size_check(
batch_size: int, target_batch_size: int, data_type: str, name: str
):
"""Check if batch size is equal to target_batch_size and no larger than to MAX_BATCH_SIZE"""
assert (
batch_size == target_batch_size
), f"expected {data_type}: {name} batch_size equal to {target_batch_size} but got {batch_size}"
assert (
batch_size <= MAX_BATCH_SIZE
), f"expected {data_type}:{name} batch_size <= MAX_BATCH_SIZE: {MAX_BATCH_SIZE} but got {batch_size}"
class IDTypeFeature:
"""IDTypeFeature is a lil sparse matrix."""
def __init__(self, name: str, data: List[np.ndarray]):
"""
Arguments:
name (str): Name of IDTypeFeature.
data (List[np.ndarray]): IDTypeFeature data. A lil sparse matrix. Requires np.uint64 as type for its elements.
"""
if not SKIP_CHECK_PERSIA_DATA:
(_id_type_data_check(x, name) for x in data)
self.name = name
self.data = data
@property
def batch_size(self):
return len(self.data)
class IDTypeFeatureWithSingleID:
"""IDTypeFeatureWithSingleID is a special format of IDTypeFeature where there is only one id for each sample in the batch."""
def __init__(self, name: str, data: np.ndarray):
"""
Arguments:
name (str): Name of IDTypeFeatureWithSingleID.
data (np.ndarray): IDTypeFeatureWithSingleID data. Requires np.uint64 as type for its elements.
"""
if not SKIP_CHECK_PERSIA_DATA:
_id_type_data_check(data, name)
self.name = name
self.data = data
@property
def batch_size(self) -> int:
return len(self.data)
class _NdarrayDataBase:
DEFAULT_NAME = "ndarray_base"
def __init__(self, data: np.ndarray, name: str = None):
"""
Arguments:
data (np.ndarray): Numpy array.
name (str, optional): Name of data.
"""
self.data = data
self._name = name
if not SKIP_CHECK_PERSIA_DATA:
_ndarray_check(self.data, name)
@property
def batch_size(self) -> int:
return self.data.shape[0]
@property
def name(self):
return self._name or self.DEFAULT_NAME
def __len__(self):
return len(self.data)
class Label(_NdarrayDataBase):
DEFAULT_NAME = "label_anonymous"
class NonIDTypeFeature(_NdarrayDataBase):
DEFAULT_NAME = "non_id_type_feature_anonymous"
class PersiaBatch:
r"""`PersiaBatch` is the type of dataset used internally in Persia.
It wraps the id_type_features, non_id_type_features, labels and meta bytes data.
Example:
>>> import time
>>> import json
>>> ...
>>> import numpy as np
>>> ...
>>> from persia.embedding.data import PersiaBatch, NonIDTypeFeature, IDTypeFeature, Label
>>> ...
>>> batch_size = 1024
>>> non_id_type_feature = NonIDTypeFeature(np.zeros((batch_size, 2), dtype=np.float32))
>>> label = Label(np.ones((batch_size, 2), dtype=np.float32))
>>> id_type_feature_num = 3
>>> id_type_feature_max_sample_size = 100
>>> id_type_features = [
... IDTypeFeature(f"feature_{idx}", [np.ones((np.random.randint(id_type_feature_max_sample_size)), dtype=np.uint64)
... for _ in range(batch_size)
... ]), for idx in range(id_type_feature_num))
... ]
>>> meta_info = {
... timestamp: time.time(),
... weight: 0.9,
... }
>>> meta_bytes = json.dumps(meta_info)
>>> requires_grad = True
>>> persia_batch = PersiaBatch(id_type_features,
... non_id_type_features=[non_id_type_feature],
... labels=[label] requires_grad=requires_grad,
... meta=meta_bytes
... )
"""
def __init__(
self,
id_type_features: List[Union[IDTypeFeature, IDTypeFeatureWithSingleID]],
non_id_type_features: Optional[List[NonIDTypeFeature]] = None,
labels: Optional[List[Label]] = None,
batch_size: Optional[int] = None,
requires_grad: bool = True,
meta: Optional[bytes] = None,
):
"""
Arguments:
id_type_features (List[Union[IDTypeFeatureWithSingleID, IDTypeFeature]]): Categorical data whose datatype should be uint64.
non_id_type_features (List[NonIdTypeFeature], optional): Dense data.
labels: (List[Label], optional): Labels data.
batch_size (int, optional): Number of samples in each batch. IDTypeFeatures, NonIDTypeFeatures and Labels should have the same batch_size.
requires_grad (bool, optional): Set requires_grad for id_type_features.
meta (bytes, optional): Binary data.
"""
assert len(id_type_features) > 0, "id_type_features should not be empty"
batch_size = batch_size or id_type_features[0].batch_size
self.batch = _PersiaBatch()
for id_type_feature in id_type_features:
_batch_size_check(
id_type_feature.batch_size,
batch_size,
"id_type_feature",
id_type_feature.name,
)
if isinstance(id_type_feature, IDTypeFeatureWithSingleID):
self.batch.add_id_type_feature_with_single_id(
id_type_feature.data, id_type_feature.name
)
elif isinstance(id_type_feature, IDTypeFeature):
self.batch.add_id_type_feature(
id_type_feature.data, id_type_feature.name
)
else:
raise TypeError(
f"expected type of id_type_feature to be Union[IDTypeFeatureWithSingleID, IDTypeFeature] but got {type(id_type_feature)}"
)
if non_id_type_features is not None:
for non_id_type_feature in non_id_type_features:
_batch_size_check(
non_id_type_feature.batch_size,
batch_size,
"non_id_type_feature",
non_id_type_feature.name,
)
self.batch.add_non_id_type_feature(
non_id_type_feature.data,
non_id_type_feature.data.dtype,
non_id_type_feature.name,
)
if labels is not None:
for label in labels:
_batch_size_check(label.batch_size, batch_size, "label", label.name)
self.batch.add_label(label.data, label.data.dtype, label.name)
if meta is not None:
if isinstance(meta, bytes):
self.batch.add_meta(meta)
else:
_logger.warn(
f"expect PersiaBatch.meta type is bytes but got {type(meta)}"
)
self.batch_size = batch_size
self.batch.converted_id_type_features2embedding_tensor(requires_grad)
@property
def data(self) -> _PersiaBatch:
return self.batch
def to_bytes(self) -> bytes:
"""Serialize persia_batch to bytes after checking."""
return self.data.to_bytes()
|
<gh_stars>0
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [
{
'target_name': 'ios_chrome_browser',
'type': 'static_library',
'include_dirs': [
'../..',
],
'dependencies': [
'../../base/base.gyp:base',
'../../components/components.gyp:infobars_core',
'../../components/components.gyp:keyed_service_core',
'../../components/components.gyp:keyed_service_ios',
'../../components/components.gyp:leveldb_proto',
'../../components/components.gyp:suggestions',
'../../components/components.gyp:translate_core_browser',
'../../components/components.gyp:translate_ios_browser',
'../../components/components.gyp:web_resource',
'../../components/components.gyp:webp_transcode',
'../../components/components_strings.gyp:components_strings',
'../../net/net.gyp:net',
'../../skia/skia.gyp:skia',
'../../ui/base/ui_base.gyp:ui_base',
'../../ui/gfx/gfx.gyp:gfx',
'../../url/url.gyp:url_lib',
'../provider/ios_provider_chrome.gyp:ios_provider_chrome_browser',
'../web/ios_web.gyp:ios_web',
'ios_chrome_resources.gyp:ios_theme_resources_gen',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Accelerate.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreGraphics.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'$(SDKROOT)/System/Library/Frameworks/QuartzCore.framework',
'$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
],
},
'sources': [
'browser/application_context.cc',
'browser/application_context.h',
'browser/application_context_impl.cc',
'browser/application_context_impl.h',
'browser/arch_util.cc',
'browser/arch_util.h',
'browser/browser_state/browser_state_otr_helper.cc',
'browser/browser_state/browser_state_otr_helper.h',
'browser/chrome_url_constants.cc',
'browser/chrome_url_constants.h',
'browser/infobars/confirm_infobar_controller.h',
'browser/infobars/confirm_infobar_controller.mm',
'browser/infobars/infobar.h',
'browser/infobars/infobar.mm',
'browser/infobars/infobar_container_ios.h',
'browser/infobars/infobar_container_ios.mm',
'browser/infobars/infobar_container_view.h',
'browser/infobars/infobar_container_view.mm',
'browser/infobars/infobar_controller.h',
'browser/infobars/infobar_controller.mm',
'browser/infobars/infobar_manager_impl.cc',
'browser/infobars/infobar_manager_impl.h',
'browser/infobars/infobar_utils.h',
'browser/infobars/infobar_utils.mm',
'browser/net/image_fetcher.h',
'browser/net/image_fetcher.mm',
'browser/pref_names.cc',
'browser/pref_names.h',
'browser/snapshots/snapshot_cache.h',
'browser/snapshots/snapshot_cache.mm',
'browser/snapshots/snapshot_manager.h',
'browser/snapshots/snapshot_manager.mm',
'browser/snapshots/snapshot_overlay.h',
'browser/snapshots/snapshot_overlay.mm',
'browser/snapshots/snapshots_util.h',
'browser/snapshots/snapshots_util.mm',
'browser/suggestions/image_fetcher_impl.h',
'browser/suggestions/image_fetcher_impl.mm',
'browser/suggestions/suggestions_service_factory.h',
'browser/suggestions/suggestions_service_factory.mm',
'browser/translate/after_translate_infobar_controller.h',
'browser/translate/after_translate_infobar_controller.mm',
'browser/translate/before_translate_infobar_controller.h',
'browser/translate/before_translate_infobar_controller.mm',
'browser/translate/chrome_ios_translate_client.h',
'browser/translate/chrome_ios_translate_client.mm',
'browser/translate/never_translate_infobar_controller.h',
'browser/translate/never_translate_infobar_controller.mm',
'browser/translate/translate_accept_languages_factory.cc',
'browser/translate/translate_accept_languages_factory.h',
'browser/translate/translate_infobar_tags.h',
'browser/translate/translate_message_infobar_controller.h',
'browser/translate/translate_message_infobar_controller.mm',
'browser/translate/translate_service_ios.cc',
'browser/translate/translate_service_ios.h',
'browser/ui/animation_util.h',
'browser/ui/animation_util.mm',
'browser/ui/image_util.h',
'browser/ui/image_util.mm',
'browser/ui/reversed_animation.h',
'browser/ui/reversed_animation.mm',
'browser/ui/ui_util.h',
'browser/ui/ui_util.mm',
'browser/ui/uikit_ui_util.h',
'browser/ui/uikit_ui_util.mm',
'browser/web/dom_altering_lock.h',
'browser/web/dom_altering_lock.mm',
'browser/web_resource/ios_web_resource_service.cc',
'browser/web_resource/ios_web_resource_service.h',
],
},
],
}
|
import torch
import torch.nn as nn
import functools
from torch.nn import init
class Unet(nn.Module):
def __init__(self, in_dim=1, c_dim=3, conv_dim=64, out_dim=1):
super(Unet, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_dim + c_dim, conv_dim, kernel_size=3, stride=2, padding=1), # 64
nn.BatchNorm2d(conv_dim),
nn.ReLU(inplace=True)
)
self.conv2 = nn.Sequential(
nn.Conv2d(conv_dim, conv_dim * 2, kernel_size=3, stride=2, padding=1), # 32
nn.BatchNorm2d(conv_dim * 2),
nn.ReLU(inplace=True)
)
self.conv3 = nn.Sequential(
nn.Conv2d(conv_dim * 2, conv_dim * 4, kernel_size=3, stride=2, padding=1), # 16
nn.BatchNorm2d(conv_dim * 4),
nn.ReLU(inplace=True)
)
self.conv4 = nn.Sequential(
nn.Conv2d(conv_dim * 4, conv_dim * 8, kernel_size=3, stride=2, padding=1), # 8
nn.BatchNorm2d(conv_dim * 8),
nn.ReLU(inplace=True)
)
self.deconv1 = nn.Sequential(
nn.ConvTranspose2d(conv_dim * 8, conv_dim * 8, kernel_size=3, stride=2, padding=1, output_padding=1),
nn.BatchNorm2d(conv_dim * 8),
nn.ReLU(inplace=True)
)
self.deconv2 = nn.Sequential(
nn.ConvTranspose2d(conv_dim * (8 + 4), conv_dim * 4, kernel_size=3, stride=2, padding=1, output_padding=1),
nn.BatchNorm2d(conv_dim * 4),
nn.ReLU(inplace=True)
)
self.deconv3 = nn.Sequential(
nn.ConvTranspose2d(conv_dim * (4 + 2), conv_dim * 2, kernel_size=3, stride=2, padding=1, output_padding=1),
nn.BatchNorm2d(conv_dim * 2),
nn.ReLU(inplace=True)
)
self.deconv4 = nn.Sequential(
nn.ConvTranspose2d(conv_dim * (2 + 1), out_dim, kernel_size=3, stride=2, padding=1, output_padding=1),
nn.Sigmoid(),
)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, a=0)
if hasattr(m, 'bias') and m.bias is not None:
nn.init.constant_(m.bias.data, 0.0)
if isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def forward(self, x, c):
c = c.view(c.size(0), c.size(1), 1, 1)
c = c.repeat(1, 1, x.size(2), x.size(3))
x = torch.cat([x, c], dim=1)
x1 = self.conv1(x)
x2 = self.conv2(x1)
x3 = self.conv3(x2)
x4 = self.conv4(x3)
out = self.deconv1(x4)
x3 = torch.cat([x3, out], dim=1)
out = self.deconv2(x3)
x2 = torch.cat([x2, out], dim=1)
out = self.deconv3(x2)
x1 = torch.cat([x1, out], dim=1)
out = self.deconv4(x1)
return out
class netD(nn.Module):
def __init__(self, image_size=128, conv_dim=64, c_dim=3, repeat_num=5):
super(netD, self).__init__()
layers = []
layers.append(nn.Conv2d(2, conv_dim, kernel_size=4, stride=2, padding=1))
# layers.append(nn.InstanceNorm2d(conv_dim))
layers.append(nn.LeakyReLU(inplace=True))
curr_dim = conv_dim
for i in range(1, repeat_num):
layers.append(nn.Conv2d(curr_dim, curr_dim * 2, kernel_size=4, stride=2, padding=1))
# layers.append(nn.InstanceNorm2d(curr_dim * 2))
layers.append(nn.LeakyReLU(inplace=True))
curr_dim *= 2
kernel_size = int(image_size / 2 ** repeat_num)
self.main = nn.Sequential(*layers)
self.conv1 = nn.Conv2d(curr_dim, 1, kernel_size=3, stride=1, padding=1, bias=False)
self.conv2 = nn.Conv2d(curr_dim, c_dim, kernel_size=kernel_size, bias=False)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, a=0)
if hasattr(m, 'bias') and m.bias is not None:
nn.init.constant_(m.bias.data, 0.0)
if isinstance(m, nn.InstanceNorm2d) and m.bias is not None:
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def forward(self, x,y):
x = torch.cat([x, y], dim=1)
h = self.main(x)
out_src = self.conv1(h)
out_cls = self.conv2(h)
return out_src, out_cls.view(out_cls.size(0), out_cls.size(1))
def get_norm_layer(norm_type='instance'):
"""Return a normalization layer
Parameters:
norm_type (str) -- the name of the normalization layer: batch | instance | none
For BatchNorm, we use learnable affine parameters and track running statistics (mean/stddev).
For InstanceNorm, we do not use learnable affine parameters. We do not track running statistics.
"""
if norm_type == 'batch':
norm_layer = functools.partial(nn.BatchNorm2d, affine=True, track_running_stats=True)
elif norm_type == 'instance':
norm_layer = functools.partial(nn.InstanceNorm2d, affine=False, track_running_stats=False)
elif norm_type == 'none':
norm_layer = None
else:
raise NotImplementedError('normalization layer [%s] is not found' % norm_type)
return norm_layer
class ResnetGenerator(nn.Module):
"""Resnet-based generator that consists of Resnet blocks between a few downsampling/upsampling operations.
We adapt Torch code and idea from <NAME>'s neural style transfer project(https://github.com/jcjohnson/fast-neural-style)
"""
def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6,
padding_type='reflect'):
"""Construct a Resnet-based generator
Parameters:
input_nc (int) -- the number of channels in input images
output_nc (int) -- the number of channels in output images
ngf (int) -- the number of filters in the last conv layer
norm_layer -- normalization layer
use_dropout (bool) -- if use dropout layers
n_blocks (int) -- the number of ResNet blocks
padding_type (str) -- the name of padding layer in conv layers: reflect | replicate | zero
"""
assert (n_blocks >= 0)
super(ResnetGenerator, self).__init__()
if type(norm_layer) == functools.partial:
use_bias = norm_layer.func == nn.InstanceNorm2d
else:
use_bias = norm_layer == nn.InstanceNorm2d
model = [nn.ReflectionPad2d(3),
nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0, bias=use_bias),
norm_layer(ngf),
nn.ReLU(True)]
n_downsampling = 2
for i in range(n_downsampling): # add downsampling layers
mult = 2 ** i
model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1, bias=use_bias),
norm_layer(ngf * mult * 2),
nn.ReLU(True)]
mult = 2 ** n_downsampling
for i in range(n_blocks): # add ResNet blocks
model += [ResnetBlock(ngf * mult, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout,
use_bias=use_bias)]
for i in range(n_downsampling): # add upsampling layers
mult = 2 ** (n_downsampling - i)
model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2),
kernel_size=3, stride=2,
padding=1, output_padding=1,
bias=use_bias),
norm_layer(int(ngf * mult / 2)),
nn.ReLU(True)]
model += [nn.ReflectionPad2d(3)]
model += [nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)]
model += [nn.Tanh()]
self.model = nn.Sequential(*model)
def forward(self, input):
"""Standard forward"""
return self.model(input)
def define_G(input_nc, output_nc, ngf, netG, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02,
gpu_ids=[]):
"""Create a generator
Parameters:
input_nc (int) -- the number of channels in input images
output_nc (int) -- the number of channels in output images
ngf (int) -- the number of filters in the last conv layer
netG (str) -- the architecture's name: resnet_9blocks | resnet_6blocks | unet_256 | unet_128
norm (str) -- the name of normalization layers used in the network: batch | instance | none
use_dropout (bool) -- if use dropout layers.
init_type (str) -- the name of our initialization method.
init_gain (float) -- scaling factor for normal, xavier and orthogonal.
gpu_ids (int list) -- which GPUs the network runs on: e.g., 0,1,2
Returns a generator
Our current implementation provides two types of generators:
U-Net: [unet_128] (for 128x128 input images) and [unet_256] (for 256x256 input images)
The original U-Net paper: https://arxiv.org/abs/1505.04597
Resnet-based generator: [resnet_6blocks] (with 6 Resnet blocks) and [resnet_9blocks] (with 9 Resnet blocks)
Resnet-based generator consists of several Resnet blocks between a few downsampling/upsampling operations.
We adapt Torch code from <NAME>'s neural style transfer project (https://github.com/jcjohnson/fast-neural-style).
The generator has been initialized by <init_net>. It uses RELU for non-linearity.
"""
net = None
norm_layer = get_norm_layer(norm_type=norm)
if netG == 'unet_128':
net = UnetGenerator(input_nc, output_nc, 7, ngf, norm_layer=norm_layer, use_dropout=use_dropout)
else:
raise NotImplementedError('Generator model name [%s] is not recognized' % netG)
return init_net(net, init_type, init_gain, gpu_ids)
class UnetGenerator(nn.Module):
"""Create a Unet-based generator"""
def __init__(self, input_nc, output_nc, num_downs, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False):
"""Construct a Unet generator
Parameters:
input_nc (int) -- the number of channels in input images
output_nc (int) -- the number of channels in output images
num_downs (int) -- the number of downsamplings in UNet. For example, # if |num_downs| == 7,
image of size 128x128 will become of size 1x1 # at the bottleneck
ngf (int) -- the number of filters in the last conv layer
norm_layer -- normalization layer
We construct the U-Net from the innermost layer to the outermost layer.
It is a recursive process.
"""
super(UnetGenerator, self).__init__()
# construct unet structure
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer,
innermost=True) # add the innermost layer
for i in range(num_downs - 5): # add intermediate layers with ngf * 8 filters
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_dropout=use_dropout)
# gradually reduce the number of filters from ngf * 8 to ngf
unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer)
unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block,
norm_layer=norm_layer)
unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer)
self.model = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True,
norm_layer=norm_layer) # add the outermost layer
def forward(self, x, c):
"""Standard forward"""
c = c.view(c.size(0), c.size(1), 1, 1)
c = c.repeat(1, 1, x.size(2), x.size(3))
x = torch.cat([x, c], dim=1)
return self.model(x)
def init_weights(net, init_type='normal', init_gain=0.02):
"""Initialize network weights.
Parameters:
net (network) -- network to be initialized
init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal
init_gain (float) -- scaling factor for normal, xavier and orthogonal.
We use 'normal' in the original pix2pix and CycleGAN paper. But xavier and kaiming might
work better for some applications. Feel free to try yourself.
"""
def init_func(m): # define the initialization function
classname = m.__class__.__name__
if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1):
if init_type == 'normal':
init.normal_(m.weight.data, 0.0, init_gain)
elif init_type == 'xavier':
init.xavier_normal_(m.weight.data, gain=init_gain)
elif init_type == 'kaiming':
init.kaiming_normal_(m.weight.data, a=0, mode='fan_in')
elif init_type == 'orthogonal':
init.orthogonal_(m.weight.data, gain=init_gain)
else:
raise NotImplementedError('initialization method [%s] is not implemented' % init_type)
if hasattr(m, 'bias') and m.bias is not None:
init.constant_(m.bias.data, 0.0)
elif classname.find(
'BatchNorm2d') != -1: # BatchNorm Layer's weight is not a matrix; only normal distribution applies.
init.normal_(m.weight.data, 1.0, init_gain)
init.constant_(m.bias.data, 0.0)
print('initialize network with %s' % init_type)
net.apply(init_func) # apply the initialization function <init_func>
def init_net(net, init_type='normal', init_gain=0.02, gpu_ids=[]):
"""Initialize a network: 1. register CPU/GPU device (with multi-GPU support); 2. initialize the network weights
Parameters:
net (network) -- the network to be initialized
init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal
gain (float) -- scaling factor for normal, xavier and orthogonal.
gpu_ids (int list) -- which GPUs the network runs on: e.g., 0,1,2
Return an initialized network.
"""
# if len(gpu_ids) > 0:
# assert(torch.cuda.is_available())
# net.to(gpu_ids[0])
# net = torch.nn.DataParallel(net, gpu_ids) # multi-GPUs
# net.cuda()
init_weights(net, init_type, init_gain=init_gain)
return net
class UnetSkipConnectionBlock(nn.Module):
"""Defines the Unet submodule with skip connection.
X -------------------identity----------------------
|-- downsampling -- |submodule| -- upsampling --|
"""
def __init__(self, outer_nc, inner_nc, input_nc=None,
submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False):
"""Construct a Unet submodule with skip connections.
Parameters:
outer_nc (int) -- the number of filters in the outer conv layer
inner_nc (int) -- the number of filters in the inner conv layer
input_nc (int) -- the number of channels in input images/features
submodule (UnetSkipConnectionBlock) -- previously defined submodules
outermost (bool) -- if this module is the outermost module
innermost (bool) -- if this module is the innermost module
norm_layer -- normalization layer
user_dropout (bool) -- if use dropout layers.
"""
super(UnetSkipConnectionBlock, self).__init__()
self.outermost = outermost
if type(norm_layer) == functools.partial:
use_bias = norm_layer.func == nn.InstanceNorm2d
else:
use_bias = norm_layer == nn.InstanceNorm2d
if input_nc is None:
input_nc = outer_nc
downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1, bias=use_bias)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc)
if outermost:
upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
elif innermost:
upconv = nn.ConvTranspose2d(inner_nc, outer_nc,
kernel_size=4, stride=2,
padding=1, bias=use_bias)
down = [downrelu, downconv]
up = [uprelu, upconv, upnorm]
model = down + up
else:
upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1, bias=use_bias)
down = [downrelu, downconv, downnorm]
up = [uprelu, upconv, upnorm]
if use_dropout:
model = down + [submodule] + up + [nn.Dropout(0.5)]
else:
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost:
return self.model(x)
else: # add skip connections
return torch.cat([x, self.model(x)], 1)
if __name__ == '__main__':
torch.manual_seed(1)
x = torch.rand((4, 1, 128, 128))
c = torch.rand((4, 3, 1, 1))
g = define_G(4, 1, 64, 'unet_128', norm='instance', )
d = netD()
s = Unet()
y = g(x, c)
f, cls = d(x,y)
seg = s(y, c)
print('generator output : ', y.shape)
print('discriminator output : ', f.shape, cls.shape)
print('generator output : ', seg.shape)
print(' segmentor params: %.2fMB' % (sum(p.numel() for p in g.parameters()) / (1024.0 * 1024) * 4))
print(' discriminator params: %.2fMB' % (sum(p.numel() for p in d.parameters()) / (1024.0 * 1024) * 4))
print(' segmentor params: %.2fMB' % (sum(p.numel() for p in s.parameters()) / (1024.0 * 1024) * 4))
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2016-2018 by I3py Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Implements the Action class used to wrap public driver methods.
"""
from functools import partial, update_wrapper
from inspect import Signature, currentframe, signature
from typing import (Any, Callable, ClassVar, Dict, List, Optional, Tuple, Type,
Union)
from ..abstracts import AbstractAction, AbstractHasFeatures
from ..composition import SupportMethodCustomization, normalize_signature
from ..errors import I3pyFailedCall
from ..limits import FloatLimitsValidator, IntLimitsValidator
from ..unit import UNIT_RETURN, UNIT_SUPPORT, get_unit_registry
from ..utils import (build_checker, check_options, get_limits_and_validate,
update_function_lineno, validate_in, validate_limits)
LINENO = currentframe().f_lineno
CALL_TEMPLATE = ("""
def __call__(self{sig}):
should_lock = self.action.should_lock
if should_lock:
self.driver.lock.acquire()
try:
params = self.action.sig.bind(self.driver{sig})
args = params.args[1:]
kwargs = params.kwargs
args, kwargs = self.action.pre_call(self.driver, *args, **kwargs)
res = self.action.call(self.driver, *args, **kwargs)
return self.action.post_call(self.driver, res, *args, **kwargs)
except Exception as e:
msg = ('An exception occurred while calling {msg} with the '
'following arguments {msg} and keywords arguments {msg}.')
fmt_msg = msg.format(self.action.name, (self.driver,) + args,
kwargs)
raise I3pyFailedCall(fmt_msg) from e
finally:
if should_lock:
self.driver.lock.release()
""")
def add_retries(func, action):
"""Re-call if the call fail due to a communication issue.
"""
def wrapper(driver, *args, **kwargs):
i = -1
while i < action._retries:
try:
i += 1
return action.func(driver, *args, **kwargs)
except driver.retries_exceptions:
if i != action._retries:
driver.reopen_connection()
continue
else:
raise
update_wrapper(wrapper, func)
return wrapper
class ActionCall(object):
"""Object returned when an Action is used as descriptor.
Actually when an Action is used to decorate a function a custom subclass
of this class is created with a __call__ method whose signature match the
decorated function signature.
"""
__slots__ = ('action', 'driver')
#: Dict storing custom class for each signature
sigs: Dict[Tuple[str, ...], Type['ActionCall']] = {}
def __new__(cls, action: AbstractAction, driver: AbstractHasFeatures
) -> 'ActionCall':
"""Create a custom subclass for each signature action.
Parameters
----------
action : Action
Action for which to return a callable.
driver :
Instance of the owner class of the action.
"""
sig = normalize_signature(action.sig, alias='driver')
if sig not in cls.sigs:
cls.sigs[sig] = cls.create_callable(action, sig)
custom_type = cls.sigs[sig]
return object.__new__(custom_type)
def __call__(self, *args, **kwargs):
"""Updated in subclasses to preserve the decorated function signature.
"""
raise NotImplementedError
@classmethod
def create_callable(cls, action: AbstractAction, sig: Tuple[str, ...]
) -> Type['ActionCall']:
"""Dynamically create a subclass of ActionCall for a signature.
"""
name = '{}ActionCall'.format(action.name)
# Should store sig on class attribute
decl = ('class {name}(ActionCall):\n' +
CALL_TEMPLATE
).format(msg='{}', name=name,
sig=', ' + ', '.join(sig[1:]))
glob = dict(ActionCall=ActionCall,
I3pyFailedCall=I3pyFailedCall)
# Consider that this file is the source of the function
code = compile(decl, __file__, 'exec')
exec(code, glob)
action_call_cls: Type[ActionCall] = glob[name]
# Set the lineno to point to the string source.
update_function_lineno(action_call_cls.__call__, LINENO + 3)
return action_call_cls
def __init__(self, action: AbstractAction, driver: AbstractHasFeatures
) -> None:
self.action = action
self.driver = driver
class BaseAction(SupportMethodCustomization):
"""Wraps a method with pre and post processing operations.
"""
#: Class variable used to stored the king of method wrapper generated by
#: the descriptor.
ACTION_CALL_CLS: ClassVar[type] = ActionCall
def __init__(self, **kwargs):
super().__init__()
self.name: str = ''
self.func: Optional[Callable] = None
self.sig: Optional[Signature] = None
self.creation_kwargs: dict = kwargs
self.should_lock = kwargs.get('lock', False)
self._desc: Optional[ActionCall] = None
self._retries: int = kwargs.get('retries', 0)
self._use_options = bool(kwargs.get('options', False))
def __call__(self, func: Callable) -> 'BaseAction':
if self.func:
msg = 'Attempt to decorate a second function using one Action.'
raise RuntimeError(msg)
self.__doc__ = func.__doc__
self.sig = signature(func)
self.func = func
self.name = self.__name__ = func.__name__
self.customize_call(func, self.creation_kwargs)
return self
def __get__(self,
obj: AbstractHasFeatures,
objtype: Optional[Type[AbstractHasFeatures]]=None
) -> Union[ActionCall, 'BaseAction']:
if obj is None:
return self
if self._use_options is True:
op, msg = obj._settings[self.name]['_options']
if op is None:
op, msg = check_options(obj, self.creation_kwargs['options'])
obj._settings[self.name]['_options'] = op
if not op:
raise AttributeError('Invalid options: %s' % msg)
if self._desc is None:
# A specialized class matching the wrapped function signature is
# created on the fly.
self._desc = self.ACTION_CALL_CLS(self, obj)
return self._desc
def clone(self) -> 'BaseAction':
"""Create a clone of itself.
"""
new = type(self)(**self.creation_kwargs)
new(self.func)
new.copy_custom_behaviors(self)
new.__doc__ = self.__doc__
new.name = self.name
return new
def create_default_settings(self) -> Dict[str, Any]:
"""Create the default settings for an action.
"""
settings = {}
if self._use_options:
settings['_options'] = (None, '')
return settings
def pre_call(self, driver: AbstractHasFeatures, *args, **kwargs
) -> Tuple[tuple, Dict[str, Any]]:
"""Method called before calling the decorated function.
This method can be used to validate or modify the arguments passed
to the function.
Parameters
----------
driver :
Reference to the instance of the owner class for this action
calling it.
*args :
Positional arguments of the function.
**kwargs :
Keywords arguments of the function.
Returns
-------
args : tuple
Modified (or not) positional arguments
kwargs : dict
Modified or not keyword arguments.
Notes
-----
When customizing through composition the method used can be given
either the above signature or the signature of the function used in the
Action.
"""
return args, kwargs
def post_call(self, driver: AbstractHasFeatures, result: Any,
*args, **kwargs) -> Any:
"""Method called after calling the decorated function.
This method can be used to alter the returned function.
Parameters
----------
driver :
Reference to the instance of the owner class for this action
calling it.
result :
Object returned by the decorated function.
*args :
Positional arguments of the function.
**kwargs :
Keywords arguments of the function.
Returns
-------
result : object
Modified (or not) result from the decorated function.
Notes
-----
When customizing through composition the method used can be given
either the above signature or the signature of the function used in the
Action with the result added after the reference to the driver and
before the other function arguments.
"""
return result
def customize_call(self, func: Callable, kwargs: Dict[str, Any]):
"""Store the function in call attributes.
"""
if self._retries:
func = add_retries(func, self)
self.call = func
if 'checks' in kwargs:
sig = normalize_signature(self.sig, alias='driver')
check_sig = ('(action' +
(', ' + ', '.join(sig) if sig else '') + ')')
check_args = build_checker(kwargs['checks'], check_sig)
def checker_wrapper(action, driver, *args, **kwargs):
check_args(action, driver, *args, **kwargs)
return args, kwargs
self.modify_behavior('pre_call', checker_wrapper,
('append',), 'checks', internal=True)
def analyse_function(self,
meth_name: str,
func: Callable,
specifiers: Tuple[str, ...]
) -> Tuple[Tuple[str, ...],
List[Tuple[str, ...]],
str]:
"""Analyse the possibility to use a function for a method.
Parameters
----------
meth_name : str
Name of the method that should be customized using the provided
function.
func : callable
Function to use to customize the method.
specifiers : tuple
Tuple describing the attempted modification.
Returns
-------
specifiers : tuple
Tuple describing a possibly simplified customization that the one
suggested by the user.
signatures : list
List of signatures that should be supported by a composer.
chain_on : str
Comma separated list of functions arguments that are also values
returned by the function.
Raises
------
ValueError :
Raised if the signature of the provided function does not match the
one of the customized method.
"""
act_sig = ('action',) + normalize_signature(self.sig,
alias=self.self_alias)
if meth_name == 'call':
if specifiers:
msg = ('Can only replace call method of an action, not '
'customize it. Failed on action {} with customization '
'specifications {}')
raise ValueError(msg.format(self.name, specifiers))
sigs = [act_sig]
chain_on = None
elif meth_name == 'pre_call':
sigs = [('action', 'driver', '*args', '**kwargs'), act_sig]
chain_on = 'args, kwargs'
# The base version of pre_call is no-op so we can directly replace
if self.pre_call.__func__ is Action.pre_call:
specifiers = ()
elif meth_name == 'post_call':
sigs = [('action', 'driver', 'result', '*args', '**kwargs'),
('action', 'driver', 'result') + act_sig[2:]]
chain_on = 'result'
# The base version of post_call is no-op so we can directly replace
if self.post_call.__func__ is Action.post_call:
specifiers = ()
else:
msg = ('Cannot customize method {}, only pre_call, call and '
'post_call can be.')
raise ValueError(msg)
func_sig = normalize_signature(signature(func), self.self_alias)
if func_sig not in sigs:
msg = ('Function {} used to attempt to customize method {} of '
'action {} does not have the right signature '
'(expected={}, provided={}).')
raise ValueError(msg.format(func.__name__, meth_name, self.name,
sigs, func_sig))
return specifiers, sigs, chain_on
@property
def self_alias(self) -> str:
"""Name used instead of self in function signature.
"""
return 'driver'
AbstractAction.register(BaseAction)
class Action(BaseAction):
"""Wraps a method with pre and post processing operations.
All parameters must be passed as keyword arguments.
All public driver methods should be decorated as an Action to make them
easy to identify and hence make introspection easier.
Parameters
----------
options : str, optional
Assertions in the form option_name['option_field'] == possible_values
or any other valid boolean test. Multiple assertions can be separated
by ;
checks : str, optional
Boolean tests to execute before calling the function. Multiple
assertions can be separated with ';'. All the method arguments are
available in the assertion execution namespace so one can access to the
driver using self and to the arguments using their name (the signature
of the wrapper is made to match the signature of the wrapped method).
values : dict, optional
Dictionary mapping the arguments names to their allowed values.
limits : dict, optional
Dictionary mapping the arguments names to their allowed limits. Limits
can a be a tuple of length 2, or 3 (min, max, step) or the name of
the limits to use to check the input.
units : tuple, optional
Tuple of length 2 containing the return unit and the unit of each
passed argument. None can be used to mark that an argument should not
be converted. The first argument (self) should always be marked this
way.
retries: int, optional
Number of times to re-attempt to call the decoarated function if an
exception listed in the driver `retries_exception` occurs.
Notes
-----
A single argument should be value checked or limit checked but not both,
unit conversion is performed before anything else. When limit validating
against a driver limits the parameter should ALWAYS be converted to the
same unit as the one used by the limits.
"""
def create_default_settings(self) -> Dict[str, Any]:
"""Create the default settings for an action.
"""
settings = super().create_default_settings()
settings['unit_return'] = UNIT_RETURN
return settings
def customize_call(self, func: Callable, kwargs: Dict[str, Any]):
"""Store the function in call attributes and customize pre/post based
on the kwargs.
"""
super().customize_call(func, kwargs)
if 'limits' in kwargs or 'values' in kwargs:
self.add_values_limits_validation(kwargs.get('values', {}),
kwargs.get('limits', {}))
if UNIT_SUPPORT and 'units' in kwargs:
self.add_unit_support(kwargs['units'])
def add_unit_support(self, units: Tuple[str, Dict[str, Optional[str]]]):
"""Wrap a func using Pint to automatically convert Quantity to float.
"""
ureg = get_unit_registry()
if len(units[1]) != len(self.sig.parameters):
msg = ('The number of provided units does not match the number of '
'function arguments.')
raise ValueError(msg)
def convert_input(action, driver, *args, **kwargs):
"""Convert the arguments to the proper unit and return magnitudes.
"""
bound = self.sig.bind(driver, *args, **kwargs)
for i, (k, v) in enumerate(list(bound.arguments.items())):
if units[1][i] is not None and isinstance(v, ureg.Quantity):
bound.arguments[k] = v.to(units[1][i]).m
# remove driver from the args
return bound.args[1:], bound.kwargs
self.modify_behavior('pre_call', convert_input, ('prepend',), 'units',
internal=True)
def convert_output(action, driver, result, *args, **kwargs):
"""Convert the output to the proper units.
"""
if not driver._settings[self.name]['unit_return']:
return result
re_units = units[0]
is_container = isinstance(re_units, (tuple, list))
if not is_container:
result = [result]
re_units = [re_units]
results = [ureg.Quantity(result[i], u)
for i, u in enumerate(re_units)]
return results if is_container else results[0]
self.modify_behavior('post_call', convert_output, ('append',), 'units',
internal=True)
def add_values_limits_validation(self, values: Dict[str, tuple],
limits: Dict[str, Union[str, list, tuple]]
):
"""Add arguments validation to pre_call.
Parameters
----------
values : dict
Dictionary mapping the parameters name to the set of allowed
values.
limits : dict
Dictionary mapping the parameters name to the limits they must
abide by.
"""
validators = {}
for name, vals in values.items():
validators[name] = partial(validate_in, name=name,
values=set(vals))
for name, lims in limits.items():
if name in validators:
msg = 'Arg %s can be limits or values validated not both'
raise ValueError(msg % name)
if isinstance(lims, (list, tuple)):
if any([isinstance(e, float) for e in lims]):
lim = FloatLimitsValidator(*lims)
else:
lim = IntLimitsValidator(*lims) # type: ignore
validators[name] = partial(validate_limits, limits=lim,
name=name)
elif isinstance(lims, str):
validators[name] = partial(get_limits_and_validate,
limits=lims, name=name)
else:
msg = 'Invalid type for limits values (key {}) : {}'
raise TypeError(msg.format(name, type(lims)))
sig = self.sig
def validate_args(action, driver, *args, **kwargs):
bound = sig.bind(driver, *args, **kwargs).arguments
for n in validators:
validators[n](driver, bound[n])
return args, kwargs
self.modify_behavior('pre_call', validate_args, ('append',),
'values_limits', internal=True)
|
<gh_stars>10-100
import types
import json
from collections import deque, OrderedDict
import macropy.core.macros
from macropy.core.macros import ast, expr
from macropy.core.hquotes import macros, u, hq, unhygienic
from macropy.core.quotes import ast as quotes_ast
# the four types below are ostensibly not used but are used by
# macro-expanded code so must be imported
from macropy.core.macros import Call, Attribute, Captured, Load
# called on each annotation if not null
callback = None
macros = macropy.core.macros.Macros()
default_output_format = "json"
types_supported = (types.FunctionType, types.BuiltinFunctionType, types.BuiltinMethodType)
# USAGE NOTES
# -----------
# invoke using `python -m run your_target_code`
# * where your_target_code.py starts with
# `from trace import macros, kite_trace, get_all_traced_ast_reprs`,
# and then use `with kite_trace:` to open a block that will be traced
# * after the code in your block has executed, call `kite_trace.get_all_traced_ast_reprs()`
# * for maximum clarity, use get_all_traced_ast_reprs(indent=' ', include_field_names=True)
# * for minimum clarity, use get_all_traced_ast_reprs(indent=None, include_field_names=False) : )
#
# AST nodes have the following kwarg annotations:
#
# k_type will be '__kite_mixed' if the value of the expression changes across evaluations
# k_lineno and k_col_offset as defined in https://docs.python.org/2/library/ast.html
# k_num_evals number of times the expression was evaluated
# k_function_fqn (FUNCTIONS ONLY) fully qualified name of function
# k_module_fqn (MODULES ONLY) fully qualified name of module
# IMPLEMENTATION NOTES
# --------------------
# todo: annotate function defs with types of arguments for each call, num times reached, etc
# todo: annotate try/except/finally to track number of times reached,
# actual type of exceptions, etc
# note re python AST node api: the official docs aren't super descriptive about the role of each field
# of each ast node type. I found the following code to be very useful:
# http://svn.edgewall.org/repos/genshi/tags/0.6.0/genshi/template/astutil.py. Basically it prints
# out formatted code which parses to the ast tree provided as input. another potentially helpful url:
# https://greentreesnakes.readthedocs.org/en/latest/
# it's also useful to look at the source code of macropy. e.g. when working with walking the AST, see
# https://github.com/lihaoyi/macropy/blob/13993ccb08df21a0d63b091dbaae50b9dbb3fe3e/macropy/core/walkers.py
# in get_all_traced_ast_reprs()`: all we need is a reference to the top level WrappedNodeTracker. This
# is trivially available when we create the top level WrappedNodeTracker, but for some odd reason this
# doesn't work. If you try printing the `id()` of WrappedNodeTrackers during WrappedNodeTracker.__init__()
# vs when they're used in `wrap()`, there is NO overlap. Macropy must be doing something funky. So
# we're just sticking with the strategy of awkwardly gathering the top level WrappedNodeTrackers from
# `wrap()`, and keeping track of them through `top_level_node_trackers`.
top_level_node_trackers = set()
class Node(object):
def __init__(self, parent, original_ast_node):
self.parent = parent
self.original_ast_node = original_ast_node
self._annotations = OrderedDict()
def annotate(self, key, value):
if key in self._annotations and self._annotations[key] != value:
self._annotations[key] = '__kite_mixed'
else:
self._annotations[key] = value
@staticmethod
def _get_ast_repr(ast_node, depth=0, indent='',
include_field_names=False, output_format=default_output_format):
indent_text = '\n' + (indent * depth) if indent else ''
if isinstance(ast_node, ast.AST):
if ast_node._fields:
field_names, field_ast_nodes = zip(*ast.iter_fields(ast_node))
field_values = map(lambda node: NotWrappedSubtree._get_ast_repr(node,
depth + 1, indent, include_field_names, output_format), field_ast_nodes)
else:
# `zip` requires at least one entry to "unzip" correctly
field_names, field_values = [], []
return Node._encode_ast_node_name_and_fields(ast_node, field_names, field_values, OrderedDict(
), depth, indent, include_field_names, output_format)
elif isinstance(ast_node, list):
list_values = map(lambda node: Node._get_ast_repr(node,
depth + 1, indent, include_field_names, output_format), ast_node)
return '%s[%s%s]' % (
indent_text, ', '.join(list_values), indent_text)
if output_format == "json":
return "%s%s" % (indent_text, Node._get_json_ast_repr_for_primitive(ast_node))
return "%s%s" % (indent_text, Node._get_plain_ast_repr_for_primitive(ast_node))
@staticmethod
def _get_plain_ast_repr_for_primitive(ast_node):
if isinstance(ast_node, basestring):
return "'%s'" % (ast_node)
return "%s" % (ast_node)
@staticmethod
def _get_json_ast_repr_for_primitive(ast_node):
# add special clause for bool so it does not fall into the int case
# below (a bool in python is also an int)
if isinstance(ast_node, bool):
return "\"%s\"" % (ast_node)
elif isinstance(ast_node, int):
return "%s" % (ast_node)
elif isinstance(ast_node, str):
return json.dumps(ast_node)
elif ast_node is None:
return "null"
# everything else needs to be enclosed in quotes to make valid json
return "\"%s\"" % (ast_node)
@staticmethod
def _encode_ast_node_name_and_fields(
ast_node, field_names, field_values, annotations, depth, indent, include_field_names, output_format):
annotations['k_lineno'] = ast_node.lineno
annotations['k_col_offset'] = ast_node.col_offset
if include_field_names:
# start with adding fields, so that they are printed before our k_
# annotations
node_kw_args = OrderedDict()
for field_name, field_value in zip(field_names, field_values):
node_kw_args[field_name] = field_value
node_kw_args.update(annotations)
field_values = []
else:
node_kw_args = annotations
indent_text_node = '\n' + (indent * depth) if indent else ''
indent_text_args = '\n' + (indent * (depth + 1)) if indent else ''
encoded = ""
if output_format == "json":
kw_fields_as_strings = []
for (k, v) in node_kw_args.iteritems():
val = str(v).strip()
if k == "k_type" or k == "k_function_fqn" or k == "k_module_fqn" or k == "k_instance_class_fqn":
val = "\"%s\"" % val
kw_fields_as_strings.append(
'%s\"%s\":%s' %
(indent_text_args, str(k).strip(), val))
encoded = '%s{\"%s\":{%s}}' % (indent_text_node,
ast_node.__class__.__name__,
', '.join(
field_values + kw_fields_as_strings))
elif output_format == "plain":
kw_fields_as_strings = [
'%s%s=%s' %
(indent_text_args,
str(k).strip(),
str(v).strip()) for (
k,
v) in node_kw_args.iteritems()]
encoded = '%s%s(%s)' % (indent_text_node,
ast_node.__class__.__name__,
', '.join(
field_values + kw_fields_as_strings))
return encoded
class NotWrappedSubtree(Node):
def get_ast_repr(self, depth=0, indent='',
include_field_names=False, output_format=default_output_format):
return Node._get_ast_repr(
self.original_ast_node, depth, indent, include_field_names, output_format)
class WrappedNodeTracker(Node):
def __init__(self, parent, original_ast_node):
super(WrappedNodeTracker, self).__init__(parent, original_ast_node)
self.children = []
@property
def is_top_level(self):
return self.parent is None
@staticmethod
def get_type_name(o):
if not hasattr(o, '__class__'):
return ""
cl = o.__class__
if hasattr(cl, '__module__'):
module = cl.__module__
if module is None or module == str.__class__.__module__:
return cl.__name__
return cl.__module__ + '.' + cl.__name__
return cl.__name__
def wrap(self, result):
self.annotate('k_type', WrappedNodeTracker.get_type_name(result))
if isinstance(result, types_supported):
if (hasattr(result, '__module__') and result.__module__ is not None and
hasattr(result, '__name__') and result.__name__ is not None):
self.annotate('k_function_fqn', result.__module__ + '.' + result.__name__)
if isinstance(result, types.ModuleType):
if hasattr(result, '__name__') and result.__name__ is not None:
self.annotate('k_module_fqn', result.__name__)
if isinstance(result, types.TypeType):
if (hasattr(result, '__module__') and result.__module__ is not None and
hasattr(result, '__name__') and result.__name__ is not None):
self.annotate('k_instance_class_fqn', result.__module__ + '.' + result.__name__)
# skip the annotate() call because this value is supposed to change
# over time
if 'k_num_evals' in self._annotations:
self._annotations['k_num_evals'] = self._annotations[
'k_num_evals'] + 1
else:
self._annotations['k_num_evals'] = 1
# go to top of tree -> gather the root WrappedNodeTracker, in preparation for
# `get_all_traced_ast_reprs()`
# (see long note at top of file for detailed explanation)
root = self
while root.parent is not None:
root = root.parent
top_level_node_trackers.add(root)
if callback is not None:
callback()
return result
def get_field_values_as_strs(self, field_ast_nodes, depth=0, indent='',
include_field_names=False, output_format=default_output_format):
y = []
ix_next_child = 0
for ast_node in field_ast_nodes:
if isinstance(ast_node, ast.AST) or isinstance(ast_node, list):
# expect to have an entry in self.children for this ast_node
y.append(
self.children[ix_next_child].get_ast_repr(
depth,
indent,
include_field_names,
output_format))
ix_next_child += 1
else:
y.append(
Node._get_ast_repr(
ast_node,
depth,
indent,
include_field_names,
output_format))
if ix_next_child != len(self.children):
raise ValueError()
if not all(map(lambda elem: isinstance(elem, str), y)):
raise ValueError()
return y
def get_ast_repr(self, depth=0, indent='',
include_field_names=False, output_format=default_output_format):
# inspired by `real_repr()`
indent_text = '\n' + (indent * depth) if indent else ''
if isinstance(self.original_ast_node, ast.AST):
if self.original_ast_node._fields:
field_names, field_ast_nodes = zip(
*ast.iter_fields(self.original_ast_node))
else:
# `zip` requires at least one entry to "unzip" correctly
field_names, field_ast_nodes = [], []
field_values = self.get_field_values_as_strs(
field_ast_nodes,
depth + 1,
indent,
include_field_names,
output_format)
return Node._encode_ast_node_name_and_fields(self.original_ast_node, field_names, field_values,
self._annotations, depth, indent, include_field_names, output_format)
elif isinstance(self.original_ast_node, list):
return '%s{"RootArray":[%s]}' % (indent_text, ', '.join(self.get_field_values_as_strs(self.original_ast_node, depth + 1, indent, include_field_names, output_format)))
raise ValueError()
# macropy doesn't offer much state tracking while walking an AST tree, so
# we use a bit of a hack here
last_parent = None
def create_node_tracker(original_ast_node, inner):
global last_parent
my_parent = last_parent
last_parent = node_tracker = WrappedNodeTracker(
my_parent,
original_ast_node)
if my_parent:
my_parent.children.append(node_tracker)
try:
return inner(node_tracker)
finally:
last_parent = my_parent
# macropy won't expose `list`s when walking along the AST (it just iterates over their elements
# (line 74 of walkers.py)), but we'd like to create WrappedNodeTrackers for them and generally
# approach them like AST nodes
# we achieve this by overriding the `walk_children` function of `Walker`
class KiteWalker(macropy.core.macros.Walker):
def walk_children(self, tree, *args, **kw):
if isinstance(tree, list):
def inner(node_tracker):
return super(KiteWalker, self).walk_children(tree, *args, **kw)
return create_node_tracker(tree, inner)
else:
return super(KiteWalker, self).walk_children(tree, *args, **kw)
def trace_walk_func(tree, exact_src):
@KiteWalker
def trace_walk(tree, stop, **kw):
def inner(node_tracker):
# NODE TYPES WE SHOULD NOT WRAP, AND WHERE WE SHOULD IGNORE SOME FIELD(S)
# -----------------------------------------------------------------------
# not in the dictionary: wrap it, recurse on all fields
# in the dictionary, empty fields list: don't wrap it, recurse on all fields
# in the dictionary, non-empty fields list: don't wrap it, recurse on all fields other than ones listed
#
# the first rule to match wins. e.g. a `For` node will match the `For` entry rather than
# the `stmt` entry. In general the ordering is more specific -> less specific, i.e. `stmt`
# is at end.
types_to_not_wrap_and_fields_to_ignore = OrderedDict([
# don't try to wrap left hand side (`targets`)
(macropy.core.macros.Assign, ['targets']),
(macropy.core.macros.AugAssign, ['target']),
# don't try to wrap the `i` in `for i...`
(macropy.core.macros.For, ['target']),
# can't wrap in a function call
(macropy.core.macros.arguments, []),
(macropy.core.macros.excepthandler, ['name']),
(macropy.core.macros.ClassDef, ['name', 'bases']),
(macropy.core.macros.FunctionDef, ['name', 'args']),
(macropy.core.macros.Delete, ['targets']),
(macropy.core.macros.With, ['optional_vars']),
# (all fields)
(macropy.core.macros.Import, ['names']),
# (all fields)
(macropy.core.macros.ImportFrom, ['module', 'names', 'level']),
(macropy.core.macros.Global, []),
(macropy.core.macros.Lambda, ['args']),
(macropy.core.macros.comprehension, ['target']),
(macropy.core.macros.DictComp, ['key']),
# load / store / del ... not wrappable
(macropy.core.macros.expr_context, []),
# you can't wrap a slice in a function call (in python
(macropy.core.macros.slice, []),
# you
# can't
# write
# `(1,2)wrap([1])`)
(macropy.core.macros.boolop, []), # and, or
# and, sub, mult, div; e.g. you can't write `1 wrap(+) 2`
(macropy.core.macros.operator, []),
# invert, not, uadd, usub
(macropy.core.macros.unaryop, []),
# Eq, NotEq, Lt, LtE, ...
(macropy.core.macros.cmpop, []),
# `arg` and `value` provided as a kwarg to a function call
(macropy.core.macros.keyword, ['arg']),
# can't replace a statement (e.g. try/except) with a
(macropy.core.macros.stmt, []),
# function
# call
])
for type_to_not_wrap, fields_to_ignore in types_to_not_wrap_and_fields_to_ignore.iteritems(
):
if isinstance(tree, type_to_not_wrap):
# there are three kinds of fields:
# 1) fields we are ignoring because we don't want to wrap on them -- there will be
# some acrobatics to make walk_children() ignore them.
# 2) fields which are not ast.AST or list nodes, e.g. literals -- walk_children()
# ignores these anyway but they're included in tree._fields so we have to be
# aware of them.
# 3) fields which are are recursing on
all_fields_is_ast_or_list = [isinstance(value, ast.AST) or isinstance(value, list)
for field, value in ast.iter_fields(node_tracker.original_ast_node)]
if len(all_fields_is_ast_or_list) != len(tree._fields):
raise ValueError()
# `walk_children` (below) walks based on `tree._fields` so we're going to hack
# `walk_children` to make it only walk the subset of nodes we'd like it to
all_fields = tree._fields
tree._fields = tuple(
filter(
lambda field: field not in fields_to_ignore,
tree._fields)) # fields to walk
trace_walk.walk_children(tree) # now recurse on children
stop()
# restore _fields to all_fields / undo our hack
tree._fields = all_fields
# now we have the problem that WrappedNodeTracker expects to have children for each
# ast-or-list-field, not just the subset that we walked.
# give it NotWrappedSubtree children for each skipped node
children_gathered = deque(
node_tracker.children) # limited to the ones we recursed on / didn't ignore
node_tracker.children = []
for i in range(len(all_fields)):
if not all_fields_is_ast_or_list[i]:
# a literal value / similar -> no entry in
# `children`
continue
if all_fields[i] in fields_to_ignore:
node_tracker.children.append(NotWrappedSubtree(node_tracker,
getattr(tree, all_fields[i], None)))
else:
node_tracker.children.append(
children_gathered.popleft())
return tree
if not isinstance(tree, expr):
raise ValueError(
'cannot be wrapped -> should be in types_to_not_wrap: ' + str(type(tree)))
trace_walk.walk_children(tree)
wrapped = hq[
node_tracker.wrap(
ast[tree])] # <- this line is where the magic happens
stop()
return wrapped
return create_node_tracker(tree, inner)
new_tree = trace_walk.recurse(tree)
return new_tree
def get_all_traced_ast_reprs(
indent=None, include_field_names=False, out=default_output_format):
'''
Returns an array of strings, one for each block or expression traced thus far in the process's lifetime.
'''
for top_level_node_tracker in top_level_node_trackers:
if not top_level_node_tracker.is_top_level:
raise ValueError()
# .strip() below removes leading newline when indent is non-empty
return [tracker.get_ast_repr(indent=indent, include_field_names=include_field_names, output_format=out).strip(
) for tracker in top_level_node_trackers]
@macros.expr
def kite_trace(tree, exact_src, **kw):
ret = trace_walk_func(tree, exact_src)
return ret
@macros.block
def kite_trace(tree, exact_src, **kw):
ret = trace_walk_func(tree, exact_src)
return ret
|
<filename>tests/cupy_tests/math_tests/test_rounding.py
import unittest
import numpy
import pytest
import cupy
from cupy import testing
@testing.gpu
class TestRounding(unittest.TestCase):
@testing.for_all_dtypes(no_complex=True)
@testing.numpy_cupy_allclose(atol=1e-5)
def check_unary(self, name, xp, dtype):
a = testing.shaped_arange((2, 3), xp, dtype)
return getattr(xp, name)(a)
@testing.for_complex_dtypes()
@testing.numpy_cupy_allclose(atol=1e-5)
def check_unary_complex(self, name, xp, dtype):
a = testing.shaped_arange((2, 3), xp, dtype)
return getattr(xp, name)(a)
@testing.for_complex_dtypes()
def check_unary_complex_unsupported(self, name, dtype):
for xp in (numpy, cupy):
a = testing.shaped_arange((2, 3), xp, dtype)
with pytest.raises(TypeError):
getattr(xp, name)(a)
@testing.for_dtypes(['?', 'b', 'h', 'i', 'q', 'e', 'f', 'd'])
@testing.numpy_cupy_allclose(atol=1e-5)
def check_unary_negative(self, name, xp, dtype):
a = xp.array([-3, -2, -1, 1, 2, 3], dtype=dtype)
return getattr(xp, name)(a)
@testing.for_complex_dtypes()
@testing.numpy_cupy_allclose(atol=1e-5)
def check_unary_negative_complex(self, name, xp, dtype):
a = xp.array([-3-3j, -2-2j, -1-1j, 1+1j, 2+2j, 3+3j], dtype=dtype)
return getattr(xp, name)(a)
def test_rint(self):
self.check_unary('rint')
self.check_unary_complex('rint')
def test_rint_negative(self):
self.check_unary_negative('rint')
self.check_unary_negative_complex('rint')
def test_floor(self):
self.check_unary('floor')
self.check_unary_complex_unsupported('floor')
def test_ceil(self):
self.check_unary('ceil')
self.check_unary_complex_unsupported('ceil')
def test_trunc(self):
self.check_unary('trunc')
self.check_unary_complex_unsupported('trunc')
def test_fix(self):
self.check_unary('fix')
self.check_unary_complex_unsupported('fix')
def test_around(self):
self.check_unary('around')
self.check_unary_complex('around')
def test_round_(self):
self.check_unary('round_')
self.check_unary_complex('around')
@testing.parameterize(*testing.product({
'decimals': [-2, -1, 0, 1, 2],
}))
class TestRound(unittest.TestCase):
shape = (20,)
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose(atol=1e-5)
def test_round(self, xp, dtype):
if dtype == numpy.bool_:
# avoid cast problem
a = testing.shaped_random(self.shape, xp, scale=10, dtype=dtype)
return xp.around(a, 0)
if dtype == numpy.float16:
# avoid accuracy problem
a = testing.shaped_random(self.shape, xp, scale=10, dtype=dtype)
return xp.around(a, 0)
a = testing.shaped_random(self.shape, xp, scale=100, dtype=dtype)
return xp.around(a, self.decimals)
@testing.numpy_cupy_array_equal()
def test_round_out(self, xp):
a = testing.shaped_random(self.shape, xp, scale=100, dtype='d')
out = xp.empty_like(a)
xp.around(a, self.decimals, out)
return out
@testing.parameterize(*testing.product({
'decimals': [-100, -99, -90, 0, 90, 99, 100],
}))
class TestRoundExtreme(unittest.TestCase):
shape = (20,)
@testing.for_dtypes([numpy.float64, numpy.complex128])
@testing.numpy_cupy_allclose()
def test_round_large(self, xp, dtype):
a = testing.shaped_random(self.shape, xp, scale=1e100, dtype=dtype)
return xp.around(a, self.decimals)
@testing.for_dtypes([numpy.float64, numpy.complex128])
@testing.numpy_cupy_allclose()
def test_round_small(self, xp, dtype):
a = testing.shaped_random(self.shape, xp, scale=1e-100, dtype=dtype)
return xp.around(a, self.decimals)
@testing.parameterize(*testing.product({
'value': [
(14, -1),
(15, -1),
(16, -1),
(14.0, -1),
(15.0, -1),
(16.0, -1),
(1.4, 0),
(1.5, 0),
(1.6, 0),
]
}))
class TestRoundBorder(unittest.TestCase):
@testing.numpy_cupy_allclose(atol=1e-5)
def test_around_positive1(self, xp):
a, decimals = self.value
return xp.around(a, decimals)
@testing.numpy_cupy_allclose(atol=1e-5)
def test_around_positive2(self, xp):
a, decimals = self.value
a = xp.asarray(a)
return xp.around(a, decimals)
@testing.numpy_cupy_allclose(atol=1e-5)
def test_around_negative1(self, xp):
a, decimals = self.value
return xp.around(-a, decimals)
@testing.numpy_cupy_allclose(atol=1e-5)
def test_around_negative2(self, xp):
a, decimals = self.value
a = xp.asarray(a)
return xp.around(-a, decimals)
|
<filename>scripts/plot_uph.py
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import getopt
import logging
import os
import sys
import matplotlib.pyplot as plt
import numpy as np
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# plot both results
def read_pystella_swd_uph(fname, path=None):
if path is not None:
fname = os.path.join(path, fname)
if not os.path.isfile(fname):
logger.error(' No uph-data for %s' % fname)
raise ValueError(' No uph-data for %s' % fname)
# return None
logger.info(' Load uph-data from %s' % fname)
col_names = "time zone R V "
dt = np.dtype({'names': col_names.split(), 'formats': np.repeat('f8', len(col_names))})
data = np.loadtxt(fname, comments='#', dtype=dt)
return data
def read_uph(fname, path=None):
if path is not None:
fname = os.path.join(path, fname)
if not os.path.isfile(fname):
logger.error(' No uph-data for %s' % fname)
raise ValueError(' No uph-data for %s' % fname)
# return None
logger.info(' Load uph-data from %s' % fname)
col_names = "time R V"
dt = np.dtype({'names': col_names.split(), 'formats': np.repeat('f8', len(col_names))})
data = np.loadtxt(fname, comments='#', dtype=dt)
return data
def read_ph_swd(fname, path=None):
if path is not None:
fname = os.path.join(path, fname)
if not os.path.isfile(fname):
logger.error(' No ph-swd-data for %s' % fname)
raise ValueError(' No ph-swd-data for %s' % fname)
# return None
logger.info(' Load ph-swd-data from %s' % fname)
col_names = "time R V M T"
dt = np.dtype({'names': col_names.split(), 'formats': np.repeat('f8', len(col_names))})
data = np.loadtxt(fname, comments='#', dtype=dt)
return data
def usage():
print("Usage:")
print(" plot_uph.py uph-file ph-swd-pystella ph-swd-file ")
def main():
lw = 2
fname_pystella = None # 's15s7b2v1z532E1.swd.ph'
fname_swd = None # 's15s7b2v1z532E1.swd.ph'
fname_uph = None # 'uph_s15s7b2v1z532E1.txt'
try:
opts, args = getopt.getopt(sys.argv[1:], "h")
except getopt.GetoptError as err:
print(str(err)) # will print something like "option -a not recognized"
usage()
sys.exit(2)
if len(args) > 2:
fname_uph = args[0]
fname_pystella = args[1]
fname_swd = args[2]
elif len(args) > 1:
fname_uph = args[0]
fname_swd = args[1]
elif len(args) > 0:
fname_uph = args[0]
elif len(opts) == 0:
usage()
sys.exit(2)
# setup plot
plt.matplotlib.rcParams.update({'font.size': 12})
fig = plt.figure(figsize=(8, 8))
ax = fig.add_subplot(1, 1, 1)
ax.set_xlabel('Time [day]')
ax.set_ylabel(r'Velocity [$\times 10^3$ km/s]')
# ax.set_title(fname_uph)
# read and plot ph-swd-data
if fname_swd is not None:
dswd = read_ph_swd(fname_swd)
x = dswd['time']
y = dswd['V']
ax.plot(x, y, label=fname_swd, color='blue', ls="-", linewidth=lw)
# read and plot uph-data
if fname_uph is not None:
dswd = read_uph(fname_uph)
x = dswd['time']
y = dswd['V']
ax.plot(x, y, label=fname_uph, color='orange', ls="--", linewidth=lw)
# read and plot uph-data
if fname_pystella is not None:
dswd = read_pystella_swd_uph(fname_pystella)
x = dswd['time']
y = dswd['V']
ax.plot(x, y, label=fname_pystella, color='red', ls=":", linewidth=lw)
ax.legend()
plt.show()
if __name__ == '__main__':
main()
|
"""
Entry point for the command-line interface (CLI).
"""
# Copyright 2018-2022 The emsig community.
#
# This file is part of emg3d.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import os
import sys
import argparse
from emg3d import utils
from emg3d.cli import run
def main(args=None):
"""Parsing command line inputs of CLI interface."""
# If not explicitly called, catch arguments.
if args is None:
args = sys.argv[1:]
# Start CLI-arg-parser and define arguments.
parser = argparse.ArgumentParser(
description="Multigrid solver for 3D electromagnetic diffusion."
)
# arg: Optional parameter-file name.
parser.add_argument(
"config",
nargs="?",
default="emg3d.cfg",
type=str,
help=("name of config file; default is 'emg3d.cfg'; consult "
"https://emg3d.emsig.xyz/en/stable/manual/cli.html for "
"its format")
)
# arg: Number of processors.
parser.add_argument(
"-n", "--nproc",
type=int,
default=None,
help="number of processors"
)
# arg: What to run
group1 = parser.add_mutually_exclusive_group()
group1.add_argument(
"-f", "--forward",
action='store_true',
help="compute synthetic data (default)"
)
group1.add_argument(
"-m", "--misfit",
action='store_true',
help="compute synthetic data and their misfit"
)
group1.add_argument(
"-g", "--gradient",
action='store_true',
help="compute synthetic data, misfit, and its gradient"
)
# arg: Path to files
parser.add_argument(
"--path",
type=str,
default=None,
help="path (abs or rel); file names are relative to path"
)
# arg: Survey file name; relative to path
parser.add_argument(
"--survey",
type=str,
default=None,
help="input survey file name; default is 'survey.h5'"
)
# arg: Model file name; relative to path
parser.add_argument(
"--model",
type=str,
default=None,
help="input model file name; default is 'model.h5'"
)
# arg: Output base name; relative to path
parser.add_argument(
"--output",
type=str,
default=None,
help="output files base name; default is 'emg3d_out'"
)
# arg: Simulation file name to save; relative to path
parser.add_argument(
"--save",
type=str,
default=None,
help="file-name to save simulation; not used by default"
)
# arg: Simulation file name to load; relative to path
parser.add_argument(
"--load",
type=str,
default=None,
help=(
"file-name to load simulation; not used by default; if set, "
"the provided survey and almost all parameters in the config "
"file are ignored; provided model is only used if --clean is set"
)
)
# arg: Shortcut for load and save together.
parser.add_argument(
"--cache",
type=str,
default=None,
help=(
"shortcut: file-name for --load --save "
"(cache overrules load and save)"
)
)
# arg: Replace model and remove fields from existing simulation
parser.add_argument(
"--clean",
action="store_true",
default=False,
help="replace model and all computed data of loaded simulation"
)
# arg: Verbosity.
group3 = parser.add_mutually_exclusive_group()
group3.add_argument(
"--verbosity",
type=int,
default=0,
choices=[-1, 0, 1, 2],
help="set verbosity; default is 0"
)
group3.add_argument(
"-v", "--verbose",
action="count",
dest="verbosity",
help="increase verbosity; can be used multiple times"
)
group3.add_argument(
"-q", "--quiet",
action="store_const",
const=-1,
dest="verbosity",
help="decrease verbosity"
)
# arg: Run without emg3d-computation.
parser.add_argument(
"-d", "--dry-run",
action="store_true",
default=False,
help="only display what would have been done"
)
# arg: Report
parser.add_argument(
"--report",
action="store_true",
default=False,
help="only display emg3d report"
)
# arg: Version
parser.add_argument(
"--version",
action="store_true",
default=False,
help="only display emg3d version"
)
# Get command line arguments.
args_dict = vars(parser.parse_args(args))
# Exits without simulation.
if args_dict.pop('version'): # emg3d version info.
print(f"emg3d v{utils.__version__}")
return
elif args_dict.pop('report'): # emg3d report.
print(utils.Report())
return
elif len(sys.argv) == 1 and not os.path.isfile('emg3d.cfg'):
# If no arguments provided, and ./emg3d.cfg does not exist, print info.
print(parser.description)
version = utils.__version__
print(f"=> Type `emg3d --help` for more info (emg3d v{version}).")
return
# Run simulation with given command line inputs.
run.simulation(args_dict)
if __name__ == "__main__":
sys.exit(main())
|
<gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
"""
import meep as mp
import numpy as np
# import h5py as h5
#import scipy as sp
from scipy import optimize as op
from scipy import interpolate as itp
from matplotlib import pyplot as plt
# from multiprocessing import Pool
# from mpl_toolkits.mplot3d import Axes3D
import meep_objects as mpo
# import io
import sys
import json
import time
# from mayavi import mlab
#from ipywidgets import IntProgress
#from IPython.display import display
#import csv
## useful function
def convert_seconds (elapsed):
minutes = np.floor(elapsed/60)
secs = elapsed-minutes*60
secs = np.round(secs*100)/100
hours = np.int_(np.floor(minutes/60))
minutes = np.int_(minutes-hours*60)
return f'{hours}h-{minutes}min-{secs}s'
class Simulation(mp.Simulation):
def __init__(self, sim_name='simulation_buried', dimensions=3, symmetries = []):
self.name = sim_name
self.extra_space_xy = 4
self.PML_width = .5
self.top_air_gap = 0.7
self.substrate_thickness = .2
self._empty = True
super().__init__(
cell_size = mp.Vector3(1,1,1),
geometry = [],
sources = [],
resolution = 1,
boundary_layers = [],
dimensions = dimensions,
symmetries = symmetries,
filename_prefix = sim_name,
force_complex_fields = False,
eps_averaging = False)
@property
def empty(self):
return self._empty
@empty.setter
def empty(self,value):
self._empty = value
self.reset_meep()
self.geometry = []
try:
if self._empty :
self.geometry.extend( self._empty_geometry )
else:
self.geometry.extend( self._empty_geometry )
self.geometry.extend( self._geometry )
except AttributeError:
raise AttributeError("cannot assign 'empty' property before initializing the geometry")
def init_geometric_objects(self, multilayer_file, used_layer_info={}, res=10,
pattern_type='positive', outcoupler_parameters={}):
# D=5, grating_period=0.2, N_rings=10, N_arms=0, lambda_bsw=0.4,
# scatter_length=0.4, scatter_width=0.1, scatter_tilt=0,
# scatter_shape='', scatter_disposition='filled', topology='spiral', pattern_type='positive') :
self._geometry = []
self._empty_geometry = []
used_layer = used_layer_info['used_layer']
self.domain_x = outcoupler_parameters["N_periods_x"] * outcoupler_parameters["period"] + self.extra_space_xy
self.domain_y = outcoupler_parameters["N_periods_y"] * outcoupler_parameters["period"] + self.extra_space_xy
multilayer, multilayer_thickness, design_specs = mpo.dielectric_multilayer(
design_file = multilayer_file,
substrate_thickness = self.substrate_thickness + self.PML_width,
x_width = self.domain_x,
y_width = self.domain_y,
used_layer_info = used_layer_info,
unit = 'um',
exclude_last_layer = False)
print(design_specs)
self._empty_geometry.extend(multilayer) # keep multilayer even if empty
if pattern_type == 'positive':
grating_index = np.real(design_specs['idx_layers'][used_layer+1])
# dummy_layer = mp.Block(
# material = mp.Medium(index = np.real(design_specs['idx_layers'][used_layer])),
# size = mp.Vector3(self.domain_x,
# self.domain_y,
# design_specs['d_layers'][used_layer]),
# center = mp.Vector3(0, 0, 0))#design_specs['d_layers'][used_layer]/2))
# self._empty_geometry.append(dummy_layer) # part of the multilayer
elif pattern_type == 'negative':
grating_index = np.real(design_specs['idx_layers'][used_layer])
dummy_layer = mp.Block(
material = mp.Medium(index = np.real(design_specs['idx_layers'][used_layer+1])),
size = mp.Vector3(self.domain_x,
self.domain_y,
design_specs['d_layers'][used_layer]),
center = mp.Vector3(0, 0, 0))#design_specs['d_layers'][used_layer]/2))
self._empty_geometry.append(dummy_layer) # part of the multilayer
else :
raise ValueError(f'patter type "{pattern_type}" is unknown')
outcoupler = mpo.linear_pol_splitting_grating(
medium_groove = mp.Medium(index=grating_index),
metasurface_period = outcoupler_parameters["period"],
scatter_length = outcoupler_parameters["scatter_length"],
scatter_width = outcoupler_parameters["scatter_width"],
scatter_tilt = outcoupler_parameters["scatter_tilt"],
scatter_shape = outcoupler_parameters["scatter_shape"],
N_periods_x = outcoupler_parameters["N_periods_x"],
N_periods_y = outcoupler_parameters["N_periods_y"],
thickness = float(design_specs['d_layers'][used_layer]),
center = mp.Vector3(z= 0) )#design_specs['d_layers'][used_layer]/2))
self._geometry.extend(outcoupler)
# this will add all geometric objects to the simulation
self.empty = False
self.domain_z = self.substrate_thickness + multilayer_thickness + self.top_air_gap
# resolution is 10 points per wavelength in the highest index material time a scale factor
self.resolution = res
self.name = self.name + f'_res{self.resolution}'
self.filename_prefix = self.name
# round domain with an integer number of grid points
self.grid_step = 1/self.resolution
self.cell_size = mp.Vector3(self.domain_x + 2*self.PML_width,
self.domain_y + 2*self.PML_width,
self.domain_z + 2*self.PML_width)
# make domain an integer number of voxels
Nx = int(self.cell_size.x / self.grid_step)
Nx -= np.mod(Nx,2) + 1 # make odd
self.cell_size.x = Nx * self.grid_step
Ny = int(self.cell_size.y / self.grid_step)
Ny -= np.mod(Ny,2) + 1
self.cell_size.y = Ny * self.grid_step
Nz = int(self.cell_size.z / self.grid_step)
Nz -= np.mod(Nz,2) + 1
self.cell_size.z = Nz * self.grid_step
print()
print(f"Number of voxels is ({Nx}x{Ny}x{Nz}) = {Nx*Ny*Nz/1e6} Mln")
print(f"Minimum expected memory is {96*Nx*Ny*Nz/2**30:.2f}GB")
print()
self.geometry_center = mp.Vector3(0, 0, -(self.cell_size.z/2 - self.top_air_gap - self.PML_width - np.sum(design_specs['d_layers'][used_layer+1:-1]) - design_specs['d_layers'][used_layer]/2))
# self.geometry_center = mp.Vector3(0, -(self.cell_size.y/2 - self.top_air_gap - self.PML_width - np.sum(design_specs['d_layers'][used_layer+1:-1]) - design_specs['d_layers'][used_layer]))
print(self.geometry_center.z)
self.boundary_layers = [mp.PML(self.PML_width)] #thickness=self.PML_width, direction=mp.Z)]
# self.k_point = mp.Vector3() # PBC
# print( [self.cell_size.x / self.
with open(f'{self.name}.json', 'w') as fp:
data2save = {"multilayer": multilayer_file,
"pattern_type": pattern_type,
"resolution": self.resolution}
data2save["outcoupler_parameters"] = outcoupler_parameters
json.dump(data2save, fp, indent=4)
def init_sources_and_monitors(self, f, df, allow_farfield=True) :
self.sources = [ mp.Source(
src = mp.ContinuousSource(f,fwidth=0.1,is_integrated=True) if df==0 else mp.GaussianSource(f,fwidth=df,is_integrated=True),
center = mp.Vector3(z = self.top_air_gap/2),
size = mp.Vector3(self.cell_size.x, self.cell_size.y, 0),
component = mp.Ey)]
self.nearfield_monitor = None
self.harminv_instance = None
self.spectrum_monitors = []
if allow_farfield :
nearfield = mp.Near2FarRegion(
center = mp.Vector3(0, 0, self.geometry_center.z - self.cell_size.z/2 + self.PML_width + self.substrate_thickness/2), #self.top_air_gap - 0.03),#
size = mp.Vector3(self.domain_x, self.domain_y, 0),
direction = -mp.Z)
self.nearfield_monitor = self.add_near2far(f, 0, 1, nearfield)#, yee_grid=True))
#%% geometry and simulation parameters
if __name__ == "__main__": # good practise in parallel computing
c0 = 1
wavelength = 0.532
wwidth = .03
f = c0 / wavelength
fmax = c0 / (wavelength - wwidth/2)
fmin = c0 / (wavelength + wwidth/2)
df = fmax - fmin
n_eff_l = 1.070 # 1.6642
n_eff_h = 1.185 # 1.7899
n_eff_FF0d5 = n_eff_h*.5 + n_eff_l*.5
file = 'design_TE_N7' #'design_TM_gd3_buriedDBR_onSiO2'
buried = False
pattern_type = 'positive' # 'positive' or 'negative'
out_grating_type = 'polSplitting' # 'spiral' or 'polSplitting' or 'only'
# pol splitting info
FF_pol_splitter = .3
FF = FF_pol_splitter
n_eff = n_eff_h*(1-FF) + n_eff_l*FF if pattern_type=='positive' else n_eff_h*FF + n_eff_l*(1-FF)
scatter_disposition='filled' # 'radial' or 'filled'
D_phi = 0# np.pi/3;
sigma = -1; # select for circl left or circ right
K_bsw = 2*np.pi * n_eff / wavelength
m = 1 # ordinary grating order
s = (m*2*np.pi + sigma * 2*D_phi) / K_bsw
outcoupler_period = s
# outcoupler info
N_outcoupler = 1 #round(np.pi/D_phi) * 1
d_cavity_out = 5
charge = 1
polSplitter_parameters = {
"D": d_cavity_out,
"period": outcoupler_period,
"scatter_length": outcoupler_period*0.8,
"scatter_width": outcoupler_period*FF_pol_splitter,
"scatter_tilt": D_phi,
"scatter_shape": '',
"scatter_disposition": scatter_disposition,
"N_periods_x": N_outcoupler,
"N_periods_y": 1}
used_layer_info = {
"used_layer" : -2,
"thickness" : 70e-3,
"refractive index" : 1.48}
t0 = time.time()
date = time.strftime('%y%m%d-%H%M%S')#'211001-121139'#
if len(sys.argv) > 1:
sim_prefix = f"{sys.argv[1]}"
else:
sim_prefix = f"{date}"
sim_name = f"{out_grating_type}_" if N_outcoupler > 0 else ""
sim_name += f"{sim_prefix}_{file}"
# sim_name += f"_{parameter_to_loop}"
sim = Simulation(sim_name)
# sim.extra_space_xy = 1
sim.eps_averaging = False
sim.init_geometric_objects( multilayer_file = f"./Lumerical-Objects/multilayer_design/designs/{file}",
used_layer_info = used_layer_info,
res = 88,
pattern_type = pattern_type,
outcoupler_parameters = polSplitter_parameters)
if len(sys.argv) > 2:
if sys.argv[2] == "empty" :
sim.empty = True
sim.name += '_empty'
else:
sim.empty = False
# sim.k_point = mp.Vector3(K_bsw, 0, 0)
sim.init_sources_and_monitors(f, df, allow_farfield=(not sim.empty) )
mp.verbosity(2)
mpo.create_openscad(sim,1000)
# sim.init_sim()
# raise ValueError()
print(f'\n\nSimulation took {convert_seconds(time.time()-t0)} to initiate\n')
#%%
simsize = sim.cell_size
center = sim.geometry_center
max_epsilon = 2.53**2
fig = plt.figure(dpi=200)
plot = sim.plot2D( output_plane=mp.Volume(center=center, size=mp.Vector3(simsize.x, 0, simsize.z)),
labels=True,
eps_parameters={"interpolation":'none',"cmap":'gnuplot', "vmin":'0.5', "vmax":max_epsilon} )
try:
fig.colorbar(plot.images[0], orientation="horizontal")
except:
plt.close()
print("Only one of the parallel jobs will print the image")
else:
fig.savefig(f'{sim.name}_section-yz.jpg')
# plt.close()
fig = plt.figure(dpi=200)
plot = sim.plot2D( output_plane=mp.Volume(center=mp.Vector3(z=-.00), size=mp.Vector3(simsize.x,simsize.y)),
labels=True,
eps_parameters={"interpolation":'none',"cmap":'gnuplot', "vmin":'0.5', "vmax":max_epsilon})
try:
fig.colorbar(plot.images[0])
except:
plt.close()
print("Only one of the parallel jobs will print the image")
else:
fig.savefig(f'{sim.name}_section-xy.jpg')
# plt.close()
# sim.output_epsilon(f'{sim.name}_eps')
# eps_data = sim.get_epsilon()
# mpo.savemat(f'{sim.name}_eps.mat', {"eps_data": eps_data})
# x, y, z, w = [np.array(tmp) for tmp in sim.get_array_metadata()]
# mpo.plot_image(z, y, eps_data[:,:,84], vmax=9.0, vmin=1.0)
# mpo.plot_image(y, z, eps_data[int(eps_data.shape[0]/2)+1,:,:])#, vmax=9.0, vmin=-1.0)
# mpo.plot_data_section(eps_data)
# # s = mlab.con(x,y,z,w)=sim.get_array_metadata()tour3d(eps_data, colormap="YlGnBu")
# # mlab.show()
#%%
# raise RuntimeError("comment this line to run til the end")
def print_time(sim):
print(f'\n\nSimulation is at {sim.round_time()} \n It has run for {convert_seconds(time.time()-t0)}\n')
t0 = time.time()
mp.verbosity(1)
fig = plt.figure(dpi=100)
# Animate = mp.Animate2D( sim, fields=mp.Ey, f=fig, realtime=False, normalize=True,
# output_plane=mp.Volume(center=center, size=mp.Vector3(simsize.x, 0, simsize.z)),
# eps_parameters={"interpolation":'none',"vmin":'0'})
step_functions = [mp.at_every(5,print_time)]
if sim.harminv_instance != None :
step_functions.append( mp.after_sources(sim.harminv_instance) )
# step_functions.append( mp.at_every(.1, Animate) )
sim.run(*step_functions, until=50)#_after_sources=mp.stop_when_fields_decayed(1, mp.Ez, mp.Vector3(), 1e-1))
# sim.run(until_after_sources=mp.stop_when_dft_decayed(minimum_run_time=10))
# Animate.to_mp4(10,f'{sim.name}_section.mp4')
print(f'\n\nSimulation took {convert_seconds(time.time()-t0)} to run\n')
t = np.round(sim.round_time(), 2)
if sim.nearfield_monitor != None :
for i in range( sim.nearfield_monitor.nfreqs):
ex_near, ey_near = [sim.get_dft_array(sim.nearfield_monitor, field, i) for field in [mp.Ex, mp.Ey]]
mpo.savemat(f'{sim.name}_nearfield_fp{i:02}_t{t}.mat', {'Ex': ex_near, 'Ey': ey_near,
'Lx': sim.nearfield_monitor.regions[0].size.x,
'Ly': sim.nearfield_monitor.regions[0].size.y})
data2save = {}
spectra = []
for monitor in sim.spectrum_monitors :
spectrum_f = np.array(mp.get_flux_freqs(monitor))
spectra.append(mp.get_fluxes(monitor))
if len(spectra) > 0 :
data2save["wavelength"] = 1/spectrum_f*1e3
data2save["spectra"] = spectra
if len(data2save) > 0:
mpo.savemat(f'{sim.name}_spectra_t{t}.mat', data2save)
# if len(spectra) > 0 :
# sim.empty = True
# sim.init_sources_and_monitors(f, df, allow_farfield=False)
# sim.run(mp.at_every(5,print_time), until=t)
# spectra_out = []
# for i, monitor in enumerate(sim.spectrum_monitors) :
# spectrum_empty = mp.get_fluxes(monitor)
# spectra_out.append( np.array(spectra[i]) / np.array(spectrum_empty) )
# fig = plt.figure(dpi=200)
# ax = fig.add_subplot(111)
# data_plot = []
# for spectrum in spectra_out:
# data_plot.extend( [1/spectrum_f, spectrum] )
# plt.plot(*data_plot)
# plt.xlim(wavelength - wwidth, wavelength + wwidth)
# plt.ylim(-2,2)
# ax.grid(True)
# plt.xlabel('wavelength [um]')
# plt.ylabel('Transmission')
# ax2 = fig.add_subplot(336)
# # plt.title('Table of the resonances')
# collabel=[ "Wavelength [nm]", "Quality"]
# rowlabel=[ f'{i}' for i in range(len(resonance_table))]
# ax2.axis('tight')
# ax2.axis('off')
# the_table = ax2.table(cellText=resonance_table, colLabels=collabel, rowLabels=rowlabel,loc='center')
# fig.savefig(f'{sim.name}_spectrum_cavity.jpg')
# plt.close(fig)
# mpo.savemat(f'{sim.name}_spectra_t{t}.mat', {"wavelength": 1/spectrum_f*1e3,
# "spectra" : spectra_out,
# "resnances" : resonance_table})
|
<filename>Brain/agent.py
import numpy as np
from .model import PolicyNetwork, QvalueNetwork, ValueNetwork, Discriminator
import torch
from .replay_memory import Memory, Transition
from torch import from_numpy
from torch.optim.adam import Adam
from torch.nn.functional import log_softmax
class SACAgent:
def __init__(self,
p_z,
**config):
self.config = config
self.n_states = self.config["n_states"]
self.n_skills = self.config["n_skills"]
self.batch_size = self.config["batch_size"]
self.p_z = np.tile(p_z, self.batch_size).reshape(self.batch_size, self.n_skills)
self.memory = Memory(self.config["mem_size"], self.config["seed"])
self.device = "cuda" if torch.cuda.is_available() else "cpu"
torch.manual_seed(self.config["seed"])
self.policy_network = PolicyNetwork(n_states=self.n_states + self.n_skills,
n_actions=self.config["n_actions"],
action_bounds=self.config["action_bounds"],
n_hidden_filters=self.config["n_hiddens"]).to(self.device)
self.q_value_network1 = QvalueNetwork(n_states=self.n_states + self.n_skills,
n_actions=self.config["n_actions"],
n_hidden_filters=self.config["n_hiddens"]).to(self.device)
self.q_value_network2 = QvalueNetwork(n_states=self.n_states + self.n_skills,
n_actions=self.config["n_actions"],
n_hidden_filters=self.config["n_hiddens"]).to(self.device)
self.value_network = ValueNetwork(n_states=self.n_states + self.n_skills,
n_hidden_filters=self.config["n_hiddens"]).to(self.device)
self.value_target_network = ValueNetwork(n_states=self.n_states + self.n_skills,
n_hidden_filters=self.config["n_hiddens"]).to(self.device)
self.hard_update_target_network()
self.discriminator = Discriminator(n_states=self.n_states, n_skills=self.n_skills,
n_hidden_filters=self.config["n_hiddens"]).to(self.device)
self.mse_loss = torch.nn.MSELoss()
self.cross_ent_loss = torch.nn.CrossEntropyLoss()
self.value_opt = Adam(self.value_network.parameters(), lr=self.config["lr"])
self.q_value1_opt = Adam(self.q_value_network1.parameters(), lr=self.config["lr"])
self.q_value2_opt = Adam(self.q_value_network2.parameters(), lr=self.config["lr"])
self.policy_opt = Adam(self.policy_network.parameters(), lr=self.config["lr"])
self.discriminator_opt = Adam(self.discriminator.parameters(), lr=self.config["lr"])
def choose_action(self, states):
states = np.expand_dims(states, axis=0)
states = from_numpy(states).float().to(self.device)
action, _ = self.policy_network.sample_or_likelihood(states)
return action.detach().cpu().numpy()[0]
def store(self, state, z, done, action, next_state):
state = from_numpy(state).float().to("cpu")
z = torch.ByteTensor([z]).to("cpu")
done = torch.BoolTensor([done]).to("cpu")
action = torch.Tensor([action]).to("cpu")
next_state = from_numpy(next_state).float().to("cpu")
self.memory.add(state, z, done, action, next_state)
def unpack(self, batch):
batch = Transition(*zip(*batch))
states = torch.cat(batch.state).view(self.batch_size, self.n_states + self.n_skills).to(self.device)
zs = torch.cat(batch.z).view(self.batch_size, 1).long().to(self.device)
dones = torch.cat(batch.done).view(self.batch_size, 1).to(self.device)
actions = torch.cat(batch.action).view(-1, self.config["n_actions"]).to(self.device)
next_states = torch.cat(batch.next_state).view(self.batch_size, self.n_states + self.n_skills).to(self.device)
return states, zs, dones, actions, next_states
def train(self):
if len(self.memory) < self.batch_size:
return None
else:
batch = self.memory.sample(self.batch_size)
states, zs, dones, actions, next_states = self.unpack(batch)
p_z = from_numpy(self.p_z).to(self.device)
# Calculating the value target
reparam_actions, log_probs = self.policy_network.sample_or_likelihood(states)
q1 = self.q_value_network1(states, reparam_actions)
q2 = self.q_value_network2(states, reparam_actions)
q = torch.min(q1, q2)
target_value = q.detach() - self.config["alpha"] * log_probs.detach()
value = self.value_network(states)
value_loss = self.mse_loss(value, target_value)
logits = self.discriminator(torch.split(next_states, [self.n_states, self.n_skills], dim=-1)[0])
p_z = p_z.gather(-1, zs)
logq_z_ns = log_softmax(logits, dim=-1)
rewards = logq_z_ns.gather(-1, zs).detach() - torch.log(p_z + 1e-6)
# Calculating the Q-Value target
with torch.no_grad():
target_q = self.config["reward_scale"] * rewards.float() + \
self.config["gamma"] * self.value_target_network(next_states) * (~dones)
q1 = self.q_value_network1(states, actions)
q2 = self.q_value_network2(states, actions)
q1_loss = self.mse_loss(q1, target_q)
q2_loss = self.mse_loss(q2, target_q)
policy_loss = (self.config["alpha"] * log_probs - q).mean()
logits = self.discriminator(torch.split(states, [self.n_states, self.n_skills], dim=-1)[0])
discriminator_loss = self.cross_ent_loss(logits, zs.squeeze(-1))
self.policy_opt.zero_grad()
policy_loss.backward()
self.policy_opt.step()
self.value_opt.zero_grad()
value_loss.backward()
self.value_opt.step()
self.q_value1_opt.zero_grad()
q1_loss.backward()
self.q_value1_opt.step()
self.q_value2_opt.zero_grad()
q2_loss.backward()
self.q_value2_opt.step()
self.discriminator_opt.zero_grad()
discriminator_loss.backward()
self.discriminator_opt.step()
self.soft_update_target_network(self.value_network, self.value_target_network)
return -discriminator_loss.item()
def soft_update_target_network(self, local_network, target_network):
for target_param, local_param in zip(target_network.parameters(), local_network.parameters()):
target_param.data.copy_(self.config["tau"] * local_param.data +
(1 - self.config["tau"]) * target_param.data)
def hard_update_target_network(self):
self.value_target_network.load_state_dict(self.value_network.state_dict())
self.value_target_network.eval()
def get_rng_states(self):
return torch.get_rng_state(), self.memory.get_rng_state()
def set_rng_states(self, torch_rng_state, random_rng_state):
torch.set_rng_state(torch_rng_state.to("cpu"))
self.memory.set_rng_state(random_rng_state)
def set_policy_net_to_eval_mode(self):
self.policy_network.eval()
def set_policy_net_to_cpu_mode(self):
self.device = torch.device("cpu")
self.policy_network.to(self.device)
|
# coding: utf8
from __future__ import unicode_literals
import itertools
from ..char_classes import LIST_PUNCT, LIST_ELLIPSES, LIST_QUOTES, LIST_CURRENCY
from ..char_classes import LIST_ICONS, CURRENCY
from ..char_classes import CONCAT_QUOTES, ALPHA_LOWER, ALPHA_UPPER, ALPHA, PUNCT
_list_icons = [x for x in LIST_ICONS if x != "°"]
_list_icons = [x.replace("\\u00B0", "") for x in _list_icons]
_ro_variants = {
"Ă": ["Ă", "A"],
"Â": ["Â", "A"],
"Î": ["Î", "I"],
"Ș": ["Ș", "Ş", "S"],
"Ț": ["Ț", "Ţ", "T"],
}
def _make_ro_variants(tokens):
variants = []
for token in tokens:
upper_token = token.upper()
upper_char_variants = [_ro_variants.get(c, [c]) for c in upper_token]
upper_variants = ["".join(x) for x in itertools.product(*upper_char_variants)]
for variant in upper_variants:
variants.extend([variant, variant.lower(), variant.title()])
return sorted(list(set(variants)))
# UD_Romanian-RRT closed class prefixes
# POS: ADP|AUX|CCONJ|DET|NUM|PART|PRON|SCONJ
_ud_rrt_prefixes = [
"a-",
"c-",
"ce-",
"cu-",
"d-",
"de-",
"dintr-",
"e-",
"făr-",
"i-",
"l-",
"le-",
"m-",
"mi-",
"n-",
"ne-",
"p-",
"pe-",
"prim-",
"printr-",
"s-",
"se-",
"te-",
"v-",
"într-",
"ș-",
"și-",
"ți-",
]
_ud_rrt_prefix_variants = _make_ro_variants(_ud_rrt_prefixes)
# UD_Romanian-RRT closed class suffixes without NUM
# POS: ADP|AUX|CCONJ|DET|PART|PRON|SCONJ
_ud_rrt_suffixes = [
"-a",
"-aceasta",
"-ai",
"-al",
"-ale",
"-alta",
"-am",
"-ar",
"-astea",
"-atâta",
"-au",
"-aș",
"-ați",
"-i",
"-ilor",
"-l",
"-le",
"-lea",
"-mea",
"-meu",
"-mi",
"-mă",
"-n",
"-ndărătul",
"-ne",
"-o",
"-oi",
"-or",
"-s",
"-se",
"-si",
"-te",
"-ul",
"-ului",
"-un",
"-uri",
"-urile",
"-urilor",
"-veți",
"-vă",
"-ăștia",
"-și",
"-ți",
]
_ud_rrt_suffix_variants = _make_ro_variants(_ud_rrt_suffixes)
_prefixes = (
["§", "%", "=", "—", "–", r"\+(?![0-9])"]
+ _ud_rrt_prefix_variants
+ LIST_PUNCT
+ LIST_ELLIPSES
+ LIST_QUOTES
+ LIST_CURRENCY
+ LIST_ICONS
)
_suffixes = (
_ud_rrt_suffix_variants
+ LIST_PUNCT
+ LIST_ELLIPSES
+ LIST_QUOTES
+ _list_icons
+ ["—", "–"]
+ [
r"(?<=[0-9])\+",
r"(?<=°[FfCcKk])\.",
r"(?<=[0-9])(?:{c})".format(c=CURRENCY),
r"(?<=[0-9{al}{e}{p}(?:{q})])\.".format(
al=ALPHA_LOWER, e=r"%²\-\+", q=CONCAT_QUOTES, p=PUNCT
),
r"(?<=[{au}][{au}])\.".format(au=ALPHA_UPPER),
]
)
_infixes = (
LIST_ELLIPSES
+ _list_icons
+ [
r"(?<=[0-9])[+\*^](?=[0-9-])",
r"(?<=[{al}{q}])\.(?=[{au}{q}])".format(
al=ALPHA_LOWER, au=ALPHA_UPPER, q=CONCAT_QUOTES
),
r"(?<=[{a}]),(?=[{a}])".format(a=ALPHA),
r"(?<=[{a}0-9])[:<>=](?=[{a}])".format(a=ALPHA),
]
)
TOKENIZER_PREFIXES = _prefixes
TOKENIZER_SUFFIXES = _suffixes
TOKENIZER_INFIXES = _infixes
|
import os
import subprocess
import threading
from typing import Callable, Optional, List
from platypush.plugins import Plugin, action
class FfmpegPlugin(Plugin):
"""
Generic FFmpeg plugin to interact with media files and devices.
Requires:
* **ffmpeg-python** (``pip install ffmpeg-python``)
* The **ffmpeg** package installed on the system.
"""
def __init__(self, ffmpeg_cmd: str = 'ffmpeg', ffprobe_cmd: str = 'ffprobe', **kwargs):
super().__init__(**kwargs)
self.ffmpeg_cmd = ffmpeg_cmd
self.ffprobe_cmd = ffprobe_cmd
self._threads = {}
self._next_thread_id = 1
self._thread_lock = threading.RLock()
@action
def info(self, filename: str, **kwargs) -> dict:
"""
Get the information of a media file.
:param filename: Path to the media file.
:return: Media file information. Example:
.. code-block:: json
{
"streams": [
{
"index": 0,
"codec_name": "h264",
"codec_long_name": "H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10",
"profile": "High 4:2:2",
"codec_type": "video",
"codec_time_base": "1/60",
"codec_tag_string": "[0][0][0][0]",
"codec_tag": "0x0000",
"width": 640,
"height": 480,
"coded_width": 640,
"coded_height": 480,
"closed_captions": 0,
"has_b_frames": 2,
"pix_fmt": "yuv422p",
"level": 30,
"chroma_location": "left",
"field_order": "progressive",
"refs": 1,
"is_avc": "true",
"nal_length_size": "4",
"r_frame_rate": "30/1",
"avg_frame_rate": "30/1",
"time_base": "1/1000",
"start_pts": 0,
"start_time": "0.000000",
"bits_per_raw_sample": "8",
"disposition": {
"default": 1,
"dub": 0,
"original": 0,
"comment": 0,
"lyrics": 0,
"karaoke": 0,
"forced": 0,
"hearing_impaired": 0,
"visual_impaired": 0,
"clean_effects": 0,
"attached_pic": 0,
"timed_thumbnails": 0
},
"tags": {
"ENCODER": "Lavc58.91.100 libx264"
}
}
],
"format": {
"filename": "./output.mkv",
"nb_streams": 1,
"nb_programs": 0,
"format_name": "matroska,webm",
"format_long_name": "Matroska / WebM",
"start_time": "0.000000",
"size": "786432",
"probe_score": 100,
"tags": {
"ENCODER": "Lavf58.45.100"
}
}
}
"""
# noinspection PyPackageRequirements
import ffmpeg
filename = os.path.abspath(os.path.expanduser(filename))
info = ffmpeg.probe(filename, cmd=self.ffprobe_cmd, **kwargs)
return info
@staticmethod
def _poll_thread(proc: subprocess.Popen, packet_size: int, on_packet: Callable[[bytes], None],
on_open: Optional[Callable[[], None]] = None,
on_close: Optional[Callable[[], None]] = None):
try:
if on_open:
on_open()
while proc.poll() is None:
data = proc.stdout.read(packet_size)
on_packet(data)
finally:
if on_close:
on_close()
@action
def start(self, pipeline: List[dict], pipe_stdin: bool = False, pipe_stdout: bool = False,
pipe_stderr: bool = False, quiet: bool = False, overwrite_output: bool = False,
on_packet: Callable[[bytes], None] = None, packet_size: int = 4096):
# noinspection PyPackageRequirements
import ffmpeg
stream = ffmpeg
for step in pipeline:
args = step.pop('args') if 'args' in step else []
stream = getattr(stream, step.pop('method'))(*args, **step)
self.logger.info('Executing {cmd} {args}'.format(cmd=self.ffmpeg_cmd, args=stream.get_args()))
proc = stream.run_async(cmd=self.ffmpeg_cmd, pipe_stdin=pipe_stdin, pipe_stdout=pipe_stdout,
pipe_stderr=pipe_stderr, quiet=quiet, overwrite_output=overwrite_output)
if on_packet:
with self._thread_lock:
self._threads[self._next_thread_id] = threading.Thread(target=self._poll_thread, kwargs=dict(
proc=proc, on_packet=on_packet, packet_size=packet_size))
self._threads[self._next_thread_id].start()
self._next_thread_id += 1
# vim:sw=4:ts=4:et:
|
# from matplotlib import pyplot as plt
from matplotlib import figure
import pygtk
pygtk.require("2.0")
import gtk
try:
from twisted.internet import gtk2reactor
gtk2reactor.install()
except:
pass
from twisted.internet import reactor
import numpy as np
from twisted.internet import task
from cPickle import loads
from twisted.web.xmlrpc import Proxy
import os,os.path
from matplotlib.backends.backend_gtkagg import FigureCanvasGTKAgg as FigureCanvas
from matplotlib.backends.backend_gtkagg import NavigationToolbar2GTKAgg as NavigationToolbar
from matplotlib.font_manager import FontProperties
fontP = FontProperties()
fontP.set_size('small')
import csv
from p2ner.abstract.ui import UI
class PlotFig(object):
def __init__(self,name,plots,x,shared=None):
self.name=name
self.plots=plots
self.xType=x
self.values={}
# for p in self.plots:
# self.values[p] = [0 for x in range(300)]
self.shared=shared
self.pause=False
self.draw=True
self.line={}
self.hidingPlots=[]
self.initPlot()
def initPlot(self):
xAchse=np.arange(0,300,1)
yAchse=np.array([0]*300)
# fig=plt.figure()
fig=figure.Figure()
self.canvas=FigureCanvas(fig)
self.fig=fig
if not self.shared:
self.ax = fig.add_subplot(111)
else:
self.ax=fig.add_subplot(111,sharex=self.shared)
self.ax.grid(True)
self.ax.set_title(self.name)
self.ax.axis([0,300,-1.5,1.5])
for p in self.plots:
self.line[p],=self.ax.plot(xAchse,yAchse,'-',label=p[2])
handles1, labels1 = self.ax.get_legend_handles_labels()
self.ax.legend(handles1, labels1, prop = fontP, loc=2)
def removePlot(self,plot):
plot=[p for p in self.plots if p[2]==plot][0]
self.ax.lines.remove(self.line[plot])
self.hidingPlots.append(plot)
if self.pause:
self.updateYAxis()
def addPlot(self,plot):
plot=[p for p in self.plots if p[2]==plot][0]
self.ax.lines.append(self.line[plot])
self.hidingPlots.remove(plot)
if self.pause:
self.updateYAxis()
def updateYAxis(self):
miny=[]
maxy=[]
for p in self.plots:
if p not in self.hidingPlots:
miny.append(min(self.values[p]['y'][-300:]))
maxy.append(max(self.values[p]['x'][-300:]))
try:
miny=min(miny)
maxy=max(maxy)
self.ax.set_ylim(miny-0.1*miny,maxy+0.1*maxy)
except:
pass
self.fig.canvas.draw()
def reloadPlot(self):
self.RealtimePloter(False)
def updatePlot(self,data):
maxX=max([(data[p][self.xType][-1],p) for p in self.plots])
self.values={}
self.values['x']=data[maxX[1]][self.xType]
for p in self.plots:
self.values[p]={}
# self.values['x']=range(len(self.data[p]))
self.values[p]['x']=data[p][self.xType]
self.values[p]['y']=data[p]['y']
if not self.pause:
self.RealtimePloter(True)
def RealtimePloter(self,sub=True):
if sub:
limit=-50
else:
limit=-len(self.values['x'])
CurrentXAxis=self.values['x']
miny=[]
maxy=[]
for p in self.plots:
CurrentXAxis2=self.values[p]['x']
self.line[p].set_data(CurrentXAxis2,np.array(self.values[p]['y']))
if p not in self.hidingPlots:
miny.append(min(self.values[p]['y'][limit:]))
maxy.append(max(self.values[p]['y'][limit:]))
try:
miny=min(miny)
maxy=max(maxy)
self.ax.set_ylim(miny-0.1*miny,maxy+0.1*maxy)
except:
pass
if not self.shared:
# self.ax.set_xlim(CurrentXAxis.min(),CurrentXAxis.max())
try:
self.ax.set_xlim(min(CurrentXAxis[limit:]),max(CurrentXAxis[limit:]))
except:
pass
# self.ax.axis([CurrentXAxis.min(),CurrentXAxis.max(),min(self.values),max(self.values)])
# else:
# self.ax.set_ylim(min(self.values),max(self.values))
if not self.shared:
self.fig.canvas.draw()
def getCanvas(self):
return self.canvas
def getSharedAxe(self):
return self.ax
def setPause(self,pause):
self.pause=pause
class PlotGui(UI):
def initUI(self,pid,plots,sharedx,statCollector):
self.pid=pid
self.statCollector=statCollector
self.plots=plots
self.fig={}
self.builder = gtk.Builder()
self.data={}
commonX=None
for k,v in plots.items():
x=v['x']
if not commonX:
commonX=x
else:
if x!=commonX:
sharedx=False
for s in v['stats']:
if s not in self.data:
self.data[s]={}
self.data[s]['y']=[]
if x not in self.data[s]:
self.data[s][x]=[]
path = os.path.dirname( os.path.realpath( __file__ ) )
self.builder.add_from_file(os.path.join(path, 'plotGui.glade'))
self.builder.connect_signals(self)
self.ui=self.builder.get_object('ui')
self.ui.connect('delete-event',self.on_ui_destroy)
self.table=self.builder.get_object('table1')
self.loopingCall=task.LoopingCall(self.updatePlots)
self.count=0
self.sharedx=sharedx
if not self.statCollector:
self.builder.get_object('allButton').set_sensitive(False)
self.builder.get_object('pauseButton').set_sensitive(False)
self.makePlots()
self.showing=True
self.ui.show_all()
def makePlots(self):
panel=self.builder.get_object('vPanel')
for k,v in self.plots.items():
b=gtk.CheckButton(v['name'])
b.set_active(True)
b.connect('toggled',self.on_checkbutton_toggled)
panel.pack_start(b,False,False)
for i in v['stats']:
b=gtk.CheckButton(i[2])
b.set_active(True)
b.connect('toggled',self.on_subcheckbutton_toggled,v['name'])
panel.pack_start(b,False,False)
l=gtk.Label('------------------------')
panel.pack_start(l,False,False)
self.addPlot(v['name'],v['stats'],v['x'])
def on_ui_destroy(self,widget,*args):
self.parent.plotDestroyed(self.pid)
def on_exitButton_clicked(self,widget):
self.parent.plotDestroyed(self.pid)
self.ui.destroy()
def on_pauseButton_clicked(self,widget):
if widget.get_label()=='Pause':
self.pause=True
widget.set_label('Restart')
else:
widget.set_label('Pause')
self.pause=False
for n,f in self.fig.items():
f['fig'].setPause(self.pause)
def on_reloadButton_clicked(self,widget):
for n,f in self.fig.items():
f['fig'].reloadPlot()
def on_checkbutton_toggled(self,widget):
name=widget.get_label()
if widget.get_active():
self.fig[name]['box'].show()
else:
self.fig[name]['box'].hide()
def on_subcheckbutton_toggled(self,widget,parent):
name=widget.get_label()
if widget.get_active():
self.fig[parent]['fig'].addPlot(name)
else:
self.fig[parent]['fig'].removePlot(name)
def addPlot(self,name,plots,x):
if self.fig:
newfig=PlotFig(name,plots,x,self.sharedAxis)
else:
newfig=PlotFig(name,plots,x,None)
if self.sharedx:
self.sharedAxis=newfig.getSharedAxe()
else:
self.sharedAxis=None
self.fig[name]={}
self.fig[name]['fig']=newfig
canvas=newfig.getCanvas()
vbox=gtk.VBox()
vbox.pack_start(canvas,True,True)
toolbar=NavigationToolbar(canvas,self.ui)
vbox.pack_start(toolbar,False,False)
self.fig[name]['box']=vbox
self.table.attach(vbox,0,1,self.count,self.count+1)
self.count+=1
self.ui.show_all()
def updatePlots(self,data):
# if not self.fig:
# return
for k,values in data.items():
for v in values:
self.data[k]['y'].append(v[0])
for x,pos in [('customX',1),('time',2),('lpb',3)]:
if x in self.data[k]:
self.data[k][x].append(v[pos])
for n,f in self.fig.items():
f['fig'].updatePlot(self.data)
def changeToolbar(self):
self.fig['rtt']['toolbar'].canvas=self.fig['bw']['fig'].getCanvas()
def on_allButton_clicked(self,widget):
req=[]
for k,v in self.data.items():
stat=[]
stat.append(k)
for x in (('customX','x'),('time','time'),('lpb','lpb')):
if x[0] in v:
stat.append(x[1])
stat.append(self.data[k][x[0]][0])
break
req.append(stat)
if req:
self.statCollector(req,self.getAllStats)
self.builder.get_object('allButton').set_sensitive(False)
def getAllStats(self,stats):
data={}
for k,values in stats.items():
if not k in data:
data[k]={}
data[k]['y']=[]
for x in ('customX','time','lpb'):
data[k][x]=[]
for v in values:
data[k]['y'].append(v[0])
for x,pos in [('customX',1),('time',2),('lpb',3)]:
data[k][x].append(v[pos])
for k,values in self.data.items():
for v in values.keys():
self.data[k][v]=data[k][v]+self.data[k][v]
self.on_reloadButton_clicked(None)
self.builder.get_object('allButton').set_sensitive(True)
|
import base64
import logging
import lxml.etree
import mimetypes
import pkg_resources
import re
import six
import six.moves.http_client
import six.moves.urllib.parse
import socket
import sys
# This is for debugging, *NOT TO BE USED IN PRODUCTION*
DEBUG_REQUEST = False
DEBUG_CONNECTION = False
XML_CONTENT_TYPE = 'text/xml; charset="utf-8"'
try:
USER_AGENT = 'zeit.connector/' + pkg_resources.get_distribution(
'vivi.core').version
except Exception:
USER_AGENT = 'zeit.connector/unknown'
logger = logging.getLogger(__name__)
class BadAuthTypeError(Exception):
pass
class HTTPBasicAuthCon(object):
"""Connection which authenticates.
NOTE: currently doesn't authenticate.
"""
connect_class = six.moves.http_client.HTTPConnection
rx = re.compile('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"')
authhdr = 'WWW-Authenticate'
def __init__(self, host, port=None, strict=None):
self._resp = None
self._authon = False
self._host = host
self._port = port
self._strict = strict
self._realms = {}
self._user = ''
self._passwd = ''
self._realm = None
self.additional_headers = {}
# Actually connect
self.connect()
def connect(self):
self._con = self.connect_class(self._host, self._port, self._strict)
if DEBUG_CONNECTION:
self._con.debuglevel = 1
def set_auth(self, user, passwd, realm=None):
if realm is None:
self._user = user
self._passwd = <PASSWORD>
self._authon = True
else:
self._realms[realm] = (user, passwd)
self._authon = False
return
def get_auth(self, realm):
try:
u, p = self._realms[realm]
except KeyError:
u, p = self._user, self._passwd
pass
return (u, p)
def _auth(self, resp, headers):
# we need authentication
resp.read()
# do basic auth
ah = resp.getheader(self.authhdr)
resp.close()
m = self.rx.match(ah)
if m:
scheme, realm = m.groups()
if scheme.lower() != 'basic':
raise BadAuthTypeError(scheme)
# set basic auth header and retry
raw = "%s:%s" % self.get_auth(realm)
self._realm = realm
auth = 'Basic %s' % base64.encodestring(raw).strip()
headers['Authorization'] = auth
return
def get_quoted_path(self, uri):
if sys.version_info < (3,) and isinstance(uri, six.text_type):
uri = uri.encode('utf8')
path = six.moves.urllib.parse.urlunparse(
('', '') + six.moves.urllib.parse.urlparse(uri)[2:])
# NOTE: Everything after the netloc is considered a path and will be
# quoted
quoted = six.moves.urllib.parse.quote(path)
return quoted
def quote_uri(self, uri):
if sys.version_info < (3,) and isinstance(uri, six.text_type):
uri = uri.encode('utf8')
parsed = six.moves.urllib.parse.urlparse(uri)
quoted = six.moves.urllib.parse.urlunparse(
(parsed.scheme, parsed.netloc, self.get_quoted_path(uri),
'', '', ''))
return quoted
def request(self, method, uri, body=None, extra_hdrs=None):
path = self.get_quoted_path(uri)
uri = self.quote_uri(uri)
headers = {}
if extra_hdrs:
headers.update(extra_hdrs)
if self._resp is not None and not self._resp.isclosed():
assert False, "Response left"
# In production the assert will be optimized away.
logger.error("Response left!")
logger.error(self._resp.read())
self._resp = None
if self._authon:
# short cut to avoid useless requests
raw = "%s:%s" % self.get_auth(self._realm)
auth = 'Basic %s' % base64.encodestring(raw).strip()
headers['Authorization'] = auth
host = str(six.moves.urllib.parse.urlparse(uri).netloc)
if host:
headers['Host'] = host
headers['Connection'] = 'keep-alive'
headers['User-Agent'] = USER_AGENT
headers.update(self.additional_headers)
try:
self._con.request(method, path, body, headers)
except six.moves.http_client.CannotSendRequest:
# Yikes. The connection got into an inconsistent state! Reconnect.
self.connect()
# If that raises the error again, well let it raise.
self._con.request(method, uri, body, headers)
def getresponse(self):
self._resp = self._con.getresponse()
return self._resp
def close(self):
if self._resp is not None:
self._resp.close()
self._resp = None
self._con.close()
return
class DAVBase(object):
def get(self, url, extra_hdrs=None):
return self._request('GET', url, extra_hdrs=extra_hdrs)
def head(self, url, extra_hdrs=None):
return self._request('HEAD', url, extra_hdrs=extra_hdrs)
def post(self, url, data=None, body=None, extra_hdrs=None):
headers = {}
if extra_hdrs:
headers.update(extra_hdrs)
assert body or data, "body or data must be supplied"
assert not (body and data), "cannot supply both body and data"
if data:
body = ''
for key, value in data.items():
if isinstance(value, list):
for item in value:
body = (body + '&' + key + '=' +
six.moves.urllib.parse.quote(str(item)))
else:
body = (body + '&' + key + '=' +
six.moves.urllib.parse.quote(str(value)))
body = body[1:]
headers['Content-Type'] = 'application/x-www-form-urlencoded'
return self._request('POST', url, body, headers)
def options(self, url='*', extra_hdrs=None):
return self._request('OPTIONS', url, extra_hdrs=extra_hdrs)
def trace(self, url, extra_hdrs=None):
return self._request('TRACE', url, extra_hdrs=extra_hdrs)
def put(self, url, contents,
content_type=None, content_enc=None, extra_hdrs=None):
if not content_type:
content_type, content_enc = mimetypes.guess_type(url)
headers = {}
if extra_hdrs:
headers.update(extra_hdrs)
if content_type:
headers['Content-Type'] = content_type
if content_enc:
headers['Content-Encoding'] = content_enc
return self._request('PUT', url, contents, headers)
def delete(self, url, extra_hdrs=None):
return self._request('DELETE', url, extra_hdrs=extra_hdrs)
def propfind(self, url, body=None, depth=None, extra_hdrs=None):
headers = {}
if extra_hdrs:
headers.update(extra_hdrs)
headers['Content-Type'] = XML_CONTENT_TYPE
if depth is not None:
headers['Depth'] = str(depth)
else:
headers['Depth'] = '0'
ret = self._request('PROPFIND', url, body, headers)
return ret
def search(self, url, body=None, extra_hdrs=None):
return self._request('SEARCH', url, body, extra_hdrs=extra_hdrs)
def proppatch(self, url, body, extra_hdrs=None):
headers = {}
if extra_hdrs:
headers.update(extra_hdrs)
headers['Content-Type'] = XML_CONTENT_TYPE
ret = self._request('PROPPATCH', url, body, headers)
return ret
def mkcol(self, url, hdrs=None):
return self._request('MKCOL', url, extra_hdrs=hdrs)
def move(self, src, dst, extra_hdrs=None):
headers = {}
dst = self.quote_uri(dst)
if extra_hdrs:
headers.update(extra_hdrs)
headers['Destination'] = dst
return self._request('MOVE', src, extra_hdrs=headers)
def copy(self, src, dst, depth=None, extra_hdrs=None):
headers = {}
if extra_hdrs:
headers.update(extra_hdrs)
headers['Destination'] = self.quote_uri(dst)
if depth is not None:
headers['Depth'] = str(depth)
return self._request('COPY', src, extra_hdrs=headers)
def lock(self, url, owner='', timeout=None, depth=None,
scope='exclusive', type='write', extra_hdrs=None):
headers = {}
if extra_hdrs:
headers.update(extra_hdrs)
headers['Content-Type'] = XML_CONTENT_TYPE
headers['Host'] = self._con.host
if depth is not None:
headers['Depth'] = str(depth)
if timeout is None:
headers['Timeout'] = 'Infinite'
else:
headers['Timeout'] = 'Second-%d' % timeout
body = lxml.etree.Element('{DAV:}lockinfo')
node = lxml.etree.Element('{DAV:}lockscope')
node.append(lxml.etree.Element('{DAV:}%s' % scope))
body.append(node)
node = lxml.etree.Element('{DAV:}locktype')
node.append(lxml.etree.Element('{DAV:}%s' % type))
body.append(node)
if owner:
node = lxml.etree.Element('{DAV:}owner')
node.text = owner
body.append(node)
xmlstr = lxml.etree.tostring(body,
encoding='UTF-8',
xml_declaration=True)
return self._request('LOCK', url, xmlstr, extra_hdrs=headers)
def unlock(self, url, locktoken, extra_hdrs=None):
headers = {}
if extra_hdrs:
headers.update(extra_hdrs)
if not locktoken:
return None
if locktoken[0] != '<':
locktoken = '<' + locktoken + '>'
headers['Lock-Token'] = locktoken
return self._request('UNLOCK', url, extra_hdrs=headers)
def _request(self, method, url, body=None, extra_hdrs=None):
"Internal method for sending a request."
if DEBUG_REQUEST:
if extra_hdrs:
debug_header_items = [
"%s: %s" % (k, v) for k, v in extra_hdrs.items()]
else:
debug_header_items = []
sys.stderr.write(
"### REQUEST: ###\n %s %s\n %s\n\n %s\n############\n" % (
method, url,
"\n ".join(debug_header_items),
body))
# that's HTTPxxxAuthCon.request, called via DAVConnection
logger.debug('%s %s', method, url)
self.request(method, url, body, extra_hdrs)
try:
resp = self.getresponse()
except six.moves.http_client.BadStatusLine:
# Gnah. We may have waited too long. Try one more time.
self.connect()
self.request(method, url, body, extra_hdrs)
resp = self.getresponse()
if DEBUG_REQUEST:
sys.stderr.write(
"### RESPONSE: ###\n %s %s\n %s\n#################\n" % (
(resp.status, resp.reason,
"\n ".join(["%s: %s" % h for h in resp.getheaders()]))))
return resp
class DAVConnection (HTTPBasicAuthCon, DAVBase):
def __init__(self, host, port=None, strict=None, referrer=None):
HTTPBasicAuthCon.__init__(self, host, port, strict)
self._con._http_vsn_str = 'HTTP/1.1'
self._con._http_vsn = 11
return
if getattr(socket, 'ssl', None):
# only include DAVS if SSL support is compiled in
class HTTPSBasicAuthCon(HTTPBasicAuthCon):
connect_class = six.moves.http_client.HTTPSConnection
pass
class DAVSConnection(HTTPSBasicAuthCon, DAVBase):
def __init__(self, host, port=None, strict=None):
HTTPSBasicAuthCon.__init__(self, host, port, strict)
self._con._http_vsn_str = 'HTTP/1.1'
self._con._http_vsn = 11
return
|
<gh_stars>0
"""
Test casefile conversion
"""
import os
import pytest
import context
from nemde.core.casefile import lookup
from nemde.io.casefile import load_base_case
@pytest.fixture(scope='module')
def casefile():
year = int(os.environ['TEST_YEAR'])
month = int(os.environ['TEST_MONTH'])
case_id = f'{year}{month:02}01001'
return load_base_case(case_id=case_id)
@pytest.mark.skip(reason='test value will vary by case file')
def test_get_case_attribute(casefile):
assert (lookup.get_case_attribute(
data=casefile, attribute='@EnergySurplusPrice', func=str) == "2250000")
def test_get_region_collection_attribute(casefile):
assert (lookup.get_region_collection_attribute(
data=casefile, region_id='SA1', attribute='@RegionID',
func=str) == 'SA1')
@pytest.mark.skip(reason='test value will vary by case file')
def test_get_region_collection_initial_condition_attribute(casefile):
assert (lookup.get_region_collection_initial_condition_attribute(
data=casefile, region_id="SA1", attribute="ADE", func=str) == "0")
def test_get_region_period_collection_attribute(casefile):
assert (lookup.get_region_period_collection_attribute(
data=casefile, region_id="NSW1", attribute="@RegionID",
func=str) == 'NSW1')
def test_get_region_solution_attribute(casefile):
assert (lookup.get_region_solution_attribute(
data=casefile, region_id="NSW1", attribute="@RegionID",
intervention="0", func=str) == 'NSW1')
def test_get_trader_collection_attribute(casefile):
assert (lookup.get_trader_collection_attribute(
data=casefile, trader_id='AGLHAL', attribute='@TraderID',
func=str) == 'AGLHAL')
@pytest.mark.skip(reason='test value will vary by case file')
def test_get_trader_collection_initial_condition_attribute(casefile):
assert (lookup.get_trader_collection_initial_condition_attribute(
data=casefile, trader_id='AGLHAL', attribute='AGCStatus',
func=str) == "0")
def test_get_trader_period_collection_attribute(casefile):
assert (lookup.get_trader_period_collection_attribute(
data=casefile, trader_id='AGLHAL', attribute='@RegionID',
func=str) == "SA1")
@pytest.mark.skip(reason='test value will vary by case file')
def test_get_trader_quantity_band_attribute(casefile):
assert (lookup.get_trader_quantity_band_attribute(
data=casefile, trader_id='AGLHAL', trade_type='ENOF',
attribute='@BandAvail1', func=str) == "0")
def test_get_trader_price_band_attribute(casefile):
assert (lookup.get_trader_price_band_attribute(
data=casefile, trader_id='AGLHAL', trade_type='ENOF',
attribute="@TradeType", func=str) == 'ENOF')
@pytest.mark.skip(reason='test value will vary by case file')
def test_get_trader_solution_attribute(casefile):
assert (lookup.get_trader_solution_attribute(
data=casefile, trader_id='AGLHAL', attribute='@TraderID', func=str,
intervention='0') == 'AGLHAL')
def test_get_interconnector_collection_attribute(casefile):
assert (lookup.get_interconnector_collection_attribute(
data=casefile, interconnector_id='N-Q-MNSP1',
attribute='@InterconnectorID', func=str) == 'N-Q-MNSP1')
@pytest.mark.skip(reason='test value will vary by case file')
def test_get_interconnector_collection_initial_condition_attribute(casefile):
assert (lookup.get_interconnector_collection_initial_condition_attribute(
data=casefile, interconnector_id='N-Q-MNSP1', attribute='InitialMW',
func=str) == "-32.7999992370605")
def test_get_interconnector_period_collection_attribute(casefile):
assert (lookup.get_interconnector_period_collection_attribute(
data=casefile, interconnector_id='N-Q-MNSP1',
attribute='@InterconnectorID', func=str) == 'N-Q-MNSP1')
def test_get_interconnector_loss_model_segments(casefile):
segments = lookup.get_interconnector_loss_model_segments(
data=casefile, interconnector_id='V-SA')
assert isinstance(segments, list)
assert len(segments) > 0
@pytest.mark.skip(reason='test value will vary by case file')
def test_get_interconnector_loss_model_attribute(casefile):
assert (lookup.get_interconnector_loss_model_attribute(
data=casefile, interconnector_id='N-Q-MNSP1', attribute='@NPLRange',
func=str) == '10000')
def test_get_interconnector_solution_attribute(casefile):
assert (lookup.get_interconnector_solution_attribute(
data=casefile, interconnector_id='N-Q-MNSP1',
attribute='@InterconnectorID', func=str,
intervention='0') == 'N-Q-MNSP1')
def test_get_generic_constraint_collection_attribute(casefile):
assert (lookup.get_generic_constraint_collection_attribute(
data=casefile, constraint_id='#BBTHREE3_E', attribute='@ConstraintID',
func=str) == '#BBTHREE3_E')
def test_get_generic_constraint_trk_collection_attribute(casefile):
assert (lookup.get_generic_constraint_trk_collection_attribute(
data=casefile, constraint_id='#BBTHREE3_E', attribute='@DynamicRHS',
func=str) == '0')
def test_get_generic_constraint_solution_attribute(casefile):
assert (lookup.get_generic_constraint_solution_attribute(
data=casefile, constraint_id='#BBTHREE3_E', attribute='@ConstraintID',
func=str, intervention='0') == '#BBTHREE3_E')
@pytest.mark.skip(reason='test value will vary by case file')
def test_get_period_solution_attribute(casefile):
assert (lookup.get_period_solution_attribute(
data=casefile, attribute='@SolverStatus', func=str,
intervention='0') == '0')
def test_get_trader_index(casefile):
traders = lookup.get_trader_index(casefile)
assert isinstance(traders, list)
assert len(traders) > 0
assert len(traders) == len(set(traders))
def test_get_interconnector_index(casefile):
interconnectors = lookup.get_interconnector_index(casefile)
assert isinstance(interconnectors, list)
assert len(interconnectors) > 0
assert len(interconnectors) == len(set(interconnectors))
def test_get_mnsp_index(casefile):
mnsps = lookup.get_mnsp_index(casefile)
assert isinstance(mnsps, list)
assert len(mnsps) > 0
assert len(mnsps) == len(set(mnsps))
def test_get_region_index(casefile):
regions = lookup.get_region_index(casefile)
assert isinstance(regions, list)
assert len(regions) > 0
assert len(regions) == len(set(regions))
@pytest.mark.skip(reason='test value will vary by case file')
def test_get_intervention_status(casefile):
assert lookup.get_intervention_status(data=casefile, mode='target') == '0'
|
<reponame>rissom/RC3MUD<filename>python/system/config.py
from xml.dom import minidom
from system.log import log
import os
class Config(object):
singleton = None
workingpath = "../gamedata"
xmlconfig = None
configdict = dict()
config = {
"gui.show" : { "default": "false", "type": "bool", "label":"", "showgui":"false", "order": 0 },
"webserverport" : { "default": "8000", "type": "int", "label":"", "showgui":"false", "order": 0}
}
def __init__(self):
Config.singleton = self
def getSingleton():
return Config.singleton or Config()
def init(self, pathname):
pathname = pathname.strip()
pathname.replace('\\','/')
if pathname.endswith('/'):
pathname = pathname[0:-1]
Config.workingpath = pathname
self.xmlconfigfile = Config.workingpath+'/config.xml'
log.debug('Config: read config file: '+self.xmlconfigfile)
self.xmlconfig = minidom.parse(self.xmlconfigfile)
for parname in self.config:
elements = self.xmlconfig.getElementsByTagName(parname)
if len(elements)==0:
self.config[parname]['value']=self.config[parname]['default']
self.config[parname]['file']=None
log.debug('config: couldnt find '+parname+', using defaut: '+self.config[parname]['value'])
if len(elements)>1:
log.warn('config: more than one '+parname+' in config!')
if len(elements)==1:
self.config[parname]['value']=elements[0].childNodes[0].data
self.config[parname]['file']=elements[0].childNodes[0].data
log.debug('config: found '+parname+': '+self.config[parname]['value'])
def getAllParameter(self):
return self.config;
def setAllParameter(self,jsonparameter):
for p in jsonparameter:
self.config[p]['value'] = jsonparameter[p]['value']
def defaultAllParameter(self):
for p in self.config:
self.config[p]['value'] = self.config[p]['default']
def saveAllParameter(self):
for parname in self.config:
self.config[parname]['file']=self.config[parname]['value']
elements = self.xmlconfig.getElementsByTagName(parname)
if len(elements)!=1:
log.debug('config: parameter '+parname+' not in config file?')
# TODO: create parameter!
newparam = self.xmlconfig.createElement(parname)
newparamvalue = self.xmlconfig.createTextNode( str(self.config[parname]['file']) )
newparam.appendChild( newparamvalue )
self.xmlconfig.childNodes[0].appendChild(newparam)
linebreak = self.xmlconfig.createTextNode("\n\n ")
else:
elements[0].childNodes[0].data = self.config[parname]['file']
file_handle = open(self.xmlconfigfile,"w")
self.xmlconfig.writexml(file_handle)
# sync buffers
os.fsync(file_handle)
file_handle.close()
# and sync os to prevent dataloss on powerloss
os.sync()
def cancelAllParameter(self):
for parname in self.config:
if self.config[parname]['file'] is not None:
self.config[parname]['value']=self.config[parname]['file']
else:
self.config[parname]['value']=self.config[parname]['default']
def get(self, parameterName):
if self.config[parameterName] is not None:
if 'value' in self.config[parameterName]:
return self.config[parameterName]['value']
else:
return self.config[parameterName]['default']
else:
log.error('config: couldnt find '+parameterName+' in config!')
return None
def getBool(self, parameterName):
value = self.get(parameterName)
if value is None:
return None
if value.lower() == 'true':
return True
if value == '1':
return True
return False
def getFloat(self, parameterName):
value = self.get(parameterName)
if value is None:
return None
return float(value)
def getInt(self, parameterName):
value = self.get(parameterName)
if value is None:
return None
return int(value)
|
<filename>efloras/patterns/range.py
"""Shared range patterns."""
from traiter.actions import REJECT_MATCH
from traiter.patterns.matcher_patterns import MatcherPatterns
from efloras.pylib.const import COMMON_PATTERNS
DECODER = COMMON_PATTERNS | {
'ambiguous': {'LOWER': {'IN': ['few', 'many']}},
}
RANGE_LOW = MatcherPatterns(
'range.low',
decoder=DECODER,
patterns=[
'99.9',
'( 99.9 -/or ) ambiguous ( -/to ambiguous )',
],
)
RANGE_MIN_LOW = MatcherPatterns(
'range.min.low',
decoder=DECODER,
patterns=[
'( 99.9 -/or ) 99.9',
'( 99.9 -/to ) 99.9',
],
)
RANGE_LOW_HIGH = MatcherPatterns(
'range.low.high',
decoder=DECODER,
patterns=[
'99.9 and/or 99.9',
'99.9 -/to 99.9',
],
)
RANGE_LOW_MAX = MatcherPatterns(
'range.low.max',
decoder=DECODER,
patterns=[
'99.9 ( and/or 99.9 )',
'99.9 ( -/to 99.9 )',
],
)
RANGE_MIN_LOW_HIGH = MatcherPatterns(
'range.min.low.high',
decoder=DECODER,
patterns=[
'( 99.9 -/or ) 99.9 -/to 99.9',
'( 99.9 -/or ) 99.9 - and/or 99.9',
'( 99.9 and/or ) 99.9 and/or 99.9',
' 99.9 ( and/or 99.9 -/to 99.9 )',
],
)
RANGE_MIN_LOW_MAX = MatcherPatterns(
'range.min.low.max',
decoder=DECODER,
patterns=[
'( 99.9 - ) 99.9 -? ( -/to 99.9 [+]? )',
' 99.9 - 99.9 - ( -/to 99.9 )',
' 99.9 - and/or 99.9 -/to 99.9',
],
)
RANGE_LOW_HIGH_MAX = MatcherPatterns(
'range.low.high.max',
decoder=DECODER,
patterns=[
'99.9 ( and/or 99.9 -/or 99.9 [+]? )',
'99.9 - 99.9 ( -/to 99.9 [+]? )',
'99.9 - 99.9 - ( -/to 99.9 [+]? )',
'99.9 - 99.9 - 99.9',
'99.9 -/to 99.9 and/or 99.9',
'99.9 - and/or 99.9 ( -/or 99.9 [+]? )',
'99.9 and/or 99.9 ( and/or 99.9 [+]? )',
],
)
RANGE_MIN_LOW_HIGH_MAX = MatcherPatterns(
'range.min.low.high.max',
decoder=DECODER,
patterns=[
'( 99.9 - ) 99.9 - 99.9 ( -/to 99.9 [+]? )',
'( 99.9 -/or ) 99.9 - and/or 99.9 ( -/or 99.9 [+]? )',
'( 99.9 and/or ) 99.9 - and/or 99.9 ( and/or 99.9 [+]? )',
'99.9 - and/or 99.9 - and/or 99.9 -/to 99.9',
'99.9 - and/or 99.9 -/to 99.9 ( -/or 99.9 [+]? )',
'99.9 -/to 99.9 ( -/or 99.9 ) ( -/or 99.9 [+]? )',
'99.9 99.9 -/to and/or 99.9 ( -/or 99.9 [+]? )',
'99.9 and/or 99.9 - 99.9 ( -/or 99.9 [+]? )',
],
)
NOT_A_RANGE = MatcherPatterns(
'not_a_range',
on_match=REJECT_MATCH,
decoder=DECODER,
patterns=[
'9 / 9',
],
)
|
import math
from Learning_Info import Learning_Info
""" tiles of the map """
home_tile = 0
finished_tile = 59
safe_corridor = [54, 55, 56, 57, 58, 59]
globe_tiles = [9, 14, 22, 35, 48]
star_tiles = [5, 12, 18, 25, 31, 38, 44, 51]
def get_epsilon_greedy(steps_done):
""" when 1200 batch size then the net starts to be trained after epoch 12 """
# steps_per_10_games = 3380
EPS_START = 0.9
EPS_END = 0.05
EPS_DECAY = 10000 # after 10 games eps_threshold=0.053
steps_training_starts_after_1200_batches = 5100
if steps_done <= steps_training_starts_after_1200_batches:
eps_threshold = EPS_START
else:
eps_threshold = EPS_END + (EPS_START - EPS_END) * math.exp(-0.6 * (steps_done-steps_training_starts_after_1200_batches) / EPS_DECAY) # platou after 200 games
# eps_threshold = EPS_END + (EPS_START - EPS_END) * math.exp(-1.4 * (steps_done-steps_training_starts_after_1200_batches) / EPS_DECAY) # platou after 100 games
# old eps_threshold = EPS_END + (EPS_START - EPS_END) * math.exp(-1.7 * steps_done / EPS_DECAY)
return eps_threshold
def init_rewards_couter_dict():
return {'piece_release': 0, 'defend_vulnerable': 0, 'knock_opponent': 0,
'move_closest_goal': 0, 'move_closest_safe': 0, 'forming_blockade': 0,
'getting_piece_knocked_next_turn': 0,
'moved_on_safe_globe': 0,
'speed_boost_star': 0,
'ai_agent_won': 0, 'ai_agent_lost': 0,
}
def init_start_state():
start_state = [[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,]]
return start_state
""" training the net """
batch_size = 1200
# batch_size = 100
epochs = 250
GAMMA = 0.95 # discount
network_sync_counter = 0
network_sync_freq = 500
# learning_rate_mlp = 5e-3
learning_rate_mlp = 1e-2 # bigger one
loss_avg_running_list = []
learning_info_data = Learning_Info()
last_turn_state_new = init_start_state()
steps_done = 0
epsilon_now = 0
rewards_detected = init_rewards_couter_dict()
""" human network pretrain """
losses_pretrain = []
# epochs_pretrain = 200
epochs_pretrain = 1000
pretrain_batch_size = 50
learning_rate_pretrain = 0.1 # big one
""" evaluation """
epochs_evaluate = 500
# epochs_evaluate = 5
|
#!/usr/bin/env python3
# Report stats (mapped reads and identity to reference) from samtools stats
# for bam file(s) ignoring secondary, suplementary and qc failed alignments
#
# USAGE: bam2stats.py bam1 bam2 ... bamN
import os, subprocess, sys
def bam2stats(fn, flag=3840):
"""Get stats from samtools stats"""
args = ["samtools", "stats", "-F%s"%flag, fn]
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
k2v = {}
for l in proc.stdout:
l = l.decode("utf-8")
if l.startswith('SN'):
ldata = l[:-1].split()#; print(ldata)
kv = [[]]
for e in ldata[1:]:
kv[-1].append(e)
if e.endswith(':'):
kv[-1][-1] = kv[-1][-1][:-1]
kv.append([])
k2v[" ".join(kv[0])] = kv[1]
# convert digits to int
for k in k2v:
if k2v[k][0].isdigit():
k2v[k] = int(k2v[k][0])
# report if no reads mapped
if not k2v['reads mapped']:
return "No reads mapped"
text = []
text.append("{:,}\t{:.1f}%\t{:,}\t{:.1f}%".format(k2v['reads mapped'], 100*k2v['reads mapped']/k2v['sequences'], k2v['bases mapped (cigar)'], 100*k2v['bases mapped (cigar)']/k2v['total length']))
text.append("{:,}\t{:,}".format(k2v['average length'], k2v['maximum length']))
text.append("{:.2f}%".format(100-100*k2v['mismatches']/k2v['bases mapped (cigar)'], )) #"identity: %.2f%"%(100-k2v['mismatches']/k2v['bases mapped (cigar)'], ))
return "\t".join(text)
for fn in sys.argv[1:]:
if os.path.isfile(fn):
sys.stdout.write("#File name\tMapped reads\tMap %\tBases\tBases %\tAvg read length\tMax read length\tidentity\n")
sys.stdout.write("%s\t%s\n"%(fn, bam2stats(fn)))
'''
CHK 4691e107 9942d94c cd9ffd51
# Summary Numbers. Use `grep ^SN | cut -f 2-` to extract this part.
SN raw total sequences: 4000
SN filtered sequences: 0
SN sequences: 4000
SN is sorted: 1
SN 1st fragments: 4000
SN last fragments: 0
SN reads mapped: 1440
SN reads mapped and paired: 0 # paired-end technology bit set + both mates mapped
SN reads unmapped: 2560
SN reads properly paired: 0 # proper-pair bit set
SN reads paired: 0 # paired-end technology bit set
SN reads duplicated: 0 # PCR or optical duplicate bit set
SN reads MQ0: 726 # mapped and MQ=0
SN reads QC failed: 0
SN non-primary alignments: 6801
SN total length: 136941 # ignores clipping
SN bases mapped: 109284 # ignores clipping
SN bases mapped (cigar): 108908 # more accurate
SN bases trimmed: 0
SN bases duplicated: 0
SN mismatches: 14898 # from NM fields
SN error rate: 1.367944e-01 # mismatches / bases mapped (cigar)
SN average length: 34
SN maximum length: 401
SN average quality: 3.5
SN insert size average: 0.0
SN insert size standard deviation: 0.0
SN inward oriented pairs: 0
SN outward oriented pairs: 0
SN pairs with other orientation: 0
SN pairs on different chromosomes: 0
'''
|
# coding: utf-8
"""
Jamf Pro API
## Overview This is a sample Jamf Pro server which allows for usage without any authentication. The Jamf Pro environment which supports the Try it Out functionality does not run the current beta version of Jamf Pro, thus any newly added endpoints will result in an error and should be used soley for documentation purposes. # noqa: E501
The version of the OpenAPI document: 10.25.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from jamf.configuration import Configuration
class Security(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'is_data_protected': 'bool',
'is_block_level_encryption_capable': 'bool',
'is_file_level_encryption_capable': 'bool',
'is_passcode_present': 'bool',
'is_passcode_compliant': 'bool',
'is_passcode_compliant_with_profile': 'bool',
'hardware_encryption': 'int',
'is_activation_lock_enabled': 'bool',
'is_jail_break_detected': 'bool'
}
attribute_map = {
'is_data_protected': 'isDataProtected',
'is_block_level_encryption_capable': 'isBlockLevelEncryptionCapable',
'is_file_level_encryption_capable': 'isFileLevelEncryptionCapable',
'is_passcode_present': 'isPasscodePresent',
'is_passcode_compliant': 'isPasscodeCompliant',
'is_passcode_compliant_with_profile': 'isPasscodeCompliantWithProfile',
'hardware_encryption': 'hardwareEncryption',
'is_activation_lock_enabled': 'isActivationLockEnabled',
'is_jail_break_detected': 'isJailBreakDetected'
}
def __init__(self, is_data_protected=None, is_block_level_encryption_capable=None, is_file_level_encryption_capable=None, is_passcode_present=None, is_passcode_compliant=None, is_passcode_compliant_with_profile=None, hardware_encryption=None, is_activation_lock_enabled=None, is_jail_break_detected=None, local_vars_configuration=None): # noqa: E501
"""Security - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._is_data_protected = None
self._is_block_level_encryption_capable = None
self._is_file_level_encryption_capable = None
self._is_passcode_present = None
self._is_passcode_compliant = None
self._is_passcode_compliant_with_profile = None
self._hardware_encryption = None
self._is_activation_lock_enabled = None
self._is_jail_break_detected = None
self.discriminator = None
if is_data_protected is not None:
self.is_data_protected = is_data_protected
if is_block_level_encryption_capable is not None:
self.is_block_level_encryption_capable = is_block_level_encryption_capable
if is_file_level_encryption_capable is not None:
self.is_file_level_encryption_capable = is_file_level_encryption_capable
if is_passcode_present is not None:
self.is_passcode_present = is_passcode_present
if is_passcode_compliant is not None:
self.is_passcode_compliant = is_passcode_compliant
if is_passcode_compliant_with_profile is not None:
self.is_passcode_compliant_with_profile = is_passcode_compliant_with_profile
if hardware_encryption is not None:
self.hardware_encryption = hardware_encryption
if is_activation_lock_enabled is not None:
self.is_activation_lock_enabled = is_activation_lock_enabled
if is_jail_break_detected is not None:
self.is_jail_break_detected = is_jail_break_detected
@property
def is_data_protected(self):
"""Gets the is_data_protected of this Security. # noqa: E501
:return: The is_data_protected of this Security. # noqa: E501
:rtype: bool
"""
return self._is_data_protected
@is_data_protected.setter
def is_data_protected(self, is_data_protected):
"""Sets the is_data_protected of this Security.
:param is_data_protected: The is_data_protected of this Security. # noqa: E501
:type is_data_protected: bool
"""
self._is_data_protected = is_data_protected
@property
def is_block_level_encryption_capable(self):
"""Gets the is_block_level_encryption_capable of this Security. # noqa: E501
:return: The is_block_level_encryption_capable of this Security. # noqa: E501
:rtype: bool
"""
return self._is_block_level_encryption_capable
@is_block_level_encryption_capable.setter
def is_block_level_encryption_capable(self, is_block_level_encryption_capable):
"""Sets the is_block_level_encryption_capable of this Security.
:param is_block_level_encryption_capable: The is_block_level_encryption_capable of this Security. # noqa: E501
:type is_block_level_encryption_capable: bool
"""
self._is_block_level_encryption_capable = is_block_level_encryption_capable
@property
def is_file_level_encryption_capable(self):
"""Gets the is_file_level_encryption_capable of this Security. # noqa: E501
:return: The is_file_level_encryption_capable of this Security. # noqa: E501
:rtype: bool
"""
return self._is_file_level_encryption_capable
@is_file_level_encryption_capable.setter
def is_file_level_encryption_capable(self, is_file_level_encryption_capable):
"""Sets the is_file_level_encryption_capable of this Security.
:param is_file_level_encryption_capable: The is_file_level_encryption_capable of this Security. # noqa: E501
:type is_file_level_encryption_capable: bool
"""
self._is_file_level_encryption_capable = is_file_level_encryption_capable
@property
def is_passcode_present(self):
"""Gets the is_passcode_present of this Security. # noqa: E501
:return: The is_passcode_present of this Security. # noqa: E501
:rtype: bool
"""
return self._is_passcode_present
@is_passcode_present.setter
def is_passcode_present(self, is_passcode_present):
"""Sets the is_passcode_present of this Security.
:param is_passcode_present: The is_passcode_present of this Security. # noqa: E501
:type is_passcode_present: bool
"""
self._is_passcode_present = is_passcode_present
@property
def is_passcode_compliant(self):
"""Gets the is_passcode_compliant of this Security. # noqa: E501
:return: The is_passcode_compliant of this Security. # noqa: E501
:rtype: bool
"""
return self._is_passcode_compliant
@is_passcode_compliant.setter
def is_passcode_compliant(self, is_passcode_compliant):
"""Sets the is_passcode_compliant of this Security.
:param is_passcode_compliant: The is_passcode_compliant of this Security. # noqa: E501
:type is_passcode_compliant: bool
"""
self._is_passcode_compliant = is_passcode_compliant
@property
def is_passcode_compliant_with_profile(self):
"""Gets the is_passcode_compliant_with_profile of this Security. # noqa: E501
:return: The is_passcode_compliant_with_profile of this Security. # noqa: E501
:rtype: bool
"""
return self._is_passcode_compliant_with_profile
@is_passcode_compliant_with_profile.setter
def is_passcode_compliant_with_profile(self, is_passcode_compliant_with_profile):
"""Sets the is_passcode_compliant_with_profile of this Security.
:param is_passcode_compliant_with_profile: The is_passcode_compliant_with_profile of this Security. # noqa: E501
:type is_passcode_compliant_with_profile: bool
"""
self._is_passcode_compliant_with_profile = is_passcode_compliant_with_profile
@property
def hardware_encryption(self):
"""Gets the hardware_encryption of this Security. # noqa: E501
:return: The hardware_encryption of this Security. # noqa: E501
:rtype: int
"""
return self._hardware_encryption
@hardware_encryption.setter
def hardware_encryption(self, hardware_encryption):
"""Sets the hardware_encryption of this Security.
:param hardware_encryption: The hardware_encryption of this Security. # noqa: E501
:type hardware_encryption: int
"""
self._hardware_encryption = hardware_encryption
@property
def is_activation_lock_enabled(self):
"""Gets the is_activation_lock_enabled of this Security. # noqa: E501
:return: The is_activation_lock_enabled of this Security. # noqa: E501
:rtype: bool
"""
return self._is_activation_lock_enabled
@is_activation_lock_enabled.setter
def is_activation_lock_enabled(self, is_activation_lock_enabled):
"""Sets the is_activation_lock_enabled of this Security.
:param is_activation_lock_enabled: The is_activation_lock_enabled of this Security. # noqa: E501
:type is_activation_lock_enabled: bool
"""
self._is_activation_lock_enabled = is_activation_lock_enabled
@property
def is_jail_break_detected(self):
"""Gets the is_jail_break_detected of this Security. # noqa: E501
:return: The is_jail_break_detected of this Security. # noqa: E501
:rtype: bool
"""
return self._is_jail_break_detected
@is_jail_break_detected.setter
def is_jail_break_detected(self, is_jail_break_detected):
"""Sets the is_jail_break_detected of this Security.
:param is_jail_break_detected: The is_jail_break_detected of this Security. # noqa: E501
:type is_jail_break_detected: bool
"""
self._is_jail_break_detected = is_jail_break_detected
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Security):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Security):
return True
return self.to_dict() != other.to_dict()
|
<filename>backend/image_migrate.py
import hashlib
import json
import os
import sys
import uuid
import time
import docker
# 获取docker client
client = docker.from_env()
def logs(args):
print(args)
pass
# 获取文件的sha256sum
def get_file_sha256(filename):
# 如果不是文件,返回空
if not os.path.isfile(filename):
return
hash_sum = hashlib.sha256()
f = open(filename, 'rb')
hash_sum.update(f.read())
f.close()
return hash_sum.hexdigest()
# 解压文件
def unpack(filename):
if not os.path.isfile(filename):
return
tmp_dir = '/tmp/' + str(uuid.uuid4())
os.system(f'mkdir {tmp_dir}')
os.system(f'tar -xf {filename} -C {tmp_dir}')
return tmp_dir + '/'
def get_tar(container_id, filename, repository="migrate", tag="test"):
image_id = client.containers.get(container_id).commit(repository=repository,tag=tag).id.replace("sha256:", "")
save_dir = "/tmp/" + filename
os.system(f'docker save -o {save_dir} {image_id} {repository}:{tag}')
# 去除换行符再返回
return image_id, save_dir
def qemu_arm_start():
client.containers.run('multiarch/qemu-user-static:register', privileged=True, remove=True)
def start_arm_container(images_name):
_out = client.containers.run(f'arm64v8/{images_name}', detach=True, name='arm64_tmp')
os.system('echo "x86 to arm64" > /tmp/.image_to_arm')
os.system('docker cp /tmp/.image_to_arm arm64_tmp:/root/')
return _out.id
def get_image_name(container_id):
image_name = client.containers.get(container_id).attrs['Config']['Image']
if ':' in image_name:
return image_name
return image_name + ':latest'
def get_target_tar(container_id, repository="migrate", tag="test"):
x86_get_time_start = time.time()
logs(f'获取x86架构的镜像文件中...')
x86_image_id, x86_file = get_tar(container_id, 'x86.tar')
x86_get_time_end = time.time()
logs(f'获取到镜像的tar文件,文件名:{x86_file},耗时:{x86_get_time_end - x86_get_time_start}')
x86_unpack_time_start = time.time()
logs(f'解压镜像文件中...')
x86_save_dir = unpack(x86_file)
x86_unpack_time_end = time.time()
logs(f'解压完成,耗时:{x86_unpack_time_end - x86_unpack_time_start}')
arm_time_start = time.time()
logs(f'启动ARM容器中...')
qemu_arm_start()
image_name = get_image_name(container_id)
start_arm_container(image_name)
arm_time_end = time.time()
logs(f'容器启动完成,耗时:{arm_time_end - arm_time_start}')
arm_get_time_start = time.time()
logs(f'获取arm架构的镜像文件中...')
arm_image_id, arm_file = get_tar('arm64_tmp', 'arm64.tar', repository, tag)
arm_get_time_end = time.time()
logs(f'获取到arm镜像的tar文件,文件名:{arm_file},耗时:{arm_get_time_end - arm_get_time_start}')
arm_unpack_time_start = time.time()
logs(f'解压arm镜像文件中...')
arm_save_dir = unpack(arm_file)
arm_unpack_time_end = time.time()
logs(f'解压完成,耗时:{arm_unpack_time_end - arm_unpack_time_start}')
config_time_start = time.time()
logs(f'重写配置文件中...')
x86_tar_sha256sum, x86_file_path, x86_json = get_top_layer(x86_save_dir, 'manifest.json')
arm_tar_sha256sum, arm_file_path, arm_json = get_top_layer(arm_save_dir, 'manifest.json')
# 复制镜像层文件
os.system(f'cp {x86_save_dir}{x86_file_path} {arm_save_dir}{arm_file_path}')
# 修改文件对应的sha256的值
if sys.platform == 'darwin':
os.system(f'sed -i \'\' \'s/{arm_tar_sha256sum}/{x86_tar_sha256sum}/g\' {arm_save_dir}{arm_json}')
elif sys.platform == 'linux':
os.system(f'sed -i \'s/{arm_tar_sha256sum}/{x86_tar_sha256sum}/g\' {arm_save_dir}{arm_json}')
config_time_end = time.time()
logs(f'配置文件重写完成,耗时:{config_time_end - config_time_start}')
pack_time_start = time.time()
# 重新打包
logs(f'重新打包中...')
os.system(f'cd {arm_save_dir} && tar -zcf /tmp/arm64-target.tar.xz .')
pack_time_end = time.time()
logs(f'重新打包完成,耗时:{pack_time_end - pack_time_start}')
logs('获取数据卷中...')
volume_time_start = time.time()
get_volume_tar(container_id)
volume_time_end = time.time()
logs(f'获取数据卷完成,耗时:{volume_time_end - volume_time_start}')
rm_time_start = time.time()
logs(f'删除临时文件中...')
# 删除临时文件
os.system(f'rm -rf {arm_save_dir} {x86_save_dir} /tmp/arm64.tar /tmp/x86.tar')
# 删除镜像文件
os.system(f'docker rmi {x86_image_id} > /dev/null')
os.system(f'docker rmi {arm_image_id} > /dev/null')
os.system(f'docker rm -f arm64_tmp > /dev/null')
rm_time_end = time.time()
logs(f'临时文件删除完成...,耗时:{rm_time_end - rm_time_start}')
logs(f'总共耗时:{rm_time_end - x86_get_time_start}')
os.system("mv /tmp/arm64-target.tar.xz ~/migrate/html/images/")
return '/tmp/arm64-target.tar.xz'
def get_tar_sha256sum(file_dir, filename):
with open(file_dir + filename, 'r', encoding='utf8')as image:
json_images = json.load(image)
sha256sum = json_images['rootfs']['diff_ids'][-1]
return sha256sum.split(':', 1)[1]
def get_top_layer(file_dir, filename):
with open(file_dir+filename, 'r', encoding='utf8')as fp:
json_manifest = json.load(fp)
json_str = json_manifest[0]
filename = json_str['Config']
sha256sum = get_tar_sha256sum(file_dir, filename)
layer = json_str['Layers'][-1]
return sha256sum, layer, filename
def get_volume_tar(container_id):
container = client.containers.get(container_id)
mounts = container.attrs['Mounts']
for mount in mounts:
_data_dir = mount['Source']
if '_data' in _data_dir:
os.system(f'cd {_data_dir} && tar -czf /tmp/arm64-target-volume.tar.xz .')
return '/tmp/arm64-target-volume.tar.xz'
if __name__ == '__main__':
get_target_tar('4ee013c99ec1')
|
#!/usr/bin/env python3
import argparse
from mako.template import Template
import os
import json
from progress.bar import ChargingBar
import multiprocessing
import re
from subprocess import PIPE
import platform
import time
import sys
import touch
import unittest
import psutil
import numpy as np
import matplotlib.pyplot as plt, mpld3
# TODO: Remove argparse, as we're just going to embed all of these functions into cmake
# or at least make it so that all of the files can be globally accessed in cmake.
parser = argparse.ArgumentParser(description="Called from CMake, takes build arguments to start Metabench and generate results.")
parser.add_argument("-bl", "--build_cmd", help="CMake generated build commands.", required=False)
parser.add_argument("-dl", "--cmake_bin_dir", help="CMake bin directory.", required=False)
parser.add_argument("-ml", "--metabench_dir", help="Metabench project directory.", required=False)
parser.add_argument("-xl", "--cmake_exe_suffix", help="Suffix for the executable generated by build process.", required=False)
args = vars(parser.parse_args())
CMAKE_CMD = args["build_cmd"]
CMAKE_BIN_DIR = args["cmake_bin_dir"]
METABENCH_DIR = args["metabench_dir"]
CMAKE_EXE_SUFFIX = args["cmake_exe_suffix"]
# TODO: Test precompiled headers for differences in compile time.
# Use a verbose output, header includes, and preprocessor output to determine
# the actual work involved with this process vs normal process. We want to eliminate
# as much of the parsing of non-template stuff in the front-end as possible.
def pch_build():
pass
def memusg(command_line):
if(platform.system() == "Linux"):
def get_pgid(pgid): # TODO: Aliasing problem here?
return "/bin/ps -o rss= -g " + pgid
elif(platform.system() == "Windows"):
def get_pgid(pgid):
psutil.Popen(["Get-Process -Id", pgid, "| Select-Object WS"])
else:
SystemError("Invalid system: cannot determine platform for process info.")
# TODO: Create separate process for memory consumption that won't rely on the parent process.
#
memory_consumption = 0
memory_inspect = multiprocessing.Process([command_line])
memory_inspect.start()
while memory_inspect.is_alive():
memory_consumption = max([memory_consumption, get_pgid(memory_inspect.pid)])
time.sleep(0.01)
return memory_consumption
def build(target, comp_type):
### Check command code
#command = CMAKE_CMD + " --build " + " . " + " --target " + target
# TODO: Compile and link commands should be taken from cmake input
comp_command = "clang-cl.exe " + "-m64 " + target + ".cpp" + " /c" + " /O2"
link_command = "clang-cl.exe " + "-m64 " + target + ".obj"
cpp_file = target + ".cpp"
#exe_file = target + '.' + CMAKE_EXE_SUFFIX
exe_file = target + '.' + "exe"
#print(command)
# Change the timestamp of the source file and remove the executable (if it exists) to
# ensure that CMake considers the target as outdated.
touch.touch(cpp_file)
if os.path.exists(exe_file):
os.remove(exe_file)
comp_mem = 0
t_compile_start = time.perf_counter_ns()
# memusg(comp_command)
memory_inspect = psutil.Popen(comp_command)
memory_inspect.wait()
t_compile_end = time.perf_counter_ns()
link_mem = 0
t_link_start = time.perf_counter_ns()
link_output = psutil.Popen(link_command)
link_output.wait()
# if link_mem < link_output.memory_info().vms:
# link_mem = link_output.memory_info().vms
#time.sleep(0.5) # TODO: Test this for accuracy
t_link_end = time.perf_counter_ns()
result = {}
return (t_compile_end - t_compile_start) / 1e9
def memory_usage():
pass
# The function compile is a custom command run by cmake to drive the actual
# compiler.
def compile_target(target):
command_line = ['${PYTHON_EXECUTABLE}', ' ', '${MEMUSG_PY_PATH}'] + sys.argv
t0 = time.perf_counter_ns()
# TODO: Get the memory usage from this subprocess via psutil based Popen
comp_cstdout, comp_stderr = psutil.Popen([command_line],stdout=PIPE,stderr=PIPE)
t1 = time.perf_counter_ns()
# TODO: Return stdout, stderr, status, compile options, and total time.
print(t1 - t0)
def create_rendered_file(mako_template, cpp_target_file, type_iterations, metabench_flags=""):
cpp_file = open(cpp_target_file, 'w')
write_cpp_file = lambda cpp_code: cpp_file.write(cpp_code)
temp_file = Template(filename=mako_template)
code = temp_file.render(iterations=type_iterations)
write_cpp_file(metabench_flags + code)
cpp_file.close()
# TODO: To be deleted once all of the good stuff is taken out of this function.
def measure(target, mako_template, inrange, repetitions, steps):
target_cpp_file = target + ".cpp"
data = {}
### Time here
### Setup the progress bar
with ChargingBar("Processing", max=inrange) as bar:
for n in range(0 + steps, inrange + steps, steps):
compile = lambda comp_type: build(target, comp_type)
### Returns build target output
create_rendered_file(mako_template, target_cpp_file, n)
### Baseline, built without #define METABENCH
# TODO: Does this really need to be done each time? Probably not?
base = sum([compile("Base") for rep in range(repetitions)]) / repetitions
create_rendered_file(mako_template, target_cpp_file, n, "#define METABENCH\n")
### Build with #define METABENCH
total = sum([compile("Measure") for rep in range(repetitions)]) / repetitions
bar.next(steps)
data.update({n: [base, total, total - base]}) ### Pipe data in
bar.finish()
#print(data.items())
return data
def create_chart(datasets, aspect, title, subtitle, x_label, y_label, output_file):
xpts = []
ypts = []
for key, data in datasets.items():
xpts.append(key)
ypts.append(data[2])
plt.plot(xpts, ypts, label="New Label", marker='o')
plt.title(title)
plt.xlabel(x_label)
plt.ylabel(y_label)
plt.legend()
# TODO: Implement interactive backend
plt.savefig(output_file)
class time_per_elems:
def __init__(self, elem_count, time_pt_s):
self.elem_count = elem_count
self.time_pt_s = time_pt_s
elem_count = 0
time_pt_s = 0
class library_dataset:
def __init__(self, name, color, tpe_pts):
self.name = name
self.color = color
for elem_pt in tpe_pts:
self.elem_pts.append(elem_pt)
def set_time_pts(self, tpe_pts):
for time_pt in tpe_pts:
self.time_pts.append(time_pt)
name = ""
color = ""
time_pts = []
elem_pts = []
class lib_comparison_chart:
libs_in_test = ""
def __init__(self, val):
self.val = val
def measure_library_datasets(library_mako_files, inrange, repetitions, steps):
datasets = {}
for file in library_mako_files:
datasets += measure(file, file, inrange, inrange, repetitions, steps)
return datasets
#measure("test", "generate_test.cpp.mako", 500, 10, 100)
measurements = measure("test", "homemade_test.cpp.mako", 200, 5, 50)
create_chart(measurements, aspect=None, title="homemade", subtitle="", x_label="Number of Elements", y_label="Time (in seconds)", output_file="test") |
<reponame>yjbanov/chromium_build
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from recipe_engine import recipe_api
class LegionApi(recipe_api.RecipeApi):
"""Provides a recipes interface for the Legion framework."""
@property
def legion_path(self):
"""Returns the path to legion.py."""
return self.m.path['checkout'].join('testing', 'legion', 'tools',
'legion.py')
def create_controller(self, name, path, os, config_vars=None,
controller_vars=None, dimensions=None):
"""Returns a controller config dictionary.
Args:
name: The name of the controller.
path: The path to the .isolate or .isolated file for the controller.
os: The os to run the controller on.
config_vars: A dictionary of config vars to pass when isolating the
controller .isolate file. This is ignored if passing a .isolated file.
controller_vars: A dictionary of command line vars passed to the
controller.
dimensions: A dictionary of dimensions to pass when isolating the
controller .isolate file. This is ignored if passing a .isolated file.
"""
return {
'name': name,
'path': path,
'os': os,
'config_vars': config_vars or {},
'controller_vars': controller_vars or {},
'dimensions': dimensions or {},
'tasks': []
}
def add_task_to_controller(self, controller, name, path, config_vars=None):
"""Adds a task config to a controller config.
Args:
controller: A controller config returnd by create_controller.
name: The name of the task. This corresponds to the command line flag
defined in the controller code.
path: The path to the .isolate or .isolated file for the task.
config_vars: Config variables passed when isolating a task .isolate file.
This is ignored if passing a .isolated file.
"""
controller['tasks'].append({
'name': name,
'path': path,
'config_vars': config_vars or {}
})
def _archive_if_needed(self, config):
"""Archives an isolate file if needed.
This method is a no-op if the path is already an .isolated file. If not the
file is archived and the path is set to the .isolated file.
"""
for item in [config] + config['tasks']:
if self.m.path.splitext(item['path'])[-1] == '.isolated':
continue
isolated_path = str(item['path']) + 'd'
cmd = [
'archive',
'--isolate', item['path'],
'--isolated', isolated_path,
'--isolate-server', self.m.isolate.isolate_server,
]
for name, value in item['config_vars'].iteritems():
cmd.extend(['--config-variable', name, value])
self.m.python(
'archive for %s' % self.m.path.split(str(item['path']))[-1],
self.m.swarming_client.path.join('isolate.py'),
cmd)
item['path'] = isolated_path
def execute(self, config):
"""Executes a Legion-based swarming test.
config: The configuration returned by create_controller.
"""
self._archive_if_needed(config)
cmd = [
'run',
'--controller-isolated', config['path'],
'--task-name', config['name'],
'--isolate-server', self.m.isolate.isolate_server,
'--swarming-server', self.m.swarming.swarming_server,
'--dimension', 'os', config['os']
]
for name, value in config['dimensions'].iteritems():
cmd.extend(['--dimension', name, value])
for name, value in config['controller_vars'].iteritems():
cmd.extend(['--controller-var', name, value])
for task in config['tasks']:
cmd.extend(['--task', task['name'], task['path']])
step_result = self.m.python(
'Running test for %s' % config['name'],
self.legion_path,
cmd)
return step_result.stdout
|
"""
Copyright 2019 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Parse processor-specific CSR description YAML file and generate a CSR test file.
This test code will utilize every CSR instruction, writing values to the CSR
and then using a prediction function to calculate a reference value that will
be written into another register and compared against the value actually stored
in the CSR at this point, allowing for the test to self-check in order to
determine success or failure.
"""
"""
To install the bitstring library:
1) sudo apt-get install python3-bitstring OR
2) pip install bitstring
"""
import sys
import yaml
import argparse
import random
import copy
try:
from bitstring import BitArray as bitarray
except ImportError as e:
logging.error("Please install bitstring package: sudo apt-get install python3-bitstring")
sys.exit(1)
"""
Defines the test's success/failure values, one of which will be written to
the chosen signature address to indicate the test's result.
"""
TEST_RESULT = 1
TEST_PASS = 0
TEST_FAIL = 1
def get_csr_map(csr_file, xlen):
"""
Parses the YAML file containing CSR descriptions.
Args:
csr_file: The CSR YAML file.
xlen: The current RISC-V ISA bit length.
Returns:
A dictionary contining mappings for each CSR, of the form:
{ csr_name : [csr_address, csr_val_bitarray, csr_write_mask_bitarray, csr_read_mask_bitarray] }
"""
rv_string = "rv{}".format(str(xlen))
csrs = {}
with open(csr_file, "r") as c:
csr_description = yaml.safe_load(c)
for csr_dict in csr_description:
csr_name = csr_dict.get("csr")
csr_address = csr_dict.get("address")
assert(rv_string in csr_dict), "The {} CSR must be configured for rv{}".format(csr_name, str(rv))
csr_value = bitarray(uintbe=0, length=xlen)
csr_write_mask = []
csr_read_mask = bitarray(uintbe=0, length=xlen)
csr_field_list = csr_dict.get(rv_string)
for csr_field_detail_dict in csr_field_list:
field_type = csr_field_detail_dict.get("type")
field_val = csr_field_detail_dict.get("reset_val")
field_msb = csr_field_detail_dict.get("msb")
field_lsb = csr_field_detail_dict.get("lsb")
field_size = field_msb - field_lsb + 1
if field_type != "WPRI":
val_bitarray = bitarray(uint=field_val, length=field_size)
mask_bitarray = bitarray(uint=1, length=1) * field_size
start_pos = xlen - 1 - field_msb
end_pos = xlen - 1 - field_lsb
csr_read_mask.overwrite(mask_bitarray, xlen - 1 - field_msb)
csr_value.overwrite(val_bitarray, xlen - 1 - field_msb)
access = True if field_type == "R" else False
csr_write_mask.append([mask_bitarray, (start_pos, end_pos), access])
csrs.update({csr_name : [csr_address, csr_value, csr_write_mask, csr_read_mask]})
return csrs
def get_rs1_val(iteration, xlen):
"""
Calculates and returns the 3 test RS1 values that will be used
to exercise the CSR.
Args:
iteration: Integer between 0 and 2 inclusive, indicates which
test value to return.
xlen: The currnet RISC-V ISA bit length.
Returns:
A bitarray encoding the value that will be written to the CSR to test it.
Will be one of 3 values:
1) 0xa5a5...
2) 0x5a5a...
3) A randomly generated number
"""
if iteration == 0:
return bitarray(hex=f"0x{'a5'*int(xlen/8)}")
elif iteration == 1:
return bitarray(hex=f"0x{'5a'*int(xlen/8)}")
elif iteration == 2:
val = bitarray(uint=0, length=xlen)
# Must randomize all 32 bits, due to randomization library limitations
for i in range(32):
bit = random.randint(0, 1)
val.set(bit, i)
return val
def csr_write(val, csr_val, csr_write_mask):
"""
Performs a CSR write.
Args:
val: A bitarray containing the value to be written.
csr_val: A bitarray containing the current CSR value.
csr_write_mask: A bitarray containing the CSR's mask.
"""
for bitslice in csr_write_mask:
read_only = bitslice[2]
start_index = bitslice[1][0]
end_index = bitslice[1][1]
length = end_index - start_index + 1
mask_val = bitslice[0]
# only write if not read only
if not read_only:
val_slice = val[start_index:end_index+1]
csr_val.overwrite(mask_val & val_slice, start_index)
"""
CSR Read:
Reads the given CSR, after applying the bitmask
"""
def csr_read(csr_val, csr_read_mask):
"""
Performs a CSR read.
Args:
csr_val: A bitarray containing the current CSR value.
csr_read_mask: A bitarray containing the CSR's read mask.
Returns:
A bitarray of the logical AND of csr_val and csr_read_mask.
"""
return csr_val & csr_read_mask
def predict_csr_val(csr_op, rs1_val, csr_val, csr_write_mask, csr_read_mask):
"""
Predicts the CSR reference value, based on the current CSR operation.
Args:
csr_op: A string of the CSR operation being performed.
rs1_val: A bitarray containing the value to be written to the CSR.
csr_val: A bitarray containing the current value of the CSR.
csr_write_mask: A bitarray containing the CSR's write mask.
csr_read_mask: A bitarray containing the CSR's read mask
Returns:
A hexadecimal string of the predicted CSR value.
"""
prediction = None
# create a zero bitarray to zero extend immediates
zero = bitarray(uint=0, length=csr_val.len - 5)
if csr_op == 'csrrw':
prediction = csr_read(csr_val, csr_read_mask)
csr_write(rs1_val, csr_val, csr_write_mask)
elif csr_op == 'csrrs':
prediction = csr_read(csr_val, csr_read_mask)
csr_write(rs1_val | prediction, csr_val, csr_write_mask)
elif csr_op == 'csrrc':
prediction = csr_read(csr_val, csr_read_mask)
csr_write((~rs1_val) & prediction, csr_val, csr_write_mask)
elif csr_op == 'csrrwi':
prediction = csr_read(csr_val, csr_read_mask)
zero.append(rs1_val[-5:])
csr_write(zero, csr_val, csr_write_mask)
elif csr_op == 'csrrsi':
prediction = csr_read(csr_val, csr_read_mask)
zero.append(rs1_val[-5:])
csr_write(zero | prediction, csr_val, csr_write_mask)
elif csr_op == 'csrrci':
prediction = csr_read(csr_val, csr_read_mask)
zero.append(rs1_val[-5:])
csr_write((~zero) & prediction, csr_val, csr_write_mask)
return f"0x{prediction.hex}"
def gen_setup(test_file):
"""
Generates the setup code for the CSR test.
Args:
test_file: the file containing the generated assembly code.
"""
test_file.write(f".macro init\n")
test_file.write(f".endm\n")
test_file.write(f".section .text.init\n")
test_file.write(f".globl _start\n")
test_file.write(f".option norvc\n")
test_file.write(f"_start:\n")
def gen_csr_test_fail(test_file, end_addr):
"""
Generates code to handle a test failure.
This code consists of writing 1 to the GP register in an infinite loop.
The testbench will poll this register at the end of the test to detect failure.
Args:
test_file: the file containing the generated assembly test code.
end_addr: address that should be written to at end of test
"""
test_file.write(f"csr_fail:\n")
test_file.write(f"\tli x1, {TEST_FAIL}\n")
test_file.write(f"\tslli x1, x1, 8\n")
test_file.write(f"\taddi x1, x1, {TEST_RESULT}\n")
test_file.write(f"\tli x2, {end_addr}\n")
test_file.write(f"\tsw x1, 0(x2)\n")
test_file.write(f"\tj csr_fail\n")
def gen_csr_test_pass(test_file, end_addr):
"""
Generates code to handle test success.
This code consists of writing 2 to the GP register in an infinite loop.
The testbench will poll this register at the end of the test to detect success.
Args:
test_file: the file containing the generated assembly test code.
end_addr: address that should be written to at end of test
"""
test_file.write(f"csr_pass:\n")
test_file.write(f"\tli x1, {TEST_PASS}\n")
test_file.write(f"\tslli x1, x1, 8\n")
test_file.write(f"\taddi x1, x1, {TEST_RESULT}\n")
test_file.write(f"\tli x2, {end_addr}\n")
test_file.write(f"\tsw x1, 0(x2)\n")
test_file.write(f"\tj csr_pass\n")
def gen_csr_instr(original_csr_map, csr_instructions, xlen,
iterations, out, end_signature_addr):
"""
Uses the information in the map produced by get_csr_map() to generate
test CSR instructions operating on the generated random values.
Args:
original_csr_map: The dictionary containing CSR mappings generated by get_csr_map()
csr_instructions: A list of all supported CSR instructions in string form.
xlen: The RISC-V ISA bit length.
iterations: Indicates how many randomized test files will be generated.
out: A string containing the directory path that the tests will be generated in.
end_signature_addr: The address the test should write to upon terminating
Returns:
No explicit return value, but will write the randomized assembly test code
to the specified number of files.
"""
for i in range(iterations):
# pick two GPRs at random to act as source and destination registers
# for CSR operations
csr_map = copy.deepcopy(original_csr_map)
source_reg, dest_reg = [f"x{i}" for i in random.sample(range(1, 16), 2)]
csr_list = list(csr_map.keys())
with open(f"{out}/riscv_csr_test_{i}.S", "w") as csr_test_file:
gen_setup(csr_test_file)
for csr in csr_list:
csr_address, csr_val, csr_write_mask, csr_read_mask = csr_map.get(csr)
csr_test_file.write(f"\t# {csr}\n")
for op in csr_instructions:
for i in range(3):
# hex string
rand_rs1_val = get_rs1_val(i, xlen)
# I type CSR instruction
first_li = ""
if op[-1] == "i":
imm = rand_rs1_val[-5:]
csr_inst = f"\t{op} {dest_reg}, {csr_address}, 0b{imm.bin}\n"
imm_val = bitarray(uint=0, length=xlen-5)
imm_val.append(imm)
predict_li = (f"\tli {source_reg}, "
f"{predict_csr_val(op, imm_val, csr_val, csr_write_mask, csr_read_mask)}\n")
else:
first_li = f"\tli {source_reg}, 0x{rand_rs1_val.hex}\n"
csr_inst = f"\t{op} {dest_reg}, {csr_address}, {source_reg}\n"
predict_li = (f"\tli {source_reg}, "
f"{predict_csr_val(op, rand_rs1_val, csr_val, csr_write_mask, csr_read_mask)}\n")
branch_check = f"\tbne {source_reg}, {dest_reg}, csr_fail\n"
csr_test_file.write(first_li)
csr_test_file.write(csr_inst)
csr_test_file.write(predict_li)
csr_test_file.write(branch_check)
"""
We must hardcode in one final CSR check, as the value that has last
been written to the CSR has not been tested.
"""
if csr == csr_list[-1] and op == csr_instructions[-1] and i == 2:
final_csr_read = f"\tcsrr {dest_reg}, {csr_address}\n"
csrrs_read_mask = bitarray(uint=0, length=xlen)
final_li = (f"\tli {source_reg}, "
f"{predict_csr_val('csrrs', csrrs_read_mask, csr_val, csr_write_mask, csr_read_mask)}\n")
final_branch_check = f"\tbne {source_reg}, {dest_reg}, csr_fail\n"
csr_test_file.write(final_csr_read)
csr_test_file.write(final_li)
csr_test_file.write(final_branch_check)
gen_csr_test_pass(csr_test_file, end_signature_addr)
gen_csr_test_fail(csr_test_file, end_signature_addr)
"""
Define command line arguments.
"""
parser = argparse.ArgumentParser()
parser.add_argument("--csr_file", type=str, default="yaml/csr_template.yaml",
help="The YAML file contating descriptions of all processor supported CSRs")
parser.add_argument("--xlen", type=int, default=32,
help="Specify the ISA width, e.g. 32 or 64 or 128")
parser.add_argument("--iterations", type=int, default=1,
help="Specify how many tests to be generated")
parser.add_argument("--out", type=str, default="./",
help="Specify output directory")
parser.add_argument("--end_signature_addr", type=str, default="0",
help="Address that should be written to at end of this test")
args = parser.parse_args()
"""
A list containing all supported CSR instructions.
"""
csr_ops = ['csrrw', 'csrrs', 'csrrc', 'csrrwi', 'csrrsi', 'csrrci']
gen_csr_instr(get_csr_map(args.csr_file, args.xlen),
csr_ops, args.xlen, args.iterations, args.out,
args.end_signature_addr)
|
<gh_stars>0
import pandas as pd
import constants as c
class DataManager:
"""
This class encapsulates the logic for reading and processing the datasets.
"""
def __init__(self, heights_csv, gdps_csv):
"""
Initialize the DataManager with the given CSV inputs. The DataManager holds the input as
pandas dataframes and saves the processed datasets as well as the merged output when the respective functions
are called. When reading the "average height" dataset, only the columns 'Country', 'Sex', 'Year', 'Age group'
and 'Mean height' are saved. When reading the "GDP per capita" dataset, only the columns 'Country Name',
'Country Code' and '2019' are saved since we are only interested in data from 2019.
:param heights_csv: path to the CSV file containing data about the average heights
:param gdps_csv: path to the CSV file containing data about the GDP
"""
self.heights_input = pd.read_csv(heights_csv, usecols=['Country', 'Sex', 'Year', 'Age group', 'Mean height'])
self.gdps_input = pd.read_csv(gdps_csv, usecols=['Country Name', 'Country Code', '2019'])
self.heights_processed = None
self.gdps_processed = None
self.output = None
def process_datasets(self):
"""
Process the two datasets. The "average heights" dataset is filtered so that only the data entries from 2019 and
for the age group 19 are considered (those columns are dropped in the next step because they contain the same
value for each row). The 'Mean height' column is renamed to a more meaningful name. Rows that contain a NaN
value in the "GDP per capita" dataset are deleted. Additionally, the columns 'Country Name', '2019' and
'Country Code' are renamed in order to be able to merge the two datasets and to provide meaningful column names.
:return: nothing
"""
# PREPARE DATAFRAMES
# filter heights: year 2019, age 19
self.heights_processed = self.heights_input.loc[(self.heights_input['Year'] == 2019) & (self.heights_input['Age group'] == 19)]
self.heights_processed = self.heights_processed.drop(labels=['Year', 'Age group'], axis=1) # drop unnecessary columns
self.heights_processed = self.heights_processed.rename(columns={'Mean height': c.AVG_HEIGHT})
self.gdps_processed = self.gdps_input.dropna(thresh=3) # drop NaN GDPs
self.gdps_processed = self.gdps_processed.rename(
columns={'Country Name': c.COUNTRY, '2019': c.GDP, 'Country Code': c.COUNTRY_CODE})
def merge_processed_datasets(self):
"""
Merge the processed datasets. The processed datasets are merged on the 'Country' column which contains the
name of the country. An exception is thrown if the datasets have not been processed yet (i.e., one of the
datasets is None).
:return: nothing
"""
if self.heights_processed is not None and self.gdps_processed is not None:
# MERGE DATAFRAMES
merged = pd.merge(self.gdps_processed, self.heights_processed, on='Country')
self.output = merged
else:
raise Exception('Datasets have not been processed yet!')
def export_csv(self):
"""
Export the resulting dataset to a CSV file. An exception is thrown if the result dataset has not been
created yet.
:return: nothing
"""
if self.output is not None:
self.output.to_csv('out/gdp_avgHeight_per_country.csv', index_label='ID', index=True)
else:
raise Exception('No data produced yet!')
def get_dataset_males(self):
"""
Get the filtered result dataframe which only contains records for 19-year-old males. An exception is thrown
if the result dataset has not been created yet.
:return: nothing
"""
if self.output is not None:
return self.output.loc[(self.output['Sex'] == 'Boys')].drop(labels=['Sex'], axis=1)
else:
raise Exception('No data produced yet!')
def get_dataset_females(self):
"""
Get the filtered result dataframe which only contains records for 19-year-old females. An exception is thrown
if the result dataset has not been created yet.
:return: nothing
"""
if self.output is not None:
return self.output.loc[(self.output['Sex'] == 'Girls')].drop(labels=['Sex'], axis=1)
else:
raise Exception('No data produced yet!')
|
"""
MIT License
Copyright (c) 2018 AlessioNetti
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import select, socket, logging
from fault_injector.network.msg_entity import MessageEntity
from fault_injector.util.misc import getipport
class MessageServer(MessageEntity):
"""
Class that implements a simple server enabled for communication with multiple clients.
"""
logger = logging.getLogger('MessageServer')
def __init__(self, port, socket_timeout=10, max_connections=100, re_send_msgs=False):
"""
Constructor for the class
:param port: Listening port for the server socket
:param socket_timeout: Timeout for the sockets
:param max_connections: Maximum number of concurrent connections to the server
:param re_send_msgs: if True, the entity will keep track of sent/received messages, and eventually attempt
to resend them to hosts that have not received them due to a connection loss
"""
assert port is not None, 'A listening port for the server must be specified'
super().__init__(socket_timeout=socket_timeout, max_connections=max_connections, re_send_msgs=re_send_msgs)
# The server socket must be initialized
self._serverAddress = ('', port)
af = socket.AF_INET
self._serverSock = socket.socket(af, socket.SOCK_STREAM)
self._serverSock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._readSet = [self._dummy_sock_r, self._serverSock]
def _listen(self):
"""
Method that implements the basic listener behavior of the server
No action is taken upon the reception of a message: it is up to the user to decide how to react by looking
at the message queue and taking action
"""
# Listen for incoming connections
self._serverSock.bind(self._serverAddress)
self._serverSock.listen(self.max_connections)
MessageServer.logger.info('Server has been started')
while not self._hasToFinish:
try:
read, wr, err = select.select(self._readSet, [], self._readSet, self.sock_timeout)
for sock in err:
self._remove_host(sock.getpeername())
if sock in read:
read.remove(sock)
for sock in read:
if sock == self._serverSock:
connection, client_address = self._serverSock.accept()
self._register_host(connection)
MessageServer.logger.info('Client %s has subscribed' % getipport(connection))
elif sock == self._dummy_sock_r:
self._flush_output_queue()
else:
if not self._liveness_check(sock):
self._remove_host(sock.getpeername())
else:
peername = sock.getpeername()
data, seq_num = self._recv_msg(sock)
if data is not None:
self._add_to_input_queue(peername, data)
elif self.reSendMsgs and seq_num is not None:
self._forward_old_msgs(seq_num, peername)
except socket.timeout:
pass
except select.error:
self._trim_dead_sockets()
self._serverSock.close()
self._dummy_sock_r.close()
self._dummy_sock_w.close()
for sock in self._registeredHosts.values():
sock.close()
MessageServer.logger.info('Server has been shut down')
def _update_seq_num(self, addr, seq_num, received=True):
"""
Refreshes the sequence number associated to a certain connected host
This implementation of the method is a dummy and does nothing. This is because in our client-server architecture
it is the client that tracks the sequence numbers of received/sent messages, and that has priority in the
message forwarding process
:param addr: The address of the connected host
:param seq_num: The sequence number associated to the connected host, in tuple format
:param received: If True, then the sequence number refers to a received message, and sent otherwise
"""
pass
def _update_read_set(self):
"""
Updates the list of socket enabled for reading on the 'select' calls
"""
self._readSet = [self._serverSock, self._dummy_sock_r] + list(self._registeredHosts.values())
|
#!/usr/bin/python
"""
Two Column Virtual Terminal.
"""
# Copyright 2011 <NAME> <<EMAIL>>. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY <NAME> ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL <NAME> OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of <NAME>.
# pylint: disable=invalid-name, missing-docstring
import pty
import sys
import os
import select
import fcntl
import termios
import struct
import curses
import errno
import time
import optparse
def init_color_pairs(invert):
"""
Set color pairs for ncurses where each color is between 0 and COLORS.
"""
foreground = curses.COLOR_BLACK
background = curses.COLOR_WHITE
if invert:
background = curses.COLOR_BLACK
foreground = curses.COLOR_WHITE
for bi, bc in enumerate((background, curses.COLOR_RED,
curses.COLOR_GREEN, curses.COLOR_YELLOW,
curses.COLOR_BLUE, curses.COLOR_MAGENTA,
curses.COLOR_CYAN, foreground)):
for fi, fc in enumerate((curses.COLOR_WHITE, curses.COLOR_BLACK,
curses.COLOR_RED, curses.COLOR_GREEN,
curses.COLOR_YELLOW, curses.COLOR_BLUE,
curses.COLOR_MAGENTA, curses.COLOR_CYAN)):
if fi != 0 or bi != 0:
curses.init_pair(fi*8+bi, fc, bc)
def get_color(fg=1, bg=0):
return curses.color_pair(((fg + 1) % 8) * 8 + bg)
class Simple:
def __init__(self, curseswindow):
self.screen = curseswindow
self.screen.scrollok(1)
def getmaxyx(self):
return self.screen.getmaxyx()
def move(self, ypos, xpos):
ym, xm = self.getmaxyx()
self.screen.move(max(0, min(ym - 1, ypos)), max(0, min(xm - 1, xpos)))
def relmove(self, yoff, xoff):
y, x = self.getyx()
self.move(y + yoff, x + xoff)
def addch(self, char):
self.screen.addch(char)
def refresh(self):
self.screen.refresh()
def getyx(self):
return self.screen.getyx()
def scroll(self):
self.screen.scroll()
def clrtobot(self):
self.screen.clrtobot()
def attron(self, attr):
self.screen.attron(attr)
def clrtoeol(self):
self.screen.clrtoeol()
def delch(self):
self.screen.delch()
def attrset(self, attr):
self.screen.attrset(attr)
def insertln(self):
self.screen.insertln()
def insch(self, char):
self.screen.insch(char)
def deleteln(self):
self.screen.deleteln()
def inch(self):
return self.screen.inch()
class BadWidth(Exception):
pass
class Columns:
def __init__(self, curseswindow, numcolumns=2, reverse=False):
self.screen = curseswindow
self.height, width = self.screen.getmaxyx()
if numcolumns < 1:
raise BadWidth("need at least two columns")
self.numcolumns = numcolumns
self.columnwidth = (width - (numcolumns - 1)) // numcolumns
if self.columnwidth <= 0:
raise BadWidth("resulting column width too small")
self.windows = []
for i in range(numcolumns):
window = self.screen.derwin(self.height, self.columnwidth,
0, i * (self.columnwidth + 1))
window.scrollok(1)
self.windows.append(window)
if reverse:
self.windows.reverse()
self.ypos, self.xpos = 0, 0
for i in range(1, numcolumns):
self.screen.vline(0, i * (self.columnwidth + 1) - 1,
curses.ACS_VLINE, self.height)
self.attrs = 0
@property
def curwin(self):
return self.windows[self.ypos // self.height]
@property
def curypos(self):
return self.ypos % self.height
@property
def curxpos(self):
return self.xpos
def getmaxyx(self):
return (self.height * self.numcolumns, self.columnwidth)
def move(self, ypos, xpos):
height, width = self.getmaxyx()
self.ypos = max(0, min(height - 1, ypos))
self.xpos = max(0, min(width - 1, xpos))
self.fix_cursor()
def fix_cursor(self):
self.curwin.move(self.curypos, self.curxpos)
def relmove(self, yoff, xoff):
self.move(self.ypos + yoff, self.xpos + xoff)
def addch(self, char):
if self.xpos == self.columnwidth - 1:
self.curwin.insch(self.curypos, self.curxpos, char, self.attrs)
if self.ypos + 1 == 2 * self.height:
self.scroll()
self.move(self.ypos, 0)
else:
self.move(self.ypos + 1, 0)
else:
self.curwin.addch(self.curypos, self.curxpos, char, self.attrs)
self.xpos += 1
def refresh(self):
self.screen.refresh()
for window in self.windows:
if window is not self.curwin:
window.refresh()
self.curwin.refresh()
def getyx(self):
return (self.ypos, self.xpos)
def scroll_up(self, index):
"""Copy first line of the window with given index to last line of the
previous window and scroll up the given window."""
assert index > 0
previous = self.windows[index - 1]
previous.move(self.height - 1, 0)
for x in range(self.columnwidth - 1):
previous.addch(self.windows[index].inch(0, x))
previous.insch(self.windows[index].inch(0, self.columnwidth - 1))
self.fix_cursor()
self.windows[index].scroll()
def scroll_down(self, index):
"""Scroll down the window with given index and copy the last line of
the previous window to the first line of the given window."""
assert index > 0
current = self.windows[index]
previous = self.windows[index - 1]
current.scroll(-1)
current.move(0, 0)
for x in range(self.columnwidth - 1):
current.addch(previous.inch(self.height - 1, x))
current.insch(previous.inch(self.height - 1, self.columnwidth - 1))
self.fix_cursor()
def scroll(self):
self.windows[0].scroll()
for i in range(1, self.numcolumns):
self.scroll_up(i)
def clrtobot(self):
index = self.ypos // self.height
for i in range(index + 1, self.numcolumns):
self.windows[i].clear()
self.windows[index].clrtobot()
def attron(self, attr):
self.attrs |= attr
def clrtoeol(self):
self.curwin.clrtoeol()
def delch(self):
self.curwin.delch(self.curypos, self.curxpos)
def attrset(self, attr):
self.attrs = attr
def insertln(self):
index = self.ypos // self.height
for i in reversed(range(index + 1, self.numcolumns)):
self.scroll_down(i)
self.curwin.insertln()
def insch(self, char):
self.curwin.insch(self.curypos, self.curxpos, char, self.attrs)
def deleteln(self):
index = self.ypos // self.height
self.windows[index].deleteln()
for i in range(index + 1, self.numcolumns):
self.scroll_up(i)
def inch(self):
return self.curwin.inch(self.curypos, self.curxpos)
def acs_map():
"""call after curses.initscr"""
# can this mapping be obtained from curses?
return {
ord(b'l'): curses.ACS_ULCORNER,
ord(b'm'): curses.ACS_LLCORNER,
ord(b'k'): curses.ACS_URCORNER,
ord(b'j'): curses.ACS_LRCORNER,
ord(b't'): curses.ACS_LTEE,
ord(b'u'): curses.ACS_RTEE,
ord(b'v'): curses.ACS_BTEE,
ord(b'w'): curses.ACS_TTEE,
ord(b'q'): curses.ACS_HLINE,
ord(b'x'): curses.ACS_VLINE,
ord(b'n'): curses.ACS_PLUS,
ord(b'o'): curses.ACS_S1,
ord(b's'): curses.ACS_S9,
ord(b'`'): curses.ACS_DIAMOND,
ord(b'a'): curses.ACS_CKBOARD,
ord(b'f'): curses.ACS_DEGREE,
ord(b'g'): curses.ACS_PLMINUS,
ord(b'~'): curses.ACS_BULLET,
ord(b','): curses.ACS_LARROW,
ord(b'+'): curses.ACS_RARROW,
ord(b'.'): curses.ACS_DARROW,
ord(b'-'): curses.ACS_UARROW,
ord(b'h'): curses.ACS_BOARD,
ord(b'i'): curses.ACS_LANTERN,
ord(b'p'): curses.ACS_S3,
ord(b'r'): curses.ACS_S7,
ord(b'y'): curses.ACS_LEQUAL,
ord(b'z'): curses.ACS_GEQUAL,
ord(b'{'): curses.ACS_PI,
ord(b'|'): curses.ACS_NEQUAL,
ord(b'}'): curses.ACS_STERLING,
}
def compose_dicts(dct1, dct2):
result = {}
for key, value in dct1.items():
try:
result[key] = dct2[value]
except KeyError:
pass
return result
SIMPLE_CHARACTERS = bytearray(
b'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' +
b'0123456789@:~$ .#!/_(),[]=-+*\'"|<>%&\\?;`^{}' +
b'\xb4\xb6\xb7\xc3\xc4\xd6\xdc\xe4\xe9\xfc\xf6')
class Terminal:
def __init__(self, acsc, columns, reverse=False, invert=False):
self.mode = (self.feed_simple,)
self.realscreen = None
self.screen = None
self.fg = self.bg = 0
self.graphics_font = False
self.graphics_chars = acsc # really initialized after
self.lastchar = ord(b' ')
self.columns = columns
self.reverse = reverse
self.invert = invert
def switchmode(self):
if isinstance(self.screen, Columns):
self.screen = Simple(self.realscreen)
else:
self.screen = Columns(self.realscreen, self.columns)
self.screen.refresh()
def resized(self):
# The refresh call causes curses to notice the new dimensions.
self.realscreen.refresh()
self.realscreen.clear()
try:
self.screen = Columns(self.realscreen, self.columns,
reverse=self.reverse)
except BadWidth:
self.screen = Simple(self.realscreen)
def resizepty(self, ptyfd):
ym, xm = self.screen.getmaxyx()
fcntl.ioctl(ptyfd, termios.TIOCSWINSZ,
struct.pack("HHHH", ym, xm, 0, 0))
def addch(self, char):
self.lastchar = char
self.screen.addch(char)
def start(self):
self.realscreen = curses.initscr()
self.realscreen.nodelay(1)
self.realscreen.keypad(1)
curses.start_color()
curses.use_default_colors()
init_color_pairs(self.invert)
self.screen = Columns(self.realscreen, self.columns,
reverse=self.reverse)
curses.noecho()
curses.raw()
self.graphics_chars = compose_dicts(self.graphics_chars, acs_map())
def stop(self):
curses.noraw()
curses.echo()
curses.endwin()
def do_bel(self):
curses.beep()
def do_blink(self):
self.screen.attron(curses.A_BLINK)
def do_bold(self):
self.screen.attron(curses.A_BOLD)
def do_cr(self):
self.screen.relmove(0, -9999)
def do_cub(self, n):
self.screen.relmove(0, -n)
def do_cub1(self):
self.do_cub(1)
def do_cud(self, n):
self.screen.relmove(n, 0)
def do_cud1(self):
self.do_cud(1)
def do_cuf(self, n):
self.screen.relmove(0, n)
def do_cuf1(self):
self.do_cuf(1)
def do_cuu(self, n):
self.screen.relmove(-n, 0)
def do_cuu1(self):
self.do_cuu(1)
def do_dch(self, n):
for _ in range(n):
self.screen.delch()
def do_dch1(self):
self.do_dch(1)
def do_dl(self, n):
for _ in range(n):
self.screen.deleteln()
def do_dl1(self):
self.do_dl(1)
def do_ech(self, n):
for _ in range(n):
self.screen.addch(ord(b' '))
def do_ed(self):
self.screen.clrtobot()
def do_el(self):
self.screen.clrtoeol()
def do_el1(self):
y, x = self.screen.getyx()
self.screen.move(y, 0)
for _ in range(x):
self.screen.addch(ord(b' '))
def do_home(self):
self.screen.move(0, 0)
def do_hpa(self, n):
y, _ = self.screen.getyx()
self.screen.move(y, n)
def do_ht(self):
y, x = self.screen.getyx()
_, xm = self.screen.getmaxyx()
x = min(x + 8 - x % 8, xm - 1)
self.screen.move(y, x)
def do_ich(self, n):
for _ in range(n):
self.screen.insch(ord(b' '))
def do_il(self, n):
for _ in range(n):
self.screen.insertln()
def do_il1(self):
self.do_il(1)
def do_ind(self):
y, _ = self.screen.getyx()
ym, _ = self.screen.getmaxyx()
if y + 1 == ym:
self.screen.scroll()
self.screen.move(y, 0)
else:
self.screen.move(y+1, 0)
def do_invis(self):
self.screen.attron(curses.A_INVIS)
def do_smul(self):
self.screen.attron(curses.A_UNDERLINE)
def do_vpa(self, n):
_, x = self.screen.getyx()
self.screen.move(n, x)
def feed_reset(self):
if self.graphics_font:
self.mode = (self.feed_graphics,)
else:
self.mode = (self.feed_simple,)
def feed(self, char):
self.mode[0](char, *self.mode[1:])
def feed_simple(self, char):
func = {
ord('\a'): self.do_bel,
ord('\b'): self.do_cub1,
ord('\n'): self.do_ind,
ord('\r'): self.do_cr,
ord('\t'): self.do_ht,
}.get(char)
if func:
func()
elif char in SIMPLE_CHARACTERS:
self.addch(char)
elif char == 0x1b:
self.mode = (self.feed_esc,)
else:
raise ValueError("feed %r" % char)
def feed_graphics(self, char):
if char == 0x1b:
self.mode = (self.feed_esc,)
elif char in self.graphics_chars:
self.addch(self.graphics_chars[char])
elif char == ord(b'q'): # some applications appear to use VT100 names?
self.addch(curses.ACS_HLINE)
else:
raise ValueError("graphics %r" % char)
def feed_esc(self, char):
if char == ord(b'['):
self.mode = (self.feed_esc_opbr,)
else:
raise ValueError("feed esc %r" % char)
def feed_esc_opbr(self, char):
self.feed_reset()
func = {
ord('A'): self.do_cuu1,
ord('B'): self.do_cud1,
ord('C'): self.do_cuf1,
ord('D'): self.do_cub1,
ord('H'): self.do_home,
ord('J'): self.do_ed,
ord('L'): self.do_il1,
ord('M'): self.do_dl1,
ord('K'): self.do_el,
ord('P'): self.do_dch1,
}.get(char)
if func:
func()
elif char == ord(b'm'):
self.feed_esc_opbr_next(char, bytearray(b'0'))
elif char in bytearray(b'0123456789'):
self.mode = (self.feed_esc_opbr_next, bytearray((char,)))
else:
raise ValueError("feed esc [ %r" % char)
def feed_color(self, code):
func = {
1: self.do_bold,
4: self.do_smul,
5: self.do_blink,
8: self.do_invis,
}.get(code)
if func:
func()
elif code == 0:
self.fg = self.bg = 0
self.screen.attrset(0)
elif code == 7:
self.screen.attron(curses.A_REVERSE)
elif code == 10:
self.graphics_font = False
self.feed_reset()
elif code == 11:
self.graphics_font = True
self.feed_reset()
elif 30 <= code <= 37:
self.fg = code - 30
self.screen.attron(get_color(self.fg, self.bg))
elif code == 39:
self.fg = 7
self.screen.attron(get_color(self.fg, self.bg))
elif 40 <= code <= 47:
self.bg = code - 40
self.screen.attron(get_color(self.fg, self.bg))
elif code == 49:
self.bg = 0
self.screen.attron(get_color(self.fg, self.bg))
else:
raise ValueError("feed esc [ %r m" % code)
def feed_esc_opbr_next(self, char, prev):
self.feed_reset()
func = {
ord('A'): self.do_cuu,
ord('B'): self.do_cud,
ord('C'): self.do_cuf,
ord('D'): self.do_cub,
ord('L'): self.do_il,
ord('M'): self.do_dl,
ord('P'): self.do_dch,
ord('X'): self.do_ech,
ord('@'): self.do_ich,
}.get(char)
if func and prev.isdigit():
func(int(prev))
elif char in bytearray(b'0123456789;'):
self.mode = (self.feed_esc_opbr_next, prev + bytearray((char,)))
elif char == ord(b'm'):
parts = prev.split(b';')
for p in parts:
self.feed_color(int(p))
elif char == ord(b'H'):
parts = prev.split(b';')
if len(parts) != 2:
raise ValueError("feed esc [ %r H" % parts)
self.screen.move(*map((-1).__add__, map(int, parts)))
elif prev == bytearray(b'2') and char == ord(b'J'):
self.screen.move(0, 0)
self.screen.clrtobot()
elif char == ord(b'd') and prev.isdigit():
self.do_vpa(int(prev) - 1)
elif char == ord(b'b') and prev.isdigit():
for _ in range(int(prev)):
self.screen.addch(self.lastchar)
elif char == ord(b'G') and prev.isdigit():
self.do_hpa(int(prev) - 1)
elif char == ord(b'K') and prev == b'1':
self.do_el1()
else:
raise ValueError("feed esc [ %r %r" % (prev, char))
SYMBOLIC_KEYMAPPING = {
ord(b"\n"): "cr",
curses.KEY_LEFT: "kcub1",
curses.KEY_DOWN: "kcud1",
curses.KEY_RIGHT: "kcuf1",
curses.KEY_UP: "kcuu1",
curses.KEY_HOME: "khome",
curses.KEY_IC: "kich1",
curses.KEY_BACKSPACE: "kbs",
curses.KEY_PPAGE: "kpp",
curses.KEY_NPAGE: "knp",
curses.KEY_F1: "kf1",
curses.KEY_F2: "kf2",
curses.KEY_F3: "kf3",
curses.KEY_F4: "kf4",
curses.KEY_F5: "kf5",
curses.KEY_F6: "kf6",
curses.KEY_F7: "kf7",
curses.KEY_F8: "kf8",
curses.KEY_F9: "kf9",
}
def compute_keymap(symbolic_map):
oldterm = os.environ["TERM"]
curses.setupterm("ansi")
keymap = {}
for key, value in symbolic_map.items():
keymap[key] = (curses.tigetstr(value) or b"").replace(b"\\E", b"\x1b")
acsc = curses.tigetstr("acsc")
acsc = bytearray(acsc)
acsc = dict(zip(acsc[1::2], acsc[::2]))
curses.setupterm(oldterm)
return keymap, acsc
def set_cloexec(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFD, 0)
flags |= fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
def main():
# Options
parser = optparse.OptionParser()
parser.disable_interspersed_args()
parser.add_option("-c", "--columns", dest="columns", metavar="N",
type="int", default=2, help="Number of columns")
parser.add_option("-r", "--reverse", action="store_true",
dest="reverse", default=False,
help="Order last column to the left")
parser.add_option("-i", "--invert", action="store_true",
dest="invert", default=False,
help="Invert the foreground and background colors")
options, args = parser.parse_args()
# Environment
keymapping, acsc = compute_keymap(SYMBOLIC_KEYMAPPING)
t = Terminal(acsc, options.columns, reverse=options.reverse,
invert=options.invert)
errpiper, errpipew = os.pipe()
set_cloexec(errpipew)
pid, masterfd = pty.fork()
if pid == 0: # child
os.close(errpiper)
os.environ["TERM"] = "ansi"
try:
if len(args) < 1:
os.execvp(os.environ["SHELL"], [os.environ["SHELL"]])
else:
os.execvp(args[0], args)
except OSError as err:
os.write(errpipew, "exec failed: %s" % (err,))
sys.exit(1)
os.close(errpipew)
data = os.read(errpiper, 1024)
os.close(errpiper)
if data:
print(data)
sys.exit(1)
# Begin multicolumn layout
try:
t.start()
t.resizepty(masterfd)
refreshpending = None
while True:
try:
res, _, _ = select.select([0, masterfd], [], [],
refreshpending and 0)
except select.error as err:
if err.args[0] == errno.EINTR:
t.resized()
t.resizepty(masterfd)
continue
raise
if 0 in res:
while True:
key = t.realscreen.getch()
if key == -1:
break
if key == 0xb3:
t.switchmode()
t.resizepty(masterfd)
elif key in keymapping:
os.write(masterfd, keymapping[key])
elif key <= 0xff:
os.write(masterfd, struct.pack("B", key))
else:
if "TCVT_DEVEL" in os.environ:
raise ValueError("getch returned %d" % key)
elif masterfd in res:
try:
data = os.read(masterfd, 1024)
except OSError:
break
if not data:
break
for char in bytearray(data):
if "TCVT_DEVEL" in os.environ:
t.feed(char)
else:
try:
t.feed(char)
except ValueError:
t.feed_reset()
if refreshpending is None:
refreshpending = time.time() + 0.1
elif refreshpending is not None:
t.screen.refresh()
refreshpending = None
if refreshpending is not None and refreshpending < time.time():
t.screen.refresh()
refreshpending = None
finally:
t.stop()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
Subscribes T_goal {Pose()}, publishes joint angles {JointState()}
"""
# imports
# import Data.data_logger_module as data_logger
import sys, os
import rospy
from math import sqrt, pi
import numpy as np
from sensor_msgs.msg import JointState
from geometry_msgs.msg import Vector3
from geometry_msgs.msg import Pose
from std_msgs.msg import Int8
from openravepy import *
sys.path.append("/home/gizem/catkin_ws/src/ur5_with_hand_gazebo/src/Classes")
robot_dir = "/home/gizem/catkin_ws/src/ik_solver_test/robots/ur5/xml/"
robot_name = "ur5-with-objects.xml"
robot_path = os.path.join(robot_dir, robot_name)
from DH_matrices import DHmatrices
_ROSTIME_START = 0
test_pub_msg = Vector3()
class IKSolver:
def __init__(self, ikmodel=2, START_NODE=False, rate=100):
''' Initializes the openrave environment, robot state and IK model
@params ikmodel: 1->Transform6D 2->Translation3D
'''
# Set environment and robot state
self.env = Environment()
self.env.Load(robot_path) # load a scene
self.env.SetViewer('qtcoin') # start the viewer
self.robot = self.env.GetRobots()[0] # get the first robot
print "Dof", self.robot.GetDOFValues()
# Set IK model
if ikmodel==1:
self.iktype = IkParameterization.Type.Transform6D
elif ikmodel==2:
self.iktype = IkParameterization.Type.Translation3D
else:
sys.exit("IK type not known")
self.ikmodel = databases.inversekinematics.InverseKinematicsModel(robot=self.robot,iktype=self.iktype)
if not self.ikmodel.load():
print "New IK model is creating.."
self.ikmodel.autogenerate()
print "IK model created"
print "Load:", self.ikmodel.load()
print "Filename:", self.ikmodel.getfilename()
print "IKname:", self.ikmodel.getikname()
# Set active manipulator bases
self.basemanip = interfaces.BaseManipulation(self.robot)
self.taskmanip = interfaces.TaskManipulation(self.robot)
self.manip = self.robot.GetActiveManipulator()
self.Tee_current = self.manip.GetEndEffectorTransform() # get end effector
self.Twrist = self.robot.GetLinks()[5].GetTransform() # get wrist transform
self.Twrist_pose = DHmatrices.htm_to_pose(self.Twrist)
# Set joint limits
self.robot.SetDOFValues([0.0,-1.57,1.57,0.0,0.0,0.0]) ## you may need to check this values.
lower = np.concatenate((np.array([-0.01, -(pi/2-0.01), pi/2-0.01]), np.array([1., 1., 1.])*-3.14159265))
upper = np.concatenate((np.array([0.01, -(pi/2-0.01), pi/2+0.01]), np.array([1., 1., 1.])*3.14159265))
# self.robot.SetDOFLimits(lower, upper) ## So annoying but skip now
print "DOF limits:", self.robot.GetDOFLimits()
# EE poses
Tee1 = np.array([[0.00, 1.00, 0.00, 1.18], [1.00, 0.00, 0.00, -0.743], [-0.00, 0.00, -1.00, 1.011], [0.00, 0.00, 0.00, 1.00]])
Tee2 = np.array([[0.00, 0.00, -1.00, 0.496], [ 1.00, 0.00, 0.00, -0.743], [0.00, -1.00, 0.00, 0.555], [ 0.00, 0.00, 0.00, 1.00]])
Tee3 = np.array([[1.00, 0.00, 0.00, 0.704], [0.00, 1.00, 0.00, -0.836], [0.00, 0.00, 1.00, 0.670], [0.00, 0.00, 0.00, 1.00]])
# self.Tee_current = np.array([1.00, 0.00, 0.00, 0.00], [0.00, 1.00, 0.00, 0.00], [0.00, 0.00, 1.00, 0.00], [0.00, 0.00, 0.00, 1.00])
self.Tee_current = Tee1 ## for test only - constantly read by self.manip.GetEndEffectorTransform() # get end effector
self.Tee_goal = np.zeros((4,4), dtype=np.float32) # gonna be calculated by ik
# IK parametrization init
self.ikparam = IkParameterization(self.Tee_current[0:3,3], self.ikmodel.iktype) # build up the translation3d ik query
self.sol = self.manip.FindIKSolution(self.ikparam, IkFilterOptions.CheckEnvCollisions)
# Init robot pose
self.robot.SetDOFValues(self.sol, self.ikmodel.manip.GetArmIndices())
# Updated parameters
self.joint_states = JointState()
## TODO: parametrize such that robot.GetJointNames()
self.joint_states.name = ['shoulder_pan_joint', 'shoulder_lift_joint', 'elbow_joint', 'wrist_1_joint', 'wrist_2_joint', 'wrist_3_joint']
self.joint_states.position = [0.0, 0.0, pi/2, 0.0, 0.0, 0.0]
self.ee_goal = Vector3()
self.test_joints = JointState()
self.test_joints.position = [0.0,1.57,0.0,0.0,0.0,0.0]
if START_NODE == True:
rospy.init_node("ik_solver_node")
self.r = rospy.Rate(rate)
print "ik_solver_node initialized"
def start_node(self, rate):
'''
NOTE: only one node should be calling this function.
'''
rospy.init_node('ik_solver_node', anonymous=False)
print "ik_solver_node initialized"
# self.r = rospy.Rate(rate)
def init_subscribers_and_publishers(self):
self.pub = rospy.Publisher('/joint_states_openrave', JointState, queue_size=1)
self.pub_calculated_tee = rospy.Publisher('/Tee_calculated', Pose, queue_size=1)
self.pub_Twrist_pose = rospy.Publisher('/Twrist_pose', Pose, queue_size=1)
# self.sub_hand_pose = rospy.Subscriber('/hand_pose', Pose, self.sub_hand_pose)
# self.sub_wrist_pose = rospy.Subscriber('/wrist_pose', Pose, self.sub_hand_pose)
self.sub_Tee_pose = rospy.Subscriber('/Tee_goal_pose', Pose, self.sub_Tee_pose)
self.sub_test_joint = rospy.Subscriber('/test_joints', JointState, self.sub_test_joint)
self.sub_selector = rospy.Subscriber('/selector', Int8, self.sub_selector)
# self.log_start_time = rospy.get_time()
_ROSTIME_START = rospy.get_time()
print "ik_solver_node pub/sub initialized"
def update(self):
tee_goal = self.Tee_goal
self.calculate_joint_angles2(tee_goal)
self.joint_states.header.stamp = rospy.Time.now()
self.joint_states.position = self.robot.GetDOFValues()
self.pub.publish(self.joint_states)
self.Tee_current = self.manip.GetEndEffectorTransform()
self.Tee_goal_pose = DHmatrices.htm_to_pose(self.Tee_current)
self.pub_calculated_tee.publish(self.Tee_goal_pose)
self.Twrist = self.robot.GetLinks()[5].GetTransform() # get wrist transform. The pivot point
self.Twrist_pose = DHmatrices.htm_to_pose(self.Twrist)
self.pub_Twrist_pose.publish(self.Twrist_pose)
## DEBUG purpose only
# print "self.test_joints.position", self.test_joints.position
# self.robot.SetDOFValues(self.test_joints.position)
# print "Tee:", self.Tee_current
# print "Twrist_pose:", self.Twrist_pose
# print "Tee_pose:", DHmatrices.htm_to_pose(self.Tee_current)
def calculate_joint_angles(self):
'''
Given ee_goal, calculate joint angles. Do I need to pull ee_goal?
@params ee_goal: type np.array(4x4) HTM
'''
if (type(self.Tee_goal)==np.ndarray) and (self.Tee_goal.shape == (4,4)):
# self.Tee = Tee
self.ikparam = IkParameterization(self.Tee_goal[0:3,3], self.ikmodel.iktype) # build up the translation3d ik query
self.sol = self.manip.FindIKSolution(self.ikparam, IkFilterOptions.CheckEnvCollisions)
self.robot.SetDOFValues(self.sol,self.ikmodel.manip.GetArmIndices())
self.joint_states.position = self.robot.GetDOFValues()
print "Tee_goal:", self.Tee_goal
print "joint positions:", self.joint_states.position
else:
print "Unknown ee_type"
def calculate_joint_angles2(self, tee_goal):
'''
Given ee_goal, calculate joint angles. Do I need to pull ee_goal?
@params ee_goal: type np.array(4x4) HTM
'''
if (type(tee_goal)==np.ndarray) and (tee_goal.shape == (4,4)):
self.ikparam = IkParameterization(tee_goal[0:3,3], self.ikmodel.iktype) # build up the translation3d ik query
self.sol = self.manip.FindIKSolution(self.ikparam, IkFilterOptions.CheckEnvCollisions)
self.robot.SetDOFValues(self.sol,self.ikmodel.manip.GetArmIndices())
self.joint_states.position = self.robot.GetDOFValues()
# print "Tee_goal:", tee_goal
# print "joint positions:", self.joint_states.position
else:
print "Unknown ee_type"
def sub_Tee_pose(self, msg):
'''
Subscribes Tee_pose {Pose()}, converts it to Tee {np.array()}
'''
Tee_goal_pose = msg
print "Tee_goal_pose:", Tee_goal_pose
self.Tee_goal = DHmatrices.pose_to_htm(Tee_goal_pose)
def sub_test_joint(self, msg):
'''
Subscribes Tee_pose {Pose()}, converts it to Tee {np.array()}
'''
self.test_joints.position = list(msg.position)
def sub_selector(self, msg):
'''
This is only for test purpose
'''
selector = msg.data
if selector == 1:
self.Tee_goal = np.array([[0.0, 1.0, -0.01, 0.577], [1.0, 0.0, 0.0, -0.743], [0.0, -0.01, -1.0, 1.618], [0.0, 0.0, 0.0, 1.0]])
elif selector == 2:
self.Tee_goal = np.array([[0.0, 1.0, -0.01, 0.462], [1.0, 0.0, 0.0, -0.743], [0.0, -0.01, -1.0, 1.734], [0.0, 0.0, 0.0, 1.0]])
else:
print "non registered selection"
|
import logging
import dateutil.parser
from mypy_extensions import NoReturn
from copy import deepcopy
from src.hard_correctness_checks.normal_forms import eliminate_must, reset_ancestor_statement_pointers
from src.model.EventsAndTraces import breachActionId, interveneOnDelayId
from src.independent.util_for_sequences import flatten
from src.independent.util_for_str import nonemptySortedSubsets, mapjoin
from src.independent.util import chcaststr, todo_once, castid, chcast
from src.independent.util_for_str import streqci, isFloat, isInt
from src.constants_and_defined_types import *
from src.correctness_checks import L4ContractConstructorInterface
from src.independent.FileCoord import FileCoord
from src.independent.SExpr import SExpr, SExprOrStr, sexpr_rewrite
from src.independent.parse_sexpr import castse, STRING_LITERAL_MARKER, prettySExprStr
from src.independent.typing_imports import *
from src.model.Action import Action
from src.model.EventRule import EventRule, DeadlineEventRule, ActorEventRule, EventRuleContext
from src.model.BoundVar import ContractParam, RuleBoundActionParam, ActionBoundActionParam, \
LocalVar, StateVar, PrimedStateVar
from src.model.ContractClaim import ContractClaim, StateInvariant
from src.model.ContractParamDec import ContractParamDec
from src.model.Definition import Definition
from src.model.StateTransform import StateTransform
from src.model.Statement import Statement, FVRequirement, \
LocalVarDec, IfElse, StateVarAssign, StatementList
from src.model.StateVarDec import StateVarDec
from src.model.L4Contract import L4Contract, is_derived_destination_id, is_derived_trigger_id, \
derived_trigger_id_to_situation_id, derived_destination_id
from src.model.L4Macro import L4Macro, L4BlockMacro
from src.model.Literal import SortLit, IntLit, FloatLit, BoolLit, SimpleTimeDeltaLit, DateTimeLit, \
RoleIdLit, Literal, TimeDeltaLit, StringLit, EventIdLit
from src.model.Situation import Situation
from src.model.Sort import Sort, SortOpApp
from src.model.Term import FnApp, Quantified
from src.model.Term import Term
def primed(s:str) -> StateVarId:
return s + "'" # type:ignore
def unprimed(s:str) -> StateVarId:
assert s[-1] == "'"
return s[:-1] # type:ignore
def isprimed(s:str) -> bool:
return s[-1] == "'"
def syntaxErrorX(expr: Optional[SExprOrStr], msg:Optional[str] = None) -> NoReturn:
if isinstance(expr,SExpr):
raise SyntaxError((msg if msg else "") +
"\n" + str(expr) +
"\nline " + str(expr.line))
elif expr is not None:
raise SyntaxError((msg if msg else "") +
"\n" + expr )
else:
raise SyntaxError((msg if msg else ""))
T = TypeVar('T')
IntBounds = Tuple[int,Optional[int]]
class L4ContractConstructor(L4ContractConstructorInterface):
def __init__(self, filename:str, verbose=True,
flags:Optional[Dict[str,bool]] = None,
raw_substitutions: Optional[Dict[str, Any]] = None) -> None:
self.top : L4Contract = L4Contract(filename)
self.referenced_nonderived_situation_ids: Set[SituationId] = set()
self.referenced_nonderived_action_ids: Set[ActionId] = set()
self.after_model_build_requirements : List[Tuple[Callable[[],bool],str]] = []
self.verbose = verbose
self._building_situation_id : Optional[SituationId] = None
self._building_action_id: Optional[ActionId] = None
self._building_next_action_rule: bool = False
self._building_future_action_rule: bool = False
self.flags = flags
self.raw_substitutions = raw_substitutions
def addAfterBuildAssertion(self, f:Callable[[],bool], errmsg:str):
self.after_model_build_requirements.append((f,errmsg))
def _handle_flags(self, l:Union[SExpr,List[SExpr]]) -> List[SExpr]:
assert self._needs_preprocessing()
flags = cast(Dict[str, bool], self.flags)
rs = cast(Dict[str, Any], self.raw_substitutions)
def helper(x:SExprOrStr) -> List[SExprOrStr]:
if isinstance(x,str):
if rs is not None and x in rs:
return [rs[x]]
else:
return [x]
if len(x) == 0:
return [x]
results : List[SExprOrStr] = []
children_to_recurse_on : List[SExprOrStr]
if x[0] == "ifflag":
assert len(x) == 3 or len(x) == 5, f"ifflag expression has wrong form: {x}\nexpected\n" \
"( ifflag ‹flag› (‹expr1> <expr2> ...) else (<expr1> <expr2> ...) )\n" \
"or\n" \
"( ifflag ‹flag› (‹expr1> <expr2> ...) )\n"
assert x[1] in flags
if flags[x[1]]:
children_to_recurse_on = x[2]
elif len(x) == 5:
# if test false and has else part
assert x[3] == "else", f"expected ({x[0]} ‹flag› ‹block› else ‹block›)"
children_to_recurse_on = x[4]
else:
# if test false and no else part
children_to_recurse_on = []
for c in children_to_recurse_on:
results.extend(helper(c))
return results
else:
children_to_recurse_on = x.lst
for c in children_to_recurse_on:
results.extend(helper(c))
return [SExpr(results, x.line, x.col, x.symb)]
rv = []
for x in l:
rv.extend(helper(x))
return cast(List[SExpr],rv)
def syntaxError(self, expr: SExprOrStr, msg:Optional[str] = None) -> NoReturn:
if isinstance(expr,SExpr):
raise SyntaxError((msg if msg else "") +
"\n" + str(expr) +
"\nline " + str(expr.line) +
"\n" + str(self.top.filename))
else:
print(expr, type(expr))
print(self.top.filename, type(self.top.filename))
raise SyntaxError((msg if msg else "") +
"\n" + str(expr) +
"\n" + str(self.top.filename))
def assertOrSyntaxError(self, test:bool, expr:SExpr, msg:Optional[str] = None) -> Union[NoReturn,None]:
if not test:
self.syntaxError(expr, msg)
return None
@staticmethod
def assertOrSyntaxErrorX(test:bool, expr:Optional[SExpr], msg:Optional[str] = None) -> Union[NoReturn,None]:
if not test:
syntaxErrorX(expr, msg)
return None
def _mk_toplevel(self, x:SExpr):
rem = x.tillEnd(1)
# def head(constant: str) -> bool:
# nonlocal x
# return streqci(x[0], constant)
def head(*constants: str) -> bool:
nonlocal x
return not all( not streqci(x[0], constant) for constant in constants )
if head(MACRO_DEC_LABEL) or head(BLOCKMACRO_DEC_LABEL):
assert isinstance(x[1],str) and len(x) >= 4, "Macro should have the form "\
"(Macro ‹macroname› (‹param1› ‹param2› ...) ‹sexpr1›) or " \
"(BlockMacro ‹macroname› (‹param1› ‹param2› ...) ‹sexpr1› <sexpr2> ...)"
macroname = chcaststr(x[1])
macroparams: List[str]
if isinstance(x[2], str):
macroparams = [x[2]]
else:
macroparams = cast(List[str], castse(x[2]).lst)
if head(MACRO_DEC_LABEL):
macrobody = chcast(SExpr, x[3])
self.top.macros[macroname] = L4Macro(macroparams, macrobody)
else:
self.top.blockmacros[macroname] = L4BlockMacro(macroparams, chcast(list, x[3:]))
elif head(GLOBAL_VARS_AREA_LABEL) or head("GlobalStateVars"):
self.top.state_var_decs = self._mk_statevar_decs(rem)
elif head(CONTRACT_PARAMETERS_AREA_LABEL):
for expr in rem:
self.top.contract_params[castid(ContractParamId, expr[0])] = self._mk_contract_param(expr)
elif head(ROLES_DEC_LABEL) or head("Actors"):
self.top.roles.extend(self._mk_actors(rem))
elif head(PROSE_CONTRACT_AREA_LABEL):
self.top.prose_contract = self._mk_prose_contract(cast(List[List[str]], rem))
elif head(FORMAL_CONTRACT_AREA_LABEL):
self._mk_main_program_area(rem)
elif head(TIMEUNIT_DEC_LABEL):
given = chcaststr(x[1]).lower()
self.assertOrSyntaxError(
given in SUPPORTED_TIMEUNITS or given in LONGFORMS_OF_SUPPORTED_TIMEUNITS, x,
f"{TIMEUNIT_DEC_LABEL} must be one of {SUPPORTED_TIMEUNITS} or {list(LONGFORMS_OF_SUPPORTED_TIMEUNITS.values())}")
if given in SUPPORTED_TIMEUNITS:
self.top.timeunit = given
else:
self.top.timeunit = LONGFORMS_OF_SUPPORTED_TIMEUNITS[given]
elif head("StartDatetime","StartDT"):
try:
dt = dateutil.parser.parse(rem[0][1])
self.top.start_datetime = dt
except Exception as e:
print(rem,e)
raise e
elif head(SORT_DEFINITIONS_AREA) or head("TypeDefinitions"):
self._mk_sort_definitions(rem)
elif head(DEFINITIONS_AREA):
self.top.definitions = self._mk_definitions(rem)
elif head(TOPLEVEL_CLAIMS_AREA_LABEL):
self.top.claims = self._mk_claims(rem)
elif head(TOPLEVEL_STATE_INVARIANTS_AREA_LABEL):
self.top.state_invariants = self._mk_invariants(rem)
elif head("EndOfTraceClaims"):
self.top.end_of_trace_claims = self._mk_end_of_trace_claims(rem)
elif head("VerificationDefinition"):
todo_once("handle VerificationDefinition")
elif head("Ontology"):
self._process_ontology_section(rem)
elif head(DOT_FILE_NAME_LABEL):
self.top.dot_file_name = chcaststr(
x[1][1]) # the extra [1] is because its parse is of the form ['STRLIT', 'filename']
elif head(IMG_FILE_NAME_LABEL):
self.top.img_file_name = chcaststr(
x[1][1]) # the extra [1] is because its parse is of the form ['STRLIT', 'filename']
elif head("NLGNames"):
for pair in x[1:]:
self.top.nlg_names[pair[0]] = pair[1][1]
elif head("DefaultActionTimeLimit"):
self.top.default_action_timelimit = cast(SimpleTimeDeltaLit, self.mk_literal(x[1], x, self.top)).lit
# elif head("Flags"):
# self._flags = set(x[1:])
# return
elif head("TypedMacro"):
todo_once("Handle TypedMacro")
elif head("NonoperativeContractParams"):
for pair in x[1:]:
self.top.contract_params_nonoperative[pair[0]] = pair[1][1]
elif head("NLGSection"):
assert len(x) == 3, "NLGSection expression should have form (NLGSection <section id> <string>)"
self.top.nlg_sections[x[1][1]] = self._mk_nlg_section(x[2])
elif head("NLDefinitions"):
self._mk_nlg_definitions(x.tillEnd(1))
elif head("ActionPredicate"):
todo_once("Handle ActionPredicate")
else:
raise self.syntaxError(x, "Unsupported: " + str(x[0]))
def _mk_nlg_section(self, x:SExprOrStr) -> str:
assert isinstance(x, SExpr) and x[0] == STRING_LITERAL_MARKER, f"{x} should be a STRLIT tuple"
return chcaststr(x[1])
def _mk_nlg_definitions(self, x:SExprOrStr):
assert isinstance(x, SExpr), "should be a list"
for defn in x:
assert isinstance(defn[1],str)
assert isinstance(defn[2],SExpr), defn
assert defn[2][0] == STRING_LITERAL_MARKER, defn
nlterm = chcaststr(defn[1])
defntext = defn[2][1]
self.top.nlg_definitions[nlterm] = chcaststr(defntext)
def _needs_preprocessing(self) -> bool:
return (self.flags is not None and len(self.flags) > 0) or (self.raw_substitutions is not None and len(self.raw_substitutions) > 0)
def mk_l4contract(self, l:Union[SExpr,List[SExpr]]) -> L4Contract:
if self._needs_preprocessing():
l = self._handle_flags(l)
# for x in l:
# print(prettySExprStr(x))
for sexpr in l:
eliminate_must(sexpr, self.top, self.top.timeunit, "1d")
for x in l:
self._mk_toplevel(x)
for f in self.after_model_build_requirements:
if not f[0]():
raise Exception(f[1])
roles_without_Env = self.top.roles.copy()
roles_without_Env.remove(ENV_ROLE)
roles_without_Env.remove(ARBITER_ROLE)
for role_lst in nonemptySortedSubsets(roles_without_Env):
sit = Situation.breachSituation(*role_lst)
self.top.situations_by_id[sit.situation_id] = sit
act = Action.breachAction(*role_lst)
self.top.actions_by_id[act.action_id] = act
for roleid in roles_without_Env:
if roleid != ARBITER_ROLE:
act = Action.interveneOnDelayAction(roleid)
self.top.actions_by_id[act.action_id] = act
# don't impose this for rule vars.
# for rule in self.top.action_rules():
# if rule.arg_vars_bound_by_rule:
# for rule_varname in rule.arg_vars_bound_by_rule:
# sort = rule.rule_varname_to_sort(self.top,rule_varname)
# self.top.register_sorted_name(rule_varname, sort)
reset_ancestor_statement_pointers(self.top, "At end of L4ContractConstructor.mk_l4contract, ")
return self.top
def _mk_sort(self, x:SExprOrStr) -> Sort:
sort: Sort
if isinstance(x,str):
# sort = temp_normalize_sort(castid(SortId, x))
sort = castid(SortId, x)
else:
if len(x) == 2 and x[0] == STRING_LITERAL_MARKER:
# This is just a sort that has brackets/parentheses/spaces in it, so that it has to be
# enclosed in quotes in source code.
sort = self._mk_sort(x[1])
else:
assert len(x) >= 2, x
sort = SortOpApp.c(x[0], tuple(self._mk_sort(x[i]) for i in range(1, len(x))))
self.top.sorts.add(sort)
assert sort is not None
return sort
def mk_sort_lit(self, x:SExprOrStr) -> SortLit:
if isinstance(x,str):
# return SortLit(temp_normalize_sort(x))
return SortLit(x)
else:
assert len(x) == 2 and x[0] == STRING_LITERAL_MARKER
# assert x[1] in AllSorts
return SortLit(x[1])
def _mk_contract_param(self, expr:SExpr) -> ContractParamDec:
# self.assertOrSyntaxError( len(expr) == 5, expr, "Contract parameter dec should have form (name : sort := term)" )
sort = self._mk_sort(expr[2])
if len(expr) == 5:
return ContractParamDec(expr[0], sort, self._mk_term(expr[4], None, None, None, expr))
else:
return ContractParamDec(expr[0], sort, None)
# def _handle_ifflag(self, x:SExprOrStr) -> Optional[SExprOrStr]:
# assert len(x) == 3 or len(x) == 5
# assert x[1] in self.flags
# if x[1]:
# return x[2]
# elif len(x) == 5:
# assert x[3] == "else", "expected ifflag ‹flag› ‹body1› else ‹body2›"
# return x[4]
# else:
# return None
def _mk_statevar_decs(self, l:SExpr) -> Dict[StateVarId, StateVarDec]:
rv : Dict[StateVarId, StateVarDec] = dict()
for dec in l:
try:
i = 0
modifiers : List[str] = []
while True:
assert isinstance(dec[i],str), dec
if dec[i].lower() in VARIABLE_MODIFIERS:
modifiers.append(chcaststr(dec[i].lower()))
i += 1
else:
break
name = cast(StateVarId, chcaststr(dec[i]))
sort = self._mk_sort(dec[i + 2])
initval : Optional[Term] = None
if i+3 < len(dec) and (dec[i+3] == ':=' or dec[i+3] == '='):
initval = self._mk_term(dec[i + 4],None,None,None,dec)
rv[name] = StateVarDec(name, sort, initval, modifiers)
# I had written: TO DO: for requiring primed variables.
# rv[primed(name)] = rv[name]
self.top.write_bounds[name] = self._modifiers_to_write_bounds(modifiers, l)
self.top.register_sorted_name(name, sort)
except Exception as e:
logging.error("Problem processing " + str(dec))
raise e
# logging.info(str(rv))
return rv
def _modifiers_to_write_bounds(self, modifiers:Iterable[str], parent_expr:SExpr) -> IntBounds:
low,high = 0, None
low2:int
high2:Optional[int]
for s in modifiers:
if s == "writeonce" or s == "writes1":
low2,high2 = 1,1
elif s == "writeatmostonce" or s == "writes≤1":
low2,high2 = low,1
else:
low2,high2 = low,high
self.assertOrSyntaxError( low2 >= low and (high is None or high2 <= high), parent_expr )
low,high = low2,high2
return low,high
def _mk_claims(self, l:SExpr) -> List[ContractClaim]:
rv = [ContractClaim(x) for x in l]
return rv
def _mk_invariants(self, l:SExpr) -> List[StateInvariant]:
rv = [StateInvariant(self._mk_term(x,None,None,None,x)) for x in l]
return rv
def _mk_end_of_trace_claims(self, l:SExpr) -> List[Term]:
return [self._mk_term(x,None,None,None,x) for x in l]
def _mk_actors(self, s:SExpr) -> List[RoleId]:
# logging.info(str(l))
self.assertOrSyntaxError(all(isinstance(x, str) for x in s), s, "Actors declaration S-expression should have the form (Actors <NAME>)")
return cast(List[RoleId],s.lst.copy())
def _mk_definitions(self, s:SExpr) -> Dict[DefinitionId, Definition]:
self.assertOrSyntaxError(all(len(x) == 3 for x in s), s,
"Definition declaration S-expressions should have the form (id = SExpr)")
return {x[0] : Definition(x[0],x[2]) for x in s}
def _mk_sort_definitions(self, s:SExpr) -> None:
self.assertOrSyntaxError(all(len(x) == 3 for x in s), s,
"Definition declaration S-expressions should have the form (id = SExpr) or (id := SExpr)")
for x in s:
self.top.sort_definitions[x[0]] = self._mk_sort(x[2])
self._expand_sort_defns()
def _process_ontology_section(self, s: SExpr) -> None:
for x in s:
if x[0] == "Axiom":
self.top.ontology_axioms.append(self._mk_term(x[1], None, None, None, x))
elif x[0] == "Fn":
self.top.ontology_fns.append(x[1])
todo_once("Handle Fn")
elif x[0] == "Sort":
todo_once("Handle Sort")
elif x[0] == "BeforeStart":
pass
elif x[0] == "SortDefn":
pass
else:
raise NotImplementedError(x)
def _expand_sort_defns(self):
expanded: Dict[str,Optional[Sort]] = {sid: None for sid in self.top.sort_definitions}
def helper(sort:Sort) -> Sort:
if isinstance(sort,str):
if sort in expanded:
expansion = expanded[sort]
assert expansion is not None, "Do you have a cycle in your sort definitions? Or do you need to reorder them?"
# assert expansion is not None, f"{sort}\n{self.top.sort_definitions}\n{expanded}"
return expansion
else:
return sort
else:
assert isinstance(sort, SortOpApp)
if sort.op == "Dimensioned":
# need to exclude the second arg to Dimensioned, since it's the string name of this defined sort.
return SortOpApp.c(sort.op, (helper(sort.args[0]), sort.args[1]))
else:
return SortOpApp.c(sort.op, tuple(helper(x) for x in sort.args))
for defined_sort_id, sort_defn in self.top.sort_definitions.items():
expanded[defined_sort_id] = helper(sort_defn)
assert all(expanded[x] is not None for x in expanded)
self.top.expanded_sort_definitions = cast(Dict[str,Sort], expanded)
def _mk_prose_contract(self, l: List[List[str]]) -> ProseContract:
rv = {castid(ProseClauseId,x[0]): x[1] for x in l}
# logging.info(str(rv))
return rv
def handle_apply_macro(self, x:SExpr) -> SExpr:
self.assertOrSyntaxError(len(x) >= 3 if x[0] == APPLY_MACRO_LABEL else len(x) >= 2, x, "macro app problem")
macroname, args = (chcaststr(x[1]),x[2]) if x[0] == APPLY_MACRO_LABEL else (x[0],x[1])
macro : L4Macro = self.top.macros[macroname]
if isinstance(args,str):
return macro.subst([args])
else:
return macro.subst(args)
def handle_apply_blockmacro(self, x:SExpr) -> List[SExpr]:
macroname, args = (chcaststr(x[1]),x[2]) if x[0] == APPLY_MACRO_LABEL else (x[0],x[1])
macro : L4BlockMacro = self.top.blockmacros[macroname]
assert isinstance(macro, L4BlockMacro)
if isinstance(args,str):
return macro.subst([args])
else:
return macro.subst(args)
def _is_anymacro_app(self, s:SExprOrStr) -> bool:
return isinstance(s, SExpr) and isinstance(s[0], str) and \
(s[0] in self.top.macros or s[0] in self.top.blockmacros) or\
(s[0] == APPLY_MACRO_LABEL and (s[1] in self.top.macros or s[1] in self.top.blockmacros))
def _is_macro_app(self, s:SExprOrStr):
return isinstance(s, SExpr) and isinstance(s[0], str) and \
s[0] in self.top.macros or \
(s[0] == APPLY_MACRO_LABEL and s[1] in self.top.macros)
def _is_blockmacro_app(self, s:SExprOrStr):
return isinstance(s, SExpr) and isinstance(s[0], str) and \
s[0] in self.top.blockmacros or \
(s[0] == APPLY_MACRO_LABEL and s[1] in self.top.blockmacros)
def _with_macro_handling(self, e:Union[SExprOrStr, Sequence[SExprOrStr]],
g:Callable[[SExprOrStr,Any],T], args:Any) -> Union[T,List[T]]:
if isinstance(e,list):
return flatten([self._with_macro_handling(x, g, args) for x in e])
elif isinstance(e,SExpr):
if self._is_macro_app(e):
return self._with_macro_handling(self.handle_apply_macro(e), g, args)
elif self._is_blockmacro_app(e):
return self._with_macro_handling(self.handle_apply_blockmacro(e), g, args)
return [g(e, *args)]
elif isinstance(e,str):
return [g(e, *args)]
else:
raise Exception
def _mk_main_program_area(self, l:SExpr) -> None:
self.assertOrSyntaxError(l[0][0] == STRING_LITERAL_MARKER, l[0], f"Immediately after the {FORMAL_CONTRACT_AREA_LABEL} keyword should be a string literal that gives the contract's name")
self.top.contract_name = chcaststr(l[0][1]) # [1] because STRLIT sexpr
x: SExpr
for x in l[1:]:
assert len(x) >= 2
if self._is_macro_app(x):
x = self.handle_apply_macro(x)
self._mk_main_program_area_part(x,l)
def _mk_main_program_area_part(self, x:SExpr, l:SExpr):
def head(constant:str) -> bool:
nonlocal x
return streqci(x[0], constant)
if self._is_macro_app(x):
x = self.handle_apply_macro(x)
if head(START_SITUATION_LABEL):
self.assertOrSyntaxError( len(x) == 2, l, "StartState declaration S-expression should have length 2")
situation_id = cast(SituationId, chcaststr(x[1]))
self.top.start_situation_id = situation_id
if not is_derived_destination_id(situation_id):
self.referenced_nonderived_situation_ids.add(situation_id)
elif head(ACTION_LABEL):
action_id : ActionId
action : Action
action_body = x.tillEnd(2)
if isinstance(x[1], str):
# e.g. (Action SomethingHappens ...)
action_id = cast(ActionId, x[1])
action = self._mk_action(action_id, None, action_body)
self.top.actions_by_id[action_id] = action
else:
# e.g. (Action (SomethingHappens param&sort1 param&sort2) ...)
action_id = cast(ActionId, chcaststr(x[1][0]))
action_params = cast(List[List[str]], x[1][1:])
action = self._mk_action(action_id, action_params, action_body)
self.top.actions_by_id[action_id] = action
self.top.ordered_declarations.append(action)
elif head(SITUATION_LABEL) or head("StateType") or head("SituationType"):
situation_id = cast(SituationId, chcaststr(x[1]))
situation_data = x.tillEnd(2)
situation = self._mk_situation(situation_id, situation_data, None)
self.top.situations_by_id[situation_id] = situation
self.top.ordered_declarations.append(situation)
else:
self.syntaxError(l, f"Unrecognized head {x[0]}")
def _mk_situation(self, situation_id:SituationId, rest:SExpr, parent_action:Optional[Action]) -> Situation:
situation = Situation(situation_id)
self._building_situation_id = situation_id
x: SExpr
for x in rest:
self.assertOrSyntaxError(isinstance(x,SExpr), rest, f"{x} should be an s-expression")
def head(constant:str) -> bool:
nonlocal x
return streqci(x[0], constant)
if head(SITUATION_PRECONDITION_LABEL):
situation.preconditions.append(self._mk_term(x[1], situation, parent_action,None, x))
elif head(SITUATION_DESCRIPTION_LABEL):
situation.description = chcaststr(x[1][1]) # extract from STRLIT expression
elif head(PROSE_REFS_LABEL):
situation.prose_refs = cast(List,x[1:]).copy()
elif 'visits' in x or 'traversals' in x:
situation.visit_bounds = x # self.term(x, None, situation)
elif head("nlg"):
situation.nlg = x[1][1]
elif head("nlglogicsection"):
situation.nlgsection = x[1][1]
elif head(OUT_CONNECTIONS_LABEL):
if isinstance(x[1],SExpr) and isinstance(x[1][0],str) and (x[1][0] == 'guardsDisjointExhaustive' or x[1][0] == 'timeConstraintsPartitionFuture'):
x = x[1]
todo_once("guardsDisjointExhaustive etc in situation()")
action_rule_exprs = x.tillEnd(1).lst
for action_rule_expr in action_rule_exprs:
self._with_macro_handling(action_rule_expr, self._mk_next_action_rule, (situation, parent_action)) # type:ignore
else:
if head('guardsDisjointExhaustive') or head('timeConstraintsPartitionFuture'):
x = x.tillEnd(1)
todo_once("guardsDisjointExhaustive etc in situation()")
self._with_macro_handling(x, self._mk_event_rule, (situation, parent_action)) # type:ignore
# self.syntaxError(x, f"Unsupported declaration type {x[0]} in situation {situation_id}")
# todo_once(f"Handle {x[0]} in situation dec")
self._building_situation_id = None
return situation
def _mk_action(self, action_id:ActionId, params_sexpr:Optional[List[List[str]]], rest:SExpr) -> Action:
a = Action(action_id)
self._building_action_id = action_id
dest_situation_id = None
x: SExpr
if params_sexpr is not None: #isinstance(params_sexpr, SExpr):
a.param_sorts_by_name = self._mk_action_params(params_sexpr)
for name,sort in a.param_sorts_by_name.items():
self.top.register_sorted_name(name,sort)
a.param_names = [castid(ActionParamId, y[0]) for y in params_sexpr]
a.param_name_to_ind = {a.param_names[i]: i for i in range(len(a.param_names))}
for x in rest:
def head(constant:str) -> bool:
nonlocal x
if len(x) == 0:
print("problem", x)
return streqci(x[0], constant)
if head(ACTION_PRECONDITION_LABEL):
a.preconditions.append(self._mk_term(x[1], None, a, None, rest))
elif head(ACTION_POSTCONDITION_LABEL):
a.postconditions.append(self._mk_term(x[1], None, a, None, rest))
elif head(ACTION_DESCRIPTION_LABEL):
a.action_description = chcaststr(x[1][1]) # extract from STRLIT expression
elif head(CODE_BLOCK_LABEL):
a.state_transform= self._mk_state_transform(cast(List[SExpr], x.tillEnd(1).lst), a)
elif head(PROSE_REFS_LABEL):
a.prose_refs = cast(List[str], x.tillEnd(1).lst)
elif head(TRANSITIONS_TO_LABEL):
dest_situation_id = x[1]
if not is_derived_destination_id(dest_situation_id) and dest_situation_id != LOOP_KEYWORD:
self.referenced_nonderived_situation_ids.add(dest_situation_id)
elif 'traversals' in x or 'visits' in x:
a.traversal_bounds = x # self.term(x, None, a)
elif head("nlg"):
a.nlg = x[1][1]
elif head("nlglogicsection"):
a.nlgsection = x[1][1]
elif head(ALLOWED_SUBJECTS_DEC_LABEL):
a.allowed_subjects = x.tillEnd(1)
elif head(FOLLOWING_SITUATION_DEC_LABEL):
anon_sect_id : str
if is_derived_trigger_id(action_id):
anon_sect_id = derived_trigger_id_to_situation_id(action_id)
else:
anon_sect_id = derived_destination_id(action_id)
a.following_anon_situation = self._mk_situation(anon_sect_id, x.tillEnd(1), a)
a.following_anon_situation.parent_action_id = action_id
self.top.situations_by_id[a.following_anon_situation.situation_id] = a.following_anon_situation
elif head("AllowedRoles"):
todo_once("handle (AllowedRoles ...)")
elif head("nlg"):
todo_once("handle (nlg ...)")
elif head("sideeffects"):
todo_once("handle (sideeffects ...)")
else:
self.syntaxError(f"Unhandled {x[0]}",str(x))
todo_once(f"Handle {x[0]} in action dec.")
if dest_situation_id:
a.dest_situation_id = dest_situation_id
else:
if is_derived_trigger_id(a.action_id):
a.dest_situation_id = derived_trigger_id_to_situation_id(a.action_id)
self.referenced_nonderived_situation_ids.add(a.dest_situation_id)
else:
a.dest_situation_id = derived_destination_id(a.action_id)
self._building_action_id = action_id
return a
def _mk_action_params(self, parts:List[List[str]]) -> ParamsDec:
pdec : List[str]
rv : ParamsDec = dict()
for pdec in parts:
assert len(pdec) == 3, f"Expected [<param name str>, ':', SORTstr] but got {pdec}"
sort = self._mk_sort(pdec[2])
rv[castid(ActionParamId,pdec[0])] = sort
self.top.register_sorted_name(pdec[0],sort)
return rv
def _mk_state_transform(self, statement_exprs:List[SExpr], a:Action) -> StateTransform:
return StateTransform(self._mk_statements(statement_exprs, a, None))
def _mk_statements(self, statement_exprs:List[SExpr], parent_action:Action, parent_ifelse:Optional[IfElse]) -> List[Statement]:
"""
call _mk_statement on each element x of statement_exprs and return list of results, UNLESS x is a macro app, in which case:
if x is a seqmacro,
seqexpansion = expansion of x
extend return list with result of calling _mk_statements on seqexpansion
if x is a regular macro
seqexpansion = [expansion of x]
extend return list with result of calling _mk_statements on seqexpansion
"""
rv = []
for statement_expr in statement_exprs:
assert not isinstance(statement_expr,list)
# assert not self._is_anymacro_app(statement_expr)
# it = self._mk_statement(statement_expr, parent_action, rv, parent_ifelse)
# rv.append(it)
if self._is_blockmacro_app(statement_expr):
statements = self._mk_statements(self.handle_apply_blockmacro(statement_expr), parent_action, parent_ifelse)
rv.extend(statements)
elif self._is_macro_app(statement_expr):
# old version, which is more-likely to be fucking up ancestor pointers:
# statements = self._mk_statements([self.handle_apply_macro(statement_expr)], parent_action, parent_ifelse)
statements = [self._mk_statement(self.handle_apply_macro(statement_expr), parent_action,
rv, parent_ifelse)]
rv.extend(statements)
else:
it = self._mk_statement(statement_expr, parent_action, rv, parent_ifelse)
rv.append(it)
return rv
def _mk_statement(self, statement_expr:SExpr, parent_action:Action, parent_block:StatementList, parent_ifelse:Optional[IfElse]) -> Statement:
assert isinstance(statement_expr, SExpr) and statement_expr.coord is not None, statement_expr
varname : str
rv : Statement
try:
assert not self._is_anymacro_app(statement_expr)
if statement_expr[0] == 'conjecture' or statement_expr[0] == 'prove':
self.assertOrSyntaxError(len(statement_expr) == 2, statement_expr, "GlobalStateTransformConjecture expression should have length 2")
rhs = self._mk_term(statement_expr[1], None, parent_action, None, statement_expr)
rv = FVRequirement(rhs)
elif statement_expr[0] == 'local' or statement_expr[0] == 'writeout':
self.assertOrSyntaxError(len(statement_expr) == 6, statement_expr, 'Local var dec should have form (local name : type = term) or := instead of =')
self.assertOrSyntaxError(statement_expr[2] == ':' and (statement_expr[4] == ":=" or statement_expr[4] == "="), statement_expr,
'Local var dec should have form (local name : type = term) or := instead of =')
sort = self._mk_sort(statement_expr[3])
rhs = self._mk_term(statement_expr[5], None, parent_action, None, statement_expr)
varname = castid(LocalVarId, statement_expr[1])
self.top.register_sorted_name(varname,sort)
lvd = LocalVarDec(varname, rhs, sort)
if varname in parent_action.local_vars:
self.syntaxError(statement_expr, "Redeclaration of local variable")
parent_action.local_vars[varname] = lvd
if statement_expr[0] == 'writeout':
lvd.is_writeout = True
rv = lvd
elif statement_expr[0] == 'if':
test = self._mk_term(statement_expr[1], None, parent_action, None, statement_expr)
self.assertOrSyntaxError(isinstance(statement_expr[2], SExpr) and isinstance(statement_expr[4], SExpr), statement_expr)
rv = IfElse(test,[],[])
f"Expected {statement_expr} to have the form `(if TEST (BLOCK) else (BLOCK))`"
rv.true_branch = self._mk_statements(statement_expr[2], parent_action, rv)
self.assertOrSyntaxError(statement_expr[3] == 'else', statement_expr), \
f"Expected {statement_expr} to have the form `(if TEST (BLOCK) else (BLOCK))`"
rv.false_branch = self._mk_statements(statement_expr[4], parent_action, rv)
else:
self.assertOrSyntaxError(len(statement_expr) == 3, statement_expr,
"As of 16 Mar 2018, every code block statement other than a conjecture or local var intro should be"
"a triple: a := (or =), +=, -=, *= specifically. See\n" + str(statement_expr))
assert statement_expr.coord() is not None
rhs = self._mk_term(statement_expr[2], None, parent_action, None, statement_expr)
# print(statement_expr.coord, rhs.coord, statement_expr[2])
assert rhs.coord is not None, f"{rhs} has no FileCoord. it's a {type(rhs)}"
varname = castid(StateVarId, statement_expr[0])
# TODO: for requiring primed variables. recall, though, that this makes += syntax kinda odd.
assert isprimed(varname), f"Replace assigned-to state var name {varname} with {primed(varname)}."
unprimed_name = unprimed(varname)
# self.assertOrSyntaxError(isprimed(varname), statement, f"To assign to a state variable {varname}, you must assign to {primed(varname)}, which indicates \"the next value of X\".")
self.assertOrSyntaxError(unprimed_name in self.top.state_var_decs, statement_expr, f"{unprimed_name} not recognized as a state variable. State variables are: " + str(self.top.state_var_decs.keys()))
vardec = self.top.state_var_decs[unprimed_name]
orig : Statement
reduced : Statement
if statement_expr[1] == ':=' or statement_expr[1] == "=":
rv = StateVarAssign(vardec, rhs)
else:
assert statement_expr.coord is not None
var = self.top.new_state_var_ref(unprimed_name, statement_expr.coord())
orig = StateVarAssign(vardec, rhs, statement_expr[1])
if orig.varop == "+=":
reduced = StateVarAssign(vardec, FnApp('+', [var, rhs], rhs.coord))
elif orig.varop == '-=':
reduced = StateVarAssign(vardec, FnApp('-', [var, rhs], rhs.coord))
elif orig.varop == '*=':
reduced = StateVarAssign(vardec, FnApp('*', [var, rhs], rhs.coord))
else:
raise Exception
reduced.orig = orig
rv = reduced
rv.parent_block = parent_block
rv.grandparent_ifelse = parent_ifelse
return rv
except Exception as e:
logging.error(f"Problem with {statement_expr}")
raise e
@staticmethod
def mk_literal(x:str, parent_SExpr:Optional[SExpr] = None, prog:Optional[L4Contract] = None) -> Term:
coord = FileCoord(parent_SExpr.line, parent_SExpr.col) if parent_SExpr else None
if isInt(x):
return IntLit(int(x), coord)
if isFloat(x):
return FloatLit(float(x), coord)
if x == 'false':
return BoolLit(False, coord)
if x == 'true':
return BoolLit(True, coord)
# if x == 'never':
# return DeadlineLit(x, coord)
if x[-1].lower() in SUPPORTED_TIMEUNITS and isInt(x[:-1]):
rv = SimpleTimeDeltaLit(int(x[:-1]), x[-1].lower(), coord)
return rv
if x[-2:].lower() in SUPPORTED_TIMEUNITS and isInt(x[:-2]):
rv = SimpleTimeDeltaLit(int(x[:-2]), x[-2:].lower(), coord)
return rv
if prog and x in prog.roles:
return RoleIdLit(x)
syntaxErrorX(parent_SExpr, f"Don't recognize name {x}")
"""
parent_SExpr is used for debugging since s-expressions carry their original line number. not used for anything else.
"""
def _mk_term(self, x:Union[str, SExpr],
parent_situation : Optional[Situation] = None,
parent_action : Optional[Action] = None,
parent_er_context : Optional[EventRuleContext] = None,
parent_SExpr : Optional[SExpr] = None ) -> Term:
assert parent_SExpr is not None, x # todo clean this up
assert x != "no_time_constraint"
assert parent_er_context is None or isinstance(parent_er_context, EventRuleContext), parent_er_context
if isinstance(x,str):
if x in UNICODE_TO_ASCII:
x = UNICODE_TO_ASCII[x]
if x in EXEC_ENV_VARIABLES:
if x == "next_event_td":
self.assertOrSyntaxError(self._building_next_action_rule, parent_SExpr, "Can't use next_event_td when not in the scope of an action rule.")
# elif x == "last_event_td":
# self.assertOrSyntaxError(self._building_action_id is not None, parent_SExpr, "Can't use last_event_td when not in the scope of an action.")
# self.assertOrSyntaxError(not self._building_action_rule, parent_SExpr, ("last_event_td directly within the time constraint or `where` clause of a next-action rule is not supported, because it's confusing." +
# "Use last_situation_td instead."))
# elif x == "last_situation_td":
# self.assertOrSyntaxError(self._building_situation_id is not None, parent_SExpr, "Can't use last_situation_td when not in the scope of a situation.")
return FnApp(x,[], parent_SExpr.coord() if parent_SExpr else None)
if x in self.top.actions_by_id:
return EventIdLit(x)
if x in TIME_CONSTRAINT_KEYWORDS:
assert x != "no_time_constraint"
if x == "immediately":
# SExpr(['==', 'next_event_td',
# SExpr(['+', "last_situation_td", "1" + timeunit], sexpr2.line,
# sexpr2.col)], sexpr2.line, sexpr2.col)
coord = parent_SExpr.coord()
if cast(EventRuleContext, parent_er_context).for_deadline_event_rule:
return FnApp('last_situation_td', [], coord)
else:
return FnApp('==', [
FnApp('next_event_td',[],coord),
FnApp('last_situation_td', [], coord)
], coord)
# elif x == "no_time_constraint":
# return None
raise Exception
# return DeadlineLit(x)
if x in self.top.state_var_decs:
return StateVar(self.top.state_var_decs[cast(StateVarId, x)], parent_SExpr.coord() if parent_SExpr else None)
if isprimed(x):
if not self._building_future_action_rule:
self.assertOrSyntaxError(self._building_situation_id is None and not self._building_next_action_rule,
parent_SExpr,
f"Can't use primed variables outside of a StateTransform section.")# {self._building_situation_id} {self._building_next_action_rule}")
self.assertOrSyntaxError(unprimed(x) in self.top.state_var_decs, parent_SExpr, f"Primed variable {x} does not appear to be a state variable.")
return PrimedStateVar(self.top.state_var_decs[unprimed(x)], parent_SExpr.coord() if parent_SExpr else None)
# if parent_action and (x in parent_action.local_vars):
# return LocalVar(parent_action.local_vars[cast(LocalVarId,x)])
if x in self.top.contract_params:
return ContractParam(self.top.contract_params[cast(ContractParamId,x)], parent_SExpr.coord() if parent_SExpr else None)
if parent_er_context and parent_er_context.ruleparam_names and x in parent_er_context.ruleparam_names:
assert parent_SExpr is not None and parent_SExpr.coord() is not None
assert parent_er_context.ruleparam_to_ind is not None
return RuleBoundActionParam(cast(RuleParamId, x), parent_er_context.action_id,
cast(int,parent_er_context.ruleparam_to_ind(castid(RuleParamId, x))),
parent_SExpr.coord())
if parent_action and x in parent_action.param_sorts_by_name:
assert parent_SExpr is not None and parent_SExpr.coord() is not None
assert parent_action.param_name_to_ind is not None
return ActionBoundActionParam(cast(ActionParamId, x), parent_action,
parent_action.param_name_to_ind[castid(ActionParamId,x)],
parent_SExpr.coord())
if x in self.top.definitions:
return self._mk_term(self.top.definitions[castid(DefinitionId, x)].body,
parent_situation, parent_action, parent_er_context,
parent_SExpr)
if parent_action and x in parent_action.local_vars:
return LocalVar(parent_action.local_vars[castid(LocalVarId, x)],
parent_SExpr.coord() if parent_SExpr else None)
return L4ContractConstructor.mk_literal(x, parent_SExpr, self.top)
else: # SExpr
if self._is_macro_app(x):
x = self.handle_apply_macro(x)
if x[0] in QUANTIFIERS:
rv = Quantified(x[0],
tuple((chcaststr(pair[0]), self._mk_sort(pair[1])) for pair in x[1:-1]),
self._mk_term(x[-1], None, None, None, x), x.coord())
print("Found quantified formula: ", str(rv))
return rv
pair = try_parse_as_fn_app(x)
if pair:
fnsymb_name = pair[0]
# fnsymb_or_name = cast(Union[str,FnSymb], self.top.fnsymbs[fnsymb_name] if fnsymb_name in self.top.fnsymbs else fnsymb_name)
self.top.fnsymb_names.add(fnsymb_name)
args : List[Term]
if fnsymb_name in ('cast','check','trust','units'):
args = [cast(Term, self.mk_sort_lit(pair[1][0]))] + [self._mk_term(arg, parent_situation, parent_action, parent_er_context, x) for arg in pair[1][1:]]
elif fnsymb_name == "str2dt":
assert isinstance(pair[1],SExpr) and isinstance(pair[1][0], SExpr) and pair[1][0][0] == STRING_LITERAL_MARKER, pair
try:
# pair[1] is the args tuple to str2dt, pair[1][0] is the first and only arg, and STRLIT expression, and
# pair[1][0][1] is the date string itself
dt = dateutil.parser.parse(pair[1][0][1])
# return DateTimeLit(dt, pair[1][0].coord())
self.assertOrSyntaxError(self.top.start_datetime is not None, x)
if self.top.start_datetime: # redundant None check for mypy
return TimeDeltaLit(dt - self.top.start_datetime, x.coord())
except Exception as e:
print(e)
raise e
else:
args = [ self._mk_term(arg, parent_situation, parent_action, parent_er_context, x) for arg in pair[1] ]
return FnApp(
fnsymb_name,
args,
x.coord()
)
else:
if x in EXEC_ENV_VARIABLES:
self.syntaxError(x, f"You're using environment variable {x} like a 0-arg function symbol. Remove the brackets please.")
elif x[0] in self.top.sorts:
return FnApp( "units", [
self.mk_sort_lit(x[0]),
self._mk_term(x[1], parent_situation, parent_action, parent_er_context, parent_SExpr),
], x.coord())
elif x[0] in self.top.sort_definitions:
return FnApp("units", [
self.mk_sort_lit(x[0]),
self._mk_term(x[1], parent_situation, parent_action, parent_er_context, parent_SExpr)
], x.coord())
else:
if x[0] in INFIX_FN_SYMBOLS:
self.syntaxError(x, f"Didn't recognize symbol {x[0]} in: {x}. Did you mean to use infix notation?")
if x[0] in self.top.ontology_fns:
todo_once("this is temporary")
return cast(Any,x.tillEnd(1))
self.syntaxError(x, "Didn't recognize a function symbol in sexpr with components:\n" + mapjoin(str, x, '\n'))
def _mk_time_constraint(self, expr:SExprOrStr, src_situation:Optional[Situation], src_action:Optional[Action],
event_rule_ctx:Optional[EventRuleContext], parent_sexpr:SExpr) -> Optional[Term]:
rv : Term
# if expr in TIME_CONSTRAINT_KEYWORDS:
# return self._mk_term(expr, src_situation, src_action)
# elif isinstance(expr,str):
# self.syntaxError(expr, f"Unrecognized token {expr} in time constraint keyword position.")
if isinstance(expr,str):
if expr == "no_time_constraint":
return None
rv = self._mk_term(expr, src_situation, src_action, event_rule_ctx, parent_sexpr)
else:
self.assertOrSyntaxError( len(expr) > 1, expr)
pair = try_parse_as_fn_app(expr)
if pair and pair[0] in TIME_CONSTRAINT_PREDICATES:
# return self._mk_term(expr, src_situation, src_action, parent_action_rule, None)
rv = FnApp(
pair[0],
[self._mk_term(arg, src_situation, src_action, event_rule_ctx, expr) for arg in pair[1]],
FileCoord(expr.line, expr.col)
)
else:
if src_situation:
print("pair: ", pair)
self.syntaxError(expr, f"Unhandled time constraint predicate {expr} in situation {src_situation.situation_id}")
elif src_action:
self.syntaxError(expr, f"Unhandled time constraint predicate {expr} in action {src_action.action_id}")
# raise Exception("Must have time constraint. You can use `immediately` or `no_time_constraint` or `discretionary`")
return None
if not (isinstance(rv,FnApp) and rv.fnsymb_name in ["≤","<=","<"] and isinstance(rv.args[0],FnApp) and rv.args[0].fnsymb_name == "next_event_td"): # type:ignore
if self.verbose:
print("Atypical time constraint:", rv)
# else:
# print("Typical time constraint:", rv)
return rv
def _mk_event_rule(self, expr: SExpr, src_situation: Situation, parent_action: Optional[Action]) -> None:
self._building_next_action_rule = True
entrance_enabled_guard: Optional[Term] = None
if expr[0] == 'if':
entrance_enabled_guard = self._mk_term(expr[1], src_situation, parent_action, None, expr)
if len(expr) == 3:
expr = expr[2]
else:
# multiple rules sharing an enabled-guard
for unguarded_rule in expr[2:]:
self._mk_event_rule(SExpr([expr[0], expr[1], unguarded_rule], expr.line, expr.col, expr.symb),
src_situation, parent_action)
return
if len(expr) >= 3 and expr[1] == "may":
return self._mk_actor_event_rule(expr, src_situation, parent_action, entrance_enabled_guard)
else:
return self._mk_deadline_event_rule(expr, src_situation, parent_action, entrance_enabled_guard)
def _mk_deadline_fn(self, expr:SExprOrStr, src_situation:Situation, parent_action:Optional[Action],
parent_sexpr:SExpr, parent_event_rule_context:EventRuleContext) -> Tuple[Term, TriggerType]:
if expr == "immediately" or (isinstance(expr,SExpr) and expr[0] == "immediately"):
return self._mk_term("last_event_td", src_situation, parent_action, None, parent_sexpr), TriggerType.at_td_contract
# return FnApp("=",[self._mk_term("next_event_td",src_situation, parent_action, None, parent_sexpr),
# self._mk_term("last_event_td",src_situation, parent_action, None, parent_sexpr)], parent_sexpr.coord())
# else:
# print("???", expr)
# (timeconstraint, maybe_whereclause) = self._handle_optional_action_rule_parts(expr, parent_event_rule_context, src_situation, parent_action)
# return cast(Term,timeconstraint)
for x in expr:
if isinstance(x, SExpr):
if x[0] in TRIGGER_TYPE_INTERP:
trigger_type = TRIGGER_TYPE_INTERP[x[0]]
if len(x) > 2:
deadline_fn = self._mk_term(x.tillEnd(1), src_situation, parent_action,
parent_event_rule_context, x)
else:
deadline_fn = self._mk_term(x[1], src_situation, parent_action,
parent_event_rule_context, x)
return deadline_fn, trigger_type
self.syntaxError(expr, "Didn't find time trigger function")
# if expr == "immediately":
# return FnApp("=",[self._mk_term("next_event_td",src_situation, parent_action, None, parent_sexpr),
# self._mk_term("last_event_td",src_situation, parent_action, None, parent_sexpr)], parent_sexpr.coord())
# elif isinstance(expr,SExpr):
# assert expr[0] == "at", f"sexpr {expr} for deadline fn. parent {src_situation}"
# return FnApp("=", [self._mk_term("next_event_td", src_situation, parent_action, None, parent_sexpr),
# self._mk_term(expr[1], src_situation, parent_action, None, parent_sexpr)],
# expr.coord())
# # assert expr[0] == "at", f"sexpr {expr} for deadline fn. parent {src_situation}"
# # return FnApp("=", [self._mk_term("next_event_td", src_situation, parent_action, None, parent_sexpr),
# # self._mk_term(expr[1], src_situation, parent_action, None, parent_sexpr)],
# # expr.coord())
#
# else:
# self.assertOrSyntaxError(False, expr, f"sexpr {expr} for deadline fn. parent {src_situation}")
def _mk_deadline_event_rule(self, expr:SExpr, src_situation:Situation, parent_action:Optional[Action], entrance_enabled_guard:Optional[Term]) -> None:
action_id : ActionId
der : DeadlineEventRule
# The different forms:
# 1a. (<DeadlineEventName>) is short for (<DeadlineEventName> immediately) which is itself
# short for (<DeadlineEventName> (<trigger type> last_event_td))
# 1b. ((<DeadlineEventName> arg1 arg2)) is short for ((<DeadlineEventName> arg1 arg2) immediately)
# 2. (<DeadlineEventName> (<trigger type> <Term of type TimeDelta>))
# 3. ((<DeadlineEventName> arg1 arg2) (<trigger type> <Term of type TimeDelta>))
# cases 1a and 1b
if len(expr) == 1:
expr = expr.newHere([expr[0], "immediately"])
assert len(expr) == 2
# now reduced to cases 2 and 3
deadline_fn_expr = expr[1]
er_context : EventRuleContext
param_setter : Optional[Tuple[Term,...]]
if isinstance(expr[0],str):
# then no event params
action_id = castid(ActionId,expr[0])
param_setter = None
er_context = EventRuleContext(None, action_id, True)
else:
action_id = castid(ActionId, (expr[0][0]))
param_setter_part = expr[0].tillEnd(1)
self.assertOrSyntaxError(len(param_setter_part) > 0, expr, "Either you meant to include parameter setters, or you have an exta pair of brackets")
er_context = EventRuleContext(None, action_id, True)
param_setter = tuple(self._mk_term(arg, src_situation, parent_action,
er_context, param_setter_part) for arg in param_setter_part)
if not is_derived_trigger_id(action_id):
self.referenced_nonderived_action_ids.add(action_id)
rem = expr.tillEnd(1)
# deadline_fn = self._mk_deadline_fn(deadline_fn_expr, src_situation, parent_action, expr, er_context)
deadline_fn, triggertype = self._mk_deadline_fn(rem, src_situation, parent_action, expr, er_context)
der = DeadlineEventRule(src_situation.situation_id, action_id,
entrance_enabled_guard, param_setter, deadline_fn, triggertype)
src_situation.add_action_rule(der)
self._building_next_action_rule = False
def _mk_actor_event_rule(self, expr:SExpr, src_situation:Situation, parent_action:Optional[Action], entrance_enabled_guard:Optional[Term]) -> None:
action_id : ActionId
ruleparams : Optional[Tuple[RuleParamId,...]] = None
er : ActorEventRule
param_setter: Optional[Tuple[Term,...]] = None
role_ids = [castid(RoleId, expr[0])] if (isinstance(expr[0],Literal) or isinstance(expr[0],str)) else expr[0]
deontic_keyword : DeonticKeyword = castid(DeonticKeyword, expr[1])
self.assertOrSyntaxError(deontic_keyword in DEONTIC_KEYWORDS, expr, deontic_keyword)
if isinstance(expr[2],str):
action_id = castid(ActionId,expr[2])
ruleparams = None
param_setter = None
else:
action_id = castid(ActionId, (expr[2][0]))
ruleparams_part = expr[2].tillEnd(1)
if len(ruleparams_part) == 0:
ruleparams = None
param_setter = None
elif ruleparams_part[0][0] == "?":
assert all([ruleparams_part[i][0] == "?" for i in range(len(ruleparams_part))]), \
"Either all or none of the action argument positions in an action rule must be newly-bound variables prefixed with '?'."
# ruleparams = cast(List[RuleParamId], ruleparams_part)
ruleparams = cast(Tuple[RuleParamId,...],tuple(ruleparams_part))
param_setter = None
else:
ruleparams = None
er_ctx = EventRuleContext(ruleparams, action_id, False)
param_setter = tuple(self._mk_term(arg, src_situation, parent_action, er_ctx, ruleparams_part) for arg in
ruleparams_part)
rem = expr.tillEnd(3)
if not is_derived_trigger_id(action_id):
self.referenced_nonderived_action_ids.add(action_id)
er_ctx = EventRuleContext(ruleparams, action_id, False)
(timeconstraint, whereclause) = self._handle_optional_action_rule_parts(rem, er_ctx, src_situation, parent_action)
er = ActorEventRule(src_situation.situation_id, action_id, entrance_enabled_guard, ruleparams, param_setter, whereclause,
role_ids, deontic_keyword, timeconstraint, len(rem) > 0 and rem[0] == "immediate")
if deontic_keyword == "must" and er.time_constraint:
if isinstance(er.time_constraint, Literal) and er.time_constraint.lit == "no_time_constraint":
assert False, er
assert not er.param_setter or not er.where_clause
src_situation.add_action_rule(er)
self._building_next_action_rule = False
def _handle_optional_action_rule_parts(self, rem:SExpr, er_context:EventRuleContext, src_situation:Optional[Situation], src_or_parent_act: Optional[Action]) -> Tuple[Optional[Term],Optional[Term]]:
where_clause : Optional[Term] = None
time_constraint : Optional[Term] = None
found_labeled_time_constraint = False
for x in rem:
if not isinstance(x, str):
if x[0] == "where":
where_clause = self._mk_term(x[1], src_situation, src_or_parent_act, er_context, rem)
elif x[0] == "when":
found_labeled_time_constraint = True
if len(x) > 2:
time_constraint = self._mk_time_constraint(x.tillEnd(1), src_situation, src_or_parent_act, er_context, x)
else:
time_constraint = self._mk_time_constraint(x[1], src_situation, src_or_parent_act, er_context, x)
elif x[0] in {"before", "within", "at", "after", "at_or_after"}:
found_labeled_time_constraint = True
rest = x[1] if len(x) == 2 else x.tillEnd(1)
if x[0] == "within":
symb = '≤'
elif x[0] == "at":
symb = '=='
elif x[0] == "before":
symb = '<'
elif x[0] == "after":
symb = '>'
elif x[0] == "at_or_after":
symb = '≥'
expanded = x.newHere([symb, 'next_event_td', rest])
time_constraint = self._mk_time_constraint(expanded, src_situation, src_or_parent_act, er_context, x)
# =============TimeDelta since last shorthand=============
elif x[0] in { "before_split", "within_split", "at_split", "after_split", "at_or_after_split"}:
found_labeled_time_constraint = True
rest = x[1] if len(x) == 2 else x.tillEnd(1)
if x[0] == "within_split":
symb = '≤'
elif x[0] == "at_split":
symb = '=='
elif x[0] == "before_split":
symb = '<'
elif x[0] == "after_split":
symb = '>'
elif x[0] == "at_or_after_split":
symb = '≥'
expanded = x.newHere([symb, 'next_event_td', x.newHere(['+', 'last_situation_td', rest])])
time_constraint = self._mk_time_constraint(expanded, src_situation, src_or_parent_act, er_context, x)
# =============DateTime shorthand=============
elif x[0] in {"before_dt", "by", "within_dt", "on", "at_dt", "after_dt", "at_or_after_dt"}:
found_labeled_time_constraint = True
rest = x[1] if len(x) == 2 else x.tillEnd(1)
if x[0] == "on" or x[0] == "at_dt":
symb = '=='
elif x[0] == "within_dt" or x[0] == "by":
symb = '≤'
elif x[0] == "before_dt":
symb = '<'
elif x[0] == 'after_dt':
symb = '>'
elif x[0] == 'at_or_after_dt':
symb = '≥'
expanded = SExpr(['≤', 'next_event_dt', rest], x.line, x.col)
time_constraint = self._mk_time_constraint(expanded, src_situation, src_or_parent_act, er_context, x)
else:
self.syntaxError(rem)
elif x in TIME_CONSTRAINT_KEYWORDS:
found_labeled_time_constraint = True
time_constraint = self._mk_time_constraint(x, src_situation, src_or_parent_act, er_context, rem)
# else:
# self.syntaxError(rem, "wtf is this? " + str(x))
if not found_labeled_time_constraint:
assert time_constraint is None
# er_context.time_constraint = self._mk_time_constraint("no_time_constraint", src_situation, src_or_parent_act, er_context, rem)
# assert er_context.time_constraint is not None, f"Currently a time constraint is needed in the S-Expr syntax, but it can be 'no_time_constraint'. See {str(rem)}"
return (time_constraint, where_clause)
def try_parse_as_fn_app(x:SExpr) -> Optional[Tuple[str, SExpr]]:
return maybe_as_infix_fn_app(x) or maybe_as_prefix_fn_app(x) or maybe_as_postfix_fn_app(x)
def maybe_as_prefix_fn_app(se:SExpr) -> Optional[Tuple[str, SExpr]]:
if isinstance(se[0],str):
symb = se[0]
if symb in UNICODE_TO_ASCII:
symb = UNICODE_TO_ASCII[symb]
if symb in PREFIX_FN_SYMBOLS or symb in INFIX_FN_SYMBOLS:
return symb, se.tillEnd(1)
return None
def maybe_as_infix_fn_app(se:SExpr) -> Optional[Tuple[str, SExpr]]:
if len(se) == 3 and isinstance(se[1],str):
symb : str = se[1]
if symb in UNICODE_TO_ASCII:
symb = UNICODE_TO_ASCII[symb]
if symb in INFIX_FN_SYMBOLS:
return symb, se.withElementDropped(1)
return None
def maybe_as_postfix_fn_app(se:SExpr) -> Optional[Tuple[str, SExpr]]:
if isinstance(se[-1],str):
symb = se[-1]
if symb in UNICODE_TO_ASCII:
symb = UNICODE_TO_ASCII[symb]
if symb in POSTFIX_FN_SYMBOLS:
return symb, se.fromStartToExclusive(len(se) - 1)
return None
|
#!/usr/bin/env python3
"""Fetch hierarchical mappings and descriptions of a given list of KEGG
Orthology (KO) entries from the KEGG server.
Usage:
python me.py ko.list
Notes:
This script utilizes the official KEGG API (https://www.kegg.jp/kegg/rest/
keggapi.html) to query the KEGG database and retrieve relevant information
in the following categories: orthology (ko), module, pathway, reaction,
reaction class, compound, and disease.
Tested and working with KEGG release 97.0+.
Restrictions:
The official website states:
"KEGG API is provided for academic use by academic users belonging to
academic institutions." (https://www.kegg.jp/kegg/rest/)
"The maximum number of identifiers that can be given is 10 (per query)."
(https://www.kegg.jp/kegg/rest/keggapi.html)
References:
The latest KEGG paper (Kanehisa et al., 2021):
https://academic.oup.com/nar/article/49/D1/D545/5943834
"""
import sys
from time import sleep
from datetime import datetime
from functools import partial
from urllib.request import urlopen, HTTPError, URLError
__author__ = '<NAME>'
__license__ = 'BSD-3-Clause'
__version__ = '0.0.1-dev'
__email__ = '<EMAIL>'
# network connection parameters
server = 'http://rest.kegg.jp/'
step = 10 # no. of entries per query (max. 10 according to policy)
delay = 2 # time gap between two queries (sec)
retries = 5 # no. retries on failed query
timeout = 60 # waiting time before giving up (sec)
def fetch(api):
"""Fetch content from KEGG server.
Parameters
----------
api : str
RESTful API command.
Returns
-------
list of str
Lines of text.
"""
for i in range(retries):
if i:
print('Retrying...', end=' ', flush=True)
sleep(delay)
try:
with urlopen(server + api, timeout=timeout) as response:
return response.read().decode('utf-8').splitlines()
except (HTTPError, URLError) as e:
print(f'{e.code} {e.reason}.', end=' ', flush=True)
def kegg_info():
"""Return current KEGG release's version and statistics.
Returns
-------
list of str
KEGG release information.
"""
return [x[17:] for x in fetch('info/kegg')]
def batch_query(ids, cmd, f, name='entries', step=10):
"""Perform batch query and retrieve results.
Parameters
----------
ids : list of str
Entries to query.
cmd : str
API command ("get", "list", etc.)
f : function
Function to convert retrieved text into data.
name : str, optional
Task name to display.
step : int, optional
Number of queries to submit per time.
Returns
-------
dict of dict
Retrieved data.
"""
data = {}
print(f'Querying {len(ids)} {name}...', end=' ', flush=True)
counter = 0
ids = sorted(ids)
for i in range(0, len(ids), step):
batch = ids[i:i + step]
text = fetch(cmd + '/' + '+'.join(batch))
data = {**data, **f(text)}
counter += len(batch)
print(str(counter), end=' ', flush=True)
sleep(delay)
print('done.', flush=True)
return data
def parse_list(text, code=None):
"""Parse KEGG list files.
Parameters
----------
text : list of str
KEGG list text.
code : str, optional
Database code to strip from beginning.
Returns
-------
dict
Entry to description dictionary.
Raises
------
ValueError
Entry has unexpected format (e.g., different code).
"""
if code:
ll = len(code) + 1
res = {}
for line in text:
key, value = line.split('\t')
if code:
if not key.startswith(code + ':'):
raise ValueError(f'Unexpected entry: {key}.')
key = key[ll:]
res[key] = value
return res
def parse_flat(text, skeys=(), mkeys=()):
"""Parse KEGG flat files.
Parameters
----------
text : list of str
KEGG flat text.
skeys : tuple of str, optional
Single keys to retrieve.
mkeys : tuple of str, optional
Multiple keys to retrieve.
Returns
-------
dict of dict
Processed data of each key under each entry.
Examples
--------
ENTRY K00699 KO
NAME UGT
DEFINITION glucuronosyltransferase [EC:2.4.1.17]
PATHWAY ko00040 Pentose and glucuronate interconversions
ko00053 Ascorbate and aldarate metabolism
ko00140 Steroid hormone biosynthesis
MODULE M00014 Glucuronate pathway (uronate pathway)
M00129 Ascorbate biosynthesis, animals, glucose-1P => ...
DISEASE H00208 Hyperbilirubinemia
H01593 Osteoporosis
"""
# data structure
data = {}
# current record
entry = None # current entry
mkey = None # current multi key
# line heads
sheads = tuple(x.upper().ljust(12) for x in skeys)
mheads = tuple(x.upper().ljust(12) for x in mkeys)
for line in text:
# record starts
if line.startswith('ENTRY '):
entry = line[12:].split()[0]
data[entry] = {}
continue
# record ends
if line == '///':
entry, mkey = None, None
# single keys
if line.startswith(sheads):
skey = skeys[sheads.index(line[:12])]
data[entry][skey] = line[12:].rstrip(';')
mkey = None
continue
# multi keys
if line.startswith(mheads):
mkey = mkeys[mheads.index(line[:12])]
data[entry][mkey] = []
# clear current key
elif not line.startswith(' '):
mkey = None
# append targets to a multi key
if mkey:
data[entry][mkey].append(line[12:])
return data
def extract_targets(data, keys):
"""Extract target entries and definitions from multiline terms.
Parameters
----------
data : dict of dict
Main data structure.
keys : list or tuple of str
Keys under which targets will be extracted.
Returns
-------
dict of dict
Definitions of individual targets under each key.
Examples
--------
K00699 glucuronosyltransferase [EC:2.4.1.17] [RN:R01383]
K01195,K14756 beta-glucuronidase [EC:3.2.1.31] [RN:R01478]
K00002 alcohol dehydrogenase (NADP+) [EC:1.1.1.2] [RN:R01481]
"""
names = {x: {} for x in keys}
for entry, datum in data.items():
for key in keys:
if key not in datum:
continue
res = []
for line in datum[key]:
# attempt to extract targets and names
left, found, right = line.partition(' ')
if not found:
continue
targets = []
for field in left.split(','):
# validate target entry format
if not field.isalnum():
targets = []
break
targets.append(field)
# add one or multiple targets
if targets:
res.extend(targets)
for target in targets:
names[key][target] = right
datum[key] = sorted(set(res))
return names
def extract_dblinks(data, dbs, key='dblinks'):
"""Extract database links from multiline terms.
Parameters
----------
data : dict of dict
Main data structure.
dbs : dict of str
Map of database codes to names.
key : str, optional
Key of database link terms.
Examples
--------
RN: R01478 R04979 R07818 R08127 R08260 R10830
COG: COG3250
GO: 0004566
"""
for entry, datum in data.items():
if key not in datum:
continue
for line in datum[key]:
try:
code, targets = line.split(': ', 1)
except IndexError:
continue
if code in dbs:
datum[dbs[code]] = targets.split()
del(datum[key])
def write_smap(data, key, fname):
"""Write one-to-one mapping to file.
Parameters
----------
data : dict of dict
Main data structure.
key : str
Key of data to write.
fname : str
Output file name.
"""
with open(fname, 'w') as f:
for entry, datum in data.items():
if key in datum:
print(entry, datum[key], sep='\t', file=f)
def write_mmap(data, key, fname):
"""Write one-to-many mapping to file.
Parameters
----------
data : dict of dict
Main data structure.
key : str
Key of data to write.
fname : str
Output file name.
"""
with open(fname, 'w') as f:
for entry, datum in data.items():
if key in datum:
targets = []
for value in datum[key]:
targets.extend(value.split(','))
print(entry, '\t'.join(targets), sep='\t', file=f)
def write_names(names, fname):
"""Write names / descriptions of entries to file.
Parameters
----------
names : dict
Name dictionary.
fname : str
Output file name.
"""
with open(fname, 'w') as f:
for key, name in sorted(names.items()):
print(key, name, sep='\t', file=f)
def write_all(name, data, skeys=[], mkeys=[]):
"""Write all data to file.
Parameters
----------
name : str
Name of current analysis.
data : dict of dict
Main data structure.
skeys : iterable of str
Single keys of data to write.
mkeys : iterable of str
Multiple keys of data to write.
"""
for key in skeys:
write_smap(data, key, f'{name}_{key}.txt')
for key in mkeys:
stem = 'ko' if key == 'orthology' else key
write_mmap(data, key, f'{name}-to-{stem}.txt')
def rename_paths(code, data, names=None):
"""Convert pathway entries from "map", "rn" etc. to "ko".
Parameters
----------
code : str
Expected pathway code.
data : dict of dict
Main data structure.
names : dict of dict
Also rename pathways in name dictionary.
Raises
------
ValueError
Entry has unexpected format (e.g., different code).
"""
ll = len(code) + 5
for entry, datum in data.items():
if 'pathway' in datum:
newpaths = []
for path in datum['pathway']:
if len(path) != ll or not path.startswith(code):
raise ValueError(f'Unexpected pathway entry: {path}.')
newpaths.append(f'ko{path[-5:]}')
datum['pathway'] = newpaths
if names and 'pathway' in names:
for path in names['pathway']:
if len(path) != ll or not path.startswith(code):
raise ValueError(f'Unexpected pathway entry: {path}.')
names['pathway'] = {f'ko{k[-5:]}': v for k, v in names[
'pathway'].items()}
def get_ecs(definition):
"""Extract EC numbers from a KO definition.
Parameters
----------
definition : str
KO definition.
Returns
-------
list of str
Extracted EC numbers.
Examples
--------
K00930 acetylglutamate kinase [EC:2.7.2.8]
K02618 oxepin-CoA hydrolase [EC:3.3.2.12 1.2.1.91]
K09866 aquaporin-4
"""
if definition.endswith(']'):
idx = definition.find(' [EC:')
if idx > 0:
return definition[idx + 5:-1].split()
def get_compounds(equation):
"""Extract compound entries from an equation.
Parameters
----------
equation : str
Equation string.
Returns
-------
list of str
Compounds extracted from the left side of equation.
list of str
Compounds extracted from the right side of equation.
Examples
--------
C00068 + C00001 <=> C01081 + C00009
C00029 + C00001 + 2 C00003 <=> C00167 + 2 C00004 + 2 C00080
G10481(n+1) + G10620 <=> G10481(n) + G11108
C17207(n) + (n-2) C00009 <=> C20861 + (n-2) C00636
"""
res = []
for side in equation.split(' <=> '):
cpds = []
for field in side.split(' + '):
idx = field.find('C')
if idx >= 0:
cpd = field[idx:idx + 6]
if len(cpd) == 6 and cpd[1:].isdigit():
cpds.append(cpd)
res.append(sorted(set(cpds)))
return res
def get_classes(data, key='class'):
"""Extract multiple classes from a single line.
Parameters
----------
data : dict of dict
Main data structure.
key : str, optional
Key under which classes will be extracted.
names
-----
A class line is delimited by "; ". Example:
Pathway modules; Carbohydrate metabolism
"""
for entry, datum in data.items():
if key in datum:
datum[key] = datum[key].split('; ')
def main():
if len(sys.argv) < 2:
sys.exit(__doc__)
print(f'Task started at {datetime.now()}.')
# get KEGG release info
text = kegg_info()
print('KEGG ' + text[1])
with open('kegg_info.txt', 'w') as f:
for line in text:
print(line, file=f)
# read query KOs
with open(sys.argv[1], 'r') as f:
kos = sorted(set(x.split('\t')[0] for x in f.read(
).splitlines() if not x.startswith('#')))
print(f'KO entries to query: {len(kos)}.')
# orthology (KO)
skeys = ('name', 'definition')
mkeys = ('module', 'pathway', 'disease', 'dblinks')
f = partial(parse_flat, skeys=skeys, mkeys=mkeys)
data = batch_query(kos, 'get', f, name='KOs')
for ko, datum in data.items():
if 'definition' in datum:
ecs = get_ecs(datum['definition'])
if ecs:
datum['ec'] = ecs
names = extract_targets(data, ('module', 'pathway', 'disease'))
extract_dblinks(data, {'RN': 'reaction', 'COG': 'cog', 'GO': 'go'})
mds = names['module'].keys()
paths = names['pathway'].keys()
dses = names['disease'].keys()
rns = set().union(*[x['reaction'] for x in data.values()
if 'reaction' in x])
mkeys = ('module', 'pathway', 'disease', 'ec', 'reaction', 'cog', 'go')
write_all('ko', data, skeys, mkeys)
# reaction
skeys = ('name', 'definition', 'equation', 'enzyme')
mkeys = ('orthology', 'module', 'pathway', 'rclass')
f = partial(parse_flat, skeys=skeys, mkeys=mkeys)
data = batch_query(rns, 'get', f, name='reactions')
names = extract_targets(data, mkeys)
for entry, datum in data.items():
if 'enzyme' in datum:
datum['enzyme'] = datum['enzyme'].split()
if 'equation' in datum:
left, right = get_compounds(datum['equation'])
if left:
datum['left_compound'] = left
if right:
datum['right_compound'] = right
both = sorted(set(left + right))
if both:
datum['compound'] = both
rename_paths('rn', data, names)
skeys = ('name', 'definition', 'equation')
mkeys = ('orthology', 'module', 'pathway', 'rclass', 'enzyme', 'compound',
'left_compound', 'right_compound')
write_all('reaction', data, skeys, mkeys)
cpds = set().union(*[x['compound'] for x in data.values()
if 'compound' in x])
rcs = names['rclass'].keys()
mds = set(mds).union(names['module'].keys())
paths = set(paths).union(names['pathway'].keys())
# reaction class
f = partial(parse_list, code='rc')
names = batch_query(rcs, 'list', f, name='reaction classes')
write_names(names, 'rclass_name.txt')
# compound
f = partial(parse_list, code='cpd')
names = batch_query(cpds, 'list', f, name='compounds')
write_names(names, 'compound_name.txt')
# module
skeys = ('name', 'definition', 'class')
mkeys = ('orthology', 'pathway', 'reaction', 'compound')
f = partial(parse_flat, skeys=skeys, mkeys=mkeys)
data = batch_query(mds, 'get', f, name='modules')
get_classes(data)
names = extract_targets(data, mkeys)
rename_paths('map', data, names)
skeys = ('name', 'definition')
mkeys = ('orthology', 'pathway', 'reaction', 'compound', 'class')
write_all('module', data, skeys, mkeys)
paths = set(paths).union(names['pathway'].keys())
# pathway
skeys = ('name', 'class')
mkeys = ('orthology', 'module', 'disease', 'compound')
f = partial(parse_flat, skeys=skeys, mkeys=mkeys)
data = batch_query(paths, 'get', f, name='pathways')
get_classes(data)
names = extract_targets(data, mkeys)
skeys = ('name',)
mkeys = ('orthology', 'module', 'disease', 'compound', 'class')
write_all('pathway', data, skeys, mkeys)
dses = set(dses).union(names['disease'].keys())
# disease
f = partial(parse_list, code='ds')
names = batch_query(dses, 'list', f, name='diseases')
write_names(names, 'disease_name.txt')
print(f'Task completed at {datetime.now()}.')
if __name__ == '__main__':
main()
|
# Copyright 2020 SAS Project Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""ESC protection impact (population).
This scripts computes an ESC impact figure based on the population that is
covered by a whisper zone.
Usage:
# Measure ESC impact for category B, in a reasonably "fast mode".
# (for all sensors in New Jersey and NYC + Long Island)
esc_pop_impact --esc_fads=a_fad.json,other_fad.json \
--grid_arcsec=10 --category=B \
--filter_box="(39, -75, 41, -72)" \
--fast_mode --force_radius_km=50
Notes:
- One or several ESC fads can be specified. Aggregated statistics will be
provided, per sensor, per ESC network and total.
- The analysis resolution is specified in arcsec (1 arcsec ~= 30m). A typical
resolution is grid_arcsec=10 (faster) or grid_arcsec=5 (slower).
- the `--filter_box` allows for selecting a subset of the sensors within a
(lat_min, lon_min, lat_max, lon_max) box.
- The script is single thread, single process. To speed up the run, one can
run independently (and in parallel) unconnected areas such as west coast,
east coast and south-coast. Use --filter_box for that, for example:
- `--fast_mode` and `--force_radius_km` are provided to speed up the analysis:
+ with fast_mode: heaversine method is used instead of vincenty (no real impact).
+ with force_radius_km: one can specify 50k CatB neighborhood for example
(instead of regular 80km), as it is usually sufficient to capture the effective
whisper zones.
- A special mode '--nbor_pop_only' allows to compute the total population in the
neighborhood, without any consideration of path loss.
"""
import argparse
import ast
import collections
import json
import os
import time
import numpy as np
import shapely.geometry as sgeo
from reference_models.antenna import antenna
from reference_models.geo import utils
from reference_models.geo import zones
from reference_models.propagation import wf_itm
from usgs_pop import usgs_pop_driver
from geo import geo_utils
import xlsxwriter
#----------------------------------------
# Setup the command line arguments
parser = argparse.ArgumentParser(description='ESC Pop Impact')
parser.add_argument('--esc_fads', type=str, default='',
help='The ESC network as a FAD JSON files (separated by comma).')
parser.add_argument('--grid_arcsec', type=int, default=10,
help='The grid calculation resolution (in arcsec).')
parser.add_argument('--budget_offset_db', type=float, default=0,
help='A budget link offset (in dB).')
parser.add_argument('--category', type=str, default='B',
choices=['A', 'B', 'indoor'],
help='The CBSD category to compute impact for.')
parser.add_argument('--fast_mode', dest='fast_mode', action='store_true',
help='Use fast approximate calculation.')
parser.set_defaults(fast_mode=False)
parser.add_argument('--filter_box', type=str, default='',
help='A filtering box: (lat_min, lon_min, lat_max, lon_max)'
' for the sensors to be processed')
parser.add_argument('--force_radius_km', type=int, default=0,
help='The neighborhood radius forced to a non standard value.')
parser.add_argument('--pop_dir', type=str, default='',
help='The USGS population directory. If unspecified, uses the'
' one in default directory')
parser.add_argument('--terrain_dir', type=str, default='',
help='Terrain directory. If unspecified use default one.')
parser.add_argument('--lazy_pop', dest='lazy_pop', action='store_true',
help='Use lazy population loading.')
parser.add_argument('--nolazy_pop', dest='lazy_pop', action='store_false',
help='Disable lazy population loading - Load all at init.')
parser.set_defaults(lazy_pop=True)
parser.add_argument('--nbor_pop_only', dest='nbor_pop_only', action='store_true',
help='Only compute neighborhood population.')
parser.set_defaults(nbor_pop_only=False)
parser.add_argument('--excel_output_filename', type=str, default='output.xlsx',
help='Name of excel file for output statistics')
FLAGS = parser.parse_args()
# Population resolution database
POP_RES_ARCSEC = 10
# Nominal EIRP per category
EIRP_CATB_MHZ = 37
EIRP_CATA_MHZ = 20
EIRP_INDOOR_MHZ = 20
# A container for ESC sensor params
EscSensor = collections.namedtuple('EscSensor', [
'name', 'latitude', 'longitude', 'height', 'ant_azimuth', 'ant_pattern',
'protection_level'
])
def _Pattern(json_pattern):
"""Converts from a JSON pattern to a numpy ndarray."""
ant_pattern = sorted([(pat['angle'], pat['gain']) for pat in json_pattern])
_, gains = list(zip(*ant_pattern))
return np.array(gains)
def _ConvertEsc(esc):
"""Converts an ESC JSON dict into a more proper |EscSensor| representation."""
name = esc['id'].split('/')[-1]
params = esc['installationParam']
height_type = params['heightType']
if height_type.lower() != 'agl':
raise ValueError('Unsupported heightType=%s' % height_type)
return EscSensor(
name=name,
latitude=round(params['latitude'], 5),
longitude=round(params['longitude'], 5),
height=round(params['height'], 1),
ant_azimuth=params['antennaAzimuth'],
ant_pattern=_Pattern(params['azimuthRadiationPattern']),
protection_level=esc.get('protectionLevel', -109))
def ComputeSensorNeighborhood(latitude, longitude, radius_km, res_arcsec):
"""Computes and grids the neighborhood of a sensor.
Args:
latitude: The sensor latitude (deg).
longitude: The sensor longitude (deg).
radius_km: The neighborhood radius (km).
res_arcsec: The gridding resolution (arcsec)
Returns:
A tuple (latitudes, longitudes, geometry) representing the neighborhood:
- latitudes & longitudes: the lists of gridded points.
- geometry: a |shapely| Polygon (or MultiPolygon).
"""
us_border = zones.GetUsBorder()
sensor_nbor = geo_utils.Buffer(sgeo.Point(longitude, latitude), radius_km)
sensor_nbor = sensor_nbor.intersection(us_border)
longitudes, latitudes = list(zip(*utils.GridPolygon(sensor_nbor, res_arcsec)))
return latitudes, longitudes, sensor_nbor
def CalcEscPathLoss(latitude_esc, longitude_esc, height_esc, latitudes_tx,
longitudes_tx, height_tx):
"""Computes path loss for sensor protection area.
This uses the regular Winnforum ITM propagation model parameters.
Args:
latitude_esc: The sensor latitude (deg).
longitude_esc: The sensor longitude (deg).
height_esc: The sensor height (meters).
latitudes_tx: A sequence of Tx latitudes (deg).
longitudes_tx: A sequence of Tx longitudes (deg).
height_tx: The Tx height (meters).
Returns:
A tuple (losses, esc_bearings, tx_bearings) holding the path losses (dB) and
bearings (in degrees, clockwise from north) for each transmitter location.
The bearings are provided both for sensor and Tx.
"""
losses_db = []
esc_bearings = []
tx_bearings = []
for (lat, lon) in zip(latitudes_tx, longitudes_tx):
res = wf_itm.CalcItmPropagationLoss(
lat,
lon,
height_tx,
latitude_esc,
longitude_esc,
height_esc,
reliability=-1)
losses_db.append(res.db_loss)
tx_bearings.append(res.incidence_angles.hor_cbsd)
esc_bearings.append(res.incidence_angles.hor_rx)
return np.array(losses_db), np.array(esc_bearings), np.array(tx_bearings)
def MaskPathLoss(losses_db,
bearings,
ant_hor_pattern,
ant_azimuth,
ant_gain_dbi=0,
misc_losses=0):
"""Applies antenna and misc losses.
Args:
losses_db: A scalar or ndarray of losses (dB).
bearings: A scalar or ndarray of bearings (degrees), clockwise from north.
ant_hor_pattern: The antenna horizontal pattern, defined as absolute gain on
a sequence of 360 values (dB).
ant_azimuth: The antenna azimuth direction (degrees), clockwise from north.
ant_gain_dbi: Optional additional antenna gain (dBi). To be specified if the
`antenna_hor_pattern` is normalized to 0dBi.
misc_losses: Any additional misc losses (dB).
Returns:
The masked antenna losses (dB), as scalar or ndarray.
"""
gains = antenna.GetAntennaPatternGains(bearings, ant_azimuth, ant_hor_pattern,
ant_gain_dbi)
return losses_db - gains + misc_losses
def PopulationImpact(networks,
category,
res_arcsec,
offset_db,
popper,
filter_box=None,
forced_radius_km=None,
nbor_pop_only=False):
"""Analyse some ESC network(s) in terms of population impact.
Args:
networks: A sequence of JSON FAD data representing each network.
category: CBSD category either as 'A', 'B' or 'indoor'.
res_arcsec: The calculation resolution (in arcsec).
offset_db: An extra offset to the budget link. Positive value means more
population impacted.
popper: The population driver.
filter_box: A tuple (min_lat, min_lon, max_lat, max_lon) defining a bounding
box for sensors to be processed.
forced_radius_km: If set, override the regular radius of the CBSD category.
nbor_pop_only: If set, computes only the neighborhood population
Returns:
A tuple (total_pop, pop_per_network, pop_per_sensor) holding:
- total_pop: the total population impact of the networks.
- pop_per_network: a list of each network total population impact (no
double counting across networks).
- pop_per_sensor: a dict of each sensor population impact keyed by sensor
name.
"""
# Set the parameters depending on type of network to compuet impact for.
if category == 'B':
nbor_radius_km = 80
cbsd_eirp_dbm_per_mhz = EIRP_CATB_MHZ
cbsd_height = 25
extra_loss = 0
elif category == 'A':
nbor_radius_km = 40
cbsd_eirp_dbm_per_mhz = EIRP_CATA_MHZ
cbsd_height = 6
extra_loss = 0
elif category == 'indoor':
nbor_radius_km = 40
cbsd_eirp_dbm_per_mhz = EIRP_INDOOR_MHZ
cbsd_height = 5
extra_loss = 15
if forced_radius_km:
nbor_radius_km = forced_radius_km
total_pop = 0
pop_per_sensor = {}
pop_per_network = []
all_location_processed = set()
for i, network in enumerate(networks):
print('ESC Network processing: #%d' % i)
sensors = sorted([_ConvertEsc(sensor) for sensor in network],
key=lambda s: s.name) # sorting to improve geo locality.
net_location_processed = set()
pop_per_network.append(0)
for sensor in sensors:
if filter_box and (sensor.latitude < filter_box[0] or
sensor.latitude > filter_box[2] or
sensor.longitude < filter_box[1] or
sensor.longitude > filter_box[3]):
continue
print('... processing: %s' % sensor.name)
# Compute signal level in the whole neighborhood.
lats, lons, _ = ComputeSensorNeighborhood(sensor.latitude,
sensor.longitude,
nbor_radius_km, res_arcsec)
lats, lons = np.array(lats), np.array(lons)
if not nbor_pop_only:
losses_db, esc_bearings, _ = CalcEscPathLoss(sensor.latitude,
sensor.longitude,
sensor.height, lats, lons,
cbsd_height)
masked_losses_db = MaskPathLoss(losses_db, esc_bearings,
sensor.ant_pattern, sensor.ant_azimuth)
sig_level_dbm = cbsd_eirp_dbm_per_mhz - masked_losses_db - extra_loss
# Detect the points that are impacted
idxs = np.where(sig_level_dbm >= sensor.protection_level - offset_db)[0]
else:
idxs = np.arange(len(lats))
# Compute the standalone population impact for that sensor.
pop_per_sensor[sensor.name] = (
geo_utils.AreaPlateCarreePixel(res_arcsec, sensor.latitude) *
np.sum(popper.GetPopulationDensity(lats[idxs], lons[idxs])))
print(' %d pops' % pop_per_sensor[sensor.name])
# Compute the net and total population impact - avoid double counting.
total_pop += pop_per_sensor[sensor.name]
pop_per_network[-1] += pop_per_sensor[sensor.name]
all_done_lats, all_done_lons = [], []
net_done_lats, net_done_lons = [], []
for k in idxs:
key = round(lats[k], 5), round(lons[k], 5)
if key in all_location_processed:
all_done_lats.append(lats[k])
all_done_lons.append(lons[k])
else:
all_location_processed.add(key)
if key in net_location_processed:
net_done_lats.append(lats[k])
net_done_lons.append(lons[k])
else:
net_location_processed.add(key)
if all_done_lats:
total_pop -= (
geo_utils.AreaPlateCarreePixel(res_arcsec, sensor.latitude) *
np.sum(popper.GetPopulationDensity(all_done_lats, all_done_lons)))
if net_done_lats:
pop_per_network[-1] -= (
geo_utils.AreaPlateCarreePixel(res_arcsec, sensor.latitude) *
np.sum(popper.GetPopulationDensity(net_done_lats, net_done_lons)))
print('*** Network Total: %d pops' % pop_per_network[-1])
return total_pop, pop_per_network, pop_per_sensor
if __name__ == '__main__':
print('## Measuring impact of ESC networks')
# Special configuration.
if FLAGS.terrain_dir:
wf_itm.drive.ConfigureTerrainDriver(
terrain_dir=FLAGS.terrain_dir, cache_size=16)
if FLAGS.fast_mode:
# Replace all Vincenty by simpler great circle for improving speed.
geo_utils.ReplaceVincentyDistanceByHaversine()
# Load the ESC sensors within the optional bounding box.
print('Loading ESC networks')
networks = []
filter_box = None
if FLAGS.filter_box:
filter_box = ast.literal_eval(FLAGS.filter_box)
esc_fads = FLAGS.esc_fads.split(',')
for esc_fad in esc_fads:
if not esc_fad:
continue
with open(esc_fad, 'r') as fd:
sensors = json.load(fd)['recordData']
networks.append(sensors)
# Initialize popper, a population driver.
print('** Init population raster: %s **' %
'Lazy loading' if FLAGS.lazy_pop else 'Loading in memory')
start_time = time.time()
popper = usgs_pop_driver.UsgsPopDriver(FLAGS.pop_dir, FLAGS.lazy_pop)
if not FLAGS.lazy_pop:
popper.LoadRaster()
print('.. done in %ds' % int(time.time() - start_time))
# Initialize the output excel file
excel_filename = FLAGS.excel_output_filename
if os.path.exists(excel_filename):
os.remove(excel_filename)
workbook = xlsxwriter.Workbook(excel_filename)
worksheet = workbook.add_worksheet()
bold_format = workbook.add_format({'bold': True})
bold_format.set_align('left')
number_format = workbook.add_format({'num_format': '#,##0'})
number_format.set_align('right')
worksheet.set_column('A:B',20)
worksheet.write(0, 0,'Sensor Name', bold_format)
worksheet.write(0, 1,'Population Impact', bold_format)
# Compute the population impact
print('** Evaluating population impact **')
if FLAGS.nbor_pop_only:
print('** SPECIAL MODE: population within neighborhood. **')
start_time = time.time()
total_pop, pop_per_network, pop_per_sensor = PopulationImpact(
networks, FLAGS.category, FLAGS.grid_arcsec, FLAGS.budget_offset_db,
popper, filter_box, FLAGS.force_radius_km, FLAGS.nbor_pop_only)
print('.. done in %ds' % int(time.time() - start_time))
# Ouput final statistics
print('** Final results **')
print('Total Population: {total_pop:.3f} kpops (1000s)'.format(
total_pop=total_pop / 1000.))
print('Network:')
for k, pop_impact in enumerate(pop_per_network):
print(' {k} : {pop_impact:.3f} kpops (1000s)'.format(
k=k, pop_impact=pop_impact / 1000.))
row = 1
print('Sensors:')
for sensor_name in sorted(pop_per_sensor):
print(' {sensor_name} : {num_pop:.3f} kpops (1000s)'.format(
sensor_name=sensor_name, num_pop=pop_per_sensor[sensor_name] / 1000.))
worksheet.write(row, 0, sensor_name)
worksheet.write(row, 1, pop_per_sensor[sensor_name], number_format)
row += 1
workbook.close()
|
# Copyright © 2018 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: BSD-2-Clause
# !/usr/bin/python
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vcd_catalog
short_description: Ansible module to manage (create/update/delete) catalogs in vCloud Director.
version_added: "2.4"
description:
- Ansible module to manage (create/update/delete) catalogs in vCloud Director.
- Task performed:
- Create catalog
- Read Catalog
- Update name, description and shared state of catalog
- Delete catalog
options:
user:
description:
- vCloud Director user name
required: false
password:
description:
- vCloud Director user password
required: false
host:
description:
- vCloud Director host address
required: false
org:
description:
- Organization name on vCloud Director to access
required: false
api_version:
description:
- Pyvcloud API version
required: false
verify_ssl_certs:
description:
- whether to use secure connection to vCloud Director host
required: false
catalog_name:
description:
- catalog name
required: true
new_catalog_name:
description:
- new catalog name. Used while updating catalog name.
required: false
description:
description:
- description of the catalog
required: false
shared:
description:
- shared state of catalog("true"/"false")
required: false
state:
description:
- state of catalog ('present'/'absent'/'update').
- One from state or operation has to be provided.
required: false
operation:
description:
- operation which should be performed over catalog.
- various operations are:
- read : read catalog metadata
- shared: share/unshare catalog
- One from state or operation has to be provided.
required: false
author:
- <EMAIL>
- <EMAIL>
'''
EXAMPLES = '''
- name: create catalog
vcd_catalog:
catalog_name: "catalog_name"
description: "description"
state: "present"
register: output
'''
RETURN = '''
msg: success/failure message corresponding to catalog state/operation
changed: true if resource has been changed else false
'''
from pyvcloud.vcd.org import Org
from ansible.module_utils.vcd import VcdAnsibleModule
from pyvcloud.vcd.exceptions import EntityNotFoundException
VCD_CATALOG_STATES = ['present', 'absent', 'update']
VCD_CATALOG_OPERATIONS = ['read', 'shared']
def vcd_catalog_argument_spec():
return dict(
catalog_name=dict(type='str', required=True),
new_catalog_name=dict(type='str', required=False),
description=dict(type='str', required=False),
shared=dict(type='bool', required=False, default=True),
state=dict(choices=VCD_CATALOG_STATES, required=False),
operation=dict(choices=VCD_CATALOG_OPERATIONS, required=False)
)
class Catalog(VcdAnsibleModule):
def __init__(self, **kwargs):
super(Catalog, self).__init__(**kwargs)
logged_in_org = self.client.get_org()
self.org = Org(self.client, resource=logged_in_org)
def manage_states(self):
state = self.params.get('state')
if state == "present":
return self.create()
if state == "absent":
return self.delete()
if state == "update":
return self.update()
def manage_operations(self):
operation = self.params.get('operation')
if operation == "shared":
return self.shared()
if operation == "read":
return self.read()
def create(self):
catalog_name = self.params.get('catalog_name')
description = self.params.get('description')
response = dict()
response['changed'] = False
try:
self.org.get_catalog(name=catalog_name)
except EntityNotFoundException:
self.org.create_catalog(name=catalog_name, description=description)
response['msg'] = 'Catalog {} has been created.'.format(catalog_name)
response['changed'] = True
else:
response['warnings'] = 'Catalog {} is already present.'.format(catalog_name)
return response
def delete(self):
catalog_name = self.params.get('catalog_name')
response = dict()
response['changed'] = False
try:
self.org.get_catalog(name=catalog_name)
except EntityNotFoundException:
response['warnings'] = 'Catalog {} is not present.'.format(catalog_name)
else:
self.org.delete_catalog(catalog_name)
response['msg'] = 'Catalog {} has been deleted.'.format(catalog_name)
response['changed'] = True
return response
def update(self):
catalog_name = self.params.get('catalog_name')
new_catalog_name = self.params.get('new_catalog_name')
description = self.params.get('description')
response = dict()
response['changed'] = False
if not new_catalog_name:
new_catalog_name = catalog_name
self.org.update_catalog(old_catalog_name=catalog_name,
new_catalog_name=new_catalog_name,
description=description)
response['msg'] = 'Catalog {} has been updated.'.format(catalog_name)
response['changed'] = True
return response
def shared(self):
catalog_name = self.params.get('catalog_name')
shared = self.params.get('shared')
response = dict()
response['changed'] = False
self.org.share_catalog(name=catalog_name, share=shared)
response['msg'] = 'Catalog {} shared state has been updated to [shared={}].'.format(catalog_name, shared)
response['changed'] = True
return response
def read(self):
catalog_name = self.params.get('catalog_name')
response = dict()
result = dict()
response['changed'] = False
catalog = self.org.get_catalog(catalog_name)
result['name'] = str(catalog.get("name"))
result['description'] = str(catalog.Description)
result['shared'] = str(catalog.IsPublished)
response['msg'] = result
response['changed'] = False
return response
def main():
argument_spec = vcd_catalog_argument_spec()
response = dict(
msg=dict(type='str')
)
module = Catalog(argument_spec=argument_spec, supports_check_mode=True)
try:
if module.params.get('state'):
response = module.manage_states()
elif module.params.get('operation'):
response = module.manage_operations()
else:
raise Exception('One of the state/operation should be provided.')
except Exception as error:
response['msg'] = error.__str__()
module.fail_json(**response)
module.exit_json(**response)
if __name__ == '__main__':
main()
|
from __future__ import unicode_literals
import logging
import urlparse
from mopidy.audio import PlaybackState
from . import listener
logger = logging.getLogger(__name__)
class PlaybackController(object):
pykka_traversable = True
def __init__(self, audio, backends, core):
self.audio = audio
self.backends = backends
self.core = core
self._state = PlaybackState.STOPPED
self._volume = None
self._mute = False
def _get_backend(self):
if self.current_tl_track is None:
return None
uri = self.current_tl_track.track.uri
uri_scheme = urlparse.urlparse(uri).scheme
return self.backends.with_playback.get(uri_scheme, None)
# Properties
def get_current_tl_track(self):
return self.current_tl_track
current_tl_track = None
"""
The currently playing or selected :class:`mopidy.models.TlTrack`, or
:class:`None`.
"""
def get_current_track(self):
return self.current_tl_track and self.current_tl_track.track
current_track = property(get_current_track)
"""
The currently playing or selected :class:`mopidy.models.Track`.
Read-only. Extracted from :attr:`current_tl_track` for convenience.
"""
def get_state(self):
return self._state
def set_state(self, new_state):
(old_state, self._state) = (self.state, new_state)
logger.debug('Changing state: %s -> %s', old_state, new_state)
self._trigger_playback_state_changed(old_state, new_state)
state = property(get_state, set_state)
"""
The playback state. Must be :attr:`PLAYING`, :attr:`PAUSED`, or
:attr:`STOPPED`.
Possible states and transitions:
.. digraph:: state_transitions
"STOPPED" -> "PLAYING" [ label="play" ]
"STOPPED" -> "PAUSED" [ label="pause" ]
"PLAYING" -> "STOPPED" [ label="stop" ]
"PLAYING" -> "PAUSED" [ label="pause" ]
"PLAYING" -> "PLAYING" [ label="play" ]
"PAUSED" -> "PLAYING" [ label="resume" ]
"PAUSED" -> "STOPPED" [ label="stop" ]
"""
def get_time_position(self):
backend = self._get_backend()
if backend:
return backend.playback.get_time_position().get()
else:
return 0
time_position = property(get_time_position)
"""Time position in milliseconds."""
def get_volume(self):
if self.audio:
return self.audio.get_volume().get()
else:
# For testing
return self._volume
def set_volume(self, volume):
if self.audio:
self.audio.set_volume(volume)
else:
# For testing
self._volume = volume
self._trigger_volume_changed(volume)
volume = property(get_volume, set_volume)
"""Volume as int in range [0..100] or :class:`None`"""
def get_mute(self):
if self.audio:
return self.audio.get_mute().get()
else:
# For testing
return self._mute
def set_mute(self, value):
value = bool(value)
if self.audio:
self.audio.set_mute(value)
else:
# For testing
self._mute = value
self._trigger_mute_changed(value)
mute = property(get_mute, set_mute)
"""Mute state as a :class:`True` if muted, :class:`False` otherwise"""
# Methods
def change_track(self, tl_track, on_error_step=1):
"""
Change to the given track, keeping the current playback state.
:param tl_track: track to change to
:type tl_track: :class:`mopidy.models.TlTrack` or :class:`None`
:param on_error_step: direction to step at play error, 1 for next
track (default), -1 for previous track
:type on_error_step: int, -1 or 1
"""
old_state = self.state
self.stop()
self.current_tl_track = tl_track
if old_state == PlaybackState.PLAYING:
self.play(on_error_step=on_error_step)
elif old_state == PlaybackState.PAUSED:
self.pause()
def on_end_of_track(self):
"""
Tell the playback controller that end of track is reached.
Used by event handler in :class:`mopidy.core.Core`.
"""
if self.state == PlaybackState.STOPPED:
return
original_tl_track = self.current_tl_track
next_tl_track = self.core.tracklist.eot_track(original_tl_track)
if next_tl_track:
self.change_track(next_tl_track)
else:
self.stop(clear_current_track=True)
self.core.tracklist.mark_played(original_tl_track)
def on_tracklist_change(self):
"""
Tell the playback controller that the current playlist has changed.
Used by :class:`mopidy.core.TracklistController`.
"""
if self.current_tl_track not in self.core.tracklist.tl_tracks:
self.stop(clear_current_track=True)
def next(self):
"""
Change to the next track.
The current playback state will be kept. If it was playing, playing
will continue. If it was paused, it will still be paused, etc.
"""
tl_track = self.core.tracklist.next_track(self.current_tl_track)
if tl_track:
self.change_track(tl_track)
else:
self.stop(clear_current_track=True)
def pause(self):
"""Pause playback."""
backend = self._get_backend()
if not backend or backend.playback.pause().get():
self.state = PlaybackState.PAUSED
self._trigger_track_playback_paused()
def play(self, tl_track=None, on_error_step=1):
"""
Play the given track, or if the given track is :class:`None`, play the
currently active track.
:param tl_track: track to play
:type tl_track: :class:`mopidy.models.TlTrack` or :class:`None`
:param on_error_step: direction to step at play error, 1 for next
track (default), -1 for previous track
:type on_error_step: int, -1 or 1
"""
assert on_error_step in (-1, 1)
if tl_track is None:
if self.state == PlaybackState.PAUSED:
return self.resume()
if self.current_tl_track is not None:
tl_track = self.current_tl_track
else:
if on_error_step == 1:
tl_track = self.core.tracklist.next_track(tl_track)
elif on_error_step == -1:
tl_track = self.core.tracklist.previous_track(tl_track)
if tl_track is None:
return
assert tl_track in self.core.tracklist.tl_tracks
if self.state == PlaybackState.PLAYING:
self.stop()
self.current_tl_track = tl_track
self.state = PlaybackState.PLAYING
backend = self._get_backend()
success = backend and backend.playback.play(tl_track.track).get()
if success:
self.core.tracklist.mark_playing(tl_track)
self._trigger_track_playback_started()
else:
self.core.tracklist.mark_unplayable(tl_track)
if on_error_step == 1:
# TODO: can cause an endless loop for single track repeat.
self.next()
elif on_error_step == -1:
self.previous()
def previous(self):
"""
Change to the previous track.
The current playback state will be kept. If it was playing, playing
will continue. If it was paused, it will still be paused, etc.
"""
tl_track = self.current_tl_track
self.change_track(
self.core.tracklist.previous_track(tl_track), on_error_step=-1)
def resume(self):
"""If paused, resume playing the current track."""
if self.state != PlaybackState.PAUSED:
return
backend = self._get_backend()
if backend and backend.playback.resume().get():
self.state = PlaybackState.PLAYING
self._trigger_track_playback_resumed()
def seek(self, time_position):
"""
Seeks to time position given in milliseconds.
:param time_position: time position in milliseconds
:type time_position: int
:rtype: :class:`True` if successful, else :class:`False`
"""
if not self.core.tracklist.tracks:
return False
if self.state == PlaybackState.STOPPED:
self.play()
elif self.state == PlaybackState.PAUSED:
self.resume()
if time_position < 0:
time_position = 0
elif time_position > self.current_track.length:
self.next()
return True
backend = self._get_backend()
if not backend:
return False
success = backend.playback.seek(time_position).get()
if success:
self._trigger_seeked(time_position)
return success
def stop(self, clear_current_track=False):
"""
Stop playing.
:param clear_current_track: whether to clear the current track _after_
stopping
:type clear_current_track: boolean
"""
if self.state != PlaybackState.STOPPED:
backend = self._get_backend()
time_position_before_stop = self.time_position
if not backend or backend.playback.stop().get():
self.state = PlaybackState.STOPPED
self._trigger_track_playback_ended(time_position_before_stop)
if clear_current_track:
self.current_tl_track = None
def _trigger_track_playback_paused(self):
logger.debug('Triggering track playback paused event')
if self.current_track is None:
return
listener.CoreListener.send(
'track_playback_paused',
tl_track=self.current_tl_track, time_position=self.time_position)
def _trigger_track_playback_resumed(self):
logger.debug('Triggering track playback resumed event')
if self.current_track is None:
return
listener.CoreListener.send(
'track_playback_resumed',
tl_track=self.current_tl_track, time_position=self.time_position)
def _trigger_track_playback_started(self):
logger.debug('Triggering track playback started event')
if self.current_tl_track is None:
return
listener.CoreListener.send(
'track_playback_started',
tl_track=self.current_tl_track)
def _trigger_track_playback_ended(self, time_position_before_stop):
logger.debug('Triggering track playback ended event')
if self.current_tl_track is None:
return
listener.CoreListener.send(
'track_playback_ended',
tl_track=self.current_tl_track,
time_position=time_position_before_stop)
def _trigger_playback_state_changed(self, old_state, new_state):
logger.debug('Triggering playback state change event')
listener.CoreListener.send(
'playback_state_changed',
old_state=old_state, new_state=new_state)
def _trigger_volume_changed(self, volume):
logger.debug('Triggering volume changed event')
listener.CoreListener.send('volume_changed', volume=volume)
def _trigger_mute_changed(self, mute):
logger.debug('Triggering mute changed event')
listener.CoreListener.send('mute_changed', mute=mute)
def _trigger_seeked(self, time_position):
logger.debug('Triggering seeked event')
listener.CoreListener.send('seeked', time_position=time_position)
|
<filename>splendiferous/splendcards.py
import pygame
import random
import splendconstants as ants
pass # this is all unused now
def draw_gem(screen, color, x, y):
#pygame.draw.polygon(screen, color, [[x+10, y+10], [x, y+20], [x+20, y+20]], 5)
pygame.draw.polygon(screen, color, [[x+10, y], [x, y+10], [x, y+20], [x+10, y+30], [x+20, y+20], [x+20, y+10]], 5)
#pygame.draw.polygon(screen, color, [[x+10, y+10], [x, y+20], [x+20, y+20]], 5)
#pygame.draw.polygon(screen, color, [[x+10, y+10], [x, y+20], [x+20, y+20]], 5)
def draw_gem_small(screen, color, x, y):
pygame.draw.polygon(screen, color, [[x+3, y], [x, y+3], [x, y+6], [x+3, y+9], [x+6, y+6], [x+6, y+3]])
class Token_Bank(object):
color = ants.GEM_DIAM
def __init__(self, color):
self.color = color
def draw(self, screen, x, y):
pygame.draw.circle(screen, ants.TOKEN2, [x+20, y+20], 22)
pygame.draw.circle(screen, ants.TOKEN, [x+20, y+20], 20)
draw_gem(screen, self.color, x+9, y+5)
class Mine(object):
victory_point_value = 0
color = ants.GEM_DIAM
costs = [7,7,7,7,7]
x = 1
y = 1
def __init__(self, color, vp, costs, x, y):
self.color = color
self.victory_point_value = vp
self.costs = costs
self.x = x
self.y = y
def draw(self, screen):
pygame.draw.rect(screen, self.color, [self.x, self.y, 60, 15])
pygame.draw.rect(screen, ants.MINE_BACK, [self.x, self.y + 15, 60, 45])
draw_gem(screen, self.color, self.x +5, self.y+20)
if self.victory_point_value > 0:
text = font.render("+" + str(self.victory_point_value),
True, ants.WHITE)
screen.blit(text, [self.x + 45, self.y + 3])
def pick_two(max=4):
""" pick a number from 0 to max inclusive, then pick another number from 0 to max inclusive
default from 0 to 4
returns tuple with smallest number first
"""
num1 = random.randint(0, max-1) # why -1? to leave room for the second number
num2 = random.randint(0, max-1)
print(num1, " ", num2)
if num2 >= num1:
num2 = num2 + 1 # add back in the -1 if second number is after first
return (num1, num2)
else:
return (num2, num1) # put the smaller number first
class Noble_Card(object):
victory_point_value = 3
wants = [4, 4, 4, 4, 4] # higher than any expectation
x = 1
y = 1
def __init__(self, x, y, wants = []):
#self.wants = wants
self.x = x
self.y = y
if wants == []:
num1, num2 = pick_two()
if random.randint(0,1):
# two 4s
self.wants = [0,0,0,0,0]
self.wants[num1] = 4
self.wants[num2] = 4
else:
# three 3s
self.wants = [3,3,3,3,3]
self.wants[num1] = 0
self.wants[num2] = 0
else:
self.wants = wants
print(self.wants)
def draw(self, screen):
# upper left corner x and y then width and height (downward)
pygame.draw.rect(screen, ants.NOBLE_BACK, [self.x, self.y, 50, 50])
# TODO: print wants > 0
# TODO: print victory point value (all the same, but good reminder)
line_offset = 2
for gem in range(len(self.wants)):
if self.wants[gem] > 0:
draw_gem_small(screen, ants.GEM_ORDER[gem], self.x + 2, self.y + line_offset)
text = font.render(str(self.wants[gem]), True, ants.WHITE)
screen.blit(text, [self.x + 12, self.y + line_offset - 2])
line_offset = line_offset + 12
text = font.render("+" + str(self.victory_point_value),
True, ants.WHITE)
screen.blit(text, [self.x + 30, self.y + 30])
|
#!/usr/bin/env python
# Copyright (c) 2019, IRIS-HEP
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import unittest
import numpy
import pytest
import awkward
awkward_cpp = pytest.importorskip("awkward.cpp")
class Test(unittest.TestCase):
def runTest(self):
pass
def test_cpp_offsets2parents_int64_pos(self):
offsets = numpy.array([0, 2, 4, 4, 7], dtype=numpy.int64)
parents = awkward_cpp.JaggedArray.offsets2parents(offsets)
assert parents.tolist() == [0, 0, 1, 1, 3, 3, 3]
def test_cpp_offsets2parents_int32_pos(self):
offsets = numpy.array([0, 2, 4, 4, 7], dtype=numpy.int32)
parents = awkward_cpp.JaggedArray.offsets2parents(offsets)
assert parents.tolist() == [0, 0, 1, 1, 3, 3, 3]
def test_cpp_offsets2parents_int64_neg(self):
offsets = numpy.array([], dtype=numpy.int64)
thrown = False
try:
parents = awkward_cpp.JaggedArray.offsets2parents(offsets)
except ValueError:
thrown = True
assert thrown
def test_cpp_offsets2parents_int32_neg(self):
offsets = numpy.array([], dtype=numpy.int32)
thrown = False
try:
parents = awkward_cpp.JaggedArray.offsets2parents(offsets)
except ValueError:
thrown = True
assert thrown
def test_cpp_counts2offsets_int64_pos(self):
counts = numpy.array([4, 0, 3, 4, 1], dtype=numpy.int64)
offsets = awkward_cpp.JaggedArray.counts2offsets(counts)
assert offsets.tolist() == [0, 4, 4, 7, 11, 12]
def test_cpp_counts2offsets_int32_pos(self):
counts = numpy.array([4, 0, 3, 4, 1], dtype=numpy.int32)
offsets = awkward_cpp.JaggedArray.counts2offsets(counts)
assert offsets.tolist() == [0, 4, 4, 7, 11, 12]
def test_cpp_startsstops2parents_int64_pos(self):
starts = numpy.array([0, 4, 5, 9], dtype=numpy.int64)
stops = numpy.array([1, 6, 7, 10], dtype=numpy.int64)
parents = awkward_cpp.JaggedArray.startsstops2parents(starts, stops)
assert parents.tolist() == [0, -1, -1, -1, 1, 2, 2, -1, -1, 3]
def test_cpp_startsstops2parents_int32_pos(self):
starts = numpy.array([0, 4, 5, 9], dtype=numpy.int32)
stops = numpy.array([1, 6, 7, 10], dtype=numpy.int32)
parents = awkward_cpp.JaggedArray.startsstops2parents(starts, stops)
assert parents.tolist() == [0, -1, -1, -1, 1, 2, 2, -1, -1, 3]
def test_cpp_startsstops2parents_neg(self):
starts = numpy.array([0, 4, 5, 11], dtype=numpy.int64)
stops = numpy.array([1, 6, 7, 12], dtype=numpy.int32)
thrown = False
try:
parents = awkward_cpp.JaggedArray.startsstops2parents(starts, stops)
except ValueError:
thrown = True
assert thrown
|
<reponame>Firefly-Drone-Shows/flight_review
""" Methods for sending notification emails """
from __future__ import print_function
import sys
import os
from smtplib import SMTP_SSL as SMTP # this invokes the secure SMTP protocol
# (port 465, uses SSL)
# from smtplib import SMTP # use this for standard SMTP protocol
# (port 25, no encryption)
from email.mime.text import MIMEText
# this is needed for the following imports
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'plot_app'))
from config import *
def send_notification_email(email_address, plot_url, delete_url, info):
""" send a notification email after uploading a plot
:param info: dictionary with additional info
"""
if email_address == '':
return True
description = info['description']
if description == '':
description = info['airframe']
if 'vehicle_name' in info:
description = "{:} - {:}".format(description, info['vehicle_name'])
subject = "Log File uploaded ({:})".format(description)
if len(subject) > 78: # subject should not be longer than that
subject = subject[:78]
destination = [email_address]
content = """\
Hi there!
Your uploaded log file is available under:
{plot_url}
Description: {description}
Feedback: {feedback}
Vehicle type: {type}
Airframe: {airframe}
Hardware: {hardware}
Vehicle UUID: {uuid}
Software git hash: {software}
Upload file name: {upload_filename}
Use the following link to delete the log:
{delete_url}
""".format(plot_url=plot_url, delete_url=delete_url, **info)
return _send_email(destination, subject, content)
def send_flightreport_email(destination, plot_url, rating_description,
wind_speed, delete_url, uploader_email, info):
""" send notification email for a flight report upload """
if len(destination) == 0:
return True
content = """\
Hi
A new flight report just got uploaded:
{plot_url}
Description: {description}
Feedback: {feedback}
Rating: {rating_description}
Wind Speed: {wind_speed}
Uploader: {uploader_email}
Vehicle type: {type}
Airframe: {airframe}
Hardware: {hardware}
Vehicle UUID: {uuid}
Software git hash: {software}
Use the following link to delete the log:
{delete_url}
""".format(plot_url=plot_url,
rating_description=rating_description, wind_speed=wind_speed,
delete_url=delete_url, uploader_email=uploader_email, **info)
description = info['description']
if description == '':
description = info['airframe']
if 'vehicle_name' in info:
description = "{:} - {:}".format(description, info['vehicle_name'])
subject = "Flight Report uploaded ({:})".format(description)
if info['rating'] == 'crash_sw_hw':
subject = '[CRASH] '+subject
if len(subject) > 78: # subject should not be longer than that
subject = subject[:78]
return _send_email(destination, subject, content)
def _send_email(destination, subject, content):
""" common method for sending an email to one or more destinations """
# typical values for text_subtype are plain, html, xml
text_subtype = 'plain'
try:
msg = MIMEText(content, text_subtype)
msg['Subject'] = subject
sender = email_config['sender']
msg['From'] = sender # some SMTP servers will do this automatically
conn = SMTP(email_config['smtpserver'], timeout=15)
conn.set_debuglevel(False)
conn.login(email_config['user_name'], email_config['password'])
try:
conn.sendmail(sender, destination, msg.as_string())
finally:
conn.quit()
except Exception as exc:
print("mail failed; {:}".format(str(exc)))
return False
return True
|
<filename>geosoft/gxapi/GXFFT.py
### extends 'class_empty.py'
### block ClassImports
# NOTICE: Do not edit anything here, it is generated code
from . import gxapi_cy
from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref
### endblock ClassImports
### block Header
# NOTICE: The code generator will not replace the code in this block
### endblock Header
### block ClassImplementation
# NOTICE: Do not edit anything here, it is generated code
class GXFFT(gxapi_cy.WrapFFT):
"""
GXFFT class.
This class allows for the application of predefined
filters to data in an OASIS database. The system uses
the Winograd algorithm to transform data in the spatial
domain to the wavenumber or Fourier domain.
"""
def __init__(self, handle=0):
super(GXFFT, self).__init__(GXContext._get_tls_geo(), handle)
@classmethod
def null(cls):
"""
A null (undefined) instance of `GXFFT <geosoft.gxapi.GXFFT>`
:returns: A null `GXFFT <geosoft.gxapi.GXFFT>`
:rtype: GXFFT
"""
return GXFFT()
def is_null(self):
"""
Check if this is a null (undefined) instance
:returns: True if this is a null (undefined) instance, False otherwise.
:rtype: bool
"""
return self._internal_handle() == 0
# Miscellaneous
def add_white_noise(self, amp, option):
"""
Add white noise to the power spectrum of an FFT object.
:param amp: The value added to the real part of all non-DC components of the current power spectrum
:param option: :ref:`FFT_WHITE_NOISE`
:type amp: float
:type option: int
.. versionadded:: 9.9
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._add_white_noise(amp, option)
def app_dens(self, thick, dens):
"""
Appparent density filter
:param thick: Thickness (meters) of the earth model
:param dens: Background density (g/cm3) (default = 0)
:type thick: float
:type dens: float
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._app_dens(thick, dens)
def app_susc(self, strength):
"""
Apparent susceptiblity filter
:param strength: Total magnetic field strength
:type strength: float
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
**Note:** Reduction to magnetic pole (`red_pol <geosoft.gxapi.GXFFT.red_pol>`) and downward continuation
(`contin <geosoft.gxapi.GXFFT.contin>`) should be called BEFORE using `app_susc <geosoft.gxapi.GXFFT.app_susc>`.
"""
self._app_susc(strength)
def band_pass(self, llen, hlen, pass_defined):
"""
Bandpass filter (using low and high wavelength cutoffs)
:param llen: Low Cutoff wavelength (meters)
:param hlen: High Cutoff wavelength (meter)
:param pass_defined: 1= Pass the defined band (default); 0= Reject the band
:type llen: float
:type hlen: float
:type pass_defined: int
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._band_pass(llen, hlen, pass_defined)
def b_worth(self, clen, degree, filter_type):
"""
Butterworth filter
:param clen: Central cutoff wavelength (meter)
:param degree: Degree of the filter function (default = 8.0)
:param filter_type: Filter type: 1= Low-pass (regional) filter (default) 0= High-pass (residual) filter
:type clen: float
:type degree: float
:type filter_type: int
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._b_worth(clen, degree, filter_type)
def rc_filter(self, clen, filter_type):
"""
RC filter
:param clen: Central cutoff wavelength (meter)
:param filter_type: Filter type: 1= Low-pass (regional) filter (default) 0= High-pass (residual) filter
:type clen: float
:type filter_type: int
.. versionadded:: 8.5
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._rc_filter(clen, filter_type)
def contin(self, dist):
"""
Upward/Downward continuation filter
:param dist: Distance to continue; positive = downwards negative = upwards
:type dist: float
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._contin(dist)
def cos_roll(self, llen, hlen, degree, type):
"""
Cosine roll-off filter
:param llen: Low wavelength start point (meters)
:param hlen: High wavelength end point (meters)
:param degree: Degree of the filter function (default = 2.0)
:param type: Filter type: 1= Low-pass (regional) filter (default) 0= High-pass (residual) filter
:type llen: float
:type hlen: float
:type degree: float
:type type: int
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._cos_roll(llen, hlen, degree, type)
@classmethod
def create(cls, gvv, interv, trend):
"""
Create a New `GXFFT <geosoft.gxapi.GXFFT>` with detrend options.
:param gvv: `GXVV <geosoft.gxapi.GXVV>` to transform.
:param interv: Element space interval
:param trend: :ref:`FFT_DETREND`
:type gvv: GXVV
:type interv: float
:type trend: int
:returns: `GXFFT <geosoft.gxapi.GXFFT>` Object
:rtype: GXFFT
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
**Note:** The detrending options control the removal of a trend from the data
before the `GXFFT <geosoft.gxapi.GXFFT>` is applied. The default data expansion is 10% before `GXFFT <geosoft.gxapi.GXFFT>`.
"""
ret_val = gxapi_cy.WrapFFT._create(GXContext._get_tls_geo(), gvv, interv, trend)
return GXFFT(ret_val)
@classmethod
def create_ex(cls, gvv, interv, trend, expansion):
"""
Create a New `GXFFT <geosoft.gxapi.GXFFT>` with detrend and expansion options.
:param gvv: `GXVV <geosoft.gxapi.GXVV>` to transform.
:param interv: Element space interval
:param trend: :ref:`FFT_DETREND`
:param expansion: Minimum expansion %
:type gvv: GXVV
:type interv: float
:type trend: int
:type expansion: float
:returns: `GXFFT <geosoft.gxapi.GXFFT>` Object
:rtype: GXFFT
.. versionadded:: 5.1.8
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
**Note:** The detrending options control the removal of a trend from the data
before the `GXFFT <geosoft.gxapi.GXFFT>` is applied. The expansion options control the minimum
data expansion before the `GXFFT <geosoft.gxapi.GXFFT>` is applied.
"""
ret_val = gxapi_cy.WrapFFT._create_ex(GXContext._get_tls_geo(), gvv, interv, trend, expansion)
return GXFFT(ret_val)
@classmethod
def create_ref(cls, gvv, interv, trend):
"""
Create `GXFFT <geosoft.gxapi.GXFFT>` object with detrend options from reference (original) channel,
but no `GXFFT <geosoft.gxapi.GXFFT>` process.
:param gvv: `GXVV <geosoft.gxapi.GXVV>` contains channel data to perform `GXFFT <geosoft.gxapi.GXFFT>` operations upon.
:param interv: Element space interval, should be the same as in `create_ex <geosoft.gxapi.GXFFT.create_ex>` call
:param trend: :ref:`FFT_DETREND`
:type gvv: GXVV
:type interv: float
:type trend: int
:returns: `GXFFT <geosoft.gxapi.GXFFT>` Object
:rtype: GXFFT
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
**Note:** This just creates an object. It is intended to be called
immediately after with `set_vv <geosoft.gxapi.GXFFT.set_vv>`.
"""
ret_val = gxapi_cy.WrapFFT._create_ref(GXContext._get_tls_geo(), gvv, interv, trend)
return GXFFT(ret_val)
@classmethod
def create_ref_ex(cls, gvv, interv, trend, expansion, d_cmult):
"""
Create `GXFFT <geosoft.gxapi.GXFFT>` object with detrend and expansion options from reference (original) channel,
but no `GXFFT <geosoft.gxapi.GXFFT>` process.
:param gvv: `GXVV <geosoft.gxapi.GXVV>` contains channel data to perform `GXFFT <geosoft.gxapi.GXFFT>` operations upon.
:param interv: Element space interval, should be the same as in `create_ex <geosoft.gxapi.GXFFT.create_ex>` call
:param trend: :ref:`FFT_DETREND`
:param expansion: Minimum expansion %, should be the same as in `create_ex <geosoft.gxapi.GXFFT.create_ex>` call
:param d_cmult: DC level multiple
:type gvv: GXVV
:type interv: float
:type trend: int
:type expansion: float
:type d_cmult: float
:returns: `GXFFT <geosoft.gxapi.GXFFT>` Object
:rtype: GXFFT
.. versionadded:: 5.1.8
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
**Note:** This just creates an object. It is intended to be called
immediately after with `set_vv <geosoft.gxapi.GXFFT.set_vv>`.
"""
ret_val = gxapi_cy.WrapFFT._create_ref_ex(GXContext._get_tls_geo(), gvv, interv, trend, expansion, d_cmult)
return GXFFT(ret_val)
def gaus(self, dev, type):
"""
Gaussian filter
:param dev: Standard deviation cutoff of function (meters)
:param type: Filter type: 1= Low-pass (residual) filter (default) 0= High-pass (regional) filter
:type dev: float
:type type: int
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._gaus(dev, type)
def get_vv(self, gv_vr, gv_vi):
"""
Copies real and imaginary `GXVV <geosoft.gxapi.GXVV>`'s to user `GXVV <geosoft.gxapi.GXVV>`'s.
:param gv_vr: Real component
:param gv_vi: Imaginary component
:type gv_vr: GXVV
:type gv_vi: GXVV
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._get_vv(gv_vr, gv_vi)
def h_drv(self, order):
"""
Horizontal derivative
:param order: Order of differentiation (default = 1)
:type order: float
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._h_drv(order)
def high_pass(self, wlen, fid_int):
"""
High bandpass filter
:param wlen: Cutoff wavelength (meter)
:param fid_int: Fiducial increment of the `GXFFT <geosoft.gxapi.GXFFT>`'s channel data
:type wlen: float
:type fid_int: float
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._high_pass(wlen, fid_int)
def h_int(self):
"""
Horizontal integration
.. versionadded:: 5.1.4
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._h_int()
def inverse(self, gvv, gv_vm):
"""
Inverse the `GXFFT <geosoft.gxapi.GXFFT>` from wave number domain to space domain
:param gvv: Output `GXVV <geosoft.gxapi.GXVV>`
:param gv_vm: Original `GXVV <geosoft.gxapi.GXVV>` which was used to create `GXFFT <geosoft.gxapi.GXFFT>` (will be used as mask for output `GXVV <geosoft.gxapi.GXVV>`; no masking if this parameter is NULL)
:type gvv: GXVV
:type gv_vm: GXVV
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._inverse(gvv, gv_vm)
def low_pass(self, wlen):
"""
Low bandpass filter
:param wlen: Cutoff wavelength (meters)
:type wlen: float
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._low_pass(wlen)
def red_pol(self, inc, dec, incp, dir):
"""
Reduction to magnetic pole
:param inc: Geomagnetic inclination (degrees)
:param dec: Geomagnetic declination (degrees)
:param incp: Inclination (degrees) for amplitude correction (default = 20.0)
:param dir: Direction (degrees) of Line from North
:type inc: float
:type dec: float
:type incp: float
:type dir: float
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._red_pol(inc, dec, incp, dir)
def nyquist(self):
"""
Gets the Nyquist frequency (wavenumbers/sample unit).
:returns: Nyquist frequency (wavenumbers/sample unit).
:rtype: float
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
ret_val = self._nyquist()
return ret_val
def samp_incr(self):
"""
Gets the original sample increment.
:returns: Original sample increment.
:rtype: float
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
ret_val = self._samp_incr()
return ret_val
def wave_incr(self):
"""
Get the wave number increment.
:returns: Wave number increment
:rtype: float
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
ret_val = self._wave_incr()
return ret_val
def set_vv(self, gv_vr, gv_vi):
"""
Sets real and imaginary VVs in `GXFFT <geosoft.gxapi.GXFFT>`.
:param gv_vr: Real component
:param gv_vi: Imaginary component
:type gv_vr: GXVV
:type gv_vi: GXVV
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
**Note:** The `GXVV <geosoft.gxapi.GXVV>` must have been obtained from the same `GXFFT <geosoft.gxapi.GXFFT>`
using the `set_vv <geosoft.gxapi.GXFFT.set_vv>` method.
"""
self._set_vv(gv_vr, gv_vi)
def spectrum(self, gvv):
"""
Calculates a power spectrum
:param gvv: Output power spectrum `GXVV <geosoft.gxapi.GXVV>`
:type gvv: GXVV
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._spectrum(gvv)
def v_drv(self, order):
"""
Vertical derivative
:param order: Order of differentiation (default = 1)
:type order: float
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._v_drv(order)
def v_int(self):
"""
Vertical integration
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._v_int()
def write_spectrum(self, gvv, out_file):
"""
Writes a power spectrum to a file
:param gvv: Output power spectrum `GXVV <geosoft.gxapi.GXVV>`
:param out_file: File name for output spectrum
:type gvv: GXVV
:type out_file: str
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
"""
self._write_spectrum(gvv, out_file.encode())
### endblock ClassImplementation
### block ClassExtend
# NOTICE: The code generator will not replace the code in this block
### endblock ClassExtend
### block Footer
# NOTICE: The code generator will not replace the code in this block
### endblock Footer |
<gh_stars>10-100
#!/usr/bin/env python3
from datetime import datetime
import traceback
from enum import Enum
import logging.config
import sys
import re
from hashlib import sha1
from zeep import Client
import hmac
from binascii import hexlify,unhexlify
import struct
import base64
import xml.etree.ElementTree as etree
import pickle
#user-configurable area
AUTH_FILE="authstate.dat" #file storing authentication state
DEBUG=False
TRACE=False
#exceptions
class IzlyError(Exception):
pass
class LogonFailure(IzlyError):
pass
# for debugging
def enable_trace():
logging.config.dictConfig({
'version': 1,
'formatters': {
'verbose': {
'format': '%(name)s: %(message)s'
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
},
'loggers': {
'zeep.transports': {
'level': 'DEBUG',
'propagate': True,
'handlers': ['console'],
},
}
})
# Signature-generating function helpers
def izly_auth(activation, counter):
hashed = hmac.new(base64.b64decode(activation), struct.pack(">Q", counter), sha1)
s = base64.b64encode(hashed.digest()).decode("ascii")
s = re.sub("\+", "-", s)
s = re.sub("\/", "_", s)
return s
def izly_pay(userid, session_id, cardid, amount, otp):
message = userid + "," + session_id + "," + cardid + "," + amount + "," + otp
hashed = hmac.new(otp.encode("ascii"), message.encode("ascii"), sha1)
s = base64.b64encode(hashed.digest()).decode("ascii")
s = re.sub("\+", "-", s)
s = re.sub("\/", "_", s)
return s
#object representing authentication state (persisted into "pickle" file)
class AuthState(object):
def __init__(self, counter=0):
self.user = None
self.token = None
self.counter = counter
self.session_id = None
self.act_code = None
def ensure_logon_step1_done(self):
if self.act_code is None:
raise ValueError
def ensure_logon_step2_done(self):
if self.token is None:
raise ValueError
#izly client (network layer)
#l'objet IzlyClient contient une methode generique req() pour la plupart des requetes
#les requetes necessitant un traitement specifique (do_login_XXXX) sont implementees par des methodes specifiques
#le membre auth_state contient l'etat d'authentification qui est persiste dans authstate.dat via pickle
class IzlyClient(object):
def __init__(self, url, debug=DEBUG, trace=TRACE):
self.url = url
self.debug = debug
if trace:
enable_trace()
def dbg(self, *args, **kwargs):
if self.debug:
print(*args, **kwargs)
def dump(self, tree, indent=0):
if (indent == 0):
print("Resultat de la requete SOAP: ")
print(indent * " " + tree.tag)
for k in sorted(tree.keys()):
print(indent * " " + k + " --> " + tree.get(k))
if (tree.text is not None):
print(indent * " " + "valeur: " + tree.text)
for i in tree:
self.dump(i, indent + 2)
def connect_soap(self):
self.client = Client(self.url)
def req(self, name, **kwargs):
self.auth_state.ensure_logon_step2_done()
self.client.transport.session.headers.update({'Authorization': 'Bearer ' + self.auth_state.token})
dic = {**kwargs, **{"userId": self.auth_state.user, "model": "A", "format": "T", "channel": "AIZ", "version": "6.0", "sessionId": self.auth_state.session_id}}
self.dbg("REQUEST: " + name + " with args " + str(dic))
response = self.client.service.__getattr__(name)(**dic)
if response is not None:
self.dbg("RESPONSE: " + response)
xml = etree.fromstring(response)
if xml.find("Error") is not None:
self.dbg("ERROR: " + xml.find("Msg").text)
raise IzlyError(xml.find("Msg").text)
return response
def do_logon_step1(self, phone, pw):
result = self.client.service.Logon(user=phone,
password=pw,
smoneyClientType="PART",
rooted=0, model="A",
format="T",
channel="AIZ")
xml = etree.fromstring(result)
if xml.find("Error") is not None:
self.dbg("ERROR: " + xml.find("Msg").text)
raise LogonFailure(xml.find("Msg").text)
self.auth_state.user = phone
return xml
def do_logon_simple(self, pw):
otp = self.get_otp()
result = self.client.service.Logon(user=self.auth_state.user,
password=pw,
passOTP=pw + otp,
smoneyClientType="PART",
rooted=0, model="A",
format="T",
channel="AIZ")
xml = etree.fromstring(result)
if xml.find("Error") is not None:
self.dbg("ERROR: " + xml.find("Msg").text)
raise LogonFailure(xml.find("Msg").text)
self.auth_state.session_id = xml.find("SID").text
self.auth_state.token = xml.find("OAUTH").find("ACCESS_TOKEN").text
self.dbg("Session id:" + self.auth_state.session_id)
self.dbg("Token:" + self.auth_state.token)
return result
def get_otp(self):
otp = izly_auth(self.auth_state.act_code, self.auth_state.counter)
self.auth_state.counter += 1
return otp
def save(self):
pickle.dump(self.auth_state, open(AUTH_FILE, "wb"))
def do_logon_step2(self):
self.auth_state.ensure_logon_step1_done()
otp = self.get_otp()
result = self.client.service.Logon(user=self.auth_state.user,
passOTP=otp,
password="",
smoneyClientType="PART",
rooted=0, model="A",
format="T",
channel="AIZ")
xml = etree.fromstring(result)
if xml.find("Error") is not None:
self.dbg("ERROR: " + xml.find("Msg").text)
raise LogonFailure(xml.find("Msg").text)
self.auth_state.session_id = xml.find("SID").text
self.auth_state.token = xml.find("OAUTH").find("ACCESS_TOKEN").text
self.dbg("Session id:" + self.auth_state.session_id)
self.dbg("Token:" + self.auth_state.token)
def do_confirm(self, idcarte, montant, pw):
otp = self.get_otp()
pr = izly_pay(self.auth_state.user, self.auth_state.session_id, idcarte, montant, pw + otp)
return self.req("MoneyInCbConfirm", amount=montant, cardId=idcarte, print=pr, passOTP=pw + otp)
class CmdFlag(Enum):
NONE = 0,
ACT_CODE = 1, #la commande a besoin que le code d'activation soit connu
SESSION = 2, #la commande a besoin d'une session active
USES_SOAP = 3, #la commande emet une requete SOAP
class Command(object):
def __init__(self, f, flags):
self.f = f
self.flags = flags
def call(self, *args):
obj = args[0]
try:
auth_state = pickle.load(open(AUTH_FILE, "rb"))
print("Loaded existing auth state for: " + auth_state.user)
except:
auth_state = AuthState()
print("Using new auth state")
print("Telephone: " + str (auth_state.user))
print("Compteur: " + str (auth_state.counter))
print("Activation code: " + str(auth_state.act_code))
print("Auth bearer token: " + str(auth_state.token))
print("Session ID: " + str(auth_state.session_id))
print("")
obj.ic.auth_state = auth_state
if (CmdFlag.ACT_CODE in self.flags) and (obj.ic.auth_state.act_code is None):
raise ValueError("L'operation demande un code d'activation.")
if (CmdFlag.SESSION in self.flags) and (obj.ic.auth_state.token is None):
raise ValueError("L'operation demande une session active.")
if (CmdFlag.USES_SOAP in self.flags):
obj.ic.connect_soap()
return self.f(*args)
### La partie interface utilisateur
def cmd(flags):
def aux(f):
return Command(f, flags)
return aux
#izly client (interface layer)
#gere les commandes utilisateur, et appelle l'objet IzlyClient pour les realiser
class CmdInterface(object):
def __init__(self, ic):
self.ic = ic #l'objet IzlyClient contient le client SOAP et l'etat d'authentification
def process(self, args):
attr = None
if hasattr(self, args[0]):
attr = getattr(self, args[0])
if (attr is not None) and (isinstance(attr, Command)):
attr.call(self, *args[1:])
else:
raise Exception("Unknown command: " + args[0])
@cmd({CmdFlag.USES_SOAP})
def login(self, phone, pw):
print("Clearing auth state.")
self.ic.auth_state = AuthState()
self.ic.do_logon_step1(phone, pw)
self.ic.save()
@cmd({CmdFlag.USES_SOAP})
def activation(self, code):
self.ic.auth_state.act_code = code
self.ic.auth_state.token = None
self.ic.auth_state.session_id = None
self.ic.auth_state.counter = 0
self.ic.do_logon_step2()
self.ic.save()
@cmd({CmdFlag.ACT_CODE, CmdFlag.USES_SOAP})
def relogin(self, pw):
response = self.ic.do_logon_simple(pw)
self.ic.dump(etree.fromstring(response))
self.ic.save()
@cmd({})
def status(self):
pass
@cmd({CmdFlag.SESSION, CmdFlag.USES_SOAP})
def listecb(self):
response = self.ic.req("MoneyInCbCbList")
self.ic.dump(etree.fromstring(response))
@cmd({CmdFlag.SESSION, CmdFlag.USES_SOAP})
def historique(self):
response = self.ic.req("GetStatement", filter="-1", nbItems="0", firstId="-1")
self.ic.dump(etree.fromstring(response))
@cmd({CmdFlag.SESSION, CmdFlag.USES_SOAP})
def recharger(self, idcarte, montant):
response = self.ic.req("MoneyInCb", amount=montant, cardId=idcarte)
self.ic.dump(etree.fromstring(response))
@cmd({CmdFlag.SESSION, CmdFlag.USES_SOAP})
def confirmer(self, idcarte, montant, pw):
response = self.ic.do_confirm(idcarte, str(float(montant)), pw)
self.ic.dump(etree.fromstring(response))
self.ic.save()
### Le programme principal
if len(sys.argv) < 2:
print("""Commandes disponibles:
./freezly.py status
Donne le status d'authentification (telephone, presence du code d'activation et token, etc...)
./freezly.py login <telephone> <password>
1ere phase d'authentification. Cela provoque l'envoi d'un code par SMS.
On utilise ensuite la commande "activation" pour faire la 2eme phase.
./freezly.py activation <code d'activation>
2eme phase d'authentification. On renseigne le code d'activation par SMS.
Le code d'activation est la derniere partie de l'URL (apres le derner /) recu par SMS
./freezly.py relogin <password>
Permet de se re-authentifier, pour "rafraichir" la session lorsqu'elle a expiree.
On doit donner seulement le password.
Ne pas confondre avec la commande "login" qui est a utiliser lors de la premiere authentification.
./freezly.py historique
Affiche la liste des paiements/rechargements effectues.
./freezly.py listecb
Liste les cartes bancaires enregistrees dans le compte, avec leurs identifiants (id carte).
./freezly.py recharger <id carte> <montant>
Lance le rechargement du compte a partir d'une carte bancaire enregistree.
Il faut ensuite utiliser la commande "confirmer".
./freezly.py confirmer <id carte> <montant> <password>
Confirme le rechargement du compte a partir de la carte bancaire.
AVERTISSEMENT : Cet outil est un simple exemple de demonstration pour illustrer la documentation du protocole. Il n'est pas prevu pour etre utilise en situation reelle. Il a ete realise a partir d'informations obtenues par reverse-engineering, donc potentiellement incompletes ou inexactes. Il a ete tres peu teste, il contient probablement des bugs, qui peuvent entrainer des consequences facheuses pour votre ordinateur, votre compte Izly, vos informations bancaires, etc...""")
sys.exit(1)
ic = IzlyClient("https://soap.izly.fr/Service.asmx?WSDL")
cli = CmdInterface(ic)
try:
cli.process(sys.argv[1:])
print("La commande a reussi")
except:
traceback.print_exc()
print("La commande a echouee, pour la raison mentionee ci-dessus.")
sys.exit(1)
sys.exit(0)
|
"""
Copyright 2020 ShipChain, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import pytest
from rest_framework import status
import requests
from datetime import datetime
from src.shipchain_common.utils import parse_urlencoded_data
from src.shipchain_common.test_utils import modified_http_pretty
CURRENT_TIME = datetime.now()
@pytest.fixture
def query_string():
return f'query_param_1={1}&query_param_2={2}&query_param_bool={False}&datetime={CURRENT_TIME}'
@pytest.fixture
def modified_query_string(query_string):
return f'{query_string}&query_param_3=3'
@pytest.fixture
def array_query_string(query_string):
return f'{query_string}&array=1&array=2'
@pytest.fixture
def query_dict(query_string):
return parse_urlencoded_data(query_string)
@pytest.fixture
def modified_query_dict(modified_query_string):
return parse_urlencoded_data(modified_query_string)
@pytest.fixture
def array_query_dict(array_query_string):
return parse_urlencoded_data(array_query_string)
@pytest.fixture
def http_pretty_list_mocking(modified_http_pretty):
modified_http_pretty.register_uri(modified_http_pretty.POST, 'http://google.com/path', status=status.HTTP_200_OK)
modified_http_pretty.register_uri(modified_http_pretty.POST, 'http://google.com/other_path',
status=status.HTTP_200_OK)
modified_http_pretty.register_uri(modified_http_pretty.POST, 'http://bing.com/bing_path',
status=status.HTTP_200_OK)
return modified_http_pretty
@pytest.fixture
def successful_body():
return {
'integer': 1,
'datetime': str(CURRENT_TIME),
'string': 'string',
'dictionary': {
'string': 'new_string'
},
'array': [0, 1, 2]
}
@pytest.fixture
def successful_urlencoded_body():
return {
'integer': 1,
'datetime': str(CURRENT_TIME),
'string': 'string',
'array': [0, 1, 2]
}
@pytest.fixture
def successful_json_body():
return json.dumps({
'integer': 1,
'datetime': str(CURRENT_TIME),
'dictionary': {
'string': 'new_string'
},
'array': [0, 1, 2],
'string': 'string'
})
@pytest.fixture
def successful_assertions(query_dict, modified_query_dict, array_query_dict, successful_body,
successful_urlencoded_body):
return [{
'path': '/path',
'body': successful_body,
'query': query_dict,
'host': 'google.com',
}, {
'path': '/path',
'body': None,
'query': modified_query_dict,
'host': 'google.com',
}, {
'path': '/path',
'body': None,
'query': array_query_dict,
'host': 'google.com',
}, {
'path': '/bing_path',
'body': successful_urlencoded_body,
'host': 'bing.com',
}]
@pytest.fixture
def single_assertions(query_dict, successful_body):
return [{
'path': '/path',
'body': successful_body,
'query': query_dict,
'host': 'google.com',
}]
@pytest.fixture
def failing_query_assertions(query_dict, successful_body):
return [{
'path': '/path',
'query': query_dict,
'host': 'google.com',
}]
@pytest.fixture
def failing_host_assertions(query_dict, successful_body):
return [{
'path': '/path',
'host': 'FAILING HOST',
}]
@pytest.fixture
def no_host_assertions(query_dict, successful_body):
return [{
'path': '/path',
}]
@pytest.fixture
def no_path_assertions(query_dict, successful_body):
return [{
}]
@pytest.fixture
def failing_path_assertions(query_dict, successful_body):
return [{
'path': 'FAILING PATH',
'host': 'google.com',
}]
@pytest.fixture
def dict_assertions(query_dict, successful_body):
return {
'path': '/path',
}
class TestHttprettyList:
def test_unsuccessful_empty_check(self, http_pretty_list_mocking, query_string, successful_json_body,
single_assertions):
requests.post('http://google.com/path?' + query_string, data=successful_json_body,
headers={'content-type': 'application/json'})
requests.post('http://google.com/path')
# Ensure that after all calls were checked, assert all calls checked is accurate
with pytest.raises(AssertionError) as err:
http_pretty_list_mocking.assert_calls(single_assertions)
assert f"Difference in expected call count, 2 made asserted 1. Calls: " in str(err.value)
def test_successful_mocking(self, http_pretty_list_mocking, query_string, successful_assertions, successful_body,
successful_json_body, array_query_string, modified_query_string,
successful_urlencoded_body):
requests.post('http://google.com/path?' + query_string, data=successful_json_body,
headers={'content-type': 'application/json'})
requests.post('http://google.com/path?' + modified_query_string)
requests.post('http://google.com/path?' + array_query_string)
requests.post('http://bing.com/bing_path', data=successful_urlencoded_body,
headers={'content-type': 'application/x-www-form-urlencoded'})
http_pretty_list_mocking.assert_calls(successful_assertions)
def test_unsuccessful_query_check(self, http_pretty_list_mocking, modified_query_string, failing_query_assertions,
query_dict, modified_query_dict, successful_body):
requests.post('http://google.com/path?' + modified_query_string)
with pytest.raises(AssertionError) as err:
http_pretty_list_mocking.assert_calls(failing_query_assertions)
assert f'Error: query mismatch, desired `{query_dict}` returned `{modified_query_dict}`.' in str(err.value)
def test_unsuccessful_path_check(self, http_pretty_list_mocking, failing_path_assertions):
requests.post('http://google.com/other_path')
with pytest.raises(AssertionError) as err:
http_pretty_list_mocking.assert_calls(failing_path_assertions)
assert f'Error: path mismatch, desired `FAILING PATH` returned `/other_path`.' in str(err.value)
def test_no_path_check(self, http_pretty_list_mocking, no_path_assertions):
requests.post('http://google.com/other_path')
with pytest.raises(AssertionError) as err:
http_pretty_list_mocking.assert_calls(no_path_assertions)
assert 'Error: Must include path in assertion.' in str(err.value)
def test_unsuccessful_host_check(self, http_pretty_list_mocking, failing_host_assertions):
requests.post('http://google.com/path')
with pytest.raises(AssertionError) as err:
http_pretty_list_mocking.assert_calls(failing_host_assertions)
assert f'Error: host mismatch, desired `FAILING HOST` returned `google.com`.' in str(err.value)
def test_no_host_check(self, http_pretty_list_mocking, no_host_assertions):
requests.post('http://google.com/path')
with pytest.raises(AssertionError) as err:
http_pretty_list_mocking.assert_calls(no_host_assertions)
assert 'Error: Must include host in assertion.' in str(err.value)
def test_non_list_assertion(self, http_pretty_list_mocking, dict_assertions):
requests.post('http://google.com/path')
with pytest.raises(AssertionError) as err:
http_pretty_list_mocking.assert_calls(dict_assertions)
assert f'Error: asserted calls should be of type `list` not of type `{type(dict_assertions)}`' in str(err.value)
def test_no_calls_made(self, http_pretty_list_mocking, successful_assertions):
with pytest.raises(AssertionError) as err:
http_pretty_list_mocking.assert_calls(successful_assertions)
assert f'Error: No calls made to be parsed.' in str(err.value)
def test_reset_between_tests(self, http_pretty_list_mocking, failing_host_assertions):
requests.post('http://google.com/path')
def test_default_reset_calls(self, http_pretty_list_mocking, query_string, successful_json_body, single_assertions):
requests.post('http://google.com/path?' + query_string, data=successful_json_body,
headers={'content-type': 'application/json'})
http_pretty_list_mocking.assert_calls(single_assertions)
|
<reponame>ourcwj/MoyuSystem2.0<filename>ver/MoyuSystem/Network_communication/GUI/login/conn.py
# -*- coding: utf-8 -*-
################################################################################
## Form generated from reading UI file '连接.ui'
##
## Created by: Qt User Interface Compiler version 6.3.0
##
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
from PySide6.QtCore import (QCoreApplication, QDate, QDateTime, QLocale,
QMetaObject, QObject, QPoint, QRect,
QSize, QTime, QUrl, Qt)
from PySide6.QtGui import (QBrush, QColor, QConicalGradient, QCursor,
QFont, QFontDatabase, QGradient, QIcon,
QImage, QKeySequence, QLinearGradient, QPainter,
QPalette, QPixmap, QRadialGradient, QTransform)
from PySide6.QtWidgets import (QApplication, QCommandLinkButton, QGridLayout, QGroupBox,
QLabel, QLineEdit, QMainWindow, QMenuBar,
QSizePolicy, QSpacerItem, QSpinBox, QStatusBar,
QWidget)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
if not MainWindow.objectName():
MainWindow.setObjectName(u"MainWindow")
MainWindow.resize(546, 315)
self.centralwidget = QWidget(MainWindow)
self.centralwidget.setObjectName(u"centralwidget")
self.gridLayout_2 = QGridLayout(self.centralwidget)
self.gridLayout_2.setObjectName(u"gridLayout_2")
self.groupBox = QGroupBox(self.centralwidget)
self.groupBox.setObjectName(u"groupBox")
self.gridLayout = QGridLayout(self.groupBox)
self.gridLayout.setObjectName(u"gridLayout")
self.gridLayout_3 = QGridLayout()
self.gridLayout_3.setObjectName(u"gridLayout_3")
self.commandLinkButton = QCommandLinkButton(self.groupBox)
self.commandLinkButton.setObjectName(u"commandLinkButton")
self.gridLayout_3.addWidget(self.commandLinkButton, 0, 1, 1, 1)
self.horizontalSpacer = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.gridLayout_3.addItem(self.horizontalSpacer, 0, 0, 1, 1)
self.gridLayout.addLayout(self.gridLayout_3, 3, 1, 1, 3)
self.verticalSpacer_3 = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.gridLayout.addItem(self.verticalSpacer_3, 2, 0, 1, 1)
self.verticalSpacer_4 = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.gridLayout.addItem(self.verticalSpacer_4, 2, 2, 1, 1)
self.label_2 = QLabel(self.groupBox)
self.label_2.setObjectName(u"label_2")
self.label_2.setAlignment(Qt.AlignCenter)
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.label = QLabel(self.groupBox)
self.label.setObjectName(u"label")
self.label.setAlignment(Qt.AlignCenter)
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.lineEdit = QLineEdit(self.groupBox)
self.lineEdit.setObjectName(u"lineEdit")
self.lineEdit.setEchoMode(QLineEdit.Normal)
self.gridLayout.addWidget(self.lineEdit, 0, 1, 1, 3)
self.spinBox = QSpinBox(self.groupBox)
self.spinBox.setObjectName(u"spinBox")
self.spinBox.setMinimum(1)
self.spinBox.setMaximum(9999)
self.gridLayout.addWidget(self.spinBox, 1, 1, 1, 3)
self.gridLayout_2.addWidget(self.groupBox, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QMenuBar(MainWindow)
self.menubar.setObjectName(u"menubar")
self.menubar.setGeometry(QRect(0, 0, 546, 22))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QStatusBar(MainWindow)
self.statusbar.setObjectName(u"statusbar")
MainWindow.setStatusBar(self.statusbar)
#if QT_CONFIG(shortcut)
self.label_2.setBuddy(self.spinBox)
self.label.setBuddy(self.lineEdit)
#endif // QT_CONFIG(shortcut)
self.retranslateUi(MainWindow)
self.commandLinkButton.clicked.connect(MainWindow.connect)
QMetaObject.connectSlotsByName(MainWindow)
# setupUi
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QCoreApplication.translate("MainWindow", u"MainWindow", None))
self.groupBox.setTitle(QCoreApplication.translate("MainWindow", u"\u8bf7\u8fde\u63a5\u670d\u52a1\u5668", None))
self.commandLinkButton.setText(QCoreApplication.translate("MainWindow", u"\u5c1d\u8bd5\u8fde\u63a5", None))
self.label_2.setText(QCoreApplication.translate("MainWindow", u"\u7aef\u53e3", None))
self.label.setText(QCoreApplication.translate("MainWindow", u"\u670d\u52a1\u5668\u5730\u5740", None))
self.lineEdit.setPlaceholderText(QCoreApplication.translate("MainWindow", u"127.0.0.1", None))
# retranslateUi
|
<reponame>rapidpro/chpro-microsite<gh_stars>0
# -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-04-17 22:19
from __future__ import unicode_literals
import autoslug.fields
import cms.models.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_countries.fields
import filer.fields.image
class Migration(migrations.Migration):
replaces = [('case_studies', '0001_initial'), ('case_studies', '0002_auto_20180321_1636'), ('case_studies', '0003_casestudy_use_cases'), ('case_studies', '0004_auto_20180404_0612'), ('case_studies', '0005_auto_20180405_0120'), ('case_studies', '0006_auto_20180409_0403'), ('case_studies', '0007_casestudy_last_modified'), ('case_studies', '0008_casestudy_lead_content'), ('case_studies', '0009_casestudy_countries')]
initial = True
dependencies = [
('content', '0011_block_style'),
('cms', '0016_auto_20160608_1535'),
('filer', '__first__'),
]
operations = [
migrations.CreateModel(
name='CaseStudy',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('heading', models.CharField(max_length=128)),
('slug', autoslug.fields.AutoSlugField(editable=False, max_length=128, populate_from='heading', unique=True)),
('published', models.BooleanField(default=False, help_text='Indicates if this Case Study is pubilc or still a draft.', verbose_name='Published')),
('featured_image', filer.fields.image.FilerImageField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.FILER_IMAGE_MODEL, verbose_name='Featured Image')),
('main_content', cms.models.fields.PlaceholderField(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_case_study', slotname='case_study_main_content', to='cms.Placeholder', verbose_name='Case Study Main Content')),
('sidebar_content', cms.models.fields.PlaceholderField(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sidebar_case_study', slotname='case_study_sidebar_content', to='cms.Placeholder', verbose_name='Case Study Sidebar Content')),
('stats_content', cms.models.fields.PlaceholderField(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='stats_case_study', slotname='case_study_stats_content', to='cms.Placeholder', verbose_name='Case Study Stats Content')),
('use_cases', models.ManyToManyField(to='cms.Page')),
('region', models.CharField(choices=[('americas', 'The Americas and Caribbean'), ('europe-asia-c', 'Europe and Central Asia'), ('pacific-asia-e', 'East Asia and the Pacific'), ('africa-e-s', 'Eastern and Southern Africa'), ('middle-east-africa', 'Middle East and North Africa'), ('asia-s', 'South Asia'), ('africa-w-c', 'West and Central Africa')], max_length=20)),
('last_modified', models.DateTimeField(auto_now=True)),
('lead_content', cms.models.fields.PlaceholderField(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='lead_case_study', slotname='case_study_lead_content', to='cms.Placeholder', verbose_name='Case Study Lead Content')),
('countries', django_countries.fields.CountryField(max_length=746, multiple=True)),
],
options={
'verbose_name_plural': 'Case Studies',
},
),
]
|
# Copyright (c) 2012-2013, <NAME> <<EMAIL>>
# All rights reserved.
#
# See LICENSE file for full license.
from __future__ import annotations
import json
from re import compile
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
List,
SupportsFloat,
SupportsInt,
TypeVar,
Union,
overload,
)
if TYPE_CHECKING:
from .. import AWSHelperFn, Tags
from ..type_defs.compat import Literal, SupportsIndex
__T = TypeVar("__T")
@overload
def boolean(x: Literal[True, 1, "true", "True"]) -> Literal[True]:
...
@overload
def boolean(x: Literal[False, 0, "false", "False"]) -> Literal[False]:
...
def boolean(x: Any) -> bool:
if x in [True, 1, "1", "true", "True"]:
return True
if x in [False, 0, "0", "false", "False"]:
return False
raise ValueError
def integer(x: Any) -> Union[str, bytes, SupportsInt, SupportsIndex]:
try:
int(x)
except (ValueError, TypeError):
raise ValueError("%r is not a valid integer" % x)
else:
return x
def positive_integer(x: Any) -> Union[str, bytes, SupportsInt, SupportsIndex]:
p = integer(x)
if int(p) < 0:
raise ValueError("%r is not a positive integer" % x)
return x
def integer_range(
minimum_val: float, maximum_val: float
) -> Callable[[Any], Union[str, bytes, SupportsInt, SupportsIndex]]:
def integer_range_checker(x: Any) -> Union[str, bytes, SupportsInt, SupportsIndex]:
i = int(x)
if i < minimum_val or i > maximum_val:
raise ValueError(
"Integer must be between %d and %d" % (minimum_val, maximum_val)
)
return x
return integer_range_checker
def integer_list_item(
allowed_values: List[float],
) -> Callable[[Any], Union[str, bytes, SupportsInt, SupportsIndex]]:
def integer_list_item_checker(
x: Any,
) -> Union[str, bytes, SupportsInt, SupportsIndex]:
i = int(x)
if i in allowed_values:
return x
raise ValueError(
"Integer must be one of following: %s"
% ", ".join(str(j) for j in allowed_values)
)
return integer_list_item_checker
def double(x: Any) -> Union[SupportsFloat, SupportsIndex, str, bytes, bytearray]:
try:
float(x)
except (ValueError, TypeError):
raise ValueError("%r is not a valid double" % x)
else:
return x
def tags_or_list(x: Any) -> Union[AWSHelperFn, Tags, List[Any]]:
"""backward compatibility"""
from .. import AWSHelperFn, Tags
if isinstance(x, (AWSHelperFn, Tags, list)):
return x # type: ignore
raise ValueError(f"Value {x} of type {type(x)} must be either Tags or list")
def ignore(x: __T) -> __T:
"""Method to indicate bypassing property validation"""
return x
def defer(x: __T) -> __T:
"""Method to indicate defering property validation"""
return x
def network_port(x: Any) -> Union[AWSHelperFn, str, bytes, SupportsInt, SupportsIndex]:
from .. import AWSHelperFn
# Network ports can be Ref items
if isinstance(x, AWSHelperFn):
return x
i = integer(x)
if int(i) < -1 or int(i) > 65535:
raise ValueError("network port %r must been between 0 and 65535" % i)
return x
def s3_bucket_name(b: str) -> str:
# consecutive periods not allowed
if ".." in b:
raise ValueError("%s is not a valid s3 bucket name" % b)
# IP addresses not allowed
ip_re = compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
if ip_re.match(b):
raise ValueError("%s is not a valid s3 bucket name" % b)
s3_bucket_name_re = compile(r"^[a-z\d][a-z\d\.-]{1,61}[a-z\d]$")
if s3_bucket_name_re.match(b):
return b
else:
raise ValueError("%s is not a valid s3 bucket name" % b)
def elb_name(b: str) -> str:
elb_name_re = compile(
r"^[a-zA-Z0-9](?:[a-zA-Z0-9\-]{0,30}[a-zA-Z0-9]{1})?$"
) # noqa
if elb_name_re.match(b):
return b
else:
raise ValueError("%s is not a valid elb name" % b)
def encoding(encoding: str) -> str:
valid_encodings = ["plain", "base64"]
if encoding not in valid_encodings:
raise ValueError("Encoding needs to be one of %r" % valid_encodings)
return encoding
def one_of(
class_name: str, properties: Dict[str, Any], property: str, conditionals: List[Any]
) -> None:
from .. import AWSHelperFn
check_property = properties.get(property)
if isinstance(check_property, AWSHelperFn) or issubclass(
type(check_property), AWSHelperFn
):
return
if check_property not in conditionals:
raise ValueError(
# Ensure we handle None as a valid value
'%s.%s must be one of: "%s"'
% (
class_name,
property,
", ".join(condition for condition in conditionals if condition),
),
"or a CFN Intrinsic function / troposphere.AWSHelperFn",
)
def mutually_exclusive(
class_name: str, properties: Dict[str, Any], conditionals: List[Any]
) -> int:
from .. import NoValue
found_list: List[Any] = []
for c in conditionals:
if c in properties and not properties[c] == NoValue:
found_list.append(c)
seen = set(found_list)
specified_count = len(seen)
if specified_count > 1:
raise ValueError(
("%s: only one of the following" " can be specified: %s")
% (class_name, ", ".join(conditionals))
)
return specified_count
def exactly_one(
class_name: str, properties: Dict[str, Any], conditionals: List[Any]
) -> int:
specified_count = mutually_exclusive(class_name, properties, conditionals)
if specified_count != 1:
raise ValueError(
("%s: one of the following" " must be specified: %s")
% (class_name, ", ".join(conditionals))
)
return specified_count
def check_required(
class_name: str, properties: Dict[str, Any], conditionals: List[Any]
) -> None:
for c in conditionals:
if c not in properties:
raise ValueError("Resource %s required in %s" % (c, class_name))
def json_checker(prop: object) -> Any:
from .. import AWSHelperFn
if isinstance(prop, str):
# Verify it is a valid json string
json.loads(prop)
return prop
elif isinstance(prop, dict):
# Convert the dict to a basestring
return json.dumps(prop)
elif isinstance(prop, AWSHelperFn):
return prop
else:
raise TypeError("json object must be a str or dict")
def waf_action_type(action: object) -> Literal["ALLOW", "BLOCK", "COUNT"]:
valid_actions = ["ALLOW", "BLOCK", "COUNT"]
if action in valid_actions:
return action
raise ValueError('Type must be one of: "%s"' % (", ".join(valid_actions)))
|
<filename>code/WV.py
# https://dhhr.wv.gov/COVID-19/Pages/default.aspx
import csv
from datetime import datetime
import json
import os
from urllib.request import urlopen, Request
import requests
from selenium import webdriver
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from bs4 import BeautifulSoup
import time
def run_hist():
# Using Selenium
# driver = webdriver.Safari()
driver = webdriver.Chrome(executable_path="andrew/ChromeDriver/chromedriver.exe")
driver.maximize_window()
driver.get("https://dhhr.wv.gov/COVID-19/Pages/default.aspx")
time.sleep(7)
frame = driver.find_element_by_xpath('//*[@id="responsive"]/iframe')
driver.execute_script("return arguments[0].scrollIntoView(true);", frame)
driver.switch_to.frame(frame)
time.sleep(2)
out = {}
# Click Positive Case Trends
driver.find_element_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[17]/transform/div/div[3]/div/visual-modern/div/button').click()
time.sleep(2)
cum_cases_div = driver.find_element_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[4]/transform/div/div[3]/div/visual-modern/div')
actionChains = ActionChains(driver)
actionChains.context_click(cum_cases_div).pause(3).send_keys(Keys.ENTER).perform()
time.sleep(2)
# Click Lab Test Trends
# Click Hospital
def get_data(out_county, tables, county_list):
now = str(datetime.now())
for table in tables:
sum_county = 0
for segment in table[0]:
vals = [x.text for x in segment.find_elements_by_xpath('.//*') if '\n' not in x.text]
if table[1] == "Table 1":
if len(vals) % 7 != 0:
raise Exception("Unequal number of columns")
num_counties = len(vals)/7
sum_county += num_counties
cols = []
col = []
count = 0
for val in vals:
count += 1
col.append(val)
if count == num_counties:
count = 0
cols.append(col)
col = []
for col in cols:
if len(col) != num_counties:
raise Exception("Uneven number of values")
for county, active, rec, conf, prob, test, death in zip(cols[0], cols[1], cols[2], cols[3], cols[4], cols[5], cols[6]):
if county in county_list:
continue
ct = {
"County Name": county,
"# Confirmatory Lab Tests": (test).replace(",",""),
"Total Probable Cases": (prob).replace(",",""),
"Total Confirmed Cases": (conf).replace(",",""),
"Total Active Cases": (active).replace(",",""),
"Total Recovered": (rec).replace(",",""),
"Total Deaths: ": (death).replace(",",""),
"Scrape Time": now
}
out_county.append(ct)
county_list.append(county)
# elif table[1] == "Table 2":
# if len(vals) % 4 != 0:
# raise Exception("Unequal number of columns")
# num_counties = len(vals)/4
# sum_county += num_counties
# cols = []
# col = []
# count = 0
# for val in vals:
# count += 1
# col.append(val)
# if count == num_counties:
# count = 0
# cols.append(col)
# col = []
# for col in cols:
# if len(col) != num_counties:
# raise Exception("Uneven number of values")
# for f_cases, f_tests, m_cases, m_tests in zip(cols[0], cols[1], cols[2], cols[3]):
# out_county[idx]["Total Cases: Female"] = f_cases.replace(",","")
# out_county[idx]["Total Confirmatory Tests: Female"] = f_tests.replace(",","")
# out_county[idx]["Total Cases: Male"] = m_cases.replace(",","")
# out_county[idx]["Total Confirmatory Tests: Male"] = m_tests.replace(",","")
# idx += 1
# elif table[1] == "Table 3":
# if len(vals) % 3 != 0:
# raise Exception("Unequal number of columns")
# num_counties = len(vals)/3
# sum_county += num_counties
# cols = []
# col = []
# count = 0
# for val in vals:
# count += 1
# col.append(val)
# if count == num_counties:
# count = 0
# cols.append(col)
# col = []
# for col in cols:
# if len(col) != num_counties:
# raise Exception("Uneven number of values")
# for black, other, white in zip(cols[0], cols[1], cols[2]):
# # out_county[idx]["% Cases Race/Ethnicity: Unknown"] = unk.replace("%","")
# out_county[idx]["% Cases Race/Ethnicity: Black"] = black.replace("%","")
# out_county[idx]["% Cases Race/Ethnicity: Other"] = other.replace("%","")
# out_county[idx]["% Cases Race/Ethnicity: White"] = white.replace("%","")
# out_county[idx]["Scrape Time"] = now
# idx += 1
# if sum_county != 55:
# raise Exception("Unexpected number of counties: " + str(sum_county))
return out_county, county_list
def run_WV(args):
# run_hist()
# exit()
# Parameters
raw_name = '../WV/raw'
data_county = '../WV/data/data_county.csv'
now = str(datetime.now())
# Using Selenium
# driver = webdriver.Safari()
driver = webdriver.Chrome(executable_path="andrew/ChromeDriver/chromedriver.exe")
driver.maximize_window()
driver.get("https://dhhr.wv.gov/COVID-19/Pages/default.aspx")
time.sleep(7)
frame = driver.find_element_by_xpath('//*[@id="responsive"]/iframe')
driver.execute_script("return arguments[0].scrollIntoView(true);", frame)
driver.switch_to.frame(frame)
time.sleep(2)
out_county = []
# Get county data
driver.find_element_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-group[9]/transform/div/div[2]/visual-container-modern[2]/transform/div/div[3]/div/visual-modern/div/button').click()
time.sleep(3)
driver.find_element_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[11]/transform/div/div[3]/div/visual-modern/div/button').click()
time.sleep(3)
table1_div = (driver.find_elements_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[1]/transform/div/div[3]/div/visual-modern/div/div/div[2]/div[1]/div[4]/div/*'), 'Table 1')
# table2_div = (driver.find_elements_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[2]/transform/div/div[3]/div/visual-modern/div/div/div[2]/div[1]/div[4]/div/*'), 'Table 2')
# table3_div = (driver.find_elements_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[3]/transform/div/div[3]/div/visual-modern/div/div/div[2]/div[1]/div[4]/div/*'), 'Table 3')
# Raw
driver.save_screenshot(raw_name + "/county1_" + now + ".png")
tables = [table1_div]
county_list = []
out_county, county_list = get_data(out_county, tables, county_list)
driver.find_element_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[1]/transform/div/div[3]/div/visual-modern/div/div/div[2]/div[1]/div[2]/div/div[1]/div').click()
time.sleep(3)
# Raw
driver.save_screenshot(raw_name + "/county2_" + now + ".png")
table1_div = (driver.find_elements_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[1]/transform/div/div[3]/div/visual-modern/div/div/div[2]/div[1]/div[4]/div/*'), 'Table 1')
# table2_div = (driver.find_elements_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[2]/transform/div/div[3]/div/visual-modern/div/div/div[2]/div[1]/div[4]/div/*'), 'Table 2')
# table3_div = (driver.find_elements_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[3]/transform/div/div[3]/div/visual-modern/div/div/div[2]/div[1]/div[4]/div/*'), 'Table 3')
tables = [table1_div]
out_county, county_list = get_data(out_county, tables, county_list)
if len(county_list) != 55:
raise Exception("Did not collect all counties")
for county in out_county:
fields = sorted([x for x in county])
exists = os.path.exists(data_county)
with open(data_county, "a") as fp:
writer = csv.writer(fp)
if not exists:
writer.writerow(fields)
writer.writerow([county[x] for x in fields])
# # Get Statewide
# out = {}
# driver.find_element_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[4]/transform/div/div[3]/div/visual-modern/div/button').click()
# time.sleep(5)
# out["Total Confirmed Cases"] = (driver.find_element_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[14]/transform/div/div[3]/div/visual-modern/div/svg/g[1]/text/tspan').text).replace(",","")
# out["Total Probable Cases"] = (driver.find_element_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[16]/transform/div/div[3]/div/visual-modern/div/svg/g[1]/text/tspan').text).replace(",","")
# out["Total Deaths"] = (driver.find_element_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[35]/transform/div/div[3]/div/visual-modern/div/svg/g[1]/text/tspan').text).replace(",","")
# out["Total Recovered Cases"] = (driver.find_element_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[18]/transform/div/div[3]/div/visual-modern/div/svg/g[1]/text/tspan').text).replace(",","")
# out["Total Active Cases"] = (driver.find_element_by_xpath('//*[@id="pvExplorationHost"]/div/div/exploration/div/explore-canvas-modern/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container-modern[19]/transform/div/div[3]/div/visual-modern/div/svg/g[1]/text/tspan').text).replace(",","")
# print(out)
# Get Hospital (Daily confirmed hosp, confirmed icu, confirmed vent)
if __name__ == '__main__':
run_WV({})
|
# =================================================================
#
# Author: <NAME> <<EMAIL>>,
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
#
# Copyright (c) 2020 <NAME>
# Copyright (c) 2020 <NAME>
# Copyright (c) 2022 <NAME>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
import click
from datetime import datetime
import logging
import os
from lxml import etree
from msc_pygeoapi import cli_options
from msc_pygeoapi.env import MSC_PYGEOAPI_CACHEDIR
from msc_pygeoapi.connector.elasticsearch_ import ElasticsearchConnector
from msc_pygeoapi.loader.base import BaseLoader
from msc_pygeoapi.util import (
configure_es_connection,
check_es_indexes_to_delete,
DATETIME_RFC3339_MILLIS_FMT
)
LOGGER = logging.getLogger(__name__)
STATIONS_LIST_NAME = 'swob-xml_station_list.csv'
STATIONS_LIST_URL = 'https://dd.weather.gc.ca/observations/doc/{}'.format(
STATIONS_LIST_NAME
)
STATIONS_CACHE = os.path.join(MSC_PYGEOAPI_CACHEDIR, STATIONS_LIST_NAME)
# cleanup settings
DAYS_TO_KEEP = 30
# index settings
INDEX_BASENAME = 'swob_realtime.'
SETTINGS = {
'order': 0,
'version': 1,
'index_patterns': ['{}*'.format(INDEX_BASENAME)],
'settings': {'number_of_shards': 1, 'number_of_replicas': 0},
'mappings': {
'properties': {
'geometry': {'type': 'geo_shape'},
'properties': {
'properties': {
'rmk': {
'type': 'text',
'fields': {'raw': {'type': 'keyword'}},
}
}
},
}
},
}
def parse_swob(swob_file):
"""
Read swob at swob_path and return object
:param swob_path: file path to SWOB XML
:returns: dictionary of SWOB
"""
namespaces = {
'gml': 'http://www.opengis.net/gml',
'om': 'http://www.opengis.net/om/1.0',
'xlink': 'http://www.w3.org/1999/xlink',
'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'dset': 'http://dms.ec.gc.ca/schema/point-observation/2.0',
}
swob_values = {}
elevation = ''
latitude = ''
longitude = ''
# extract the swob xml source name
swob_name = os.path.basename(swob_file)
# make sure the xml is parse-able
with open(swob_file) as fh:
try:
xml_tree = etree.parse(fh)
except (FileNotFoundError, etree.ParseError):
msg = 'Error: file {} cannot be parsed as xml'.format(swob_file)
LOGGER.debug(msg)
raise RuntimeError(msg)
gen_path = './/om:Observation/om:metadata/dset:set/dset:general'
general_info_tree = xml_tree.findall(gen_path, namespaces)
general_info_elements = list(general_info_tree[0].iter())
properties = {}
# extract swob dataset
for element in general_info_elements:
if 'name' in element.attrib:
if element.tag.split('}')[1] == 'dataset':
properties[element.tag.split('}')[1]] = element.attrib[
'name'
].replace('/', '-')
# add swob source name to properties
properties["id"] = swob_name
local_path = swob_file.split(MSC_PYGEOAPI_CACHEDIR)[-1]
properties["url"] = f'https://dd.weather.gc.ca/{local_path}'
# extract ID related properties
id_path = (
'.//om:Observation/om:metadata/'
+ 'dset:set/dset:identification-elements'
)
identification_tree = xml_tree.findall(id_path, namespaces)
identification_elements = list(identification_tree[0].iter())
for element in identification_elements:
element_name = ''
if 'name' in element.attrib:
for key in element.attrib:
if key == 'name':
if element.attrib[key] == 'stn_elev':
elevation = float(element.attrib['value'])
break
elif element.attrib[key] == 'lat':
latitude = float(element.attrib['value'])
break
elif element.attrib[key] == 'long':
longitude = float(element.attrib['value'])
break
else:
element_name = element.attrib[key]
else:
properties[
"{}-{}".format(element_name, key)
] = element.attrib[key]
# set up cords and time stamps
swob_values['coordinates'] = [longitude, latitude, elevation]
s_time = (
'.//om:Observation/om:samplingTime/'
+ 'gml:TimeInstant/gml:timePosition'
)
time_sample = list(xml_tree.findall(s_time, namespaces)[0].iter())[0]
properties['obs_date_tm'] = time_sample.text
r_time = (
'.//om:Observation/om:resultTime/'
+ 'gml:TimeInstant/gml:timePosition'
)
time_result = list(xml_tree.findall(r_time, namespaces)[0].iter())[0]
properties['processed_date_tm'] = time_result.text
# extract the result data from the swob
res_path = './/om:Observation/om:result/dset:elements'
result_tree = xml_tree.findall(res_path, namespaces)
result_elements = list(result_tree[0].iter())
last_element = ''
for element in result_elements:
nested = element.iter()
for nest_elem in nested:
value = ''
uom = ''
if 'name' in nest_elem.attrib:
name = nest_elem.attrib['name']
if 'value' in nest_elem.attrib:
value = nest_elem.attrib['value']
# Checks to see if value string can be cast
# to float/int
try:
if '.' in value:
value = float(value)
else:
value = int(value)
except ValueError:
msg = (
f'Warning the value: "{value}" could not be '
f'converted to a number, this can be because '
f'of an improperly formatted number value or '
f'because of an intentional string value'
)
LOGGER.debug(msg)
pass
if 'uom' in nest_elem.attrib:
if nest_elem.attrib['uom'] != 'unitless':
uom = nest_elem.attrib['uom'].replace('\u00c2', '')
# element can be 1 of 3 things:
# 1. a data piece
# 2. a qa summary
# 3. a data flag
if all([name != 'qa_summary', name != 'data_flag']):
properties[name] = value
if uom:
properties["{}-{}".format(name, 'uom')] = uom
last_element = name
elif name == 'qa_summary':
properties["{}-{}".format(last_element, 'qa')] = value
elif name == 'data_flag':
properties[
"{}-{}-{}".format(last_element, 'data_flag', 'uom')
] = uom
properties[
"{}-{}-{}".format(
last_element, 'data_flag', 'code_src'
)
] = nest_elem.attrib['code-src']
properties[
"{}-{}-{}".format(
last_element, 'data_flag', 'value'
)
] = value
swob_values['properties'] = properties
for k, v in swob_values['properties'].items():
if v == 'MSNG':
swob_values['properties'][k] = None
return swob_values
def swob2geojson(swob_file):
"""
Produce GeoJSON from dict
:param swob_dict: swob in memory
:returns: geojson
"""
swob_dict = parse_swob(swob_file)
json_output = {}
try:
if len(swob_dict) == 0:
msg = 'Error: dictionary passed into swob2geojson is blank'
LOGGER.debug(msg)
raise RuntimeError(msg)
except TypeError:
msg = "Error: NoneType passed in as swob dict"
LOGGER.debug(msg)
raise RuntimeError(msg)
# verify dictionary contains the data we need to avoid error
if 'properties' in swob_dict and 'coordinates' in swob_dict:
json_output['id'] = swob_dict['properties']['id']
json_output['type'] = 'Feature'
json_output["geometry"] = {
"type": "Point",
"coordinates": swob_dict['coordinates'],
}
if "minute" in swob_dict["properties"]["id"]:
swob_dict["properties"]["_is-minutely_obs-value"] = True
else:
swob_dict["properties"]["_is-minutely_obs-value"] = False
json_output["properties"] = swob_dict["properties"]
return json_output
else:
msg = (
'Error: dictionary passed into swob2geojson lacks',
' required fields',
)
LOGGER.debug(msg)
raise RuntimeError(msg)
class SWOBRealtimeLoader(BaseLoader):
"""SWOB Real-time loader"""
def __init__(self, conn_config={}):
"""initializer"""
BaseLoader.__init__(self)
self.conn = ElasticsearchConnector(conn_config)
self.items = []
self.conn.create_template(INDEX_BASENAME, SETTINGS)
def generate_observations(self, filepath):
"""
Generates and yields a series of observations, one for each row in
<filepath>. Observations are returned as Elasticsearch bulk API
upsert actions, with documents in GeoJSON to match the Elasticsearch
index mappings.
:param filename: Path to a data file of realtime SWOB
:returns: Generator of Elasticsearch actions to upsert the observations
"""
observation = swob2geojson(filepath)
observation_id = observation['id']
LOGGER.debug(
'Observation {} created successfully'.format(observation_id)
)
obs_dt = datetime.strptime(
observation['properties']['date_tm-value'],
DATETIME_RFC3339_MILLIS_FMT,
)
obs_dt2 = obs_dt.strftime('%Y-%m-%d')
es_index = '{}{}'.format(INDEX_BASENAME, obs_dt2)
action = {
'_id': observation_id,
'_index': es_index,
'_op_type': 'update',
'doc': observation,
'doc_as_upsert': True,
}
self.items.append(observation)
yield action
def load_data(self, filepath):
"""
loads data from event to target
:param filepath: filepath to data on disk
:returns: `bool` of status result
"""
LOGGER.debug('Received file {}'.format(filepath))
chunk_size = 80000
package = self.generate_observations(filepath)
self.conn.submit_elastic_package(package, request_size=chunk_size)
return True
@click.group()
def swob_realtime():
"""Manages SWOB realtime indices"""
pass
@click.command()
@click.pass_context
@cli_options.OPTION_FILE()
@cli_options.OPTION_DIRECTORY()
@cli_options.OPTION_ELASTICSEARCH()
@cli_options.OPTION_ES_USERNAME()
@cli_options.OPTION_ES_PASSWORD()
@cli_options.OPTION_ES_IGNORE_CERTS()
def add(ctx, file_, directory, es, username, password, ignore_certs):
"""adds data to system"""
if all([file_ is None, directory is None]):
raise click.ClickException('Missing --file/-f or --dir/-d option')
conn_config = configure_es_connection(es, username, password, ignore_certs)
files_to_process = []
if file_ is not None:
files_to_process = [file_]
elif directory is not None:
for root, dirs, files in os.walk(directory):
for f in [file for file in files if file.endswith('.xml')]:
files_to_process.append(os.path.join(root, f))
files_to_process.sort(key=os.path.getmtime)
for file_to_process in files_to_process:
loader = SWOBRealtimeLoader(conn_config)
result = loader.load_data(file_to_process)
if not result:
click.echo('features not generated')
@click.command()
@click.pass_context
@cli_options.OPTION_DAYS(
default=DAYS_TO_KEEP,
help='Delete indexes older than n days (default={})'.format(DAYS_TO_KEEP)
)
@cli_options.OPTION_ELASTICSEARCH()
@cli_options.OPTION_ES_USERNAME()
@cli_options.OPTION_ES_PASSWORD()
@cli_options.OPTION_ES_IGNORE_CERTS()
@cli_options.OPTION_YES(prompt='Are you sure you want to delete old indexes?')
def clean_indexes(ctx, days, es, username, password, ignore_certs):
"""Clean SWOB realtime indexes older than n number of days"""
conn_config = configure_es_connection(es, username, password, ignore_certs)
conn = ElasticsearchConnector(conn_config)
indexes = conn.get('{}*'.format(INDEX_BASENAME))
click.echo(indexes)
if indexes:
indexes_to_delete = check_es_indexes_to_delete(indexes, days)
if indexes_to_delete:
click.echo('Deleting indexes {}'.format(indexes_to_delete))
conn.delete(','.join(indexes_to_delete))
click.echo('Done')
@click.command()
@click.pass_context
@cli_options.OPTION_ELASTICSEARCH()
@cli_options.OPTION_ES_USERNAME()
@cli_options.OPTION_ES_PASSWORD()
@cli_options.OPTION_ES_IGNORE_CERTS()
@cli_options.OPTION_INDEX_TEMPLATE()
@cli_options.OPTION_YES(
prompt='Are you sure you want to delete these indexes?'
)
def delete_indexes(ctx, es, username, password, ignore_certs, index_template):
"""Delete all SWOB realtime indexes"""
conn_config = configure_es_connection(es, username, password, ignore_certs)
conn = ElasticsearchConnector(conn_config)
all_indexes = '{}*'.format(INDEX_BASENAME)
click.echo('Deleting indexes {}'.format(all_indexes))
conn.delete(all_indexes)
if index_template:
click.echo('Deleting index template {}'.format(INDEX_BASENAME))
conn.delete_template(INDEX_BASENAME)
click.echo('Done')
swob_realtime.add_command(add)
swob_realtime.add_command(clean_indexes)
swob_realtime.add_command(delete_indexes)
|
<reponame>mbsantiago/irekua-marco-geoestadistico<filename>irekua_marco_geoestadistico/migrations/migrate_geostatistical_framework.py
import os
import datetime
import zipfile
import json
import logging
from tqdm import tqdm
from django.contrib.gis.gdal import DataSource
from django.contrib.gis.gdal import CoordTransform
from django.contrib.gis.gdal import OGRGeometry
from django.contrib.gis.gdal import OGRGeomType
from django.db import migrations
from django.db import connections
from django.db import router
logging.basicConfig(level=logging.INFO)
BASEDIR = os.path.dirname(os.path.abspath(__file__))
TARGET_DIR = os.path.join(BASEDIR, 'extracted_data')
INEGI_DESCRIPTION = '''El Marco Geoestadístico (MG) Integrado se conforma por información vectorial, tablas de atributos y catálogos.
Muestra la división geoestadística del territorio nacional en sucesivos niveles de desagregación. Esta división está dada por los llamados LÍMITES GEOESTADÍSTICOS, que pueden coincidir con los límites político-administrativos oficiales, los cuales tienen sustento legal; sin embargo, los que no cuentan con dicho sustento deben entenderse como límites provisionales, trazados sólo para realizar los operativos censales. Estos límites provisionales no tienen pretensión de oficialidad, dado que el Instituto Nacional de Estadística y Geografía no es el órgano facultado para definir límites político-administrativos.
El MG contiene además la cobertura de todas las localidades del territorio nacional, de manera que a cada una de las viviendas le corresponde una secuencia de claves de identificación geográfica que está dada por los sucesivos niveles de desagregación en los que se divide el territorio nacional.
'''
BASIC_SCHEMA= {
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"title": "INEGI Marco Geoestadístico 2018",
"required": [],
"properties": {}
}
def migrate_geostatistical_framework(apps, schema_editor):
if not data_is_unpacked():
unpack_data()
entity_migrator = EntityMigrator(apps)
entity_migrator.migrate()
stores = {'entities': entity_migrator.localities}
municipality_migrator = MunicipalityMigrator(apps, stores=stores)
municipality_migrator.migrate()
stores['municipalities'] = municipality_migrator.localities
locality_migrator = LocalityMigrator(apps, stores=stores)
locality_migrator.migrate()
def data_is_unpacked():
return os.path.exists(TARGET_DIR)
def unpack_data():
logging.info('Extracting zip file with geostatistical framework')
for basename in ['l.zip', 'mun.zip', 'ent.zip']:
zip_file = os.path.join(BASEDIR, 'data', basename)
with zipfile.ZipFile(zip_file, 'r') as zfile:
zfile.extractall(TARGET_DIR)
logging.info('Extraction done')
class Migrator(object):
name = 'Migrator'
file_name = ''
attributes = []
def __init__(self, apps, stores=None):
self.logger = logging.getLogger(self.name)
self.locality_model = apps.get_model('irekua_database.Locality')
self.locality_type_model = apps.get_model('irekua_database.LocalityType')
self.spatial_backend = connections[router.db_for_write(self.locality_model)].ops
self.localities = {}
self.stores = stores
def migrate(self):
self.logger.info('Migrating %s', self.name)
shape_file = os.path.join(TARGET_DIR, self.file_name)
source = DataSource(shape_file)
layer = source[0]
self.locality_type = self.create_type(layer)
self.transform = self.get_transform(layer)
for feature in tqdm(layer):
self.create_locality_from_feature(feature)
self.logger.info('Done migrating %s', self.name)
def create_locality_from_feature(self, feature):
name = feature.get('NOMGEO')
if feature.geom_type == 'Polygon':
geometry = OGRGeometry(OGRGeomType('MultiPolygon'))
geometry.add(feature.geom)
else:
geometry = feature.geom
geometry.transform(self.transform)
metadata = self.get_feature_metadata(feature)
locality = self.locality_model.objects.create(
name=name,
geometry=geometry.wkt,
locality_type=self.locality_type,
metadata=metadata)
self.create_locality_implications(feature, locality)
def create_type(self, layer):
metadata_schema = self.create_metadata_schema()
name = '<NAME>, DICIEMBRE 2018 (%s)' % self.name
publication_date = datetime.date(year=2018, month=12, day=1)
source = 'https://www.inegi.org.mx/temas/mg/default.html'
original_datum = layer.srs.wkt
return self.locality_type_model.objects.create(
metadata_schema=metadata_schema,
name=name,
publication_date=publication_date,
source=source,
description=INEGI_DESCRIPTION,
original_datum=original_datum)
def create_locality_implications(self, feature, locality):
pass
def create_metadata_schema(self):
schema = BASIC_SCHEMA.copy()
schema['title'] = self.name + ' ' + schema['title']
for name, title in self.attributes:
schema['required'].append(name)
schema['properties'][name] = {
"type": "integer",
"title": title
}
return json.dumps(schema)
def get_feature_metadata(self, feature):
return {
field: feature.get(field)
for field, _ in self.attributes
}
def get_transform(self, layer):
source_srs = layer.srs
target_srid = self.locality_model._meta.get_field('geometry').srid
SpatialRefSys = self.spatial_backend.spatial_ref_sys()
target_srs = SpatialRefSys.objects.get(srid=target_srid).srs
return CoordTransform(source_srs, target_srs)
class EntityMigrator(Migrator):
name = 'Entidad'
file_name = '01_32_ent.shp'
attributes = [
('CVEGEO', 'Clave de geometria'),
('CVE_ENT', 'Clave de entidad')
]
def create_locality_implications(self, feature, locality):
self.localities[feature.get('CVE_ENT')] = locality
class MunicipalityMigrator(Migrator):
name = 'Municipio'
file_name = '01_32_mun.shp'
attributes = [
('CVEGEO', 'Clave de geometria'),
('CVE_ENT', 'Clave de entidad'),
('CVE_MUN', 'Clave de municipio'),
]
def create_locality_implications(self, feature, locality):
entity = self.stores['entities'][feature.get('CVE_ENT')]
locality.is_part_of.add(entity)
self.localities[feature.get('CVE_MUN')] = locality
class LocalityMigrator(Migrator):
name = 'Localidad'
file_name = '01_32_l.shp'
attributes = [
('CVEGEO', 'Clave de geometria'),
('CVE_ENT', 'Clave de entidad'),
('CVE_MUN', 'Clave de municipio'),
('CVE_LOC', 'Clave de localidad'),
]
def create_locality_implications(self, feature, locality):
entity = self.stores['entities'][feature.get('CVE_ENT')]
municipality = self.stores['municipalities'][feature.get('CVE_MUN')]
locality.is_part_of.add(entity, municipality)
class Migration(migrations.Migration):
dependencies = [
('database', '0008_locality_localitytype')
]
operations = [
migrations.RunPython(migrate_geostatistical_framework)
]
|
<gh_stars>0
from magma import *
from mantle.lattice.mantle40.MUX import Mux2
from mantle.lattice.mantle40.FF import FFs
from mantle.lattice.mantle40.register import _RegisterName, Register
__all__ = ['DefineSIPO', 'SIPO']
__all__ += ['DefineSISO', 'SISO']
__all__ += ['DefinePIPO', 'PIPO']
__all__ += ['DefinePISO', 'PISO']
def DefineSIPO(n, init=0, ce=False, r=False, s=False):
"""
Generate Serial-In, Parallel-Out shift register.
I : Bit -> O : Array(n, Bit)
"""
class _SIPO(Circuit):
name = _RegisterName('SIPO', n, init, ce, r, s)
IO = ['I', In(Bit), 'O', Out(Array(n,Bit))] + ClockInterface(ce,r,s)
@classmethod
def definition(sipo):
ffs = FFs(n, init=init, ce=ce, r=r, s=s)
reg = braid(ffs, scanargs={"I":"O"})
reg(sipo.I)
wire(reg.O, sipo.O)
wireclock(sipo, reg)
return _SIPO
def SIPO(n, init=0, ce=False, r=False, s=False, **kwargs):
return DefineSIPO(n, init, ce, r, s)(**kwargs)
def DefineSISO(n, init=0, ce=False, r=False, s=False):
"""
Generate Serial-In, Serial-Out shift register.
I : Bit -> O : Bit
"""
class _SISO(Circuit):
name = _RegisterName('SISO', n, init, ce, r, s)
IO = ['I', In(Bit), 'O', Out(Bit)] + ClockInterface(ce,r,s)
@classmethod
def definition(siso):
ffs = FFs(n, init=init, ce=ce, r=r, s=s)
reg = braid(ffs, foldargs={"I":"O"})
reg(siso.I)
wire(reg.O, siso.O)
wireclock(siso, reg)
return _SISO
def SISO(n, init=0, ce=False, r=False, s=False, **kwargs):
return DefineSISO(n, ce=ce, r=r, s=s)(**kwargs)
def DefinePIPO(n, init=0, ce=False, r=False, s=False):
"""
Generate Parallel-In, Parallel-Out shift register.
SI : Bit, PI : Array(n, Bit), LOAD : Bit -> O : Array(n, Bit)
"""
T = Array(n, Bit)
class _PIPO(Circuit):
name = _RegisterName('PIPO', n, init, ce, r, s)
IO = ['SI', In(Bit), 'PI', In(T), 'LOAD', In(Bit),
'O', Out(T)] + ClockInterface(ce,r,s)
@classmethod
def definition(pipo):
def mux2(y):
return curry(Mux2(), prefix='I')
mux = braid(col(mux2, n), forkargs=['S'])
reg = Register(n, init=init, ce=ce, r=r, s=s)
#si = array(*[pipo.SI] + [reg.O[i] for i in range(n-1)])
si = concat(array(pipo.SI),reg.O[0:n-1])
mux(si, pipo.PI, pipo.LOAD)
reg(mux)
wire(reg.O, pipo.O)
wireclock(pipo, reg)
return _PIPO
def PIPO(n, init=0, ce=False, r=False, s=False, **kwargs):
return DefinePIPO(n, init, ce, r, s)(**kwargs)
def DefinePISO(n, init=0, ce=False, r=False, s=False):
"""
Generate Parallel-In, Serial-Out shift register.
SI : Bit, PI : Array(n, Bit), LOAD : Bit -> O : Bit
"""
T = Array(n, Bit)
class _PISO(Circuit):
name = _RegisterName('PISO', n, init, ce, r, s)
IO = ['SI', In(Bit), 'PI', In(T), 'LOAD', In(Bit),
'O', Out(Bit)] + ClockInterface(ce,r,s)
@classmethod
def definition(piso):
def mux2(y):
return curry(Mux2(), prefix='I')
mux = braid(col(mux2, n), forkargs=['S'])
reg = Register(n, init, ce=ce, r=r, s=s)
#si = array(*[piso.SI] + [reg.O[i] for i in range(n-1)])
si = concat(array(piso.SI),reg.O[0:n-1])
mux(si, piso.PI, piso.LOAD)
reg(mux)
wire(reg.O[n-1], piso.O)
wireclock(piso, reg)
return _PISO
def PISO(n, init=0, ce=False, r=False, s=False, **kwargs):
return DefinePISO(n, init, ce, r, s)(**kwargs)
|
<reponame>zhangjianting/cuspatial<gh_stars>0
# Copyright (c) 2019, NVIDIA CORPORATION.
import numpy as np
import pytest
import cudf
from cudf.tests.utils import assert_eq
import cuspatial
def test_subset_id_zeros():
result = cuspatial.subset_trajectory_id(
cudf.Series([0]),
cudf.Series([0]),
cudf.Series([0]),
cudf.Series([0]),
cudf.Series([0]),
)
assert_eq(
result,
cudf.DataFrame(
{
"x": [0.0],
"y": [0.0],
"ids": cudf.Series([0]).astype("int32"),
"timestamp": cudf.Series([0]).astype("datetime64[ms]"),
}
),
)
def test_subset_id_ones():
result = cuspatial.subset_trajectory_id(
cudf.Series([1]),
cudf.Series([1]),
cudf.Series([1]),
cudf.Series([1]),
cudf.Series([1]),
)
assert_eq(
result,
cudf.DataFrame(
{
"x": [1.0],
"y": [1.0],
"ids": cudf.Series([1]).astype("int32"),
"timestamp": cudf.Series([1]).astype("datetime64[ms]"),
}
),
)
def test_subset_id_random():
np.random.seed(0)
result = cuspatial.subset_trajectory_id(
cudf.Series(np.random.randint(0, 10, 10)),
cudf.Series(np.random.randint(0, 10, 10)),
cudf.Series(np.random.randint(0, 10, 10)),
cudf.Series(np.random.randint(0, 10, 10)),
cudf.Series(np.random.randint(0, 10, 10)),
)
assert_eq(
result,
cudf.DataFrame(
{
"x": [7.0, 6, 1, 6, 7, 7, 8],
"y": [5.0, 9, 4, 3, 0, 3, 5],
"ids": cudf.Series([2, 3, 3, 3, 3, 7, 0]).astype("int32"),
"timestamp": cudf.Series([9, 9, 7, 3, 2, 7, 2]).astype(
"datetime64[ms]"
),
}
),
)
def test_spatial_bounds_zeros():
result = cuspatial.spatial_bounds(
cudf.Series([0]), cudf.Series([0]), cudf.Series([0]), cudf.Series([0])
)
assert_eq(
result,
cudf.DataFrame({"x1": [0.0], "y1": [0.0], "x2": [0.0], "y2": [0.0]}),
)
def test_spatial_bounds_ones():
result = cuspatial.spatial_bounds(
cudf.Series([1]), cudf.Series([1]), cudf.Series([1]), cudf.Series([1])
)
assert_eq(
result,
cudf.DataFrame({"x1": [1.0], "y1": [1.0], "x2": [1.0], "y2": [1.0]}),
)
def test_spatial_bounds_zero_to_one():
result = cuspatial.spatial_bounds(
cudf.Series([0, 0]),
cudf.Series([0, 1]),
cudf.Series([2]),
cudf.Series([2]),
)
assert_eq(
result,
cudf.DataFrame({"x1": [0.0], "y1": [0.0], "x2": [0.0], "y2": [1.0]}),
)
def test_spatial_bounds_zero_to_one_xy():
result = cuspatial.spatial_bounds(
cudf.Series([0, 1]),
cudf.Series([0, 1]),
cudf.Series([2]),
cudf.Series([2]),
)
assert_eq(
result,
cudf.DataFrame({"x1": [0.0], "y1": [0.0], "x2": [1.0], "y2": [1.0]}),
)
def test_spatial_bounds_subsetted():
result = cuspatial.spatial_bounds(
cudf.Series([0, 1, -1, 2]),
cudf.Series([0, 1, -1, 2]),
cudf.Series([2, 2]),
cudf.Series([2, 4]),
)
assert_eq(
result,
cudf.DataFrame(
{
"x1": [0.0, -1.0],
"y1": [0.0, -1.0],
"x2": [1.0, 2.0],
"y2": [1.0, 2.0],
}
),
)
def test_spatial_bounds_intersected():
result = cuspatial.spatial_bounds(
cudf.Series([0, 2, 1, 3]),
cudf.Series([0, 2, 1, 3]),
cudf.Series([2, 2]),
cudf.Series([2, 4]),
)
assert_eq(
result,
cudf.DataFrame(
{
"x1": [0.0, 1.0],
"y1": [0.0, 1.0],
"x2": [2.0, 3.0],
"y2": [2.0, 3.0],
}
),
)
def test_spatial_bounds_two_and_three():
result = cuspatial.spatial_bounds(
cudf.Series([0, 2, 1, 3, 2]),
cudf.Series([0, 2, 1, 3, 2]),
cudf.Series([2, 3]),
cudf.Series([2, 5]),
)
assert_eq(
result,
cudf.DataFrame(
{
"x1": [0.0, 1.0],
"y1": [0.0, 1.0],
"x2": [2.0, 3.0],
"y2": [2.0, 3.0],
}
),
)
def test_derive_trajectories_zeros():
num_trajectories = cuspatial.derive(
cudf.Series([0]), cudf.Series([0]), cudf.Series([0]), cudf.Series([0])
)
assert num_trajectories[0] == 1
assert_eq(
num_trajectories[1],
cudf.DataFrame(
{
"trajectory_id": cudf.Series([0]).astype("int32"),
"length": cudf.Series([1]).astype("int32"),
"position": cudf.Series([1]).astype("int32"),
}
),
)
def test_derive_trajectories_ones():
num_trajectories = cuspatial.derive(
cudf.Series([1]), cudf.Series([1]), cudf.Series([1]), cudf.Series([1])
)
assert num_trajectories[0] == 1
assert_eq(
num_trajectories[1],
cudf.DataFrame(
{
"trajectory_id": cudf.Series([1]).astype("int32"),
"length": cudf.Series([1]).astype("int32"),
"position": cudf.Series([1]).astype("int32"),
}
),
)
def test_derive_trajectories_two():
num_trajectories = cuspatial.derive(
cudf.Series([0, 1]),
cudf.Series([0, 1]),
cudf.Series([0, 1]),
cudf.Series([0, 1]),
)
assert num_trajectories[0] == 2
assert_eq(
num_trajectories[1],
cudf.DataFrame(
{
"trajectory_id": cudf.Series([0, 1]).astype("int32"),
"length": cudf.Series([1, 1]).astype("int32"),
"position": cudf.Series([1, 2]).astype("int32"),
}
),
)
def test_derive_trajectories_many():
np.random.seed(0)
num_trajectories = cuspatial.derive(
cudf.Series(np.random.randint(0, 10, 10)),
cudf.Series(np.random.randint(0, 10, 10)),
cudf.Series(np.random.randint(0, 10, 10)),
cudf.Series(np.random.randint(0, 10, 10)),
)
assert num_trajectories[0] == 6
assert_eq(
num_trajectories[1],
cudf.DataFrame(
{
"trajectory_id": cudf.Series([0, 3, 4, 5, 8, 9]).astype(
"int32"
),
"length": cudf.Series([2, 2, 1, 2, 1, 2]).astype("int32"),
"position": cudf.Series([2, 4, 5, 7, 8, 10]).astype("int32"),
}
),
)
def test_distance_and_speed_zeros():
result = cuspatial.distance_and_speed(
cudf.Series([0]),
cudf.Series([0]),
cudf.Series([0]),
cudf.Series([0]),
cudf.Series([0]),
)
assert_eq(result["meters"], cudf.Series([-2.0]), check_names=False)
assert_eq(result["speed"], cudf.Series([-2.0]), check_names=False)
def test_distance_and_speed_ones():
result = cuspatial.distance_and_speed(
cudf.Series([1]),
cudf.Series([1]),
cudf.Series([1]),
cudf.Series([1]),
cudf.Series([1]),
)
assert_eq(result["meters"], cudf.Series([-2.0]), check_names=False)
assert_eq(result["speed"], cudf.Series([-2.0]), check_names=False)
def test_one_one_meter_one_second():
result = cuspatial.distance_and_speed(
cudf.Series([0.0, 0.001]),
cudf.Series([0.0, 0.0]),
cudf.Series([0, 1000]),
cudf.Series([2]),
cudf.Series([2]),
)
assert_eq(result["meters"], cudf.Series([1.0]), check_names=False)
assert_eq(result["speed"], cudf.Series([1.0]), check_names=False)
def test_two_trajectories_one_meter_one_second():
result = cuspatial.distance_and_speed(
cudf.Series([0.0, 0.001, 0.0, 0.0]),
cudf.Series([0.0, 0.0, 0.0, 0.001]),
cudf.Series([0, 1000, 0, 1000]),
cudf.Series([2, 2]),
cudf.Series([2, 4]),
)
assert_eq(result["meters"], cudf.Series([1.0, 1.0]), check_names=False)
assert_eq(result["speed"], cudf.Series([1.0, 1.0]), check_names=False)
def test_distance_and_speed_single_trajectory():
result = cuspatial.distance_and_speed(
cudf.Series(
[1.0, 2.0, 3.0, 5.0, 7.0, 1.0, 2.0, 3.0, 6.0, 0.0, 3.0, 6.0]
),
cudf.Series(
[0.0, 1.0, 2.0, 3.0, 1.0, 3.0, 5.0, 6.0, 5.0, 4.0, 7.0, 4.0]
),
cudf.Series([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]),
cudf.Series([5, 4, 3]),
cudf.Series([5, 9, 12]),
)
assert_eq(
result["meters"],
cudf.Series([7892.922363, 6812.55908203125, 8485.28125]),
check_names=False,
)
assert_eq(
result["speed"],
cudf.Series([1973230.625, 2270853.0, 4242640.5]),
check_names=False,
) # fast!
#########################
# Measure that distance and speed are calculatd
# correctly using each of the four cudf datetime
# resolutions.
#
# Compute the distance and speed of two trajectories,
# each over 0.001 km in 1 second.
# If datetime type conversion wasn't supported, speed
# would be different for each test.
#########################
@pytest.mark.parametrize(
"timestamp_type",
[
("datetime64[ns]", 1000000000),
("datetime64[us]", 1000000),
("datetime64[ms]", 1000),
("datetime64[s]", 1),
],
)
def test_distance_and_speed_timestamp_types(timestamp_type):
result = cuspatial.distance_and_speed(
cudf.Series([0.0, 0.001, 0.0, 0.0]), # 1 meter in x
cudf.Series([0.0, 0.0, 0.0, 0.001]), # 1 meter in y
cudf.Series([0, timestamp_type[1], 0, timestamp_type[1]]).astype(
timestamp_type[0]
),
cudf.Series([2, 2]),
cudf.Series([2, 4]),
)
assert_eq(
result,
cudf.DataFrame({"meters": [1.0, 1.0], "speed": [1.0, 1.0]}),
check_names=False,
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# test_recoverstats.py
#
# Copyright 2016 <NAME> <<EMAIL>>
#
import os
import shlex
import subprocess
import sys
sys.path.insert(0, os.path.abspath('..'))
import numpy as np
import matplotlib.pyplot as plt
from astropy.time import Time
from astropy.io import fits
from properimage import simtools
from properimage import propercoadd as pc
# =============================================================================
# PSF measure test by propercoadd
# =============================================================================
test_dir = os.path.abspath(
'/home/bruno/Devel/zackay_code/properimage/test/test_images/varpsf_coadd')
filenames = []
for k in range(10):
frames = []
for theta in [0, 45, 105, 150]:
N = 512 # side
X_FWHM = 2 + 2.5*theta/180
Y_FWHM = 2.8
t_exp = 1
max_fw = max(X_FWHM, Y_FWHM)
x = np.linspace(6*max_fw, N-6*max_fw, 6)
y = np.linspace(6*max_fw, N-6*max_fw, 6)
xy = simtools.cartesian_product([x, y])
SN = 30. # SN para poder medir psf
weights = list(np.linspace(1000, 3000, len(xy)))
m = simtools.delta_point(N, center=False, xy=xy, weights=weights)
im = simtools.image(m, N, t_exp, X_FWHM, Y_FWHM=Y_FWHM, theta=theta,
SN=SN, bkg_pdf='gaussian')
frames.append(im)
frame = np.zeros((2*N, 2*N))
for j in range(2):
for i in range(2):
frame[i*N:(i+1)*N, j*N:(j+1)*N] = frames[i+2*j]
now = '2016-05-17T00:00:00.1234567'
t = Time(now)
filenames.append(
simtools.store_fits(frame, t, t_exp=1, i=k,
zero=3.1415, path=test_dir))
# =============================================================================
# One psf frame only
# =============================================================================
for k in range(10):
#frames = []
#for theta in [0, 45, 105, 150]:
N = 1024 # side
X_FWHM = 2 + 2.5*theta/180
Y_FWHM = 2.8
t_exp = 1
max_fw = max(X_FWHM, Y_FWHM)
x = np.linspace(6*max_fw, N-6*max_fw, 10)
y = np.linspace(6*max_fw, N-6*max_fw, 10)
xy = simtools.cartesian_product([x, y])
SN = 30. # SN para poder medir psf
weights = list(np.linspace(1000, 3000, len(xy)))
m = simtools.delta_point(N, center=False, xy=xy, weights=weights)
im = simtools.image(m, N, t_exp, X_FWHM, Y_FWHM=Y_FWHM, theta=theta,
SN=SN, bkg_pdf='gaussian')
#frames.append(im)
frame = np.zeros((2*N, 2*N))
for j in range(2):
for i in range(2):
frame[i*N:(i+1)*N, j*N:(j+1)*N] = frames[i+2*j]
now = '2016-05-17T00:00:00.1234567'
t = Time(now)
filenames.append(
simtools.store_fits(frame, t, t_exp=1, i=k,
zero=3.1415, path=test_dir))
# =============================================================================
# Coadd
# =============================================================================
#S = np.zeros((N, N))
#R = np.zeros((N, N))
ensemble = pc.ImageEnsemble(filenames)
#S = ensemble.calculate_S(n_procs=4)
R, S = ensemble.calculate_R(n_procs=4, return_S=True)
test_dir = os.path.join(test_dir, 'coadd')
if isinstance(S, np.ma.masked_array):
S = S.filled(1.)
if isinstance(R, np.ma.masked_array):
R = R.real.filled(1.)
if not os.path.exists(test_dir):
os.mkdir(test_dir)
#~ with file(os.path.join(test_dir,'S.npy'), 'w') as f:
#~ np.save(f, S)
#~ with file(os.path.join(test_dir,'R.npy'), 'w') as f:
#~ np.save(f, R)
plt.figure(figsize=(16,16))
plt.imshow(np.log10(S), interpolation='none')
plt.colorbar(orientation='horizontal')
plt.savefig(os.path.join(test_dir, 'S.png'))
plt.close()
plt.figure(figsize=(16,16))
plt.imshow(np.log10(R.real), interpolation='none')
plt.colorbar(orientation='horizontal')
plt.savefig(os.path.join(test_dir, 'R.png'))
plt.close()
shdu = fits.PrimaryHDU(S)
shdulist = fits.HDUList([shdu])
shdulist.writeto(os.path.join(test_dir,'S.fits'), overwrite=True)
rhdu = fits.PrimaryHDU(R.real)
rhdulist = fits.HDUList([rhdu])
rhdulist.writeto(os.path.join(test_dir,'R.fits'), overwrite=True)
# =============================================================================
print('Individual analisis of psf decomposition')
# =============================================================================
for im in ensemble.atoms:
a_fields, psf_basis = im.get_variable_psf()
atom_dir = os.path.join(test_dir, im._attached_to[:-5])
if not os.path.exists(atom_dir):
os.makedirs(atom_dir)
plt.figure(figsize=(16, 16))
plt.imshow(np.log10(im.imagedata), interpolation='none')
plt.plot(im._best_srcs['sources']['x'],
im._best_srcs['sources']['y'],
'ro')
plt.colorbar(orientation='horizontal')
plt.savefig(os.path.join(atom_dir, 'test_frame.png'))
plt.close()
#~ # Patches are in im._best_srcs['patches']
#~ subplots = int(np.sqrt(len(im._best_srcs['patches'])) + 1)
#~ plt.figure(figsize=(20, 20))
#~ for i in range(len(im._best_srcs['patches'])):
#~ plt.subplot(subplots, subplots, i+1)
#~ plt.imshow(im._best_srcs['patches'][i], interpolation='none')
#~ # plt.colorbar(orientation='horizontal')
#~ #plt.savefig(os.path.join(test_dir, 'psf_patches.png'))
subplots = int(np.sqrt(len(psf_basis)) + 1)
plt.figure(figsize=(16, 16))
for i in range(len(psf_basis)):
plt.subplot(subplots, subplots, i+1)
plt.imshow(psf_basis[i], interpolation='none')
#plt.colorbar(orientation='horizontal')
plt.savefig(os.path.join(atom_dir, 'psf_basis.png'))
x, y = np.mgrid[:im.imagedata.shape[0], :im.imagedata.shape[1]]
plt.figure(figsize=(16, 16))
for i in range(len(a_fields)):
plt.subplot(subplots, subplots, i+1)
plt.imshow(a_fields[i](x, y))
plt.plot(im._best_srcs['sources']['x'],
im._best_srcs['sources']['y'],
'ro')
#plt.colorbar(orientation='horizontal')
plt.savefig(os.path.join(atom_dir, 'a_fields.png'))
|
<gh_stars>1-10
"""Unit test for treadmill master.
"""
# Disable C0302: Too many lines in the module
# pylint: disable=C0302
import os
import shutil
import tempfile
import time
import unittest
import kazoo
import mock
import numpy as np
import treadmill
import treadmill.exc
from treadmill import master
from treadmill import scheduler
from tests.testutils import mockzk
class MasterTest(mockzk.MockZookeeperTestCase):
"""Mock test for treadmill.master."""
def setUp(self):
super(MasterTest, self).setUp()
scheduler.DIMENSION_COUNT = 3
self.root = tempfile.mkdtemp()
os.environ['TREADMILL_MASTER_ROOT'] = self.root
self.master = master.Master(kazoo.client.KazooClient(), 'test-cell')
# Use 111 to assert on zkhandle value.
# Disable the exit on exception hack for tests
self.old_exit_on_unhandled = treadmill.exc.exit_on_unhandled
treadmill.exc.exit_on_unhandled = mock.Mock(side_effect=lambda x: x)
def tearDown(self):
if self.root and os.path.isdir(self.root):
shutil.rmtree(self.root)
# Restore the exit on exception hack for tests
treadmill.exc.exit_on_unhandled = self.old_exit_on_unhandled
super(MasterTest, self).tearDown()
def test_resource_parsing(self):
"""Tests parsing resources."""
self.assertEqual([0, 0, 0], master.resources({}))
self.assertEqual([1, 0, 0], master.resources({'memory': '1M'}))
self.assertEqual(
[1, 10, 1024],
master.resources(
{'memory': '1M',
'cpu': '10%',
'disk': '1G'}
)
)
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
@mock.patch('kazoo.client.KazooClient.set', mock.Mock())
@mock.patch('kazoo.client.KazooClient.create', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
def test_load_servers(self):
"""Tests load of server and bucket data."""
zk_content = {
'placement': {},
'server.presence': {},
'cell': {
'pod:pod1': {},
'pod:pod2': {},
},
'buckets': {
'pod:pod1': {
'traits': None,
},
'pod:pod2': {
'traits': None,
},
'rack:1234': {
'traits': None,
'parent': 'pod:pod1',
},
'rack:2345': {
'traits': None,
'parent': 'pod:pod1',
},
},
'servers': {
'test.xx.com': {
'memory': '16G',
'disk': '128G',
'cpu': '400%',
'parent': 'rack:1234',
},
},
}
self.make_mock_zk(zk_content)
self.master.load_buckets()
self.master.load_cell()
self.assertIn(
'pod:pod1',
self.master.cell.children_by_name
)
self.assertIn(
'rack:1234',
self.master.cell.children_by_name['pod:pod1'].children_by_name
)
self.master.load_servers()
rack_1234 = self.master.cell \
.children_by_name['pod:pod1'].children_by_name['rack:1234']
self.assertIn('test.xx.com', rack_1234.children_by_name)
self.assertIn('test.xx.com', self.master.servers)
# Check capacity - (memory, cpu, disk) vector.
self.assertTrue(
np.all(np.isclose(
[16. * 1024, 400, 128. * 1024],
rack_1234.children_by_name['test.xx.com'].init_capacity)))
# Modify server parent, make sure it is reloaded.
zk_content['servers']['test.xx.com']['parent'] = 'rack:2345'
self.master.reload_servers(['test.xx.com'])
rack_2345 = self.master.cell \
.children_by_name['pod:pod1'].children_by_name['rack:2345']
self.assertNotIn('test.xx.com', rack_1234.children_by_name)
self.assertIn('test.xx.com', rack_2345.children_by_name)
# Modify server capacity, make sure it is refreshed.
server_obj1 = self.master.servers['test.xx.com']
zk_content['servers']['test.xx.com']['memory'] = '32G'
self.master.reload_servers(['test.xx.com'])
server_obj2 = self.master.servers['test.xx.com']
self.assertIn('test.xx.com', rack_2345.children_by_name)
self.assertNotEquals(id(server_obj1), id(server_obj2))
# If server is removed, make sure it is remove from the model.
del zk_content['servers']['test.xx.com']
self.master.reload_servers(['test.xx.com'])
self.assertNotIn('test.xx.com', rack_2345.children_by_name)
self.assertNotIn('test.xx.com', self.master.servers)
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('kazoo.client.KazooClient.set', mock.Mock())
@mock.patch('kazoo.client.KazooClient.create', mock.Mock())
@mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
@mock.patch('time.time', mock.Mock(return_value=0))
def test_adjust_server_state(self):
"""Tests load of server and bucket data."""
zk_content = {
'placement': {},
'server.presence': {},
'buckets': {
'pod:pod1': {
'traits': None,
},
'pod:pod2': {
'traits': None,
},
'rack:1234': {
'traits': None,
'parent': 'pod:pod1',
},
},
'servers': {
'test.xx.com': {
'memory': '16G',
'disk': '128G',
'cpu': '400%',
'parent': 'rack:1234',
},
},
}
time.time.return_value = 100
self.make_mock_zk(zk_content)
self.master.load_buckets()
self.master.load_servers()
self.assertEqual(
(scheduler.State.down, 100),
self.master.servers['test.xx.com'].get_state()
)
zk_content['server.presence']['test.xx.com'] = {}
time.time.return_value = 200
self.master.adjust_server_state('test.xx.com')
self.assertEqual(
(scheduler.State.up, 200),
self.master.servers['test.xx.com'].get_state()
)
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
def test_load_allocations(self):
"""Tests loading allocation from serialized db data."""
kazoo.client.KazooClient.get.return_value = ("""
---
- name: treadmill/dev
assignments:
- pattern: treadmlx.*
priority: 10
- pattern: treadmlp.test
priority: 42
rank: 100
cpu: 100%
disk: 1G
memory: 1G
""", None)
self.master.load_allocations()
root = self.master.cell.partitions[None].allocation
self.assertIn('treadmill', root.sub_allocations)
leaf_alloc = root.get_sub_alloc('treadmill').get_sub_alloc('dev')
self.assertEqual(100, leaf_alloc.rank)
self.assertEqual(1024, leaf_alloc.reserved[0])
self.assertEqual(100, leaf_alloc.reserved[1])
self.assertEqual(1024, leaf_alloc.reserved[2])
assignments = self.master.assignments
self.assertEqual(
(10, leaf_alloc),
assignments['treadmlx.*[#]' + '[0-9]' * 10]
)
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
def test_load_apps(self):
"""Tests loading application data."""
zk_content = {
'scheduled': {
'foo.bar#1234': {
'memory': '16G',
'disk': '128G',
'cpu': '400%',
'affinity': 'foo.bar',
'data_retention_timeout': None,
},
},
}
self.make_mock_zk(zk_content)
self.master.load_apps()
self.assertIn('foo.bar#1234', self.master.cell.apps)
self.assertEqual(self.master.cell.apps['foo.bar#1234'].priority, 1)
zk_content['scheduled']['foo.bar#1234']['priority'] = 5
self.master.load_apps()
self.assertEqual(len(self.master.cell.apps), 1)
self.assertEqual(self.master.cell.apps['foo.bar#1234'].priority, 5)
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_deleted', mock.Mock())
@mock.patch('treadmill.zkutils.put', mock.Mock())
@mock.patch('treadmill.zkutils.update', mock.Mock())
@mock.patch('time.time', mock.Mock(return_value=500))
def test_reschedule(self):
"""Tests application placement."""
srv_1 = scheduler.Server('1', [10, 10, 10],
valid_until=1000, traits=0)
srv_2 = scheduler.Server('2', [10, 10, 10],
valid_until=1000, traits=0)
srv_3 = scheduler.Server('3', [10, 10, 10],
valid_until=1000, traits=0)
srv_4 = scheduler.Server('4', [10, 10, 10],
valid_until=1000, traits=0)
cell = self.master.cell
cell.add_node(srv_1)
cell.add_node(srv_2)
cell.add_node(srv_3)
cell.add_node(srv_4)
app1 = scheduler.Application('app1', 4, [1, 1, 1], 'app')
app2 = scheduler.Application('app2', 3, [2, 2, 2], 'app')
cell.add_app(cell.partitions[None].allocation, app1)
cell.add_app(cell.partitions[None].allocation, app2)
# At this point app1 is on server 1, app2 on server 2.
self.master.reschedule()
treadmill.zkutils.put.assert_has_calls([
mock.call(mock.ANY, '/placement/1/app1',
{'expires': 500, 'identity': None}, acl=mock.ANY),
mock.call(mock.ANY, '/placement/2/app2',
{'expires': 500, 'identity': None}, acl=mock.ANY),
])
srv_1.state = scheduler.State.down
self.master.reschedule()
treadmill.zkutils.ensure_deleted.assert_has_calls([
mock.call(mock.ANY, '/placement/1/app1'),
])
treadmill.zkutils.put.assert_has_calls([
mock.call(mock.ANY, '/placement/3/app1',
{'expires': 500, 'identity': None}, acl=mock.ANY),
mock.call(mock.ANY, '/placement', mock.ANY),
])
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_deleted', mock.Mock())
@mock.patch('treadmill.zkutils.put', mock.Mock())
@mock.patch('treadmill.zkutils.update', mock.Mock())
@mock.patch('time.time', mock.Mock(return_value=500))
def test_reschedule_maxutil(self):
"""Tests application placement."""
srv_1 = scheduler.Server('1', [10, 10, 10],
valid_until=1000, traits=0)
srv_2 = scheduler.Server('2', [10, 10, 10],
valid_until=1000, traits=0)
srv_3 = scheduler.Server('3', [10, 10, 10],
valid_until=1000, traits=0)
srv_4 = scheduler.Server('4', [10, 10, 10],
valid_until=1000, traits=0)
cell = self.master.cell
cell.add_node(srv_1)
cell.add_node(srv_2)
cell.add_node(srv_3)
cell.add_node(srv_4)
app1 = scheduler.Application('app1', 4, [1, 1, 1], 'app')
app2 = scheduler.Application('app2', 3, [2, 2, 2], 'app')
cell.partitions[None].allocation.set_reserved([1, 1, 1])
cell.partitions[None].allocation.set_max_utilization(2)
cell.add_app(cell.partitions[None].allocation, app1)
cell.add_app(cell.partitions[None].allocation, app2)
self.master.reschedule()
treadmill.zkutils.put.assert_has_calls([
mock.call(mock.ANY, '/placement/1/app1',
{'expires': 500, 'identity': None}, acl=mock.ANY),
])
app2.priority = 5
self.master.reschedule()
treadmill.zkutils.ensure_deleted.assert_has_calls([
mock.call(mock.ANY, '/placement/1/app1'),
])
treadmill.zkutils.put.assert_has_calls([
mock.call(mock.ANY, '/placement/2/app2',
{'expires': 500, 'identity': None}, acl=mock.ANY),
])
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_deleted', mock.Mock())
@mock.patch('treadmill.zkutils.put', mock.Mock())
@mock.patch('treadmill.zkutils.update', mock.Mock())
@mock.patch('time.time', mock.Mock(return_value=500))
def test_reschedule_once(self):
"""Tests application placement."""
srv_1 = scheduler.Server('1', [10, 10, 10],
valid_until=1000, traits=0)
srv_2 = scheduler.Server('2', [10, 10, 10],
valid_until=1000, traits=0)
srv_3 = scheduler.Server('3', [10, 10, 10],
valid_until=1000, traits=0)
srv_4 = scheduler.Server('4', [10, 10, 10],
valid_until=1000, traits=0)
cell = self.master.cell
cell.add_node(srv_1)
cell.add_node(srv_2)
cell.add_node(srv_3)
cell.add_node(srv_4)
app1 = scheduler.Application('app1', 4, [1, 1, 1], 'app',
schedule_once=True)
app2 = scheduler.Application('app2', 3, [2, 2, 2], 'app')
cell.add_app(cell.partitions[None].allocation, app1)
cell.add_app(cell.partitions[None].allocation, app2)
# At this point app1 is on server 1, app2 on server 2.
self.master.reschedule()
treadmill.zkutils.put.assert_has_calls([
mock.call(mock.ANY, '/placement/1/app1',
{'expires': 500, 'identity': None}, acl=mock.ANY),
mock.call(mock.ANY, '/placement/2/app2',
{'expires': 500, 'identity': None}, acl=mock.ANY),
])
srv_1.state = scheduler.State.down
self.master.reschedule()
treadmill.zkutils.ensure_deleted.assert_has_calls([
mock.call(mock.ANY, '/placement/1/app1'),
mock.call(mock.ANY, '/scheduled/app1'),
])
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_exists', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_deleted', mock.Mock())
@mock.patch('treadmill.zkutils.put', mock.Mock())
def test_restore_placement(self):
"""Tests application placement."""
zk_content = {
'placement': {
'test.xx.com': {
'.data': """
state: up
since: 100
""",
'xxx.app1#1234': {
'.data': '{identity: 1}\n',
},
'xxx.app2#2345': '',
}
},
'server.presence': {
'test.xx.com': {},
},
'cell': {
'pod:pod1': {},
'pod:pod2': {},
},
'buckets': {
'pod:pod1': {
'traits': None,
},
'pod:pod2': {
'traits': None,
},
'rack:1234': {
'traits': None,
'parent': 'pod:pod1',
},
},
'servers': {
'test.xx.com': {
'memory': '16G',
'disk': '128G',
'cpu': '400%',
'parent': 'rack:1234',
},
},
'scheduled': {
'xxx.app1#1234': {
'affinity': 'app1',
'memory': '1G',
'disk': '1G',
'cpu': '100%',
'identity_group': 'xxx.app1',
},
'xxx.app2#2345': {
'affinity': 'app2',
'memory': '1G',
'disk': '1G',
'cpu': '100%',
},
},
'identity-groups': {
'xxx.app1': {
'count': 5,
}
}
}
self.make_mock_zk(zk_content)
self.master.load_buckets()
self.master.load_cell()
self.master.load_servers()
self.master.load_apps()
self.master.load_identity_groups()
self.master.load_placement_data()
self.assertTrue(
self.master.servers['test.xx.com'].state is scheduler.State.up)
# Reschedule should produce no events.
treadmill.zkutils.ensure_deleted.reset_mock()
treadmill.zkutils.ensure_exists.reset_mock()
self.master.reschedule()
self.assertFalse(treadmill.zkutils.ensure_deleted.called)
self.assertFalse(treadmill.zkutils.ensure_exists.called)
self.assertEqual(self.master.cell.apps['xxx.app1#1234'].identity, 1)
self.assertEqual(
self.master.cell.apps['xxx.app1#1234'].identity_group, 'xxx.app1'
)
self.assertEqual(
self.master.cell.identity_groups['xxx.app1'].available,
set([0, 2, 3, 4])
)
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_exists', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_deleted', mock.Mock())
@mock.patch('treadmill.zkutils.put', mock.Mock())
def test_restore_with_integrity_err(self):
"""Tests application placement."""
zk_content = {
'placement': {
'test1.xx.com': {
'.data': """
state: up
since: 100
""",
'xxx.app1#1234': '',
'xxx.app2#2345': '',
},
'test2.xx.com': {
'.data': """
state: up
since: 100
""",
'xxx.app1#1234': '',
}
},
'server.presence': {
'test1.xx.com': {},
'test2.xx.com': {},
},
'cell': {
'pod:pod1': {},
'pod:pod2': {},
},
'buckets': {
'pod:pod1': {
'traits': None,
},
'pod:pod2': {
'traits': None,
},
'rack:1234': {
'traits': None,
'parent': 'pod:pod1',
},
},
'servers': {
'test1.xx.com': {
'memory': '16G',
'disk': '128G',
'cpu': '400%',
'parent': 'rack:1234',
},
'test2.xx.com': {
'memory': '16G',
'disk': '128G',
'cpu': '400%',
'parent': 'rack:1234',
},
},
'scheduled': {
'xxx.app1#1234': {
'affinity': 'app1',
'memory': '1G',
'disk': '1G',
'cpu': '100%',
},
'xxx.app2#2345': {
'affinity': 'app2',
'memory': '1G',
'disk': '1G',
'cpu': '100%',
},
}
}
self.make_mock_zk(zk_content)
self.master.load_buckets()
self.master.load_cell()
self.master.load_servers()
self.master.load_apps()
self.assertIn('xxx.app2#2345',
self.master.servers['test1.xx.com'].apps)
self.assertIsNone(self.master.cell.apps['xxx.app1#1234'].server)
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_exists', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_deleted', mock.Mock())
@mock.patch('treadmill.zkutils.put', mock.Mock())
@mock.patch('treadmill.master.Master.load_allocations', mock.Mock())
@mock.patch('treadmill.master.Master.load_apps', mock.Mock())
@mock.patch('treadmill.master.Master.load_app', mock.Mock())
def test_process_events(self):
"""Tests application placement."""
zk_content = {
'events': {
'001-allocations-12345': {},
'000-apps-12346': {
'.data': """
- xxx.app1#1234
- xxx.app2#2345
"""
},
},
}
self.make_mock_zk(zk_content)
self.master.watch('/events')
while True:
try:
event = self.master.queue.popleft()
self.master.process(event)
except IndexError:
break
self.assertTrue(treadmill.master.Master.load_allocations.called)
self.assertTrue(treadmill.master.Master.load_apps.called)
treadmill.master.Master.load_app.assert_has_calls([
mock.call('xxx.app1#1234'),
mock.call('xxx.app2#2345'),
])
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.create', mock.Mock())
@mock.patch('time.time', mock.Mock(return_value=123.34))
@mock.patch('treadmill.sysinfo.hostname', mock.Mock(return_value='xxx'))
def test_create_apps(self):
"""Tests app api."""
zkclient = kazoo.client.KazooClient()
kazoo.client.KazooClient.create.return_value = '/scheduled/foo.bar#12'
master.create_apps(zkclient, 'foo.bar', {}, 2)
kazoo.client.KazooClient.create.assert_has_calls(
[
mock.call('/scheduled/foo.bar#',
b'{}\n',
makepath=True,
sequence=True,
ephemeral=False,
acl=mock.ANY),
mock.call('/trace/000C/foo.bar#12,123.34,xxx,pending,created',
b'',
makepath=True,
acl=mock.ANY),
# Mock call returns same instance (#12), so same task is
# created twice.
mock.call('/scheduled/foo.bar#',
b'{}\n',
makepath=True,
sequence=True,
ephemeral=False,
acl=mock.ANY),
mock.call('/trace/000C/foo.bar#12,123.34,xxx,pending,created',
b'',
makepath=True,
acl=mock.ANY)
],
any_order=True
)
@mock.patch('kazoo.client.KazooClient.get', mock.Mock(
return_value=('{}', None)))
@mock.patch('kazoo.client.KazooClient.set', mock.Mock())
@mock.patch('kazoo.client.KazooClient.create', mock.Mock())
def test_update_app_priority(self):
"""Tests app api."""
zkclient = kazoo.client.KazooClient()
kazoo.client.KazooClient.create.return_value = '/events/001-apps-1'
master.update_app_priorities(zkclient, {'foo.bar#1': 10,
'foo.bar#2': 20})
kazoo.client.KazooClient.set.assert_has_calls(
[
mock.call('/scheduled/foo.bar#1', b'{priority: 10}\n'),
mock.call('/scheduled/foo.bar#2', b'{priority: 20}\n'),
],
any_order=True
)
# Verify that event is placed correctly.
kazoo.client.KazooClient.create.assert_called_with(
'/events/001-apps-', mock.ANY,
makepath=True, acl=mock.ANY, sequence=True, ephemeral=False
)
@mock.patch('kazoo.client.KazooClient.get', mock.Mock(
return_value=('{}', None)))
@mock.patch('treadmill.zkutils.update', mock.Mock(return_value=None))
@mock.patch('treadmill.master.create_event', mock.Mock(return_value=None))
def test_update_app_priority_noop(self):
"""Tests app api."""
zkclient = kazoo.client.KazooClient()
# kazoo.client.KazooClient.create.return_value = '/events/001-apps-1'
master.update_app_priorities(zkclient, {'foo.bar#1': 10,
'foo.bar#2': 20})
treadmill.zkutils.update.assert_has_calls(
[
mock.call(mock.ANY, '/scheduled/foo.bar#1', {'priority': 10},
check_content=True),
mock.call(mock.ANY, '/scheduled/foo.bar#2', {'priority': 20},
check_content=True),
],
any_order=True
)
# Verify that event is placed correctly.
self.assertFalse(treadmill.master.create_event.called)
@mock.patch('kazoo.client.KazooClient.get', mock.Mock(
return_value=('{}', None)))
@mock.patch('kazoo.client.KazooClient.set', mock.Mock())
@mock.patch('kazoo.client.KazooClient.create', mock.Mock())
@mock.patch('kazoo.client.KazooClient.exists',
mock.Mock(return_value=False))
def test_cell_insert_bucket(self):
"""Tests inserting bucket into cell."""
zkclient = kazoo.client.KazooClient()
kazoo.client.KazooClient.create.return_value = '/events/000-cell-1'
master.cell_insert_bucket(zkclient, 'pod:pod1')
kazoo.client.KazooClient.create.assert_has_calls([
mock.call('/cell/pod:pod1', b'',
makepath=True, acl=mock.ANY,
sequence=False),
mock.call('/events/000-cell-', b'',
makepath=True, acl=mock.ANY,
sequence=True, ephemeral=False)
])
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_exists', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_deleted', mock.Mock())
@mock.patch('treadmill.zkutils.put', mock.Mock())
@mock.patch('time.time', mock.Mock())
def test_check_reboot(self):
"""Tests reboot checks."""
# Access to protected member warning.
#
# pylint: disable=W0212
zk_content = {
'placement': {
'test1.xx.com': {
'.data': """
state: up
since: 100
""",
'xxx.app1#1234': '',
'xxx.app2#2345': '',
},
'test2.xx.com': {
'.data': """
state: up
since: 100
""",
'xxx.app1#1234': '',
}
},
'server.presence': {
'test1.xx.com': {},
'test2.xx.com': {},
},
'cell': {
'pod:pod1': {},
},
'buckets': {
'pod:pod1': {
'traits': None,
},
'rack:1234': {
'traits': None,
'parent': 'pod:pod1',
},
},
'servers': {
'test1.xx.com': {
'memory': '16G',
'disk': '128G',
'cpu': '400%',
'parent': 'rack:1234',
'up_since': 100,
},
'test2.xx.com': {
'memory': '16G',
'disk': '128G',
'cpu': '400%',
'parent': 'rack:1234',
'up_since': 200,
},
},
'scheduled': {
'xxx.app1#1234': {
'affinity': 'app1',
'memory': '1G',
'disk': '1G',
'cpu': '100%',
},
}
}
time.time.return_value = 500
self.make_mock_zk(zk_content)
self.master.load_buckets()
self.master.load_cell()
self.master.load_servers()
self.master.load_apps()
self.master.load_placement_data()
self.master.load_schedule()
# Valid until is rounded to the end of day - reboot time + 21
self.assertEqual(
self.master.servers['test1.xx.com'].valid_until,
self.master.servers['test2.xx.com'].valid_until
)
expired_at = self.master.servers['test1.xx.com'].valid_until
time.time.return_value = expired_at - 500
app_server = self.master.cell.apps['xxx.app1#1234'].server
free_server = [s for s in ['test1.xx.com', 'test2.xx.com']
if s != app_server][0]
# Run check before app expires.
self.master.cell.apps['xxx.app1#1234'].placement_expiry = (
expired_at - 500
)
time.time.return_value = expired_at - 600
self.master.check_reboot()
treadmill.zkutils.ensure_exists.assert_called_with(
mock.ANY,
'/reboots/' + free_server,
acl=[master._SERVERS_ACL_DEL]
)
# time is beyond app expiration, expect app server to be rebooted.
time.time.return_value = expired_at - 400
self.master.check_reboot()
treadmill.zkutils.ensure_exists.assert_has_calls([
mock.call(mock.ANY, '/reboots/' + free_server, acl=mock.ANY),
mock.call(mock.ANY, '/reboots/' + app_server, acl=mock.ANY),
])
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('treadmill.zkutils.ensure_deleted', mock.Mock())
def test_placement_integrity(self):
"""Tests placement integrity."""
zk_content = {
'placement': {
'test1.xx.com': {
'xxx.app1#1234': '',
'xxx.app2#2345': '',
},
'test2.xx.com': {
'xxx.app1#1234': '',
}
},
}
self.master.cell.apps['xxx.app1#1234'] = scheduler.Application(
'xxx.app1#1234', 100, [1, 1, 1], 'app1')
self.master.cell.apps['xxx.app2#2345'] = scheduler.Application(
'xxx.app2#2345', 100, [1, 1, 1], 'app1')
self.master.cell.apps['xxx.app1#1234'].server = 'test1.xx.com'
self.make_mock_zk(zk_content)
self.master.check_placement_integrity()
treadmill.zkutils.ensure_deleted.assert_called_with(
mock.ANY,
'/placement/test2.xx.com/xxx.app1#1234'
)
@mock.patch('kazoo.client.KazooClient.get', mock.Mock(
return_value=('{}', None)))
@mock.patch('kazoo.client.KazooClient.set', mock.Mock())
@mock.patch('kazoo.client.KazooClient.create', mock.Mock())
def test_update_server_features(self):
"""Tests master.update_server_features()."""
zkclient = kazoo.client.KazooClient()
kazoo.client.KazooClient.create.return_value = '/events/000-servers-1'
master.update_server_features(zkclient, 'foo.ms.com', ['test'])
kazoo.client.KazooClient.set.assert_has_calls(
[
mock.call('/servers/foo.ms.com', b'features: [test]\n'),
],
any_order=True
)
# Verify that event is placed correctly.
kazoo.client.KazooClient.create.assert_called_with(
'/events/000-servers-', mock.ANY,
makepath=True, acl=mock.ANY, sequence=True, ephemeral=False
)
if __name__ == '__main__':
unittest.main()
|
<filename>utils/extractionUtils.py<gh_stars>0
import numpy as np
import json
import string
from string import punctuation
import nltk
from nltk.corpus import stopwords
import sys
import os
stopwords = stopwords.words('english')
def leaves(tree):
'''
Finds NP (nounphrase) leaf nodes of a chunk tree.
'''
l = []
for subtree in tree.subtrees(filter = lambda t: t.label()=='NP'):
l.append(subtree.leaves())
return l
def normalise(word):
"""Normalises words to lowercase and stems and lemmatizes it."""
word = word.lower()
#word = stemmer.stem(word)
#word = lemmatizer.lemmatize(word)
return word
def acceptable_word(word):
'''
Checks conditions for acceptable word: length, stopword.
'''
accepted = bool(2 <= len(word) <= 40
and word.lower() not in stopwords)
return accepted
def get_terms(tree):
kp = []
for leaf in leaves(tree):
term = [ normalise(w) for w,t in leaf if acceptable_word(w) ]
if term:
kp.append(term)
return kp
def get_kp(text):
'''
get kps from a document
Inputs:
document : a word list : ['sun', 'sunshine', ...] || lower cased
kps : can have more than one kp : [['sun'], ['key','phrase'], ['sunshine']] || not duplicate
Outputs:
all_present_kps : present keyphrases
positions_for_all : start_end_posisiton for prensent keyphrases
a present kp postions list : every present's positions in documents,
each kp can be presented in several postions .
[[[0,0],[20,21]], [[1,1]]]
'''
# Used when tokenizing words
sentence_re = r'''(?x) # set flag to allow verbose regexps
(?:[A-Z]\.)+ # abbreviations, e.g. U.S.A.
| \w+(?:-\w+)* # words with optional internal hyphens
| \$?\d+(?:\.\d+)?%? # currency and percentages, e.g. $12.40, 82%
| \.\.\. # ellipsis
| [][.,;"'?():_`-] # these are separate tokens; includes ], [
'''
#lemmatizer = nltk.WordNetLemmatizer()
#stemmer = nltk.stem.porter.PorterStemmer()
#Taken from Su Nam Kim Paper
grammar = r"""
NBAR:
{<NN.*|JJ>*<NN.*>} # Nouns and Adjectives, terminated with Nouns
{<JJ|JJR|JJS|VBG|VBN>*<NN|NNS|NNP|NNPS|VBG>}
NP:
{<NBAR>}
{<NBAR><IN><NBAR>} # Above, connected with in/of/etc...
"""
#toks = nltk.regexp_tokenize(text, sentence_re)
postoks = nltk.tag.pos_tag(text)
chunker = nltk.RegexpParser(grammar)
tree = chunker.parse(postoks)
terms = get_terms(tree)
#pos,pos_set = find_positions(text,terms)
return terms#,pos,pos_set
def find_positions(document,bert_tocs, kps):
'''
merge the same kps & keep present kps in document
Inputs:
document : a word list lower cases : ['sun', 'sunshine', ...]
bert_tocs: bert tokenized word list in lower case : ['try', 'ing', 'to', .....]
kps : can have more than one kp no duplicates : [['sun'], ['key','phrase'], ['sunshine']]
Outputs:
pos_list : list of start anf end positionas of all matching KPs : [[1,4,9,....],[2,6,10,...]]
pos_set : set of start and end position tuples : [(1,2),(4,6),(9,10),.....]
'''
tot_doc_char = ' '.join(document)
positions_for_all = []
position_start,position_end =[],[]
all_present_kps = []
for kp in kps:
if len(kp)<1:
continue
ans_string = ' '.join(kp)
if ans_string not in tot_doc_char:
continue
else:
positions_for_each = []
# find all positions for each kp
for i in range(0, len(bert_tocs) - len(kp) + 1):
found = False
search_str = ''
if ans_string.startswith(bert_tocs[i]):
found = True
search_str +=bert_tocs[i]
search_idx = i
while found and search_idx<(len(bert_tocs)-1):
search_idx+=1
if search_str+bert_tocs[search_idx] in ans_string:
search_str+=bert_tocs[search_idx]
elif search_str+' '+bert_tocs[search_idx] in ans_string:
search_str+=' '+bert_tocs[search_idx]
else:
found = False
if (search_str==ans_string) and (i<search_idx):
positions_for_each.append((i+1, search_idx))
position_start.append(i+1)
position_end.append(search_idx)
if len(positions_for_each) > 0 :
positions_for_all.extend(positions_for_each)
all_present_kps.append(kp)
assert len(positions_for_all) >= len(all_present_kps)
if len(all_present_kps) == 0:
return [None,None]
pos_list = [position_start,position_end]
pos_set = set(positions_for_all)
return pos_list,pos_set
def remove_punctuation(text):
no_punct=[words for words in text if words not in string.punctuation]
words_wo_punct=''.join(no_punct)
return words_wo_punct
def remove_stopwords(text):
text=[word for word in text if word not in stopword]
return text |
<reponame>bopopescu/cndw<gh_stars>0
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Delete command for the Org Policy CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
from googlecloudsdk.api_lib.orgpolicy import service as org_policy_service
from googlecloudsdk.api_lib.orgpolicy import utils as org_policy_utils
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.labelmanager import arguments as label_manager_arguments
from googlecloudsdk.command_lib.org_policies import arguments
from googlecloudsdk.command_lib.org_policies import utils
from googlecloudsdk.core import log
@base.Hidden
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Delete(base.Command):
r"""Delete an organization policy, or optionally only delete policy behavior defined under a condition.
Deletes an organization policy, or optionally only deletes policy behavior
defined under a condition. Specify the condition when calling the command to
delete the policy behavior defined under that condition instead of the whole
policy.
## EXAMPLES
To delete the policy associated with the constraint 'gcp.resourceLocations'
and the Project 'foo-project', run:
$ {command} gcp.resourceLocations --project=foo-project
To only delete the policy behavior defined for resources that have the
LabelValue '2222' associated with the LabelKey '1111', run:
$ {command} gcp.resourceLocations --project=foo-project \
--condition='resource.matchLabels("labelKeys/1111", "labelValues/2222")'
To delete the policy behavior for the Project 'foo-project' conditioned on
the LabelValue 'dev' under LabelKey 'env' that lives under
'organizations/123' run:
$ {command} gcp.resourceLocations --project=foo-project \
--condition='resource.matchLabels("env", "dev")' \
--label-parent='organizations/123'
"""
@staticmethod
def Args(parser):
arguments.AddConstraintArgToParser(parser)
arguments.AddResourceFlagsToParser(parser)
arguments.AddConditionFlagToParser(parser)
label_manager_arguments.AddLabelParentArgToParser(
parser, False,
('This flag must be specified as the parent of the LabelKey when the '
'input for a condition expression is set as the LabelKey and '
'LabelValue display name.')
)
def Run(self, args):
"""Deletes a whole policy or removes rules containing the specified condition from the policy.
If --condition is not specified, then the policy is deleted using
DeletePolicy.
If --condition is specified, then the policy is fetched using GetPolicy. It
then searches for and removes the rules that contain the specified condition
from the policy. If the policy is empty after this operation and
inheritFromParent is False, the policy is deleted using DeletePolicy. If
not, the policy is updated using UpdatePolicy.
Args:
args: argparse.Namespace, An object that contains the values for the
arguments specified in the Args method.
Returns:
If the policy is deleted, then messages.GoogleProtobufEmpty. If only
a partial delete is issued, then the updated policy.
"""
policy_service = org_policy_service.PolicyService()
org_policy_messages = org_policy_service.OrgPolicyMessages()
policy_name = utils.GetPolicyNameFromArgs(args)
if args.IsSpecified('condition') and args.IsSpecified('label_parent'):
utils.TransformLabelDisplayNameConditionToLabelNameCondition(args)
if args.condition is not None:
get_request = org_policy_messages.OrgpolicyPoliciesGetRequest(
name=policy_name)
policy = policy_service.Get(get_request)
new_policy = copy.deepcopy(policy)
new_policy.spec.rules = org_policy_utils.GetNonMatchingRulesFromPolicy(
policy, args.condition)
if policy == new_policy:
return policy
if new_policy.spec.rules or new_policy.spec.inheritFromParent:
update_request = org_policy_messages.OrgpolicyPoliciesPatchRequest(
name=policy_name,
forceUnconditionalWrite=False,
googleCloudOrgpolicyV2alpha1Policy=new_policy)
update_response = policy_service.Patch(update_request)
log.UpdatedResource(policy_name, 'policy')
return update_response
delete_request = org_policy_messages.OrgpolicyPoliciesDeleteRequest(
name=policy_name)
delete_response = policy_service.Delete(delete_request)
log.DeletedResource(policy_name, 'policy')
return delete_response
|
<gh_stars>0
"""Tests for useful utilities for higher level polynomial classes."""
import pytest
from diofant import (ZZ, GeneratorsNeeded, I, Integer, Integral, Mul,
PolynomialError, Rational, cos, erf, exp, factor,
integrate, pi, sin, sqrt, symbols)
from diofant.abc import p, q, t, x, y, z
from diofant.polys.polyutils import (_nsort, _sort_factors, _sort_gens,
_unify_gens, parallel_dict_from_expr)
__all__ = ()
A, B = symbols('A,B', commutative=False)
def test__nsort():
# issue sympy/sympy#6137
r = ([Rational(3, 2) + sqrt(Rational(-14, 3) - 2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3) -
4/sqrt(Rational(-7, 3) + 61/(18*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)) +
2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)) -
61/(18*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)))/2 -
sqrt(-7/3 + 61/(18*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)) +
2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3))/2,
Rational(3, 2) - sqrt(Rational(-7, 3) + 61/(18*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)) +
2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3))/2 -
sqrt(Rational(-14, 3) - 2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3) -
4/sqrt(Rational(-7, 3) + 61/(18*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)) +
2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)) -
61/(18*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)))/2, Rational(3, 2) +
sqrt(Rational(-14, 3) - 2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3) +
4/sqrt(Rational(-7, 3) + 61/(18*(Rational(-415, 216) +
13*I/12)**Rational(1, 3)) + 2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)) -
61/(18*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)))/2 +
sqrt(Rational(-7, 3) + 61/(18*(Rational(-415, 216)
+ 13*I/12)**Rational(1, 3)) + 2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3))/2,
Rational(3, 2) + sqrt(Rational(-7, 3) + 61/(18*(Rational(-415, 216) +
13*I/12)**Rational(1, 3)) + 2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3))/2 -
sqrt(Rational(-14, 3) - 2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3) +
4/sqrt(Rational(-7, 3) + 61/(18*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)) +
2*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)) -
61/(18*(Rational(-415, 216) + 13*I/12)**Rational(1, 3)))/2])
ans = [r[1], r[0], r[-1], r[-2]]
assert _nsort(r) == ans
assert len(_nsort(r, separated=True)[0]) == 0
b, c, a = exp(-1000), exp(-999), exp(-1001)
assert _nsort((b, c, a)) == [a, b, c]
d = symbols('d', extended_real=True)
assert _nsort((d,)) == [d]
assert _nsort((d,), separated=True) == [[d], []]
c = symbols('c', complex=True, real=False)
assert _nsort((c,)) == [c]
assert _nsort((c,), separated=True) == [[], [c]]
assert _nsort((I, Rational(1)), separated=True) == ([Rational(1)], [I])
def test__sort_gens():
assert not _sort_gens([])
assert _sort_gens([x]) == (x,)
assert _sort_gens([p]) == (p,)
assert _sort_gens([q]) == (q,)
assert _sort_gens([x, p]) == (x, p)
assert _sort_gens([p, x]) == (x, p)
assert _sort_gens([q, p]) == (p, q)
assert _sort_gens([q, p, x]) == (x, p, q)
assert _sort_gens([x, p, q], wrt=x) == (x, p, q)
assert _sort_gens([x, p, q], wrt=p) == (p, x, q)
assert _sort_gens([x, p, q], wrt=q) == (q, x, p)
assert _sort_gens([x, p, q], wrt='x') == (x, p, q)
assert _sort_gens([x, p, q], wrt='p') == (p, x, q)
assert _sort_gens([x, p, q], wrt='q') == (q, x, p)
assert _sort_gens([x, p, q], wrt='x,q') == (x, q, p)
assert _sort_gens([x, p, q], wrt='q,x') == (q, x, p)
assert _sort_gens([x, p, q], wrt='p,q') == (p, q, x)
assert _sort_gens([x, p, q], wrt='q,p') == (q, p, x)
assert _sort_gens([x, p, q], wrt='x, q') == (x, q, p)
assert _sort_gens([x, p, q], wrt='q, x') == (q, x, p)
assert _sort_gens([x, p, q], wrt='p, q') == (p, q, x)
assert _sort_gens([x, p, q], wrt='q, p') == (q, p, x)
assert _sort_gens([x, p, q], wrt=[x, 'q']) == (x, q, p)
assert _sort_gens([x, p, q], wrt=[q, 'x']) == (q, x, p)
assert _sort_gens([x, p, q], wrt=[p, 'q']) == (p, q, x)
assert _sort_gens([x, p, q], wrt=[q, 'p']) == (q, p, x)
assert _sort_gens([x, p, q], wrt=['x', 'q']) == (x, q, p)
assert _sort_gens([x, p, q], wrt=['q', 'x']) == (q, x, p)
assert _sort_gens([x, p, q], wrt=['p', 'q']) == (p, q, x)
assert _sort_gens([x, p, q], wrt=['q', 'p']) == (q, p, x)
assert _sort_gens([x, p, q], sort='x > p > q') == (x, p, q)
assert _sort_gens([x, p, q], sort='p > x > q') == (p, x, q)
assert _sort_gens([x, p, q], sort='p > q > x') == (p, q, x)
assert _sort_gens([x, p, q], wrt='x', sort='q > p') == (x, q, p)
assert _sort_gens([x, p, q], wrt='p', sort='q > x') == (p, q, x)
assert _sort_gens([x, p, q], wrt='q', sort='p > x') == (q, p, x)
X = symbols('x0:3 x10:13 x20:23')
assert _sort_gens(X) == X
def test__unify_gens():
assert not _unify_gens([], [])
assert _unify_gens([x], [x]) == (x,)
assert _unify_gens([y], [y]) == (y,)
assert _unify_gens([x, y], [x]) == (x, y)
assert _unify_gens([x], [x, y]) == (x, y)
assert _unify_gens([x, y], [x, y]) == (x, y)
assert _unify_gens([y, x], [y, x]) == (y, x)
assert _unify_gens([x], [y]) == (x, y)
assert _unify_gens([y], [x]) == (y, x)
assert _unify_gens([x], [y, x]) == (y, x)
assert _unify_gens([y, x], [x]) == (y, x)
assert _unify_gens([x, y, z], [x, y, z]) == (x, y, z)
assert _unify_gens([z, y, x], [x, y, z]) == (z, y, x)
assert _unify_gens([x, y, z], [z, y, x]) == (x, y, z)
assert _unify_gens([z, y, x], [z, y, x]) == (z, y, x)
assert _unify_gens([x, y, z], [t, x, p, q, z]) == (t, x, y, p, q, z)
def test__sort_factors():
assert _sort_factors([], multiple=True) == []
assert _sort_factors([], multiple=False) == []
F = [[1, 2, 3], [1, 2], [1]]
G = [[1], [1, 2], [1, 2, 3]]
assert _sort_factors(F, multiple=False) == G
F = [[1, 2], [1, 2, 3], [1, 2], [1]]
G = [[1], [1, 2], [1, 2], [1, 2, 3]]
assert _sort_factors(F, multiple=False) == G
F = [[2, 2], [1, 2, 3], [1, 2], [1]]
G = [[1], [1, 2], [2, 2], [1, 2, 3]]
assert _sort_factors(F, multiple=False) == G
F = [([1, 2, 3], 1), ([1, 2], 1), ([1], 1)]
G = [([1], 1), ([1, 2], 1), ([1, 2, 3], 1)]
assert _sort_factors(F, multiple=True) == G
F = [([1, 2], 1), ([1, 2, 3], 1), ([1, 2], 1), ([1], 1)]
G = [([1], 1), ([1, 2], 1), ([1, 2], 1), ([1, 2, 3], 1)]
assert _sort_factors(F, multiple=True) == G
F = [([2, 2], 1), ([1, 2, 3], 1), ([1, 2], 1), ([1], 1)]
G = [([1], 1), ([1, 2], 1), ([2, 2], 1), ([1, 2, 3], 1)]
assert _sort_factors(F, multiple=True) == G
F = [([2, 2], 1), ([1, 2, 3], 1), ([1, 2], 2), ([1], 1)]
G = [([1], 1), ([2, 2], 1), ([1, 2], 2), ([1, 2, 3], 1)]
assert _sort_factors(F, multiple=True) == G
def test__dict_from_expr_if_gens():
assert parallel_dict_from_expr([Integer(17)],
gens=(x,)) == ([{(0,): 17}], (x,))
assert parallel_dict_from_expr([Integer(17)],
gens=(x, y)) == ([{(0, 0): 17}], (x, y))
assert parallel_dict_from_expr([Integer(17)],
gens=(x, y, z)) == ([{(0, 0, 0): 17}],
(x, y, z))
assert parallel_dict_from_expr([Integer(-17)],
gens=(x,)) == ([{(0,): -17}], (x,))
assert parallel_dict_from_expr([Integer(-17)],
gens=(x, y)) == ([{(0, 0): -17}], (x, y))
assert parallel_dict_from_expr([Integer(-17)],
gens=(x, y, z)) == ([{(0, 0, 0): -17}],
(x, y, z))
assert parallel_dict_from_expr([17*x], gens=(x,)) == ([{(1,): 17}], (x,))
assert parallel_dict_from_expr([17*x],
gens=(x, y)) == ([{(1, 0): 17}], (x, y))
assert parallel_dict_from_expr([17*x],
gens=(x, y, z)) == ([{(1, 0, 0): 17}],
(x, y, z))
assert parallel_dict_from_expr([17*x**7], gens=(x,)) == ([{(7,): 17}], (x,))
assert parallel_dict_from_expr([17*x**7*y],
gens=(x, y)) == ([{(7, 1): 17}], (x, y))
assert parallel_dict_from_expr([17*x**7*y*z**12],
gens=(x, y, z)) == ([{(7, 1, 12): 17}],
(x, y, z))
assert parallel_dict_from_expr([x + 2*y + 3*z],
gens=(x,)) == ([{(1,): 1,
(0,): 2*y + 3*z}], (x,))
assert parallel_dict_from_expr([x + 2*y + 3*z],
gens=(x, y)) == ([{(1, 0): 1, (0, 1): 2,
(0, 0): 3*z}], (x, y))
assert parallel_dict_from_expr([x + 2*y + 3*z],
gens=(x, y, z)) == ([{(1, 0, 0): 1,
(0, 1, 0): 2,
(0, 0, 1): 3}],
(x, y, z))
assert parallel_dict_from_expr([x*y + 2*x*z + 3*y*z],
gens=(x,)) == ([{(1,): y + 2*z,
(0,): 3*y*z}], (x,))
assert parallel_dict_from_expr([x*y + 2*x*z + 3*y*z],
gens=(x, y)) == ([{(1, 1): 1, (1, 0): 2*z,
(0, 1): 3*z}], (x, y))
assert parallel_dict_from_expr([x*y + 2*x*z + 3*y*z],
gens=(x, y, z)) == ([{(1, 1, 0): 1,
(1, 0, 1): 2,
(0, 1, 1): 3}],
(x, y, z))
assert parallel_dict_from_expr([2**y*x],
gens=(x,)) == ([{(1,): 2**y}], (x,))
assert parallel_dict_from_expr([Integral(x, (x, 1, 2)) +
x]) == ([{(0, 1): 1, (1, 0): 1}],
(x, Integral(x, (x, 1, 2))))
pytest.raises(PolynomialError,
lambda: parallel_dict_from_expr([2**y*x], gens=(x, y)))
def test__dict_from_expr_no_gens():
pytest.raises(GeneratorsNeeded,
lambda: parallel_dict_from_expr([Integer(17)]))
assert parallel_dict_from_expr([x]) == ([{(1,): 1}], (x,))
assert parallel_dict_from_expr([y]) == ([{(1,): 1}], (y,))
assert parallel_dict_from_expr([x*y]) == ([{(1, 1): 1}], (x, y))
assert parallel_dict_from_expr([x + y]) == ([{(1, 0): 1, (0, 1): 1}],
(x, y))
assert parallel_dict_from_expr([sqrt(2)]) == ([{(1,): 1}], (sqrt(2),))
pytest.raises(GeneratorsNeeded,
lambda: parallel_dict_from_expr([sqrt(2)], greedy=False))
assert parallel_dict_from_expr([x*y],
domain=ZZ.inject(x)) == ([{(1,): x}], (y,))
assert parallel_dict_from_expr([x*y],
domain=ZZ.inject(y)) == ([{(1,): y}], (x,))
assert parallel_dict_from_expr([3*sqrt(2)*pi*x*y],
extension=None) == ([{(1, 1, 1, 1): 3}],
(x, y, pi, sqrt(2)))
assert parallel_dict_from_expr([3*sqrt(2)*pi*x*y],
extension=True) == ([{(1, 1, 1): 3*sqrt(2)}],
(x, y, pi))
f = cos(x)*sin(x) + cos(x)*sin(y) + cos(y)*sin(x) + cos(y)*sin(y)
assert parallel_dict_from_expr([f]) == ([{(0, 1, 0, 1): 1, (0, 1, 1, 0): 1,
(1, 0, 0, 1): 1,
(1, 0, 1, 0): 1}],
(cos(x), cos(y), sin(x), sin(y)))
def test__parallel_dict_from_expr_if_gens():
assert parallel_dict_from_expr([x + 2*y + 3*z, Integer(7)], gens=(x,)) == \
([{(1,): 1, (0,): 2*y + 3*z}, {(0,): 7}], (x,))
assert parallel_dict_from_expr((Mul(x, x**2, evaluate=False),), gens=(x,)) == \
([{(3,): 1}], (x,))
pytest.raises(PolynomialError, lambda: parallel_dict_from_expr((A*x,), gens=(x,)))
def test__parallel_dict_from_expr_no_gens():
assert parallel_dict_from_expr([x*y, Integer(3)]) == \
([{(1, 1): 1}, {(0, 0): 3}], (x, y))
assert parallel_dict_from_expr([x*y, 2*z, Integer(3)]) == \
([{(1, 1, 0): 1}, {(0, 0, 1): 2}, {(0, 0, 0): 3}], (x, y, z))
assert parallel_dict_from_expr((Mul(x, x**2, evaluate=False),)) == \
([{(3,): 1}], (x,))
def test_parallel_dict_from_expr():
assert parallel_dict_from_expr([x - 1, x**2 - 2]) == ([{(0,): -1, (1,): 1},
{(0,): -2,
(2,): 1}], (x,))
pytest.raises(PolynomialError, lambda: parallel_dict_from_expr([A*B - B*A]))
def test_dict_from_expr():
assert parallel_dict_from_expr([x - 1]) == ([{(0,): -1, (1,): 1}], (x,))
pytest.raises(PolynomialError, lambda: parallel_dict_from_expr([A*B - B*A]))
def test_sympyissue_7383():
x, z, R, a = symbols('x z R a')
r = sqrt(x**2 + z**2)
u = erf(a*r/sqrt(2))/r
Ec = u.diff(z, z).subs({x: sqrt(R*R - z*z)})
assert integrate(Ec, (z, -R, R)).simplify() == \
-2*sqrt(2)*R*a**3*exp(-R**2*a**2/2)/(3*sqrt(pi))
def test_sympyissue_10161():
x = symbols('x', real=True)
h = (2*x*(-2*x + abs(x))*(x**2 - 1)/abs(x**2 - 1)
+ (x/abs(x) - 2)*abs(x**2 - 1))
assert (h - factor(h)).simplify() == 0
|
<gh_stars>0
import utils
import re
from room import Room
simpleWallPattern = re.compile("([\|])")
lowerWallPattern = re.compile(r"[\+\-*\+]+")
multipleLowerWallPattern = re.compile("\+\-*\+\-*\+")
class LineParser:
def __init__(self):
self.last_line = ""
self.in_house_rooms = []
# Only rooms being parsed
self.open_rooms = []
self.enqueued_rooms = []
self.room_count = 0
self.lineLimit = 50
self.lines_in_file = utils.file_len("maps/rooms.txt")
def getInHouseRooms(self):
return self.in_house_rooms
def getLastLine(self):
return self.last_line
def setLastLine(self, line):
self.last_line = line
def getOpenRooms(self):
return self.open_rooms
def getEnqueuedRooms(self):
return self.enqueued_rooms
def getRoomCount(self):
return self.room_count
def getLineLimit(self):
return self.lineLimit
def add_in_house(self, room):
self.in_house_rooms.append(room)
def add_open(self, room):
self.open_rooms.append(room)
def addToQueue(self, room):
self.enqueued_rooms.append(room)
def resetQueue(self):
self.enqueued_rooms = []
def addRoomToLists(self, room):
self.add_in_house(room)
self.add_open(room)
self.room_count += 1
def removeFromOpenRooms(self, room):
room.setParseFinish()
self.open_rooms.remove(room)
def newRoom(self, lb, rb):
return Room(lb, rb)
def createNewRoom(self, lb, rb):
self.addRoomToLists(Room.newRoom(lb, rb))
def liberateQueue(self, currentLine):
for room in self.enqueued_rooms:
wallPositions = []
for match in re.finditer(simpleWallPattern, currentLine):
wallPositions.append(match.start())
# Case 1. Walls fit and room is created.
if room.getLeftWall() in wallPositions and room.getRightWall() in wallPositions:
self.addRoomToLists(room)
# print("Created new Room: " + room.__str__())
# Case 2. We find a matching left wall and set our right-side neighbour wall to this left wall.
elif room.getLeftWall() in wallPositions and room.getRightWall() not in wallPositions:
neighbour = self.findAdjacentRoom("right", room)
if neighbour:
self.findAdjacentRoom("right", room).setLeftWall(room.getLeftWall())
# Case 3. We find a matching right wall and set our left-side neighbour wall to this right wall.
elif room.getLeftWall() not in wallPositions and room.getRightWall() in wallPositions:
neighbour = self.findAdjacentRoom("left", room)
if neighbour:
self.findAdjacentRoom("left", room).setRightWall(room.getRightWall())
self.resetQueue()
def getPositionOfVerticalWalls(self, line):
list = []
for match in re.finditer(simpleWallPattern, line):
list.append(match.start())
return list
def getRoomFromPosition(self, pos):
for room in self.getOpenRooms():
if room.getLeftWall() <= pos < room.getRightWall():
return room
return -1
def findAdjacentRoom(self, direction, room):
for candidate in self.getOpenRooms():
if direction == "left":
if room.getLeftWall() == candidate.getRightWall():
return candidate
if direction == "right":
if room.getRightWall() == candidate.getLeftWall():
return candidate
# print("No adjacent room found.")
return None
def updateNamesFromLine(self, names):
if len(names) > 0:
for name, index in names:
name = name.strip("(")
name = name.strip(")")
this_room = self.getRoomFromPosition(index)
if this_room != -1:
this_room.setRoomName(name)
def updateChairsFromLine(self, chairs):
if (len(chairs) > 0):
for chair, index in chairs:
this_room = self.getRoomFromPosition(index)
if this_room != -1:
this_room.addChairToRoom(chair)
def findLowerWallOnPosition(self, left, right, line):
matches = []
for match in re.finditer('\+', line):
matches.append(match.start())
for match in re.finditer(lowerWallPattern, line):
if match.start() == left and match.end() == right + 1:
return True
def getChairsInHouseByType(self):
count = [0, 0, 0, 0]
for r in self.getInHouseRooms():
count[0] += r.getChairsByType("W")
count[1] += r.getChairsByType("P")
count[2] += r.getChairsByType("S")
count[3] += r.getChairsByType("C")
return count
def getTotalChairCount(self):
count = 0
for r in self.getInHouseRooms():
count += r.getTotalChairsInRoom()
return count
|
<filename>CouncilTag/api/views.py<gh_stars>0
from django.shortcuts import render
from rest_framework import status, generics
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.pagination import LimitOffsetPagination
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth import get_user_model
from rest_framework import serializers
from rest_framework.views import APIView
from django.contrib.auth.mixins import LoginRequiredMixin
User = get_user_model()
from CouncilTag.ingest.models import Agenda, Tag, AgendaItem, EngageUserProfile, Message, Committee, EngageUser
from CouncilTag.api.serializers import AgendaSerializer, TagSerializer, AgendaItemSerializer, UserFeedSerializer, CommitteeSerializer
from CouncilTag.api.serializers import VerifySerializer, SignupSerializer, AddMessageSerializer, LoginSerializer, ModifyTagSerializer
from django.contrib.auth import login, authenticate
from django.contrib.auth.decorators import login_required
from datetime import datetime
from CouncilTag.api.utils import verify_recaptcha, send_mail
import jwt
import json
import pytz
import calendar
import uuid
import urllib
import random
import bcrypt
import sys
from CouncilTag import settings
from psycopg2.extras import NumericRange
from drf_yasg import openapi
from drf_yasg.utils import swagger_auto_schema, no_body
class SmallResultsPagination(LimitOffsetPagination):
default_limit = 2
class MediumResultsPagination(LimitOffsetPagination):
default_limit = 10
class AgendaView(generics.ListAPIView):
queryset = Agenda.objects.all().order_by('-meeting_time')
serializer_class = AgendaSerializer
pagination_class = SmallResultsPagination
class TagView(generics.ListAPIView):
queryset = Tag.objects.all()
serializer_class = TagSerializer
class UserFeed(generics.ListAPIView):
'''
List the agendas stored in the database with different results for logged in users
or users who are just using the app without logging in.
Query Parameters: begin -- start of datetime you want to query
end -- end of datetime you want to query
For logged in users:
we get their stored preferred tags from their profile
return only tags that are contained in a list of the names of those tags
and we only return the ones
For not logged in users:
we get the most recent agenda items and return those
'''
serializer_class = UserFeedSerializer
pagination_class = MediumResultsPagination
def get_queryset(self):
print("get queryset")
# Is there no test for figuring if req.user is of AnonymousUser type?
data = []
now = datetime.now(pytz.UTC)
unixnow = calendar.timegm(now.utctimetuple())
if (not isinstance(self.request.user, AnonymousUser)):
user = EngageUser.objects.get(user=self.request.user)
# tags_query_set = user.tags.all()
agenda_items = AgendaItem.objects.filter(tags__name__in=tag_names).filter(
agenda__meeting_time__contained_by=NumericRange(self.request.data['begin'], self.request.data['end']))
if agenda_items[0].meeting_time > unixnow:
meeting_held = False
else:
meeting_held = True
else:
# return the most recent agenda items for the upcoming meeting,
# if there is no upcoming meeting, show the last meeting instead
last_run = Agenda.objects.order_by('-meeting_time')[0]
if last_run.meeting_time > unixnow:
meeting_held = False
else:
meeting_held = True
agenda_items = last_run.items.all()
for ag_item in agenda_items:
data.append({"item": ag_item, "tag": list(
ag_item.tags.all()), "meeting_already_held": meeting_held})
return data
def calculateTallies(messages_qs):
pro = 0
con = 0
more_info = 0
home_owner = 0
business_owner = 0
resident = 0
works = 0
school = 0
child_school = 0
total = 0
for message in messages_qs:
if message.authcode != None:
continue
if message.pro == 0:
con += 1
elif message.pro == 1:
pro += 1
else:
more_info += 1
if message.home_owner:
home_owner += 1
if message.business_owner:
business_owner += 1
if message.resident:
resident += 1
if message.works:
works += 1
if message.school:
school += 1
if message.child_school:
child_school += 1
total += 1
return {"home_owner": home_owner, "business_owner": business_owner,
"resident": resident, "works": works, "school": school,
"child_school": child_school, "pro": pro, "con": con, "more_info": more_info, "total": total}
@api_view(['GET'])
def get_agenda(request, meeting_id):
'''
Returns specified JSON serialized agenda if it exists
'''
agenda = Agenda.objects.get(meeting_id=meeting_id)
if agenda is None:
return Response(data={"error": "No agenda item with id:" + str(meeting_id)}, status=404)
data = AgendaSerializer(agenda, many=False).data
return Response(data=data, status=200)
@api_view(['GET'])
def get_agenda_item(request, agenda_item_id):
'''
Returns JSON serialized agenda item
'''
agenda_item = AgendaItem.objects.get(agenda_item_id=agenda_item_id)
if agenda_item is None:
return Response(data={"error": "No agenda item with id:" + str(agenda_item_id)}, status=404)
data = AgendaItemSerializer(agenda_item, many=False).data
return Response(data=data, status=200)
@api_view(['GET'])
def get_agenda_item_detail(request, agenda_item_id):
'''
Returns a detail object for an agenda item, including agree/disagree/no_position tallies
'''
agenda_item = AgendaItem.objects.get(agenda_item_id=agenda_item_id)
if agenda_item is None:
return Response(data={"error": "No agenda item with id:" + str(agenda_item_id)}, status=404)
messages = Message.objects.filter(agenda_item=agenda_item)
tallyDict = calculateTallies(messages)
return Response(data=tallyDict, status=200)
@swagger_auto_schema(request_body=LoginSerializer, method='post')
@api_view(['POST'])
def login_user(request, format=None):
'''
Login a current user. Expects an email address and password
email because we have loaded 'CouncilTag.api.backends.EmailPasswordBackend'
accepts raw JSON or form-data encoded
'''
data = request.data
email = data['email']
password = data['password']
user = authenticate(username=email, password=password)
if user is not None:
# This is where attributes to the request are stored
login(request, user)
token = jwt.encode({'email': user.email}, settings.SECRET_KEY)
return Response({'token': token}, status=201)
else:
return Response(status=404, data={"error": "wrong username and password"})
@login_required
@api_view(['POST'])
def change_password(request, format=None):
data = request.data
if 'password' not in data or 'new_password' not in data:
return Response(status=404, data={"error": "Expects password and new_password fields"})
if request.user.check_password(data['password']):
# Verified password
request.user.set_password(data['new_password'])
try:
request.user.save()
send_mail({
"user": request.user,
"subject": "Reset password",
"content": "Someone has reset your password. If this was not you, please contact us at: <EMAIL>",
})
except:
return Response({"error": "Could not save password"}, status=404)
else:
print("Error, user %s attempted to reset password with incorrect password" % (
request.user.username))
return Response({"error": "Incorrect password"})
@login_required
@api_view(['POST'])
def update_profile(request, format=None):
'''
Update profile booleans
'''
data = request.data
profile = EngageUserProfile.objects.get(user_id=request.user.id)
if 'home_owner' in data and data['home_owner']:
profile.home_owner = True
elif 'home_owner' in data:
profile.home_owner = False
if 'resident' in data and data['resident']:
profile.resident = True
elif 'resident' in data:
profile.resident = False
if 'business_owner' in data and data['business_owner']:
profile.business_owner = True
elif 'business_owner' in data:
profile.business_owner = False
if 'works' in data and data['works']:
profile.works = True
elif 'works' in data:
profile.works = False
if 'school' in data and data['school']:
profile.school = True
elif 'school' in data:
profile.school = False
if 'child_school' in data and data['child_school']:
profile.child_school = True
elif 'child_school' in data:
profile.child_school = False
try:
profile.save()
return Response(status=200)
except:
print("Unexpected error:", sys.exc_info()[0])
return Response(status=404)
class VerifyView(APIView):
@swagger_auto_schema(request_body=VerifySerializer)
def post(self, request):
"""Verify signup for user or email message for non-user"""
data = request.data
if 'type' not in data or 'code' not in data or 'email' not in data:
return Response(data={"error": "Data object must contain code, email, id, and type"}, status=404)
if data['type'] not in ["email", "signup"]:
return Response(data={"error": "Data object's type must be signup or email"}, status=404)
user = User.objects.get(email=data["email"])
if data['type'] == 'email':
if 'id' not in data:
return Response(data={"error": "Data object must contain code, email, id, and type, for email message"}, status=404)
message = Message.objects.get(id=data['id'])
if message is None:
return Response(data={"error": "Message id: " + data['id'] + "was not found"}, status=404)
authcode = message.authcode
if authcode is None:
return Response(data={"error": "Message has already been verified"}, status=200)
if not check_auth_code(data['code'], authcode):
return Response(data={"error": "Authcodes do not match for email"}, status=404)
message.authcode = None
message.save()
return Response(status=200)
elif data['type'] == 'signup':
if user is None:
return Response(data={"error": "User not found"}, status=404)
profile = EngageUserProfile.objects.get(user=user)
authcode = profile.authcode
if authcode is None:
return Response(data={"error": "User has already been verified"}, status=200)
if not check_auth_code(data['code'], authcode):
return Response(data={"error": "Authcodes do not match for email"}, status=404)
profile.authcode = None
profile.save()
return Response(status=200)
return Response(status=500)
def check_auth_code(plain_code, hashed):
dec = bcrypt.hashpw(plain_code.encode('utf-8'),
hashed.encode('utf-8')).decode('utf-8')
if dec == hashed:
return True
return False
class SignupView(APIView):
@swagger_auto_schema(request_body=SignupSerializer)
def post(self, request):
'''
Signup new user. Must be unique username and email. Will create authcode and email to user.
'''
data = request.data
if 'first_name' not in data or 'last_name' not in data or 'username' not in data or 'password' not in data or 'email' not in data:
return Response(data={"error": "Data object must contain first_name, last_name, username, password, and email"}, status=400)
email = data['email']
password = data['password']
username = data['username']
first_name = data['first_name']
last_name = data['last_name']
CODE_LENGTH = 8
rand_begin = random.randint(0, 32 - CODE_LENGTH)
authcode = str(uuid.uuid1()).replace(
"-", "")[rand_begin:rand_begin + CODE_LENGTH].encode('utf-8')
authcode_hashed = bcrypt.hashpw(
authcode, bcrypt.gensalt()).decode('utf-8')
if 'home_owner' in data and data['home_owner']:
home_owner = True
else:
home_owner = False
if 'resident' in data and data['resident']:
resident = True
else:
resident = False
if 'business_owner' in data and data['business_owner']:
business_owner = True
else:
business_owner = False
if 'works' in data and data['works']:
works = True
else:
works = False
if 'school' in data and data['school']:
school = True
else:
school = False
if 'child_school' in data and data['child_school']:
child_school = True
else:
child_school = False
try:
user = User.objects.create_user(username, email, password)
user.first_name = first_name
user.last_name = last_name
user.save()
# Don't need to save any values from it
EngageUserProfile.objects.create(
user=user, home_owner=home_owner, resident=resident, business_owner=business_owner,
works=works, school=school, child_school=child_school, authcode=authcode_hashed)
query_parameters = urllib.parse.urlencode({
"code": authcode,
"email": email,
"type": "signup",
"id": ""
})
print("ZXY:", query_parameters)
query_string = 'https://engage-santa-monica.herokuapp.com/#/emailConfirmation?' + query_parameters
content = '<html><body><h3>Welcome to the Engage platform for Santa Monica,</h3> Please click <a href="' + \
query_string + '">here</a> to authenticate.<br/><br/>Thank you for your interest in your local government!<br/><br/> If you are receiving this in error, please email: <a href="mailto:<EMAIL>"><EMAIL></a>.</body></html>'
print(content)
sent_mail = send_mail(
{"user": user, "subject": "Please authenticate your email for the Engage platform",
"content": content})
print("SENT MAIL:", sent_mail)
token = jwt.encode({"username": user.email}, settings.SECRET_KEY)
return Response({"token": token}, status=201)
except:
print("Unexpected error:", sys.exc_info()[0])
return Response(status=404)
@api_view(['GET'])
def get_agendaitem_by_tag(request, tag_name):
'''
Get agenda items for a specific tag name type.
Can ammend returns with offset and limit query parameters
'''
agenda_items = AgendaItem.objects.filter(
tags__name=tag_name).select_related().all()
limit = request.GET.get('limit')
offset = request.GET.get('offset')
total_length = len(agenda_items)
num_returned = total_length
if (offset is not None):
try:
offset = int(offset)
end = None
if (limit is not None):
limit = int(limit)
end = limit + offset
if offset <= len(agenda_items):
agenda_items = agenda_items[offset: end]
num_returned = len(agenda_items)
else:
return Response(status=400)
except ValueError:
return Response(status=400)
serialized_items = AgendaItemSerializer(agenda_items, many=True)
data = {}
data['tag'] = tag_name
data['items'] = serialized_items.data
data['limit'] = limit
data['offset'] = offset
data['total_items'] = total_length
data['items_returned'] = num_returned
return Response(data=data)
class UserTagView(LoginRequiredMixin, APIView):
@swagger_auto_schema(request_body=no_body, responses={"404": "Not logged in, should be 401", "200": "OK, retrieved tags"})
def get(self, request):
user = EngageUserProfile.objects.get(user=request.user)
tags = user.tags.all()
tags_list = []
for tag in tags:
tags_list.append(tag.name)
return Response(data=tags_list)
@swagger_auto_schema(request_body=ModifyTagSerializer, responses={"404": "Not logged in, should be 401", "200": "OK, added tags"})
def post(self, request):
'''
Add new tags (array of tag names) to user's profile
'''
if len(request.data["tags"]) == 0:
return Response({"error": "tags were not included"}, status=400)
user = EngageUserProfile.objects.get(user=request.user)
for tag in request.data["tags"]:
try:
tag_to_add = Tag.objects.filter(name__contains=tag).first()
if tag_to_add is not None:
user.tags.add(tag_to_add)
except:
print("Could not add tag (" + tag + ") to user (" + request.user.username +
") since it doesn't exist in the ingest_tag table.")
try:
user.save()
except:
return Response(status=500)
return Response(status=200)
@swagger_auto_schema(request_body=ModifyTagSerializer, responses={"404": "Not logged in, should be 401", "200": "OK, removed tags"})
def delete(self, request):
'''
Delete array of existing tags from user
'''
if len(request.data["tags"]) == 0:
return Response({"error": "tags were not included"}, status=400)
user = EngageUserProfile.objects.get(user=request.user)
for tag in request.data["tags"]:
tag_to_remove = Tag.objects.filter(name__contains=tag).first()
if tag_to_remove is not None:
user.tags.remove(tag_to_remove)
try:
user.save()
except:
return Response(status=500)
return Response(status=200)
class AddMessageView(APIView):
'''Email message comments for either registered or non-registered users'''
@swagger_auto_schema(request_body=AddMessageSerializer, responses={'404': "Either committee or ", '401': 'Recaptcha v2 was incorrect or', '400': 'Incorrect parameters', '201': 'OK, message added'})
def post(self, request):
'''Add a new message to list to be sent to city council'''
now = datetime.now().timestamp()
message_info = request.data
if 'ag_item' not in message_info or 'committee' not in message_info or 'content' not in message_info or 'token' not in message_info or 'pro' not in message_info:
return Response(status=400, data={"error": "Missing or incorrect body parameters"})
committee = Committee.objects.filter(
name__contains=message_info['committee'])
print(committee)
if committee is None:
return Response(data={"error": "Could not find committee matching:" + data['committee']}, status=404)
agenda_item = AgendaItem.objects.get(pk=message_info['ag_item'])
if agenda_item is None:
return Response(data={"error": "Could not find agenda item matching:" + data['ag_item']}, status=404)
content = message_info['content']
verify_token = message_info['token']
pro = message_info['pro']
result = verify_recaptcha(verify_token)
if not result:
return Response(status=401)
first_name = None
last_name = None
zipcode = 90401
user = None
ethnicity = None
email = None
user = None
home_owner = False
business_owner = False
resident = False
works = False
school = False
child_school = False
CODE_LENGTH = 8
rand_begin = random.randint(0, 32 - CODE_LENGTH)
authcode = str(uuid.uuid1()).replace(
"-", "")[rand_begin:rand_begin + CODE_LENGTH].encode('utf-8')
authcode_hashed = bcrypt.hashpw(
authcode, bcrypt.gensalt()).decode('utf-8')
if (isinstance(request.user, AnonymousUser)):
if 'first_name' not in message_info or message_info['first_name'] is None or \
'last_name' not in message_info or message_info['last_name'] is None or \
'zipcode' not in message_info or message_info['zipcode'] is None or \
'email' not in message_info or message_info['email'] is None or \
'home_owner' not in message_info or message_info['home_owner'] is None or \
'business_owner' not in message_info or message_info['business_owner'] is None or \
'resident' not in message_info or message_info['resident'] is None or \
'works' not in message_info or message_info['works'] is None or \
'school' not in message_info or message_info['school'] is None or \
'child_school' not in message_info or message_info['child_school'] is None:
return Response(status=400, data={"error": "Missing or incorrect body parameters"})
first_name = message_info['first_name']
last_name = message_info['last_name']
zipcode = message_info['zipcode']
email = message_info['email']
home_owner = message_info['home_owner']
business_owner = message_info['business_owner']
resident = message_info['resident']
works = message_info['works']
school = message_info['school']
child_school = message_info['child_school']
else:
user = request.user
profile = EngageUserProfile.objects.get(user_id=request.user.id)
home_owner = profile.home_owner
business_owner = profile.business_owner
resident = profile.resident
works = profile.works
school = profile.school
child_school = profile.child_school
if profile.authcode == None:
authcode_hashed = None
new_message = Message(agenda_item=agenda_item, user=user,
first_name=first_name, last_name=last_name,
zipcode=zipcode, email=email, ethnicity=ethnicity,
committee=committee, content=content, pro=pro, authcode=authcode_hashed,
date=now, sent=0, home_owner=home_owner, business_owner=business_owner,
resident=resident, works=works, school=school, child_school=child_school)
new_message.save()
print(new_message.id)
if authcode_hashed is not None:
query_parameters = urllib.parse.urlencode({
"code": authcode,
"email": email,
"type": "email",
"id": str(new_message.id)
})
query_string = 'https://engage-santa-monica.herokuapp.com/#/emailConfirmation?' + query_parameters
content = '<h3>Thanks for voicing your opinion,</h3> Before we process your comment, please click <a href="' + \
query_string + '">here</a> to authenticate.<br/><br/>If you create and authenticate an account you will never have to authenticate for messages again.<br/><br/> Thank you for your interest in your local government!<br/><br/> If you are receiving this in error, please email: <a href="mailto:<EMAIL>"><EMAIL></a>. '
send_mail(
{"user": {"email": email}, "subject": "Verify message regarding agenda item: " + agenda_item.agenda_item_id,
"content": content})
# Default to unsent, will send on weekly basis all sent=0
return Response(status=201)
def array_of_ordereddict_to_list_of_names(tags_ordereddict_array):
"""
Serializers have a funny organization that isn't helpful in making further queries
Here we take the list of ordered dictionaries (id: x, name: y) and pull out the name only
and put that in a names list to return
"""
names = []
length = len(list(tags_ordereddict_array))
for i in range(length):
names.append(tags_ordereddict_array[i]["name"])
return names
|
# Copyright (c) Code Written and Tested by <NAME> in 21/02/2020, 17:27
from django.contrib.auth.models import User
from django.test import TestCase
from django.utils import timezone
from orders.models import OrderItemModel, Choice, OrderItemsGroupModel, OrderModel
from shops.models import ShopProfileModel, ProductModel, AddOnModel, OptionGroupModel, OptionModel
class TestOrderItemsGroup(TestCase):
"""Unittest for order items group model"""
def setUp(self):
"""setup for tests"""
user = User.objects.create(username='username', password='password')
self.shop = ShopProfileModel.objects.create(account=user, profile_photo='/orders/tests/sample.jpg',
cover_photo='/orders/tests/sample.jpg', phone_number=123,
description='text', shop_type='F', name='shop',
slug='shop', currency='$', delivery_fee=0,
opens_at=timezone.now() - timezone.timedelta(hours=2),
closes_at=timezone.now() + timezone.timedelta(hours=2),
time_to_prepare=20, vat=14)
self.order = OrderModel.objects.create(final_price=0, subtotal=0, delivery_fee=0, vat=0)
def test_str(self):
"""test for string function"""
group = OrderItemsGroupModel.objects.create(order=self.order, shop=self.shop)
self.assertEqual(group.__str__(), '1: shop')
group = OrderItemsGroupModel.objects.create()
self.assertNotEqual(group.__str__(), '1: shop')
class TestOrderItem(TestCase):
"""Unittest for order item model"""
def setUp(self):
"""set up for unittest"""
user = User.objects.create(username='username', password='password')
shop = ShopProfileModel.objects.create(account=user, profile_photo='/orders/tests/sample.jpg',
cover_photo='/orders/tests/sample.jpg', phone_number=123,
description='text', shop_type='F', name='shop',
slug='shop', currency='$', delivery_fee=0,
opens_at=timezone.now() - timezone.timedelta(hours=2),
closes_at=timezone.now() + timezone.timedelta(hours=2),
time_to_prepare=20, vat=14)
self.product = ProductModel.objects.create(shop=shop, photo='/orders/tests/sample.jpg',
title='product', slug='product', price=5,
description='text')
self.addon1 = AddOnModel.objects.create(product=self.product, title='addon1', added_price=5)
self.addon2 = AddOnModel.objects.create(product=self.product, title='addon2', added_price=11)
self.option_group1 = OptionGroupModel.objects.create(product=self.product, title='group1',
changes_price=True)
self.option1 = OptionModel.objects.create(option_group=self.option_group1, title='option1',
price=3.2)
self.option2 = OptionModel.objects.create(option_group=self.option_group1, title='option2',
price=5.7)
self.option_group2 = OptionGroupModel.objects.create(product=self.product, title='group2',
changes_price=False)
self.option3 = OptionModel.objects.create(option_group=self.option_group2, title='option1')
self.option4 = OptionModel.objects.create(option_group=self.option_group2, title='option2')
def test_str(self):
"""test for string function"""
order_item = OrderItemModel.objects.create()
self.assertNotEqual(order_item.__str__(), self.product.title) # because product is null
order_item = OrderItemModel.objects.create(product=self.product)
self.assertEqual(order_item.__str__(), self.product.title)
def test_addons_total_price(self):
"""test for get_add_ons_price function"""
order_item = OrderItemModel.objects.create(product=self.product)
order_item.add_ons.add(self.addon1)
self.assertEqual(order_item.get_add_ons_price(), 5)
order_item.add_ons.add(self.addon2)
self.assertEqual(order_item.get_add_ons_price(), 16)
order_item.add_ons.remove(self.addon1)
self.assertEqual(order_item.get_add_ons_price(), 11)
def test_total_price(self):
"""test for get_item_price function"""
order_item = OrderItemModel.objects.create(product=self.product)
Choice.objects.create(order_item=order_item, option_group=self.option_group1,
choosed_option=self.option1)
Choice.objects.create(order_item=order_item, option_group=self.option_group2,
choosed_option=self.option3)
order_item.add_ons.add(self.addon1)
self.assertEqual(order_item.get_item_price(), 5 + 3.2)
order_item = OrderItemModel.objects.create(product=self.product)
Choice.objects.create(order_item=order_item, option_group=self.option_group2,
choosed_option=self.option3)
order_item.add_ons.add(self.addon2)
self.assertEqual(order_item.get_item_price(), 11 + 5)
def test_vat(self):
"""test for calculate_vat function"""
order_item = OrderItemModel.objects.create(product=self.product)
Choice.objects.create(order_item=order_item, option_group=self.option_group2,
choosed_option=self.option3)
order_item.add_ons.add(self.addon2)
# price without VAT = 16, shop's vat percentage = 14
self.assertEqual(order_item.calculate_vat(), 2.24)
class TestOrderAddress(TestCase):
"""Unittest for orders address model"""
def test_str(self):
"""test for string function
this test is commented out because the
function responsible for it in signals.py need internet connection.
"""
# address = OrderAddressModel.objects.create(area='area', type='A', street='street', building='building',
# location_longitude=30, location_latitude=30)
# # Egypt, Matrouh generated by signals by geo-reversing location coordinates (30, 30)
# self.assertEqual(address.__str__(), 'Egypt, Matrouh, area, street, building')
class TestOrderItemChoice(TestCase):
"""Unitest for order item choice"""
def setUp(self):
"""set up for unittest"""
user = User.objects.create(username='username', password='password')
shop = ShopProfileModel.objects.create(account=user, profile_photo='/orders/tests/sample.jpg',
cover_photo='/orders/tests/sample.jpg', phone_number=123,
description='text', shop_type='F', name='shop',
slug='shop', currency='$', delivery_fee=0,
opens_at=timezone.now() - timezone.timedelta(hours=2),
closes_at=timezone.now() + timezone.timedelta(hours=2),
time_to_prepare=20, vat=14)
product = ProductModel.objects.create(shop=shop, photo='/orders/tests/sample.jpg',
title='product', slug='product', price=5,
description='text')
self.option_group1 = OptionGroupModel.objects.create(product=product, title='group1',
changes_price=True)
self.option1 = OptionModel.objects.create(option_group=self.option_group1, title='option1',
price=3.2)
def test_str(self):
"""test for string function"""
choice = Choice.objects.create(option_group=self.option_group1, choosed_option=self.option1)
self.assertEqual(choice.__str__(), 'group1: option1')
choice = Choice.objects.create()
self.assertNotEqual(choice.__str__(), 'group1: option1')
|
# -*- coding: UTF-8 -*-
##############################################################################
# #
# Copyright (c) 2007-2010 <NAME> <<EMAIL>> #
# #
# Translation file for TorChat #
# #
##############################################################################
LANGUAGE_CODE = u"fr"
LANGUAGE_NAME = u"Français"
LANGUAGE_NAME_ENGLISH = u"French"
TRANSLATOR_NAMES = [u"vitisch", u"<NAME>"]
#buttons
BTN_CANCEL = u"Annuler"
BTN_OK = u"Ok"
BTN_SAVE_AS = u"Save as..."
BTN_CLOSE = u"Fermer"
#status
ST_AVAILABLE = u"Disponible"
ST_AWAY = u"Absent"
ST_EXTENDED_AWAY = u"Absent pour longtemps"
ST_OFFLINE = u"Déconnecté"
#TaskbarMenu
MTB_SHOW_HIDE_TORCHAT = u"Montrer/Cacher TorChat"
MTB_QUIT = u"Arrêtez"
#popup menu
MPOP_CHAT = u"Chat..."
MPOP_SEND_FILE = u"Envoyer un fichier..."
MPOP_EDIT_CONTACT = u"Rediger contact..."
MPOP_DELETE_CONTACT = u"Supprimer contact..."
MPOP_SHOW_OFFLINE_MESSAGES = u"Montrer les messages hors-ligne"
MPOP_CLEAR_OFFLINE_MESSAGES = u"Effacer les messages hors-ligne"
MPOP_ACTIVATE_LOG = u"Activer le fichier d'archivage"
MPOP_STOP_LOG = u"Désactiver l'archivage"
MPOP_DELETE_EXISTING_LOG = u"Supprimer le fichier d'archivage"
MPOP_DELETE_AND_STOP_LOG = u"Cesser d'archiver et supprimer le fichier"
MPOP_ADD_CONTACT = u"Ajouter un contact..."
MPOP_ABOUT = u"À propos..."
MPOP_ASK_AUTHOR = u"Demandez %s..."
MPOP_SETTINGS = u"Paramètres..."
MPOP_EDIT_MY_PROFILE = u"Modifier mon profil..."
# #chat window popup menu
CPOP_COPY = u"Copier"
#confirm delete message box
D_CONFIRM_DELETE_TITLE = u"Confirmez la supression"
D_CONFIRM_DELETE_MESSAGE = u"Êtes-vous sûr de vouloir supprimer le contact?\n(%s %s)"
#warning about log
D_LOG_WARNING_TITLE = u"TorChat: Archivage est actif"
D_LOG_WARNING_MESSAGE = u"L'archivage au fichier est activé!!\n\nFicher d'archivage: %s\n\nRappelez-vous de supprimer le ficher d'archivage si vous avez fini la correction parce que le ficher d'archivage peut contenir l'information sensible."
# #warning about used port
D_WARN_USED_PORT_TITLE = u"TorChat: Port déjà occupé"
D_WARN_USED_PORT_MESSAGE = u"Quelque chose, probablement une autre instance de TorChat, écoute déjà à %s:%s. Vous devez créer un autre profil qui utilise des autres ports pour pouvoir commencer TorChat une autre fois."
#warnig about unread messages
D_WARN_UNREAD_TITLE = u"TorChat: Messages non lus"
D_WARN_UNREAD_MESSAGE = u"Il y a des messages non lus.\nIls seront perdus pour toujours!\n\nVoulez-vous vraiment sortir maintenant?"
#warning about offline buddy
D_WARN_BUDDY_OFFLINE_TITLE = u"TorChat: Copain hors ligne"
D_WARN_BUDDY_OFFLINE_MESSAGE = u"Cette opération n'est pas possible quand le copain est hors ligne"
#warning about multiple files
D_WARN_FILE_ONLY_ONE_TITLE = u"TorChat: Plusieurs fichiers"
D_WARN_FILE_ONLY_ONE_MESSAGE = u"On ne peut pas transférer plusieurs fichiers en une seule opération. Commencez les transferts individualement ou envoyez un fichier zip ou tar"
# #warning about file save error
D_WARN_FILE_SAVE_ERROR_TITLE = u"TorChat: Erreur sauvant fichier"
D_WARN_FILE_SAVE_ERROR_MESSAGE = u"Le fichier '%s' ne peut pas être créé.\n\n%s"
# #warning about file already exists
D_WARN_FILE_ALREADY_EXISTS_TITLE = u"TorChat: Fichier existe"
D_WARN_FILE_ALREADY_EXISTS_MESSAGE = u"Le fichier '%s' existe déjà.\nSurécrire?"
#dialog: add/edit contact
DEC_TITLE_ADD = u"Ajouter un nouveau contact"
DEC_TITLE_EDIT = u"Modifier le contact"
DEC_TORCHAT_ID = u"TorChat ID"
DEC_DISPLAY_NAME = u"Nom d'utilisateur"
DEC_INTRODUCTION = u"Introduction"
DEC_MSG_16_CHARACTERS = u"L'adresse doit avoir 16 caractères, pas %i."
DEC_MSG_ONLY_ALPANUM = u"L'adresse doit seulement contenir des nombres et des lettres minuscule."
DEC_MSG_ALREADY_ON_LIST = u"%s est déjà sur votre liste."
# #dialog: edit my profile
DEP_TITLE = u"Modifier mon profil"
DEP_NAME = u"Nom"
DEP_TEXT = u"Texte"
# DEP_SET_AVATAR = u"Set Avatar"
# DEP_REMOVE_AVATAR = u"Remove Avatar"
DEP_AVATAR_SELECT_PNG = u"Sélectionner fichier .PNG pour votre avatar (agrandi ou réduit à 64*64, peut contenir transparence)"
DEP_PNG_FILES = u"Fichiers PNG"
DEP_ALL_FILES = u"Tous fichiers"
DEP_WARN_TITLE = u"Sélection d'avatar impossible"
DEP_WARN_IS_ALREADY = u"C'est déjá l'avatar actuel"
DEP_WARN_MUST_BE_PNG = u"L'avatar doit être un fichier .png"
#file transfer window
DFT_FILE_OPEN_TITLE = u"Envoyer fichier à %s"
DFT_FILE_SAVE_TITLE = u"Sauver fichier de %s"
DFT_SEND = u"Envoyer %s\nà %s\n%04.1f%% (%i de %i octets)"
DFT_RECEIVE = u"Recevoir %s\nde %s\n%04.1f%% (%i de %i octets)"
DFT_WAITING = u"attendant connexion"
DFT_STARTING = u"commençant transfert"
DFT_ABORTED = u"transfert avorté"
DFT_COMPLETE = u"transfert complet"
DFT_ERROR = u"erreur"
#settings dialaog
DSET_TITLE = u"Configuration de TorChat"
DSET_NET_TITLE = u"Réseau"
DSET_NET_ACTIVE = u"actif"
DSET_NET_INACTIVE = u"inactif"
DSET_NET_TOR_ADDRESS = u"Adresse de procuration pour Tor"
DSET_NET_TOR_SOCKS = u"Port de SOCKS"
DSET_NET_TOR_CONTROL = u"Port de commande"
DSET_NET_OWN_HOSTNAME = u"Mon TorChat ID"
DSET_NET_LISTEN_INTERFACE = u"Interface d'écouter"
DSET_NET_LISTEN_PORT = u"Port d'écouter"
DSET_GUI_TITLE = u"Interface d'utilisateur"
DSET_GUI_LANGUAGE = u"Langue"
DSET_GUI_OPEN_MAIN_HIDDEN = u"Commencer avec fenêtre principale minimalisée"
DSET_GUI_OPEN_CHAT_HIDDEN = u"Ne pas ouvrir automatiquement des nouvelles fenêtres"
DSET_GUI_NOTIFICATION_POPUP = u"Notification surgissante"
# DSET_GUI_NOTIFICATION_METHOD = u"Notification method"
DSET_GUI_FLASH_WINDOW = u"Clignoter titre de fenêtre à un nouveau message"
DSET_MISC_TITLE = u"Misc"
DSET_MISC_TEMP_IN_DATA = u"Cacher fichiers temporaires dans le directoir de données"
DSET_MISC_TEMP_CUSTOM_DIR = u"Directoire temporaire (laissez vide pour défaut de SE)"
#notices in the chat window (those in square brackets)
NOTICE_DELAYED_MSG_WAITING = u"messages retardés attendant pour être envoyé"
NOTICE_DELAYED_MSG_SENT = u"messages retardés ont été envoyés"
NOTICE_DELAYED = u"retardé"
# #messagebox for offline messages
MSG_OFFLINE_TITLE = u"TorChat: messages en queue"
MSG_OFFLINE_EMPTY = u"pas de messages en queue pour %s"
MSG_OFFLINE_QUEUED = u"messages en queue pour %s hors ligne:\n\n%s"
# #buddy list mouse hover popup
BPOP_BUDDY_IS_OFFLINE = u"Copain est hors ligne"
BPOP_CONNECTED_AWAITING_RETURN_CONN = u"Connexion aller, attendant connexion retour..."
BPOP_CLIENT_SOFTWARE = u"Client: %s %s"
# #logging of conversations to file
LOG_HEADER = u"Ce fichier d'archive n'es pas signé et n'a pas de cogence de preuve."
LOG_STARTED = u"Commence à archiver"
LOG_STOPPED = u"Cesse d'archiver"
LOG_DELETED = u"Supprime les fichiers d'archive"
LOG_IS_ACTIVATED = u"Active l'archive à fichier:\n%s"
LOG_IS_STOPPED_OLD_LOG_FOUND = u"Désactive l'archive mais le fichier existe encore:\n%s"
#about box
ABOUT_TITLE = u"À propos de TorChat"
ABOUT_TEXT = u"""TorChat %(version)s (svn: r%(svn)s)
%(copyright)s
Traductions:
%(translators)s
Environnement de marche:
Python: %(python)s
wx: %(wx)s
TorChat est un logiciel libre: vous pouvez le redistribuer et/ou \
modifier sous les termes de la GNU General Public \
License publié par la Free Software Foundation, \
soit version 3 de la License, ou (à votre option) \
une version postérieure.
TorChat est distribué en espérant qu'il soit utile, \
mais SANS AUCUNE GARANTIE; ni même la garantie \
implicite de MARCHANTABILITÉ or APTITUDE À PROPOS PARTICULIER. \
Voir la GNU General Public License pour plus de détails.
*
Et maintenant, une chose complètement différente:
Si par chance vous gérez une compagnie de logiciel près de Hannover, Allemagne et \
avez besoin d'un codeur, sentez-vous libre de considérer ce petit programme \
comme mes documents d'application et m'envoyer un courriel avec votre réponse.
""" |
import os
import cv2
import glob
import json
from tqdm import tqdm
import numpy as np
from pathlib import Path
from PIL import Image
import torch
from torch.utils.data import Dataset
from lib.utils.general import xyxy2xywh, xywh2xyxy
def create_dataloader(image_path, imgsz, batch_size, hyp=None, augment=False, workers=8):
dataset = LoadImagesAndLabels(image_path, imgsz)
batch_size = min(batch_size, len(dataset))
nw = min([batch_size if batch_size > 1 else 0, workers]) # number of workers
dataloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
num_workers=nw,
shuffle=True,
pin_memory=True,
collate_fn=LoadImagesAndLabels.collate_fn)
return dataloader, dataset
class LoadImagesAndLabels(Dataset): # for training/testing
def __init__(self, path, img_size=640, augment=False, hyp=None, cache_images=False):
if os.path.isdir(path):
self.img_files = sorted(glob.glob(path + os.sep + '*.*'))
else:
raise Exception('%s does not exit' % path)
n = len(self.img_files)
assert n > 0, 'No images found in %s' % path
self.n = n # number of images
self.img_size = img_size
self.augment = augment
self.hyp = hyp
self.mosaic = self.augment # load 4 images at a time into a mosaic (only during training)
# Define labels
self.label_files = [x.replace('images', 'labels').replace(os.path.splitext(x)[-1], '.txt') for x in
self.img_files]
# Check cache
cache_path = str(Path(self.label_files[0]).parent) + '.cache' # cached labels
if os.path.isfile(cache_path):
cache = torch.load(cache_path) # load
if cache['hash'] != get_hash(self.label_files + self.img_files): # dataset changed
cache = self.cache_labels(cache_path) # re-cache
else:
cache = self.cache_labels(cache_path) # cache
# Get labels
labels, shapes = zip(*[cache[x] for x in self.img_files])
self.shapes = np.array(shapes, dtype=np.float64)
self.labels = list(labels)
# Cache labels
nm, nf, ne, ns, nd = 0, 0, 0, 0, 0 # number missing, found, empty, datasubset, duplicate
pbar = tqdm(enumerate(self.label_files))
for i, file in pbar:
l = self.labels[i] # label
if l is not None and l.shape[0]:
assert l.shape[1] == 5, '> 5 label columns: %s' % file
assert (l >= 0).all(), 'negative labels: %s' % file
# assert (l[:, 1:] <= 1).all(), 'non-normalized or out of bounds coordinate labels: %s' % file
if np.unique(l, axis=0).shape[0] < l.shape[0]: # duplicate rows
nd += 1 # print('WARNING: duplicate rows in %s' % self.label_files[i]) # duplicate rows
self.labels[i] = l
nf += 1 # file found
else:
ne += 1 # print('empty labels for image %s' % self.img_files[i]) # file empty
# os.system("rm '%s' '%s'" % (self.img_files[i], self.label_files[i])) # remove
pbar.desc = 'Scanning labels %s (%g found, %g missing, %g empty, %g duplicate, for %g images)' % (
cache_path, nf, nm, ne, nd, n)
if nf == 0:
s = 'WARNING: No labels found in %s' % (os.path.dirname(file) + os.sep)
print(s)
assert not augment, '%s. Can not train without labels.' % s
# Cache images into memory for faster training (WARNING: large datasets may exceed system RAM)
self.imgs = [None] * n
if cache_images:
gb = 0 # Gigabytes of cached images
pbar = tqdm(range(len(self.img_files)), desc='Caching images')
self.img_hw0, self.img_hw = [None] * n, [None] * n
for i in pbar: # max 10k images
self.imgs[i], self.img_hw0[i], self.img_hw[i] = load_image(self, i) # img, hw_original, hw_resized
gb += self.imgs[i].nbytes
pbar.desc = 'Caching images (%.1fGB)' % (gb / 1E9)
def cache_labels(self, path='labels.cache'):
# Cache dataset labels, check images and read shapes
x = {} # dict
pbar = tqdm(zip(self.img_files, self.label_files), desc='Scanning images', total=len(self.img_files))
for (img, label) in pbar:
try:
l = []
image = Image.open(img)
image.verify() # PIL verify
# _ = io.imread(img) # skimage verify (from skimage import io)
shape = image.size # image size (width, height)
assert (shape[0] > 9) & (shape[1] > 9), 'image size <10 pixels'
if os.path.isfile(label):
with open(label, 'r') as f:
l = np.array([x.split() for x in f.read().splitlines()], dtype=np.float32) # labels
# content = json.load(f)
# for sample in content['shapes']:
# x1, y1 = sample['points'][0]
# x2, y2 = sample['points'][1]
# if sample['group_id'] is None:
# cls = 0
# else:
# cls = int(sample['group_id'])
# l.append([cls, x1, y1, x2, y2])
# l = np.array(l, dtype=np.float32) # labels
if len(l) == 0:
l = np.zeros((0, 5), dtype=np.float32)
x[img] = [l, shape]
except Exception as e:
x[img] = [None, None]
print('WARNING: %s: %s' % (img, e))
x['hash'] = get_hash(self.label_files + self.img_files)
torch.save(x, path) # save for next time
return x
def __len__(self):
return len(self.img_files)
def __getitem__(self, index):
hyp = self.hyp
if self.mosaic:
# load mosaic
img, labels = load_mosaic()
else:
# Load image
img, (h0, w0), (h, w) = load_image(self, index)
print('aaa', h0, w0, h, w)
# Letterbox
shape = self.img_size # final letterboxed shape
img, ratio, pad = letterbox(img, shape, auto=False, scaleup=self.augment)
print(ratio, pad)
shapes = (h0, w0), ((h / h0, w / w0), pad) # for COCO mAP rescaling
# Load labels
labels = []
x = self.labels[index]
if x.size > 0:
# Normalized xywh to pixel xyxy format
labels = x.copy()
labels[:, 1] = ratio[0] * w * (x[:, 1] - x[:, 3] / 2) + pad[0] # pad width
labels[:, 2] = ratio[1] * h * (x[:, 2] - x[:, 4] / 2) + pad[1] # pad height
labels[:, 3] = ratio[0] * w * (x[:, 1] + x[:, 3] / 2) + pad[0]
labels[:, 4] = ratio[1] * h * (x[:, 2] + x[:, 4] / 2) + pad[1]
nL = len(labels) # number of labels
if nL:
labels[:, 1:5] = xyxy2xywh(labels[:, 1:5]) # convert xyxy to xywh
labels[:, [2, 4]] /= img.shape[0] # normalized height 0-1
labels[:, [1, 3]] /= img.shape[1] # normalized width 0-1
# Convert
img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416
img = np.ascontiguousarray(img)
return torch.from_numpy(img), labels
@staticmethod
def collate_fn(batch):
img, label = zip(*batch)
ls = []
for l in label:
ls.append(l)
return torch.stack(img, 0), label
# Ancillary functions --------------------------------------------------------------------------------------------------
def load_image(self, index):
# loads 1 image from dataset, returns img, original hw, resized hw
img = self.imgs[index]
if img is None: # not cached
path = self.img_files[index]
img = cv2.imread(path) # BGR
assert img is not None, 'Image Not Found ' + path
h0, w0 = img.shape[:2] # orig hw
r = self.img_size / max(h0, w0) # resize image to img_size
if r != 1: # always resize down, only resize up if training with augmentation
interp = cv2.INTER_AREA if r < 1 and not self.augment else cv2.INTER_LINEAR
img = cv2.resize(img, (int(w0 * r), int(h0 * r)), interpolation=interp)
return img, (h0, w0), img.shape[:2] # img, hw_original, hw_resized
else:
return self.imgs[index], self.img_hw0[index], self.img_hw[index] # img, hw_original, hw_resized
def load_mosaic():
return 0
def letterbox(img, new_shape=(640, 640), color=(114, 114, 114), auto=True, scaleFill=False, scaleup=True):
# Resize image to a 32-pixel-multiple rectangle https://github.com/ultralytics/yolov3/issues/232
shape = img.shape[:2] # current shape [height, width]
if isinstance(new_shape, int):
new_shape = (new_shape, new_shape)
# Scale ratio (new / old)
r = min(new_shape[0] / shape[0], new_shape[1] / shape[1])
if not scaleup: # only scale down, do not scale up (for better test mAP)
r = min(r, 1.0)
# Compute padding
ratio = r, r # width, height ratios
new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r))
dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1] # wh padding
if auto: # minimum rectangle
dw, dh = np.mod(dw, 64), np.mod(dh, 64) # wh padding
elif scaleFill: # stretch
dw, dh = 0.0, 0.0
new_unpad = (new_shape[1], new_shape[0])
ratio = new_shape[1] / shape[1], new_shape[0] / shape[0] # width, height ratios
dw /= 2 # divide padding into 2 sides
dh /= 2
if shape[::-1] != new_unpad: # resize
img = cv2.resize(img, new_unpad, interpolation=cv2.INTER_LINEAR)
top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1))
left, right = int(round(dw - 0.1)), int(round(dw + 0.1))
img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # add border
return img, ratio, (dw, dh)
def get_hash(files):
# Returns a single hash value of a list of files
return sum(os.path.getsize(f) for f in files if os.path.isfile(f))
def vis_image(img, anchors, img_name=None, path='./vis'):
img = img.copy()
os.makedirs(path, exist_ok=True) # exist_ok=True 如果目录存在 不会报错
for anchor in anchors:
img = cv2.rectangle(img, pt1=(int(anchor[0]), int(anchor[1])), pt2=(int(anchor[2]), int(anchor[3])),
color=(0, 0, 255), thickness=1)
cv2.imwrite('{}/{}'.format(path, img_name), img)
if __name__ == '__main__':
train_path = '../../data/detection/images/train'
dataset = LoadImagesAndLabels(path=train_path)
print(dataset.labels, '\n', dataset.shapes)
for i in range(len(dataset)):
image, label = dataset[i]
image_name = dataset.img_files[i]
print(image_name.split('/'))
print('image shape: ', image.shape, image_name)
img = image.numpy().transpose(1, 2, 0)[:, :, ::-1]
print(img.shape)
print(label[:, 1:5])
label[:, [2, 4]] *= img.shape[0] # normalized height 0-1
label[:, [1, 3]] *= img.shape[1] # normalized width 0-1
print(label[:, 1:5])
label
vis_image(img, label[:, 1:5], img_name=image_name.split('/')[-1])
# for l in label:
# print('sss', l)
# print((int(l[1]), int(l[2])), (int(l[3]), int(l[4])))
# img = cv2.rectangle(img, pt1=(int(l[1]), int(l[2])), pt2=(int(l[3]), int(l[4])), color=(0, 255, 0))
#
# cv2.imwrite(Path(image_name).name, img)
|
<filename>automatic_search.py
import requests, bs4, time, os
from openpyxl import Workbook
from tqdm import tqdm
def main():
header()
researchesList = getResearchesList()
WorkSheetXLSX(researchesList).sheet()
print("\nFeito!\n")
os.system("PAUSE")
def header():
print("="*50)
print(" GERADOR DE RELATÓRIO DE PESQUISA ACADÊMICA")
print("="*50)
print()
print("@autor: <NAME>")
print("GitHub: https://github.com/Robsonmxms\n")
def getResearchesList():
researchesList = list()
choiceValue = '1'
while choiceValue == '1':
choiceValue = getChoiceValue()
research = getResearchList(choiceValue)
if research:
researchesList.append(research)
return researchesList
def getChoiceValue():
time.sleep(0.5)
print("-"*50)
print("\nDeseja fazer uma nova busca?")
print("[0] NÃO")
print("[1] SIM\n")
choiceValue = input()
print()
print("-"*50)
return choiceValue
def getResearchList(choiceValue):
researchList = list()
if choiceValue == '1':
researchList = Research.research()
elif choiceValue == '0':
print("Pesquisas realizadas!\n")
else:
print("ERROR\n")
return researchList
class Research:
def __init__(self, title, refUrlsList):
self.title = title
self.refUrlsList = refUrlsList
def getRefUrlsList(self):
return self.title
def getUrl(self):
return self.refUrlsList
def setTitle(self,title):
self.title = title
def setRefUrlsList(self,refUrlsList):
self.refUrlsList = refUrlsList
def research():
scholarResearch = ScholarResearch.getScholarResearch()
refUrlsList = scholarResearch.getRefUrlsList()
research = Research(scholarResearch.title,refUrlsList)
print("\nPesquisa concluída!")
return research
class ScholarResearch:
def __init__(self, title, url):
self.title = title
self.url = url
def getTitle(self):
return self.title
def getUrl(self):
return self.url
def setTitle(self,title):
self.title = title
def setUrl(self,url):
self.url = url
def getScholarResearch():
print("\nDigite algo a ser pesquisado no Google Acadêmico: ")
title = input()
searchPhrase = title + " filetype = pdf"
print("\nPesquisando " + title + "...")
scholar = 'https://scholar.google.com.br/scholar?hl=pt-BR&as_sdt=0%2C5&q='
url = scholar + searchPhrase
scholarResearch = ScholarResearch(title, url)
return scholarResearch
def getRefUrlsList(self):
refUrlsList = self.__getConcatenateRefUrlsList()
refUrlsList = list(set(refUrlsList))
return refUrlsList
def __getConcatenateRefUrlsList(self):
def isGoogleSearch(refUrl):
return refUrl[:4] != "http"
concatenateRefUrlsList = self.__getNoConcatenateRefUrlsList()
scholarUrl = 'https://scholar.google.com.br'
for i in range(len(concatenateRefUrlsList)):
if isGoogleSearch(concatenateRefUrlsList[i]):
concatenateRefUrlsList[i] = scholarUrl + concatenateRefUrlsList[i]
return concatenateRefUrlsList
def __getNoConcatenateRefUrlsList(self):
urlsList = self.__getSoup().find_all("a")
noConcatenateRefUrlsList = list()
for i in range(len(urlsList)):
noConcatenateRefUrlsList.append(urlsList[i].get("href"))
return noConcatenateRefUrlsList
def __getSoup(self):
res = requests.get(self.url)
#verifica erros, interrompendo a execução caso ocorra problemas
res.raise_for_status()
soup = bs4.BeautifulSoup(res.text, features="lxml")
return soup
class WorkSheetXLSX:
def __init__(self,researchesList):
self.researchesList = researchesList
def getResearchesList(self):
return self.researchesList
def setTitle(self,researchesList):
self.researchesList = researchesList
def sheet(self):
global ws
workSheet = Workbook()
ws = workSheet.active
ws.title = 'Pesquisas'
self.__getTable()
workSheet.save("relatorio.xlsx")
def __getTable(self):
pbar = tqdm(range(1,len(self.researchesList)+1))
for indexColumn in pbar:
self.__tabulating(pbar,indexColumn)
def __tabulating(self,pbar,indexColumn):
research = self.researchesList[indexColumn-1]
size = len(research.refUrlsList)
Column(indexColumn,size,research).rows()
time.sleep(0.5)
pbar.set_description("Gerando relatório")
class Column:
def __init__(self, indexColumn, size, research):
self.indexColumn = indexColumn
self.size = size
self.research = research
def getIndexColumn(self):
return self.indexColumn
def getSize(self):
return self.size
def getResearch(self):
return self.research
def setSize(self,size):
self.size = size
def setResearch(self,research):
self.research = research
def setIndexColumn(self,indexColumn):
self.indexColumn = indexColumn
def rows(self):
for row in range(1,self.size+1):
aColumn = Column(
self.indexColumn,
self.size,
self.research
)
Cell(row,aColumn).cell()
class Cell:
def __init__(self, row, aColumn):
self.row = row
self.aColumn = aColumn
def getRow(self):
return self.row
def getAColumn(self):
return self.aColumn
def setRow(self,row):
self.row = row
def setAColumn(self,aColumn):
self.aColumn = aColumn
def cell(self):
if self.row == 1:
self.__doIfMenuHeader()
else:
self.__doIfNotMenuHeader()
def __doIfMenuHeader(self):
ws.cell(
row = self.row,
column = self.aColumn.indexColumn,
value = self.aColumn.research.title
)
def __doIfNotMenuHeader(self):
url = self.aColumn.research.refUrlsList[self.row-1]
ws.cell(
row = self.row,
column = self.aColumn.indexColumn,
value = self.aColumn.research
.title+'_link'+str(self.row-1)
).hyperlink = url
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.