code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# signals (eventos) y slots (metodos que procesan los eventos)
from PySide6.QtWidgets import QApplication, QMainWindow, QPushButton, QWidget, QVBoxLayout, QHBoxLayout, QLabel, QLineEdit, QMessageBox
from PySide6.QtCore import QSize
import sys
class VentanaPrincipal(QMainWindow):
def __init__(self):
super().__init__()
self.setWindowTitle("Signals y Slots")
# boton
boton = QPushButton("Click Aqui")
# conectamos el evento chequedado, por defecto esta en False
boton.setCheckable(True)
# Conectamos otro slot al evento chequear
boton.clicked.connect(self._evento_chequeado)
# conectamos el evento signal click con el slot llamado evento_click (funcion)
boton.clicked.connect(self._evento_click)
# se publica o muestra el boton
self.setCentralWidget(boton)
def _evento_click(self):
# se muestra un mensaje de alerta
#QMess""ageBox.information(self, "Evento", "Se ha hecho click")
# accedemosal estado del boton para saber si esta checado o no
print("evento click: ", self.boton_checado)
print("evento click")
def _evento_chequeado(self, chequeado):
# se muestra un mensaje de alerta
#QMessageBox.information(self, "Evento", "Se ha chequeado")
self.boton_checado = chequeado
print("evento chequeado: ", self.boton_checado)
if __name__ == '__main__':
app = QApplication(sys.argv)
ventana = VentanaPrincipal()
ventana.show()
sys.exit(app.exec_()) | [
"PySide6.QtWidgets.QApplication",
"PySide6.QtWidgets.QPushButton"
] | [((1467, 1489), 'PySide6.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (1479, 1489), False, 'from PySide6.QtWidgets import QApplication, QMainWindow, QPushButton, QWidget, QVBoxLayout, QHBoxLayout, QLabel, QLineEdit, QMessageBox\n'), ((412, 437), 'PySide6.QtWidgets.QPushButton', 'QPushButton', (['"""Click Aqui"""'], {}), "('Click Aqui')\n", (423, 437), False, 'from PySide6.QtWidgets import QApplication, QMainWindow, QPushButton, QWidget, QVBoxLayout, QHBoxLayout, QLabel, QLineEdit, QMessageBox\n')] |
from flask import Flask, request, jsonify
from db_user_interactions import user_respond
from pymongo import MongoClient
from datetime import date, datetime, timedelta
import re
#today's date
today_int = date.today()
print("test_app - Today's date:", today_int)
today_str = str(today_int)
#mongodb setup
client = MongoClient('your_api_server_ip',27017)
db = client.ukov_dev
# app is Flask
app = Flask(__name__)
'''
data_list = db.archive.find({'artist_name':'새소년'})
for data in data_list:
print(data)
'''
@app.route('/', methods=['POST'])
def receive_message():
# data received from KakaoTalk
dataReceive = request.get_json()
print(dataReceive)
user_key = dataReceive["userRequest"]["user"]["id"]
print(user_key)
# json format is 'userRequest' -> 'utterance'
if dataReceive["userRequest"]["utterance"]:
# received name
# name = dataReceive["userRequest"]["utterance"]
# user request refining artist
# db.same_name.find_one('name')
# registered name
artist_name = dataReceive["action"]["params"]["가수"]
print("processing name is "+ artist_name)
# need to extract the text getting rid of \n and ]r
if re.search("(\\r|)\\n$", artist_name):
artist_name= re.sub("(\\r|)\\n$", "", artist_name)
data_list = db.archive.find({'artist_name':artist_name})
print("app.py - data list from archive is:" + str(data_list))
item_list =[]
# find all data from archive collection
# if there is ticket information on archive
# getting user key and registering on db
user_respond(artist_name, today_str, user_key)
for data in data_list:
duration = "기간: " + data['start_date'] + " ~ " + data['end_date']
item = \
{
"title":data['title'],
"description":duration,
"thumbnail":
{
"imageUrl":data['poster_png']},
"social": {"like":"","comment":"","share":""},
"buttons":
[{"action":"webLink","label":"예매하기",
"webLinkUrl":data['url']},
{"action":"share","label":"공유하기"}]
}
item_list.append(item)
if not item_list:
dataSend = {
"version": "2.0",
"template": {
"outputs": [
{
"simpleText": {
"text": "ㅠㅠ "+artist_name+"의 공연이 안 잡혔나봐요 ㅠㅠ"
}
}
]
}}
return jsonify(dataSend)
else:
# changed "Carousel" into "carousel"
dataSend = \
{"version":"2.0",
"template":
{"outputs":
[{"carousel":
{"type":"basicCard","items": item_list}}]}}
print(dataSend)
return jsonify(dataSend)
@app.route('/calendar', methods = ['POST'])
def receive_for_calendar():
# data received from KakaoTalk
dataReceive = request.get_json()
print(dataReceive)
user_key = dataReceive["userRequest"]["user"]["id"]
print(user_key)
# json format is 'userRequest' -> 'utterance'
if dataReceive["userRequest"]["utterance"]:
# received name
# name = dataReceive["userRequest"]["utterance"]
# user request refining artist
# db.same_name.find_one('name')
# registered name
artist_name = dataReceive["action"]["params"]["가수"]
print("processing name is "+ artist_name)
# need to extract the text getting rid of \n and ]r
if re.search("(\\r|)\\n$", artist_name):
artist_name= re.sub("(\\r|)\\n$", "", artist_name)
public_url_data = db.calendar.find_one({'artist_name':artist_name})
if not public_url_data:
dataSend = {
"version": "2.0",
"template": {
"outputs": [
{
"simpleText": {
"text": "ㅠㅠ "+artist_name+"의 공연이 안 잡혔나봐요 ㅠㅠ"
}
}
]
}}
return jsonify(dataSend)
else:
public_url = public_url_data['public_url']
print(public_url)
dataSend = \
{"version":"2.0",
"template":
{
"outputs":
[
{
"basicCard":
{
"title":"",
"description":"공연일정을 달력으로 보시겠습니까?",
"thumbnail":{},
"social":
{
"like":"",
"comment":"",
"share":"",
},
"buttons":
[
{"action":"webLink",
"label":"달력으로 보기",
"webLinkUrl":public_url,
}
],
},
}
],
},
}
print(dataSend)
return jsonify(dataSend)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=3000, debug=True)
'''
# sample from Kakao instruction
# https://i.kakao.com/docs/skill-response-format#bot
dataSend = {
"version": "2.0",
"template": {
"outputs": [
{
"carousel": {
"type": "basicCard",
"items": [
{
"title": "보물상자",
"description": "보물상자 안에는 뭐가 있을까",
"thumbnail": {
"imageUrl": "http://k.kakaocdn.net/dn/83BvP/bl20duRC1Q1/lj3JUcmrzC53YIjNDkqbWK/i_6piz1p.jpg"
},
"buttons": [
{
"action": "message",
"label": "열어보기",
"messageText": "짜잔! 우리가 찾던 보물입니다"
},
{
"action": "webLink",
"label": "구경하기",
"webLinkUrl": "https://e.kakao.com/t/hello-ryan"
}
]
},
{
"title": "보물상자2",
"description": "보물상자2 안에는 뭐가 있을까",
"thumbnail": {
"imageUrl": "http://k.kakaocdn.net/dn/83BvP/bl20duRC1Q1/lj3JUcmrzC53YIjNDkqbWK/i_6piz1p.jpg"
},
"buttons": [
{
"action": "message",
"label": "열어보기",
"messageText": "짜잔! 우리가 찾던 보물입니다"
},
{
"action": "webLink",
"label": "구경하기",
"webLinkUrl": "https://e.kakao.com/t/hello-ryan"
}
]
},
{
"title": "보물상자3",
"description": "보물상자3 안에는 뭐가 있을까",
"thumbnail": {
"imageUrl": "http://k.kakaocdn.net/dn/83BvP/bl20duRC1Q1/lj3JUcmrzC53YIjNDkqbWK/i_6piz1p.jpg"
},
"buttons": [
{
"action": "message",
"label": "열어보기",
"messageText": "짜잔! 우리가 찾던 보물입니다"
},
{
"action": "webLink",
"label": "구경하기",
"webLinkUrl": "https://e.kakao.com/t/hello-ryan"
}
]
}
]
}
}
]
}
}
'''
| [
"db_user_interactions.user_respond",
"flask.Flask",
"flask.jsonify",
"flask.request.get_json",
"re.sub",
"pymongo.MongoClient",
"datetime.date.today",
"re.search"
] | [((204, 216), 'datetime.date.today', 'date.today', ([], {}), '()\n', (214, 216), False, 'from datetime import date, datetime, timedelta\n'), ((314, 354), 'pymongo.MongoClient', 'MongoClient', (['"""your_api_server_ip"""', '(27017)'], {}), "('your_api_server_ip', 27017)\n", (325, 354), False, 'from pymongo import MongoClient\n'), ((397, 412), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (402, 412), False, 'from flask import Flask, request, jsonify\n'), ((623, 641), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (639, 641), False, 'from flask import Flask, request, jsonify\n'), ((3199, 3217), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (3215, 3217), False, 'from flask import Flask, request, jsonify\n'), ((1209, 1245), 're.search', 're.search', (['"""(\\\\r|)\\\\n$"""', 'artist_name'], {}), "('(\\\\r|)\\\\n$', artist_name)\n", (1218, 1245), False, 'import re\n'), ((1624, 1670), 'db_user_interactions.user_respond', 'user_respond', (['artist_name', 'today_str', 'user_key'], {}), '(artist_name, today_str, user_key)\n', (1636, 1670), False, 'from db_user_interactions import user_respond\n'), ((3785, 3821), 're.search', 're.search', (['"""(\\\\r|)\\\\n$"""', 'artist_name'], {}), "('(\\\\r|)\\\\n$', artist_name)\n", (3794, 3821), False, 'import re\n'), ((1272, 1309), 're.sub', 're.sub', (['"""(\\\\r|)\\\\n$"""', '""""""', 'artist_name'], {}), "('(\\\\r|)\\\\n$', '', artist_name)\n", (1278, 1309), False, 'import re\n'), ((2688, 2705), 'flask.jsonify', 'jsonify', (['dataSend'], {}), '(dataSend)\n', (2695, 2705), False, 'from flask import Flask, request, jsonify\n'), ((3054, 3071), 'flask.jsonify', 'jsonify', (['dataSend'], {}), '(dataSend)\n', (3061, 3071), False, 'from flask import Flask, request, jsonify\n'), ((3848, 3885), 're.sub', 're.sub', (['"""(\\\\r|)\\\\n$"""', '""""""', 'artist_name'], {}), "('(\\\\r|)\\\\n$', '', artist_name)\n", (3854, 3885), False, 'import re\n'), ((4316, 4333), 'flask.jsonify', 'jsonify', (['dataSend'], {}), '(dataSend)\n', (4323, 4333), False, 'from flask import Flask, request, jsonify\n'), ((5360, 5377), 'flask.jsonify', 'jsonify', (['dataSend'], {}), '(dataSend)\n', (5367, 5377), False, 'from flask import Flask, request, jsonify\n')] |
from qft import get_fft_from_counts, loadBackend, qft_framework
from fft import fft_framework
from frontend import frontend, signal, transform
from qiskit.circuit.library import QFT as qiskit_qft
# --- Standard imports
# Importing standard Qiskit libraries and configuring account
from qiskit import QuantumCircuit, execute, Aer, IBMQ
from qiskit.compiler import transpile, assemble
from qiskit.tools.jupyter import *
from qiskit.visualization import *
# Loading your IBM Q account(s)
provider = IBMQ.load_account()
# --- Imports
from qiskit import QuantumCircuit, execute, BasicAer
from qiskit.tools.monitor import job_monitor
import math
from numpy import linalg as LA
import numpy as np
#%config jupy = 'svg' # Makes the images look nice
# --- Computation of the calibration matrix
from qiskit.ignis.mitigation.measurement import (complete_meas_cal,CompleteMeasFitter)
from qiskit import *
nQubits = 4
nShots = 2048
qr = QuantumRegister(nQubits)
meas_calibs, state_labels = complete_meas_cal(qr=qr, circlabel='mcal')
_, backend = loadBackend('ibmq_quito', True)
job = execute(meas_calibs, backend=backend, shots=1000)
# job_monitor(job, interval = 3)
cal_results = job.result()
meas_fitter = CompleteMeasFitter(cal_results, state_labels, circlabel='mcal')
print(meas_fitter.cal_matrix)
q = QuantumRegister(4,'q')
qc = QuantumCircuit(q)
# Normalize ampl, which is required for squared sum of amps=1
ys = signal(samplingRate=1000, amplification=1, duration=0, nSamples=2**nQubits)
ys.addFrequency(125)
ys.addFrequency(250)
y = ys.sample()
plotData = ys.show(subplot=[1,4,1], title='signal')
print("Processing FFT")
fft = transform(fft_framework)
y_hat, f = fft.forward(ys)
y_hat_ideal_p, f_p = fft.postProcess(y_hat, f)
plotData = fft.show(y_hat_ideal_p, f_p, subplot=[1,4,2], title="FFT (ref)")
# y.addFrequency(250)
ampls = y / np.linalg.norm(y)
# for 2^n amplitudes, we have n qubits for initialization
# this means that the binary representation happens exactly here
qc.initialize(ampls, [q[i] for i in range(nQubits)])
qc += qiskit_qft(num_qubits=nQubits, approximation_degree=0, do_swaps=True, inverse=False, insert_barriers=False, name='qft')
qc.measure_all()
qc = transpile(qc, backend, optimization_level=1) # opt level 0,1..3. 3: heaviest opt
job = execute(qc, backend, shots = nShots)
#job = execute(qc, BasicAer.get_backend('qasm_simulator'), shots = shots)
result = job.result()
# print(result.get_counts())
genTransform = transform(None)
y_hat = np.array(get_fft_from_counts(result.get_counts(), nQubits))
f = genTransform.calcFreqArray(ys, y_hat)
y_hat_sim_p, f_p = genTransform.postProcess(y_hat, f)
plotData = genTransform.show(y_hat_sim_p, f_p, subplot=[1,4,3], title=f"qft_sim_n")
print(y_hat)
# Get the filter object
meas_filter = meas_fitter.filter
# Results with mitigation
mitigated_results = meas_filter.apply(result)
mitigated_counts = mitigated_results.get_counts(0)
# print(mitigated_counts)
y_hat = np.array(get_fft_from_counts(mitigated_counts, nQubits))
f = genTransform.calcFreqArray(ys, y_hat)
y_hat_sim_p, f_p = genTransform.postProcess(y_hat, f)
plotData = genTransform.show(y_hat_sim_p, f_p, subplot=[1,4,4], title=f"qft_sim_n_f")
print(y_hat)
frontend.primeTime() | [
"frontend.signal",
"qiskit.execute",
"qiskit.compiler.transpile",
"qft.loadBackend",
"qiskit.ignis.mitigation.measurement.complete_meas_cal",
"qiskit.IBMQ.load_account",
"qiskit.circuit.library.QFT",
"frontend.transform",
"numpy.linalg.norm",
"qiskit.QuantumCircuit",
"qft.get_fft_from_counts",
... | [((499, 518), 'qiskit.IBMQ.load_account', 'IBMQ.load_account', ([], {}), '()\n', (516, 518), False, 'from qiskit import QuantumCircuit, execute, Aer, IBMQ\n'), ((997, 1039), 'qiskit.ignis.mitigation.measurement.complete_meas_cal', 'complete_meas_cal', ([], {'qr': 'qr', 'circlabel': '"""mcal"""'}), "(qr=qr, circlabel='mcal')\n", (1014, 1039), False, 'from qiskit.ignis.mitigation.measurement import complete_meas_cal, CompleteMeasFitter\n'), ((1053, 1084), 'qft.loadBackend', 'loadBackend', (['"""ibmq_quito"""', '(True)'], {}), "('ibmq_quito', True)\n", (1064, 1084), False, 'from qft import get_fft_from_counts, loadBackend, qft_framework\n'), ((1091, 1140), 'qiskit.execute', 'execute', (['meas_calibs'], {'backend': 'backend', 'shots': '(1000)'}), '(meas_calibs, backend=backend, shots=1000)\n', (1098, 1140), False, 'from qiskit import QuantumCircuit, execute, BasicAer\n'), ((1216, 1279), 'qiskit.ignis.mitigation.measurement.CompleteMeasFitter', 'CompleteMeasFitter', (['cal_results', 'state_labels'], {'circlabel': '"""mcal"""'}), "(cal_results, state_labels, circlabel='mcal')\n", (1234, 1279), False, 'from qiskit.ignis.mitigation.measurement import complete_meas_cal, CompleteMeasFitter\n'), ((1350, 1367), 'qiskit.QuantumCircuit', 'QuantumCircuit', (['q'], {}), '(q)\n', (1364, 1367), False, 'from qiskit import QuantumCircuit, execute, BasicAer\n'), ((1435, 1512), 'frontend.signal', 'signal', ([], {'samplingRate': '(1000)', 'amplification': '(1)', 'duration': '(0)', 'nSamples': '(2 ** nQubits)'}), '(samplingRate=1000, amplification=1, duration=0, nSamples=2 ** nQubits)\n', (1441, 1512), False, 'from frontend import frontend, signal, transform\n'), ((1655, 1679), 'frontend.transform', 'transform', (['fft_framework'], {}), '(fft_framework)\n', (1664, 1679), False, 'from frontend import frontend, signal, transform\n'), ((2071, 2194), 'qiskit.circuit.library.QFT', 'qiskit_qft', ([], {'num_qubits': 'nQubits', 'approximation_degree': '(0)', 'do_swaps': '(True)', 'inverse': '(False)', 'insert_barriers': '(False)', 'name': '"""qft"""'}), "(num_qubits=nQubits, approximation_degree=0, do_swaps=True,\n inverse=False, insert_barriers=False, name='qft')\n", (2081, 2194), True, 'from qiskit.circuit.library import QFT as qiskit_qft\n'), ((2214, 2258), 'qiskit.compiler.transpile', 'transpile', (['qc', 'backend'], {'optimization_level': '(1)'}), '(qc, backend, optimization_level=1)\n', (2223, 2258), False, 'from qiskit.compiler import transpile, assemble\n'), ((2301, 2335), 'qiskit.execute', 'execute', (['qc', 'backend'], {'shots': 'nShots'}), '(qc, backend, shots=nShots)\n', (2308, 2335), False, 'from qiskit import QuantumCircuit, execute, BasicAer\n'), ((2484, 2499), 'frontend.transform', 'transform', (['None'], {}), '(None)\n', (2493, 2499), False, 'from frontend import frontend, signal, transform\n'), ((3250, 3270), 'frontend.frontend.primeTime', 'frontend.primeTime', ([], {}), '()\n', (3268, 3270), False, 'from frontend import frontend, signal, transform\n'), ((1868, 1885), 'numpy.linalg.norm', 'np.linalg.norm', (['y'], {}), '(y)\n', (1882, 1885), True, 'import numpy as np\n'), ((3004, 3050), 'qft.get_fft_from_counts', 'get_fft_from_counts', (['mitigated_counts', 'nQubits'], {}), '(mitigated_counts, nQubits)\n', (3023, 3050), False, 'from qft import get_fft_from_counts, loadBackend, qft_framework\n')] |
from unittest.mock import Mock
import pytest
from friendly_iter.iterator_modifiers import flatten, take, skip, step
def test_flatten():
result = flatten([range(4), [], [4, 5]])
assert list(result) == [0, 1, 2, 3, 4, 5]
def test_take_limits_number_of_resulting_items():
result = take(3, range(10))
assert list(result) == [0, 1, 2]
def test_take_works_if_iterator_is_too_short():
result = take(10, range(3))
assert list(result) == [0, 1, 2]
def test_skip_drops_first_n_elements():
result = skip(2, [1, 2, 3, 4, 5])
assert list(result) == [3, 4, 5]
def test_skipping_too_many_results_in_empty_iterator():
result = skip(3, [1, 2])
assert list(result) == []
def test_skip_advanced_iterator_lazily():
skip(3, FailingIter()) # should not raise
def test_refuse_stepsize_less_than_one():
with pytest.raises(ValueError):
step(0, [])
def test_step_size_one_is_an_identity_operation():
it = Mock()
result = step(1, it)
assert result is it
def test_step_always_yields_first_element():
result = step(2, [1])
assert list(result) == [1]
def test_step_yields_every_nth_item():
result = step(2, [1, 2, 3, 4])
assert list(result) == [1, 3]
class FailingIter:
def __iter__(self):
return self
def __next__(self):
pytest.fail("Iterator was advanced")
| [
"unittest.mock.Mock",
"friendly_iter.iterator_modifiers.step",
"friendly_iter.iterator_modifiers.skip",
"pytest.fail",
"pytest.raises"
] | [((526, 550), 'friendly_iter.iterator_modifiers.skip', 'skip', (['(2)', '[1, 2, 3, 4, 5]'], {}), '(2, [1, 2, 3, 4, 5])\n', (530, 550), False, 'from friendly_iter.iterator_modifiers import flatten, take, skip, step\n'), ((659, 674), 'friendly_iter.iterator_modifiers.skip', 'skip', (['(3)', '[1, 2]'], {}), '(3, [1, 2])\n', (663, 674), False, 'from friendly_iter.iterator_modifiers import flatten, take, skip, step\n'), ((958, 964), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (962, 964), False, 'from unittest.mock import Mock\n'), ((978, 989), 'friendly_iter.iterator_modifiers.step', 'step', (['(1)', 'it'], {}), '(1, it)\n', (982, 989), False, 'from friendly_iter.iterator_modifiers import flatten, take, skip, step\n'), ((1074, 1086), 'friendly_iter.iterator_modifiers.step', 'step', (['(2)', '[1]'], {}), '(2, [1])\n', (1078, 1086), False, 'from friendly_iter.iterator_modifiers import flatten, take, skip, step\n'), ((1172, 1193), 'friendly_iter.iterator_modifiers.step', 'step', (['(2)', '[1, 2, 3, 4]'], {}), '(2, [1, 2, 3, 4])\n', (1176, 1193), False, 'from friendly_iter.iterator_modifiers import flatten, take, skip, step\n'), ((849, 874), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (862, 874), False, 'import pytest\n'), ((884, 895), 'friendly_iter.iterator_modifiers.step', 'step', (['(0)', '[]'], {}), '(0, [])\n', (888, 895), False, 'from friendly_iter.iterator_modifiers import flatten, take, skip, step\n'), ((1326, 1362), 'pytest.fail', 'pytest.fail', (['"""Iterator was advanced"""'], {}), "('Iterator was advanced')\n", (1337, 1362), False, 'import pytest\n')] |
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 9 10:56:12 2017
@author: tneises
"""
import json
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.lines as mlines
import sys
import os
absFilePath = os.path.abspath(__file__)
fileDir = os.path.dirname(os.path.abspath(__file__))
parentDir = os.path.dirname(fileDir)
newPath = os.path.join(parentDir, 'core')
sys.path.append(newPath)
import sco2_cycle_ssc as sco2_solve
import sco2_plots as cy_plt
##########################################
"Cycle design simulation with default parameters"
c_sco2 = sco2_solve.C_sco2_sim(1) # Initialize to the recompression cycle default (1)
c_sco2.solve_sco2_case() # Run design simulation
print(c_sco2.m_solve_dict)
print("\nDid the simulation code solve successfully = ",c_sco2.m_solve_success)
c_sco2.m_also_save_csv = True
c_sco2.save_m_solve_dict("design_solution__default_pars") # Save design solution dictionary
sol_dict__default_pars = c_sco2.m_solve_dict
##########################################
"Plotting a cycle design"
c_plot = cy_plt.C_sco2_TS_PH_plot(sol_dict__default_pars)
c_plot.is_save_plot = True
c_plot.file_name = "cycle_design_plots__default_pars"
c_plot.plot_new_figure()
##########################################
"Modifying the cycle design parameters"
mod_base_dict = {"T_htf_hot_des" : 620, "cycle_config" : 2}
c_sco2.overwrite_des_par_base(mod_base_dict) # Overwrite baseline design parameters
c_sco2.solve_sco2_case() # Run design simulation
print(c_sco2.m_solve_dict)
print("\nDid the simulation code with"
"modified design parameters solve successfully = ",c_sco2.m_solve_success)
c_sco2.m_also_save_csv = True
c_sco2.save_m_solve_dict("design_solution__modified_pars") # Save design solution dictionary
sol_dict__mod_pars = c_sco2.m_solve_dict
##########################################
"Comparing two cycle designs"
c_comp_plot = cy_plt.C_sco2_TS_PH_overlay_plot(sol_dict__default_pars, sol_dict__mod_pars)
c_comp_plot.is_save_plot = True
c_comp_plot.plot_new_figure()
##########################################
"Running a parametric study on one design parameter"
c_sco2.reset_des_par_base_to_default_RC()
T_HTF_in_par_list = list(np.arange(570,721,25))
T_HTF_in_par_dict_list = []
for T_HTF_in in T_HTF_in_par_list:
T_HTF_in_par_dict_list.append({"T_htf_hot_des" : T_HTF_in})
c_sco2.solve_sco2_parametric(T_HTF_in_par_dict_list)
print("\nDid the parametric analyses solve successfully = ",c_sco2.m_par_solve_success)
c_sco2.m_also_save_csv = True
c_sco2.save_m_par_solve_dict("T_HTF_parametric")
sol_dict_parametric = c_sco2.m_par_solve_dict
##########################################
"Plotting a 1D parametric study"
par_plot = cy_plt.C_des_stacked_outputs_plot([sol_dict_parametric])
par_plot.x_var = "T_HTF"
par_plot.y_vars = ["eta","MC_P_in","PHX_dT"]
par_plot.is_legend = False
par_plot.max_rows = 2
par_plot.is_save = True;
par_plot.file_name = "T_HTF_par_plot"
par_plot.create_plot()
##########################################
"Plotting one cycle design from a parametric solution dictionary"
i_plot = len(sol_dict_parametric["T_htf_hot_des"]) - 1
dict_i_plot = sco2_solve.get_one_des_dict_from_par_des_dict(sol_dict_parametric, "T_htf_hot_des", i_plot)
c_i_cycle_plot = cy_plt.C_sco2_TS_PH_plot(dict_i_plot)
c_i_cycle_plot.is_save_plot = True
c_i_cycle_plot.file_name = "cycle_design_plots__T_HTF_hottest"
c_i_cycle_plot.plot_new_figure()
##########################################
##########################################
##########################################
##########################################
##########################################
| [
"sco2_plots.C_sco2_TS_PH_overlay_plot",
"sco2_plots.C_sco2_TS_PH_plot",
"os.path.join",
"os.path.dirname",
"sco2_cycle_ssc.C_sco2_sim",
"sco2_plots.C_des_stacked_outputs_plot",
"sco2_cycle_ssc.get_one_des_dict_from_par_des_dict",
"os.path.abspath",
"sys.path.append",
"numpy.arange"
] | [((223, 248), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (238, 248), False, 'import os\n'), ((314, 338), 'os.path.dirname', 'os.path.dirname', (['fileDir'], {}), '(fileDir)\n', (329, 338), False, 'import os\n'), ((349, 380), 'os.path.join', 'os.path.join', (['parentDir', '"""core"""'], {}), "(parentDir, 'core')\n", (361, 380), False, 'import os\n'), ((382, 406), 'sys.path.append', 'sys.path.append', (['newPath'], {}), '(newPath)\n', (397, 406), False, 'import sys\n'), ((578, 602), 'sco2_cycle_ssc.C_sco2_sim', 'sco2_solve.C_sco2_sim', (['(1)'], {}), '(1)\n', (599, 602), True, 'import sco2_cycle_ssc as sco2_solve\n'), ((1072, 1120), 'sco2_plots.C_sco2_TS_PH_plot', 'cy_plt.C_sco2_TS_PH_plot', (['sol_dict__default_pars'], {}), '(sol_dict__default_pars)\n', (1096, 1120), True, 'import sco2_plots as cy_plt\n'), ((1920, 1996), 'sco2_plots.C_sco2_TS_PH_overlay_plot', 'cy_plt.C_sco2_TS_PH_overlay_plot', (['sol_dict__default_pars', 'sol_dict__mod_pars'], {}), '(sol_dict__default_pars, sol_dict__mod_pars)\n', (1952, 1996), True, 'import sco2_plots as cy_plt\n'), ((2727, 2783), 'sco2_plots.C_des_stacked_outputs_plot', 'cy_plt.C_des_stacked_outputs_plot', (['[sol_dict_parametric]'], {}), '([sol_dict_parametric])\n', (2760, 2783), True, 'import sco2_plots as cy_plt\n'), ((3168, 3263), 'sco2_cycle_ssc.get_one_des_dict_from_par_des_dict', 'sco2_solve.get_one_des_dict_from_par_des_dict', (['sol_dict_parametric', '"""T_htf_hot_des"""', 'i_plot'], {}), "(sol_dict_parametric,\n 'T_htf_hot_des', i_plot)\n", (3213, 3263), True, 'import sco2_cycle_ssc as sco2_solve\n'), ((3277, 3314), 'sco2_plots.C_sco2_TS_PH_plot', 'cy_plt.C_sco2_TS_PH_plot', (['dict_i_plot'], {}), '(dict_i_plot)\n', (3301, 3314), True, 'import sco2_plots as cy_plt\n'), ((275, 300), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (290, 300), False, 'import os\n'), ((2223, 2246), 'numpy.arange', 'np.arange', (['(570)', '(721)', '(25)'], {}), '(570, 721, 25)\n', (2232, 2246), True, 'import numpy as np\n')] |
from random import randint
import datetime
import pymysql
import cgi
def getConnection():
return pymysql.connect(host='localhost',
user='root',
password='<PASSWORD>',
db='BookFetch')
def newStudent():
fName = input("First Name:\t ")
lName = input("Last Name:\t ")
email = input("Email:\t ")
telephone = input("Telephone [##########]:\t ")
street = input("Street:\t ")
city = input("City:\t ")
state = input("State:\t ")
country = input("Country:\t ")
major = input("Major:\t ")
status = input("Undergrad/Graduate:\t ")
year = input("Year [#]:\t ")
print("-----University is a foreign key constraint!-----")
university = input("University:\t ")
dob = input("DOB:\t ")
id = fName[:2] + lName[:2] + "St" + str(randint(100, 999))
id = id.upper()
connection = getConnection()
connection.autocommit(True)
try:
with connection.cursor() as cursor:
sql = ("insert into Users values (\"" + id + "\", \"" + fName
+ "\", \"" + lName + "\", \"" + email + "\", \"" + street + "\", \"" + city
+ "\", \"" + state + "\", \"" + country + "\");")
cursor.execute(sql)
sql = ("insert into PhoneNumbers values (\"" + id + "\", " + telephone + ");")
cursor.execute(sql)
sql = ("insert into Students values (\"" + id + "\", \"" + major + "\", " + year + ", \""
+ university + "\", \"" + dob + "\", \"" + status + "\");")
cursor.execute(sql)
finally:
connection.close()
def newCart():
now = datetime.datetime.now()
currentDate = str(now.month) + "/" + str(now.day) + "/" + str(now.year)
print("-----If the already has a cart created then it will be an error.-----")
firstName = input("Enter student's first name: ")
lastName = input("Enter student's last name: ")
bookTitle = input("Enter the book title: ")
rentBuy = input("Enter purchase type (rent/buy): ")
quantity = input("Enter quantity: ")
wishlist = input("Wishlist (y/n): ")
connection = getConnection()
connection.autocommit(True)
try:
with connection.cursor() as cursor:
sql = ("insert into Cart values(("
+ "select ID from Users where FirstName = \""
+ firstName + "\" and " + "LastName = \"" + lastName + "\"),"
+ "(select ISBN from BookDetails where Title = \"" + bookTitle + "\"), \"" + rentBuy + "\", " + quantity
+ ", \"" + wishlist + "\");")
cursor.execute(sql)
sql = ("insert into CartDateCreated values(("
+ "select ID from Users where FirstName = \""
+ firstName + "\" and " + "LastName = \"" + lastName + "\"), \""
+ currentDate + "\");"
)
cursor.execute(sql)
sql = ("insert into CartDateUpdated values(("
+ "select ID from Users where FirstName = \""
+ firstName + "\" and " + "LastName = \"" + lastName + "\"), \""
+ currentDate + "\");"
)
cursor.execute(sql)
except:
print(cursor)
finally:
connection.close()
def newOrder():
now = datetime.datetime.now()
currentDate = str(now.month) + "/" + str(now.day) + "/" + str(now.year)
print("-----Enter the name of the user whose cart will be turned into order.-----")
firstName = input("Enter student's first name: ")
lastName = input("Enter student's last name: ")
sType = input("Shipping type: ")
ccNumber = input("Credit card number: ")
ccExpiration = input("Credit card expiration: ")
ccName = input("Credit card name: ")
ccType = input("Credit card type: ")
connection = getConnection()
connection.autocommit(True)
try:
with connection.cursor() as cursor:
sql = ("select * from Cart where Cart.StudentID = ("
+ "select ID from Users where FirstName = \""
+ firstName + "\" and " + "LastName = \"" + lastName + "\");")
cursor.execute(sql)
commands = []
for i in cursor:
if (i[4] == "n"):
sql = ("insert into Orders values(\"" + i[0] + "\", "
+ str(i[1]) + ", \"" + i[2] + "\", " + str(i[3])
+ ", \"" + currentDate + "\", \"\", \""
+ sType + "\", \"" + ccNumber + "\", \""
+ ccExpiration + "\", \"" + ccName + "\", \"" + ccType
+ "\", \"" + "new\");")
commands.append(sql)
sql = ("delete from cart where StudentID = \""
+ i[0] + "\" and ISBN = " + str(i[1]) + ";")
commands.append(sql)
sql = ("update CartDateUpdated set DUpdated = \"" + currentDate
+ "\" where StudentID"
+ " = \"" + i[0] + "\";"
)
commands.append(sql)
for i in commands:
cursor.execute(i)
finally:
connection.close()
def newRating():
print("-----Enter the name of the user who will rate a book.-----")
firstName = input("Enter student's first name: ")
lastName = input("Enter student's last name: ")
bookTitle = input("Enter book title: ")
bookRating = input("Your rating [0-5]: ")
ratingTitle = input("Rating title: ")
ratingDescription = input("Rating description: ")
connection = getConnection()
connection.autocommit(True)
try:
with connection.cursor() as cursor:
sql = ("insert into BReviewed values(("
+ "select ID from Users where FirstName = \""
+ firstName + "\" and " + "LastName = \"" + lastName + "\"), \""
+ ratingTitle + "\", " + bookRating
+ ", (select ISBN from BookDetails where Title = \"" + bookTitle + "\"), \""
+ ratingDescription + "\");")
cursor.execute(sql)
finally:
connection.close()
def updateCart():
print("-----Enter the name of the user who's cart you will modify.-----")
firstName = input("Enter student's first name: ")
lastName = input("Enter student's last name: ")
cartOptions = (
"""Here are your options:
1) Add to cart
2) Delete from cart
Enter [1-2]: """)
userInput = input(cartOptions)
if (userInput == 1):
addToCart(firstName, lastName)
elif (userInput == 2):
deleteFromCart(firstName, lastName)
def addToCart(firstName, lastName):
now = datetime.datetime.now()
currentDate = str(now.month) + "/" + str(now.day) + "/" + str(now.year)
bookTitle = input("Enter the book title: ")
rentBuy = input("Enter purchase type (rent/buy): ")
quantity = input("Enter quantity: ")
wishlist = input("Wishlist (y/n): ")
connection = getConnection()
connection.autocommit(True)
try:
with connection.cursor() as cursor:
sql = ("insert into Cart values(("
+ "select ID from Users where FirstName = \""
+ firstName + "\" and " + "LastName = \"" + lastName + "\"),"
+ "(select ISBN from BookDetails where Title = \"" + bookTitle + "\"), \"" + rentBuy + "\", " + quantity
+ ", \"" + wishlist + "\");")
cursor.execute(sql)
sql = ("insert into CartDateUpdated values(("
+ "select ID from Users where FirstName = \""
+ firstName + "\" and " + "LastName = \"" + lastName + "\"), \""
+ currentDate + "\");"
)
cursor.execute(sql)
finally:
connection.close()
def deleteFromCart(firstName, lastName):
now = datetime.datetime.now()
currentDate = str(now.month) + "/" + str(now.day) + "/" + str(now.year)
connection = getConnection()
connection.autocommit(True)
try:
with connection.cursor() as cursor:
sql = ("select * from Cart where StudentID = ("
+ "select ID from Users where FirstName = \""
+ firstName + "\" and " + "LastName = \"" + lastName + "\");")
cursor.execute(sql)
print("\nHere is the cart of " + firstName + " " + lastName)
counter = 0
commands = []
for i in cursor:
print(str(counter) + ") " + i[0] + " " + str(i[1]) + " " + i[2] + " " + str(i[3]) + " " + i[4])
sql = ("delete from Cart where StudentID = \"" + i[0] + "\" and ISBN = " + str(i[1]) + ";")
commands.append(sql)
counter = counter + 1
deleteItem = input("Enter the number of item that you want to delete [0-"
+ str(counter - 1) + "]: ")
cursor.execute(commands[int(deleteItem)])
sql = ("insert into CartDateUpdated values(("
+ "select ID from Users where FirstName = \""
+ firstName + "\" and " + "LastName = \"" + lastName + "\"), \""
+ currentDate + "\");"
)
cursor.execute(sql)
finally:
connection.close()
def cancelOrder():
print("-----Enter the name of the user who's order you will modify.-----")
firstName = input("Enter student's first name: ")
lastName = input("Enter student's last name: ")
connection = getConnection()
connection.autocommit(True)
try:
with connection.cursor() as cursor:
sql = ("select * from Orders where StudentID = ("
+ "select ID from Users where FirstName = \""
+ firstName + "\" and " + "LastName = \"" + lastName + "\");")
cursor.execute(sql)
print("\nHere are the orders of " + firstName + " " + lastName)
counter = 0
commands = []
for i in cursor:
print(str(counter) + ") " + i[0] + " " + str(i[1]) + " " + i[2] + " " + str(i[3]) + " " + i[4])
sql = ("delete from Orders where StudentID = \"" + i[0] + "\" and BookISBN = " + str(i[1])
+ " and RentBuy = \"" + i[2] + "\" and DateCreated = \"" + i[4] + "\";")
commands.append(sql)
counter = counter + 1
deleteItem = input("Enter the number of item that you want to delete [0-"
+ str(counter - 1) + "]: ")
cursor.execute(commands[int(deleteItem)])
finally:
connection.close()
def studentModuleMain():
welcomeMsg = ("---------------------\nStudent Module\n---------------------")
mainOptionsMsg = (
"""Here are your options:
1) Create a new student
2) Create a cart for a user
3) Create a new order based on a cart
4) Create a new book rating
5) Update a cart
6) Cancel an order
7) Return
8) Quit
Enter [1-8]: """)
invalidInputMsg = "Invalid input, please enter a valid input."
print(welcomeMsg)
userInput = int(input(mainOptionsMsg))
print("\n")
while(userInput < 1 or userInput > 8):
print(invalidInputMsg)
userInput = int(input(mainOptionsMsg))
print("\n")
if (userInput == 1):
newStudent()
elif (userInput == 2):
newCart()
elif (userInput == 3):
newOrder()
elif (userInput == 4):
newRating()
elif (userInput == 5):
updateCart()
elif (userInput == 6):
cancelOrder()
elif (userInput == 7):
return
elif (userInput == 8):
quit()
studentModuleMain()
| [
"datetime.datetime.now",
"pymysql.connect",
"random.randint"
] | [((104, 194), 'pymysql.connect', 'pymysql.connect', ([], {'host': '"""localhost"""', 'user': '"""root"""', 'password': '"""<PASSWORD>"""', 'db': '"""BookFetch"""'}), "(host='localhost', user='root', password='<PASSWORD>', db=\n 'BookFetch')\n", (119, 194), False, 'import pymysql\n'), ((1689, 1712), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1710, 1712), False, 'import datetime\n'), ((3390, 3413), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3411, 3413), False, 'import datetime\n'), ((6870, 6893), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6891, 6893), False, 'import datetime\n'), ((8071, 8094), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (8092, 8094), False, 'import datetime\n'), ((857, 874), 'random.randint', 'randint', (['(100)', '(999)'], {}), '(100, 999)\n', (864, 874), False, 'from random import randint\n')] |
from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, SubmitField, SelectField
from wtforms.validators import Required
class BlogForm(FlaskForm):
blog_title = StringField("Blog title", validators=[Required()])
blog_description = StringField("Blog description", validators=[Required()])
story = TextAreaField("Blog content", validators=[Required()])
category = SelectField(
"Category",
choices=[
("Gaming", "Gaming"),
("Career", "Career"),
("Technology", "Technology"),
("Sports", "Sports"),
("Fitness", "Fitness"),
],
validators=[Required()],
)
submit = SubmitField("Post")
class CommentForm(FlaskForm):
details = TextAreaField("Your comment", validators=[Required()])
submit = SubmitField("Comment")
class UpdateProfile(FlaskForm):
bio = TextAreaField("Tell us about you.", validators=[Required()])
submit = SubmitField("Submit")
# class DeletePost(FlaskForm):
# comment_id = StringField()
# delete = SubmitField('Delete')
class UpdateProfile(FlaskForm):
bio = TextAreaField("Tell us about you.", validators=[Required()])
submit = SubmitField("Submit")
| [
"wtforms.validators.Required",
"wtforms.SubmitField"
] | [((699, 718), 'wtforms.SubmitField', 'SubmitField', (['"""Post"""'], {}), "('Post')\n", (710, 718), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n'), ((835, 857), 'wtforms.SubmitField', 'SubmitField', (['"""Comment"""'], {}), "('Comment')\n", (846, 857), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n'), ((976, 997), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (987, 997), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n'), ((1219, 1240), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (1230, 1240), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n'), ((230, 240), 'wtforms.validators.Required', 'Required', ([], {}), '()\n', (238, 240), False, 'from wtforms.validators import Required\n'), ((310, 320), 'wtforms.validators.Required', 'Required', ([], {}), '()\n', (318, 320), False, 'from wtforms.validators import Required\n'), ((377, 387), 'wtforms.validators.Required', 'Required', ([], {}), '()\n', (385, 387), False, 'from wtforms.validators import Required\n'), ((667, 677), 'wtforms.validators.Required', 'Required', ([], {}), '()\n', (675, 677), False, 'from wtforms.validators import Required\n'), ((808, 818), 'wtforms.validators.Required', 'Required', ([], {}), '()\n', (816, 818), False, 'from wtforms.validators import Required\n'), ((950, 960), 'wtforms.validators.Required', 'Required', ([], {}), '()\n', (958, 960), False, 'from wtforms.validators import Required\n'), ((1193, 1203), 'wtforms.validators.Required', 'Required', ([], {}), '()\n', (1201, 1203), False, 'from wtforms.validators import Required\n')] |
# Generated by Django 3.0.4 on 2020-03-29 14:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('role', '0002_auto_20200329_1412'),
]
operations = [
migrations.AlterField(
model_name='role',
name='name',
field=models.CharField(default='', max_length=50, unique=True),
),
]
| [
"django.db.models.CharField"
] | [((329, 385), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(50)', 'unique': '(True)'}), "(default='', max_length=50, unique=True)\n", (345, 385), False, 'from django.db import migrations, models\n')] |
from pynamodb.attributes import UnicodeAttribute
class UserModel:
class Meta:
table_name = "goslinks-users"
read_capacity_units = 1
write_capacity_units = 1
email = UnicodeAttribute(hash_key=True)
name = UnicodeAttribute()
photo = UnicodeAttribute()
@property
def organization(self):
_, o = self.email.split("@")
return o
@classmethod
def update_or_create_user(cls, user_info):
email = user_info["email"]
try:
user = cls.get(email)
except cls.DoesNotExist:
user = cls(email)
user.name = user_info["name"]
user.photo = user_info["picture"]
user.save()
return user
class LinkModel:
class Meta:
table_name = "goslinks-links"
read_capacity_units = 1
write_capacity_units = 1
name = UnicodeAttribute(hash_key=True) # contains organization name and link name
url = UnicodeAttribute()
owner = UnicodeAttribute()
@staticmethod
def name_from_organization_and_slug(organization, slug):
return f"{organization}|{slug}"
@classmethod
def get_from_organization_and_slug(cls, organization, slug, **kwargs):
name = cls.name_from_organization_and_slug(organization, slug)
return cls.get(hash_key=name, **kwargs)
@classmethod
def get_or_init(cls, user, slug):
name = cls.name_from_organization_and_slug(user.organization, slug)
try:
link = cls.get(name)
except cls.DoesNotExist:
link = cls(name=name)
link.owner = user.email
return link
@property
def organization(self):
o, _ = self.name.split("|")
return o
@property
def slug(self):
_, s = self.name.split("|")
return s
@property
def owner_user(self):
from goslinks.db.factory import get_model
if not self.owner.endswith(self.organization):
raise AssertionError(
"Owner does not belong to the organization this link is contained in"
)
return get_model("user").get(self.owner)
MODEL_REGISTRY = {"user": UserModel, "link": LinkModel}
| [
"pynamodb.attributes.UnicodeAttribute",
"goslinks.db.factory.get_model"
] | [((200, 231), 'pynamodb.attributes.UnicodeAttribute', 'UnicodeAttribute', ([], {'hash_key': '(True)'}), '(hash_key=True)\n', (216, 231), False, 'from pynamodb.attributes import UnicodeAttribute\n'), ((243, 261), 'pynamodb.attributes.UnicodeAttribute', 'UnicodeAttribute', ([], {}), '()\n', (259, 261), False, 'from pynamodb.attributes import UnicodeAttribute\n'), ((274, 292), 'pynamodb.attributes.UnicodeAttribute', 'UnicodeAttribute', ([], {}), '()\n', (290, 292), False, 'from pynamodb.attributes import UnicodeAttribute\n'), ((871, 902), 'pynamodb.attributes.UnicodeAttribute', 'UnicodeAttribute', ([], {'hash_key': '(True)'}), '(hash_key=True)\n', (887, 902), False, 'from pynamodb.attributes import UnicodeAttribute\n'), ((957, 975), 'pynamodb.attributes.UnicodeAttribute', 'UnicodeAttribute', ([], {}), '()\n', (973, 975), False, 'from pynamodb.attributes import UnicodeAttribute\n'), ((988, 1006), 'pynamodb.attributes.UnicodeAttribute', 'UnicodeAttribute', ([], {}), '()\n', (1004, 1006), False, 'from pynamodb.attributes import UnicodeAttribute\n'), ((2118, 2135), 'goslinks.db.factory.get_model', 'get_model', (['"""user"""'], {}), "('user')\n", (2127, 2135), False, 'from goslinks.db.factory import get_model\n')] |
"""Tests for creating file structure."""
import os
from pathlib import Path
import tempfile
from unittest import TestCase
import carpyt
TEST_TEMPLATES = Path(os.path.abspath(__file__)).parent / 'test_templates'
class TestTemplateParsing(TestCase):
"""Tests that templates are parsed correctly."""
def test_simple_template(self):
"""Tests creation of nested parse tree."""
template_path = TEST_TEMPLATES / 'nested_simple.yml'
file_tree = carpyt.run_template_parser(template_path)
self.assertTrue(file_tree.name == 'nested_simple')
self.assertTrue(file_tree[0].name == '{module}')
self.assertTrue(file_tree[0][0].name == 'tests')
self.assertTrue(file_tree[0][0][0].name == 'test.py')
self.assertTrue(file_tree[0][0][0].content is None)
def test_branched_template(self):
"""Tests creation of branched parse tree."""
template_path = TEST_TEMPLATES / 'nested_branched.yml'
file_tree = carpyt.run_template_parser(template_path)
self.assertTrue(file_tree.name == 'nested_branched')
self.assertTrue(file_tree[0].name == 'docs')
self.assertTrue(file_tree[1].name == 'tests')
self.assertTrue(file_tree[1][0].name == 'test_files')
self.assertTrue(file_tree[1][0].content is None)
self.assertTrue(file_tree[2].name == '{module}')
self.assertTrue(file_tree[2][0].name == '__init__.py')
self.assertTrue('content' in file_tree[2][0].content)
def test_linked_template(self):
"""Tests creation of parse tree with linked templates."""
template_path = TEST_TEMPLATES / 'parent.yml'
file_tree = carpyt.run_template_parser(template_path)
self.assertTrue(file_tree.name == 'parent')
self.assertTrue(file_tree[0].name == '{module}')
self.assertTrue(file_tree[0][0].name == 'child')
self.assertTrue(file_tree[0][0][0].name == 'test_files')
self.assertTrue(file_tree[0][0][0][0].name == 'tests.py')
self.assertTrue(file_tree[0][0][0][0].content is None)
self.assertTrue(file_tree[1].name == 'setup.py')
self.assertTrue(file_tree[1].content is None)
def test_recursive_template(self):
"""Tests error handling in recursive linked templates."""
template_path = TEST_TEMPLATES / 'recursive.yml'
with self.assertRaises(RecursionError):
carpyt.run_template_parser(template_path)
def test_reuse_template(self):
"""Tests reuse of linked templates."""
template_path = TEST_TEMPLATES / 'reuse.yml'
file_tree = carpyt.run_template_parser(template_path)
self.assertTrue(len(file_tree.content) == 1)
class TestFileCreation(TestCase):
"""Tests that all required files are generated correctly."""
def test_simple_project(self):
"""Tests the file structure of the standard python template."""
with tempfile.TemporaryDirectory() as tempdir:
td_path = Path(tempdir)
template_path = carpyt.TEMPLATES / 'python_module.yml'
file_tree = carpyt.run_template_parser(template_path)
file_tree.make(td_path)
top_dir = td_path / 'python_module'
self.assertTrue(top_dir.exists())
lib_dir = top_dir / '{module}'
self.assertTrue((lib_dir).exists())
self.assertTrue((lib_dir / '__init__.py').exists())
self.assertTrue((lib_dir / 'lib.py').exists())
tests_dir = top_dir / 'tests'
self.assertTrue(tests_dir.exists())
self.assertTrue((tests_dir / 'test_lib.py').exists())
docs_dir = top_dir / 'docs'
self.assertTrue(docs_dir.exists())
self.assertTrue((top_dir / 'README.md').exists())
self.assertTrue((top_dir / 'MANIFEST.in').exists())
self.assertTrue((top_dir / 'setup.py').exists())
| [
"os.path.abspath",
"tempfile.TemporaryDirectory",
"carpyt.run_template_parser",
"pathlib.Path"
] | [((476, 517), 'carpyt.run_template_parser', 'carpyt.run_template_parser', (['template_path'], {}), '(template_path)\n', (502, 517), False, 'import carpyt\n'), ((988, 1029), 'carpyt.run_template_parser', 'carpyt.run_template_parser', (['template_path'], {}), '(template_path)\n', (1014, 1029), False, 'import carpyt\n'), ((1676, 1717), 'carpyt.run_template_parser', 'carpyt.run_template_parser', (['template_path'], {}), '(template_path)\n', (1702, 1717), False, 'import carpyt\n'), ((2610, 2651), 'carpyt.run_template_parser', 'carpyt.run_template_parser', (['template_path'], {}), '(template_path)\n', (2636, 2651), False, 'import carpyt\n'), ((162, 187), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (177, 187), False, 'import os\n'), ((2412, 2453), 'carpyt.run_template_parser', 'carpyt.run_template_parser', (['template_path'], {}), '(template_path)\n', (2438, 2453), False, 'import carpyt\n'), ((2927, 2956), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (2954, 2956), False, 'import tempfile\n'), ((2991, 3004), 'pathlib.Path', 'Path', (['tempdir'], {}), '(tempdir)\n', (2995, 3004), False, 'from pathlib import Path\n'), ((3096, 3137), 'carpyt.run_template_parser', 'carpyt.run_template_parser', (['template_path'], {}), '(template_path)\n', (3122, 3137), False, 'import carpyt\n')] |
#!/usr/bin/env python3
import pytest
import argparse
import configparser
from tools.rest_tools import *
from tools.rest_helper import *
parser = argparse.ArgumentParser(description="Single test")
parser.add_argument( '-m', '--mod_name', help="exec only one directory under tapplet/")
parser.add_argument( '-f', '--test_func', help="exec only one test")
parser.add_argument( '-V', '--verbose' , action="store_true", help="show more info, default is false")
parser.add_argument( '-v', '--pytest_verbose' , action="store_true", help="show more pytest info, default is false")
parser.add_argument( '-s', '--sw' , action="store_true", help="exit on test fail, continue from last failing test next time")
parser.add_argument( '-l', '--list' , action="store_true", help="list all test case, not execute")
parser.add_argument( '-n', '--count' , default="3", help="test counts/loops, default is 3")
parser.add_argument( '-L', '--long_time' , action="store_true", help="run long time tests")
# parser.add_argument( '-f', '--function')
args = parser.parse_args()
mod_name = args.mod_name
test_func = args.test_func
global_verbose= args.verbose
pytest_verbose= args.pytest_verbose
global_sw = args.sw
global_list = args.list
global_count = args.count
long_time_test = args.long_time
global_config = configparser.ConfigParser()
global_config.read("global.cfg", encoding = "utf-8")
host_config = global_config.get("auto_test", "host")
eth_config = global_config.get("auto_test", "eth")
dump_eth_config = global_config.get("auto_test", "dump_eth")
port1_config = global_config.get("auto_test", "port1")
port2_config = global_config.get("auto_test", "port2")
device_config = global_config.get("auto_test", "device")
device_config_list = ["VM"]
###### initial login ######
sf_helper = Sf_rest(host_config)
try_rest_login(sf_helper)
def check_device_config():
if device_config not in device_config_list:
print("device_config not right!")
print("supported device:")
print(device_config_list)
exit(-1)
if __name__ == "__main__":
check_device_config()
addopt = []
if global_verbose is True:
addopt.append("-v")
if pytest_verbose is True and global_verbose is False:
addopt.append("-v")
if global_sw is True:
addopt.append("--sw")
if global_list is True:
addopt.append("--collect-only")
if mod_name != None:
mod_name = "tapplet/"+mod_name
addopt.append(mod_name)
pytest.main(addopt)
elif test_func != None:
addopt.append(test_func)
pytest.main(addopt)
else:
for i in range(int(global_count)):
print("############ {0} ############".format(i+1))
ret = pytest.main(addopt)
if ret != 0:
break
if global_list is True:
break
###### log out ######
try_rest_logout(sf_helper)
#pytest.main(["gtpcv1"])
| [
"configparser.ConfigParser",
"argparse.ArgumentParser",
"pytest.main"
] | [((147, 197), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Single test"""'}), "(description='Single test')\n", (170, 197), False, 'import argparse\n'), ((1297, 1324), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (1322, 1324), False, 'import configparser\n'), ((2488, 2507), 'pytest.main', 'pytest.main', (['addopt'], {}), '(addopt)\n', (2499, 2507), False, 'import pytest\n'), ((2577, 2596), 'pytest.main', 'pytest.main', (['addopt'], {}), '(addopt)\n', (2588, 2596), False, 'import pytest\n'), ((2736, 2755), 'pytest.main', 'pytest.main', (['addopt'], {}), '(addopt)\n', (2747, 2755), False, 'import pytest\n')] |
from dotenv import load_dotenv
import os
load_dotenv()
class ApplicationConfig:
SECRET_KEY = "asdadsd"
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = True
SQLALCHEMY_DATABASE_URI = r"sqlite:///./db.sqlite"
SESSION_TYPE = "filesystem"
SESSION_PERMANENT = False
SESSION_USE_SIGNER = True
EMAIL_ADDRESS = "<EMAIL>"
EMAIL_PASSWORD = "<PASSWORD>"
| [
"dotenv.load_dotenv"
] | [((42, 55), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (53, 55), False, 'from dotenv import load_dotenv\n')] |
from typing import List
import pandas as pd
from logzero import logger
from sqlalchemy.orm import Session
from covigator import SYNONYMOUS_VARIANT, MISSENSE_VARIANT
from covigator.database.model import DataSource, PrecomputedSynonymousNonSynonymousCounts, RegionType, \
VARIANT_OBSERVATION_TABLE_NAME, SAMPLE_ENA_TABLE_NAME, SAMPLE_GISAID_TABLE_NAME, PrecomputedOccurrence
from covigator.database.queries import Queries
NUMBER_TOP_OCCURRENCES = 1000
class TopOccurrencesLoader:
def __init__(self, session: Session):
self.session = session
self.queries = Queries(session=self.session)
def load(self):
# gets the top occurrent variants for each source and overall
top_occurring_variants_ena = None
try:
top_occurring_variants_ena = self.queries.get_top_occurring_variants(
top=NUMBER_TOP_OCCURRENCES, source=DataSource.ENA.name)
except ValueError as e:
logger.exception(e)
logger.error("No top occurrences for ENA data")
top_occurring_variants_gisaid = None
try:
top_occurring_variants_gisaid = self.queries.get_top_occurring_variants(
top=NUMBER_TOP_OCCURRENCES, source=DataSource.GISAID.name)
except ValueError:
logger.error("No top occurrences for GISAID data")
top_occurring_variants = None
try:
top_occurring_variants = self.queries.get_top_occurring_variants(top=NUMBER_TOP_OCCURRENCES)
except ValueError:
logger.error("No top occurrences")
# delete all rows before starting
self.session.query(PrecomputedOccurrence).delete()
self.session.commit()
database_rows = []
# stores the precomputed data
if top_occurring_variants_ena is not None:
for index, row in top_occurring_variants_ena.iterrows():
# add entries per gene
database_rows.append(self._row_to_top_occurrence(row, source=DataSource.ENA))
if top_occurring_variants_gisaid is not None:
for index, row in top_occurring_variants_gisaid.iterrows():
# add entries per gene
database_rows.append(self._row_to_top_occurrence(row, source=DataSource.GISAID))
if top_occurring_variants is not None:
for index, row in top_occurring_variants.iterrows():
# add entries per gene
database_rows.append(self._row_to_top_occurrence(row))
if len(database_rows) > 0:
self.session.add_all(database_rows)
self.session.commit()
logger.info("Added {} entries to {}".format(len(database_rows), PrecomputedOccurrence.__tablename__))
def _row_to_top_occurrence(self, row, source=None):
return PrecomputedOccurrence(
total=row["total"],
frequency=row["frequency"],
variant_id=row["variant_id"],
hgvs_p=row["hgvs_p"],
gene_name=row["gene_name"],
domain=row["pfam_name"],
annotation=row["annotation_highest_impact"],
source=source,
month=row["month"],
count=row["count"],
frequency_by_month=row["frequency_by_month"],
)
| [
"covigator.database.model.PrecomputedOccurrence",
"covigator.database.queries.Queries",
"logzero.logger.exception",
"logzero.logger.error"
] | [((584, 613), 'covigator.database.queries.Queries', 'Queries', ([], {'session': 'self.session'}), '(session=self.session)\n', (591, 613), False, 'from covigator.database.queries import Queries\n'), ((2816, 3155), 'covigator.database.model.PrecomputedOccurrence', 'PrecomputedOccurrence', ([], {'total': "row['total']", 'frequency': "row['frequency']", 'variant_id': "row['variant_id']", 'hgvs_p': "row['hgvs_p']", 'gene_name': "row['gene_name']", 'domain': "row['pfam_name']", 'annotation': "row['annotation_highest_impact']", 'source': 'source', 'month': "row['month']", 'count': "row['count']", 'frequency_by_month': "row['frequency_by_month']"}), "(total=row['total'], frequency=row['frequency'],\n variant_id=row['variant_id'], hgvs_p=row['hgvs_p'], gene_name=row[\n 'gene_name'], domain=row['pfam_name'], annotation=row[\n 'annotation_highest_impact'], source=source, month=row['month'], count=\n row['count'], frequency_by_month=row['frequency_by_month'])\n", (2837, 3155), False, 'from covigator.database.model import DataSource, PrecomputedSynonymousNonSynonymousCounts, RegionType, VARIANT_OBSERVATION_TABLE_NAME, SAMPLE_ENA_TABLE_NAME, SAMPLE_GISAID_TABLE_NAME, PrecomputedOccurrence\n'), ((959, 978), 'logzero.logger.exception', 'logger.exception', (['e'], {}), '(e)\n', (975, 978), False, 'from logzero import logger\n'), ((991, 1038), 'logzero.logger.error', 'logger.error', (['"""No top occurrences for ENA data"""'], {}), "('No top occurrences for ENA data')\n", (1003, 1038), False, 'from logzero import logger\n'), ((1297, 1347), 'logzero.logger.error', 'logger.error', (['"""No top occurrences for GISAID data"""'], {}), "('No top occurrences for GISAID data')\n", (1309, 1347), False, 'from logzero import logger\n'), ((1544, 1578), 'logzero.logger.error', 'logger.error', (['"""No top occurrences"""'], {}), "('No top occurrences')\n", (1556, 1578), False, 'from logzero import logger\n')] |
### evaluation
import numpy as np
from sklearn.linear_model import LinearRegression
class Evaluate(object):
def __init__(self, model_names, X_train, y_preds, config,verbose=0):
self.distance_min = config['distance_min']
self.point_min = config['point_min'] #0.05, point_min = 50
self.model_names = model_names
self.X_train= X_train
self.y_preds = y_preds
self.verbose = verbose
self.metrics = {'ratios':{}, 'slopes': {}, 'inters':{}, 'slopes_raw':{}}
self.boundary_points = {}
def fit(self):
for model_name in self.model_names:
ratios = get_ratio_range(self.X_train, self.y_preds[model_name])
slopes, inters, slopes_raw, boundaries = get_boundary_and_slope(self.X_train, self.y_preds[model_name], self.distance_min, self.point_min)
self.metrics['ratios'][model_name] =ratios
self.metrics['slopes'][model_name] = slopes
self.metrics['slopes_raw'][model_name] = slopes_raw
self.metrics['inters'][model_name] = inters
self.boundary_points[model_name] = boundaries
if self.verbose:
print('model_name {}, metrics ratios {}, slopes {}, inters{}'.format(model_name,
self.metrics['ratios'][model_name], self.metrics['slopes'][model_name],
self.metrics['inters'][model_name]))
return self
def get_ratio_range(X_train, y_pred):
"""
Compute range ratio index
"""
range_ratios=[]
n_components = max(y_pred)+1
for i in range(n_components):
X_train_i = X_train[y_pred==i]
T2_v = 10**(X_train_i[:,0])
T1_v = 10**(X_train_i[:,1])
range_ratio = (np.max(T1_v/T2_v)/np.min(T1_v/T2_v))
range_ratios.append(range_ratio)
return range_ratios
def get_boundary_from_two_clusters_(cluster_a, cluster_b, distance_min = 0.05):
# cluster_a: shape(n,2)
# cluster_b: shape(n,2)
id_a =set()
id_b =set()# the pair of row id (i,j), i is for cluster_a and j is for cluster_b
for i in range(cluster_a.shape[0]):
#i = 0
clsuter_a_i = cluster_a[i,:]
distance_list = np.sqrt( (clsuter_a_i[0]-cluster_b[:,0])**2 + (clsuter_a_i[1]-cluster_b[:,1])**2)
distance_ = np.amin(distance_list) # mini distance
if distance_ < distance_min:
j = np.argmin(distance_list)
id_a.add(i)
id_b.add(j)
if len(id_a) == 0 and len(id_a) == 0:
return []
else:
id_a = list(id_a)
id_b = list(id_b)
id_a.sort()
id_b.sort()
boundary_points = np.vstack( (cluster_a[id_a,:],cluster_b[id_b,:] ) )
return boundary_points
def get_boundary_and_slope(X_train, y_pred, distance_min=0.05, point_min = 50):
# point_min minimum point for the boundary points
# get the decision boundary and their slopes
boundary_list = [] # contains all boundary points
slope_raw_list = []
angle_diff_list = [] # contains the slope for that boundary
inter_list = []
n_components = max(y_pred)+1
data_all = [X_train[y_pred==i] for i in range(n_components)] # get each cluster points
for i in range(n_components-1):
for j in range(i+1, n_components):
cluster_a = data_all[i]
cluster_b = data_all[j]
boundary_points = get_boundary_from_two_clusters_(cluster_a, cluster_b,distance_min = distance_min)
if len(boundary_points) > point_min:
boundary_list.append(boundary_points)
# linear regression
lr_ = LinearRegression()
X_ = boundary_points[:,0].reshape(-1,1)
y_ = boundary_points[:,1]
lr_.fit(X_,y_)
slope = lr_.coef_[0]/np.pi*180
inter = lr_.intercept_
slope_raw_list.append(slope)
inter_list.append(inter)
diff_slope = abs(slope-45)
angle_diff_list.append(diff_slope) # normalize slope
return angle_diff_list, inter_list, slope_raw_list, boundary_list
| [
"numpy.sqrt",
"numpy.amin",
"numpy.max",
"numpy.vstack",
"numpy.min",
"numpy.argmin",
"sklearn.linear_model.LinearRegression"
] | [((2206, 2300), 'numpy.sqrt', 'np.sqrt', (['((clsuter_a_i[0] - cluster_b[:, 0]) ** 2 + (clsuter_a_i[1] - cluster_b[:, 1\n ]) ** 2)'], {}), '((clsuter_a_i[0] - cluster_b[:, 0]) ** 2 + (clsuter_a_i[1] -\n cluster_b[:, 1]) ** 2)\n', (2213, 2300), True, 'import numpy as np\n'), ((2308, 2330), 'numpy.amin', 'np.amin', (['distance_list'], {}), '(distance_list)\n', (2315, 2330), True, 'import numpy as np\n'), ((2661, 2712), 'numpy.vstack', 'np.vstack', (['(cluster_a[id_a, :], cluster_b[id_b, :])'], {}), '((cluster_a[id_a, :], cluster_b[id_b, :]))\n', (2670, 2712), True, 'import numpy as np\n'), ((1750, 1769), 'numpy.max', 'np.max', (['(T1_v / T2_v)'], {}), '(T1_v / T2_v)\n', (1756, 1769), True, 'import numpy as np\n'), ((1768, 1787), 'numpy.min', 'np.min', (['(T1_v / T2_v)'], {}), '(T1_v / T2_v)\n', (1774, 1787), True, 'import numpy as np\n'), ((2400, 2424), 'numpy.argmin', 'np.argmin', (['distance_list'], {}), '(distance_list)\n', (2409, 2424), True, 'import numpy as np\n'), ((3637, 3655), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (3653, 3655), False, 'from sklearn.linear_model import LinearRegression\n')] |
#!/usr/local/CyberCP/bin/python
import os, sys
sys.path.append('/usr/local/CyberCP')
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CyberCP.settings")
django.setup()
import threading as multi
from plogical.acl import ACLManager
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
class UserManager(multi.Thread):
def __init__(self, function, extraArgs):
multi.Thread.__init__(self)
self.function = function
self.extraArgs = extraArgs
def run(self):
try:
if self.function == 'controlUserState':
self.controlUserState()
except:
pass
def controlUserState(self):
try:
websites = ACLManager.findAllSites(self.extraArgs['currentACL'],self.extraArgs['user'].pk)
from websiteFunctions.website import WebsiteManager
wm = WebsiteManager()
if self.extraArgs['state'] == 'SUSPEND':
for items in websites:
data = {'websiteName': items, 'state': 'Suspend'}
wm.submitWebsiteStatus(self.extraArgs['user'].pk, data)
else:
for items in websites:
data = {'websiteName': items, 'state': 'UN-Suspend'}
wm.submitWebsiteStatus(self.extraArgs['user'].pk, data)
except BaseException as msg:
logging.writeToFile(str(msg) + '[Error:UserManager:32]') | [
"os.environ.setdefault",
"threading.Thread.__init__",
"django.setup",
"websiteFunctions.website.WebsiteManager",
"plogical.acl.ACLManager.findAllSites",
"sys.path.append"
] | [((48, 85), 'sys.path.append', 'sys.path.append', (['"""/usr/local/CyberCP"""'], {}), "('/usr/local/CyberCP')\n", (63, 85), False, 'import os, sys\n'), ((100, 167), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""CyberCP.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'CyberCP.settings')\n", (121, 167), False, 'import os, sys\n'), ((168, 182), 'django.setup', 'django.setup', ([], {}), '()\n', (180, 182), False, 'import django\n'), ((409, 436), 'threading.Thread.__init__', 'multi.Thread.__init__', (['self'], {}), '(self)\n', (430, 436), True, 'import threading as multi\n'), ((733, 818), 'plogical.acl.ACLManager.findAllSites', 'ACLManager.findAllSites', (["self.extraArgs['currentACL']", "self.extraArgs['user'].pk"], {}), "(self.extraArgs['currentACL'], self.extraArgs['user'].pk\n )\n", (756, 818), False, 'from plogical.acl import ACLManager\n'), ((895, 911), 'websiteFunctions.website.WebsiteManager', 'WebsiteManager', ([], {}), '()\n', (909, 911), False, 'from websiteFunctions.website import WebsiteManager\n')] |
from flask import render_template, redirect, url_for, escape, request
from app import app
from app.forms import URLForm, FilesForm
from app.utils import perform_url_request, perform_upload_request
import requests
import base64
import json
@app.route('/url-api', methods=['POST'])
def url_api():
urls = request.form['urls'].split(',')
headers = {'content-type': 'application/json'}
req = {'signature_name': 'serving_default',
'instances': []}
for url in urls:
image_bytes = base64.b64encode(requests.get(url).content).decode('utf-8')
req['instances'].append({'b64': image_bytes})
json_response = requests.post('http://rssc:8501/v1/models/rssc/versions/1:predict',
headers=headers,
data=json.dumps(req))
return json_response.json()
@app.route('/multilabel-url-api', methods=['POST'])
def multilabel_url_api():
urls = request.form['urls'].split(',')
headers = {'content-type': 'application/json'}
req = {'signature_name': 'serving_default',
'instances': []}
for url in urls:
image_bytes = base64.b64encode(requests.get(url).content).decode('utf-8')
req['instances'].append({'b64': image_bytes})
json_response = requests.post('http://rssc:8501/v1/models/multilabel-rssc/versions/1:predict',
headers=headers,
data=json.dumps(req))
return json_response.json()
@app.route('/upload-api', methods=['POST'])
def classify_images():
json_response = requests.post('http://rssc:8501/v1/models/rssc/versions/1:predict',
headers=request.headers,
data=request.data)
return json_response.json()
@app.route('/multilabel-upload-api', methods=['POST'])
def classify_images_multilabel():
json_response = requests.post('http://rssc:8501/v1/models/multilabel-rssc/versions/1:predict',
headers=request.headers,
data=request.data)
return json_response.json()
@app.route('/', methods=['GET', 'POST'])
@app.route('/index', methods=['GET', 'POST'])
def index():
return redirect(url_for('url'))
@app.route('/url', methods=['GET', 'POST'])
def url():
form = URLForm()
if form.validate_on_submit():
result = perform_url_request(form.url.data, form.task.data)
return render_template('result.html', title='Results', res=result, task=form.task.data.lower())
return render_template('url.html', title='URL', form=form)
@app.route('/upload', methods=['GET', 'POST'])
def upload():
form = FilesForm()
if form.validate_on_submit():
result = perform_upload_request(form.files.data, form.task.data)
#print(form.files.data.mimetype)
return render_template('result.html', title='Results',
res=result, task=form.task.data.lower())
#print(form.files.data.read())
return render_template('files.html', title='Upload', form=form)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html', title='404 - Not Found'), 404
@app.errorhandler(500)
def internal_error(e):
return render_template('500.html', title='500 - Server Error'), 500
| [
"flask.render_template",
"app.forms.URLForm",
"requests.post",
"app.utils.perform_url_request",
"json.dumps",
"app.utils.perform_upload_request",
"flask.url_for",
"requests.get",
"app.app.errorhandler",
"app.app.route",
"app.forms.FilesForm"
] | [((242, 281), 'app.app.route', 'app.route', (['"""/url-api"""'], {'methods': "['POST']"}), "('/url-api', methods=['POST'])\n", (251, 281), False, 'from app import app\n'), ((863, 913), 'app.app.route', 'app.route', (['"""/multilabel-url-api"""'], {'methods': "['POST']"}), "('/multilabel-url-api', methods=['POST'])\n", (872, 913), False, 'from app import app\n'), ((1517, 1559), 'app.app.route', 'app.route', (['"""/upload-api"""'], {'methods': "['POST']"}), "('/upload-api', methods=['POST'])\n", (1526, 1559), False, 'from app import app\n'), ((1773, 1826), 'app.app.route', 'app.route', (['"""/multilabel-upload-api"""'], {'methods': "['POST']"}), "('/multilabel-upload-api', methods=['POST'])\n", (1782, 1826), False, 'from app import app\n'), ((2062, 2101), 'app.app.route', 'app.route', (['"""/"""'], {'methods': "['GET', 'POST']"}), "('/', methods=['GET', 'POST'])\n", (2071, 2101), False, 'from app import app\n'), ((2103, 2147), 'app.app.route', 'app.route', (['"""/index"""'], {'methods': "['GET', 'POST']"}), "('/index', methods=['GET', 'POST'])\n", (2112, 2147), False, 'from app import app\n'), ((2200, 2242), 'app.app.route', 'app.route', (['"""/url"""'], {'methods': "['GET', 'POST']"}), "('/url', methods=['GET', 'POST'])\n", (2209, 2242), False, 'from app import app\n'), ((2550, 2595), 'app.app.route', 'app.route', (['"""/upload"""'], {'methods': "['GET', 'POST']"}), "('/upload', methods=['GET', 'POST'])\n", (2559, 2595), False, 'from app import app\n'), ((3030, 3051), 'app.app.errorhandler', 'app.errorhandler', (['(404)'], {}), '(404)\n', (3046, 3051), False, 'from app import app\n'), ((3147, 3168), 'app.app.errorhandler', 'app.errorhandler', (['(500)'], {}), '(500)\n', (3163, 3168), False, 'from app import app\n'), ((1603, 1719), 'requests.post', 'requests.post', (['"""http://rssc:8501/v1/models/rssc/versions/1:predict"""'], {'headers': 'request.headers', 'data': 'request.data'}), "('http://rssc:8501/v1/models/rssc/versions/1:predict', headers\n =request.headers, data=request.data)\n", (1616, 1719), False, 'import requests\n'), ((1881, 2007), 'requests.post', 'requests.post', (['"""http://rssc:8501/v1/models/multilabel-rssc/versions/1:predict"""'], {'headers': 'request.headers', 'data': 'request.data'}), "('http://rssc:8501/v1/models/multilabel-rssc/versions/1:predict',\n headers=request.headers, data=request.data)\n", (1894, 2007), False, 'import requests\n'), ((2265, 2274), 'app.forms.URLForm', 'URLForm', ([], {}), '()\n', (2272, 2274), False, 'from app.forms import URLForm, FilesForm\n'), ((2495, 2546), 'flask.render_template', 'render_template', (['"""url.html"""'], {'title': '"""URL"""', 'form': 'form'}), "('url.html', title='URL', form=form)\n", (2510, 2546), False, 'from flask import render_template, redirect, url_for, escape, request\n'), ((2621, 2632), 'app.forms.FilesForm', 'FilesForm', ([], {}), '()\n', (2630, 2632), False, 'from app.forms import URLForm, FilesForm\n'), ((2970, 3026), 'flask.render_template', 'render_template', (['"""files.html"""'], {'title': '"""Upload"""', 'form': 'form'}), "('files.html', title='Upload', form=form)\n", (2985, 3026), False, 'from flask import render_template, redirect, url_for, escape, request\n'), ((2181, 2195), 'flask.url_for', 'url_for', (['"""url"""'], {}), "('url')\n", (2188, 2195), False, 'from flask import render_template, redirect, url_for, escape, request\n'), ((2327, 2377), 'app.utils.perform_url_request', 'perform_url_request', (['form.url.data', 'form.task.data'], {}), '(form.url.data, form.task.data)\n', (2346, 2377), False, 'from app.utils import perform_url_request, perform_upload_request\n'), ((2685, 2740), 'app.utils.perform_upload_request', 'perform_upload_request', (['form.files.data', 'form.task.data'], {}), '(form.files.data, form.task.data)\n', (2707, 2740), False, 'from app.utils import perform_url_request, perform_upload_request\n'), ((3086, 3138), 'flask.render_template', 'render_template', (['"""404.html"""'], {'title': '"""404 - Not Found"""'}), "('404.html', title='404 - Not Found')\n", (3101, 3138), False, 'from flask import render_template, redirect, url_for, escape, request\n'), ((3203, 3258), 'flask.render_template', 'render_template', (['"""500.html"""'], {'title': '"""500 - Server Error"""'}), "('500.html', title='500 - Server Error')\n", (3218, 3258), False, 'from flask import render_template, redirect, url_for, escape, request\n'), ((812, 827), 'json.dumps', 'json.dumps', (['req'], {}), '(req)\n', (822, 827), False, 'import json\n'), ((1466, 1481), 'json.dumps', 'json.dumps', (['req'], {}), '(req)\n', (1476, 1481), False, 'import json\n'), ((527, 544), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (539, 544), False, 'import requests\n'), ((1170, 1187), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1182, 1187), False, 'import requests\n')] |
"""
orbit.py
"Frankly, a very limited and highly specific implementation of an Orbit class.
If used for applications other than the original usecase, this class will
either need to be bypassed or heavily expanded upon."
@author: <NAME> (https://github.com/Hans-Bananendans/)
"""
from numpy import log
class Orbit:
"""This class stores and supplies orbital parameters for given circular
SSO orbit"""
def __init__(self,h,i,LTAN):
self.h = h #[km]
self.i = i #[deg]
self.LTAN = LTAN #0-23[h] e.g. 14 is 14:00
def period(self):
"""
Parameters
----------
h : double
Orbital altitude in [km].
Returns
-------
int
Orbital period in [s].
"""
return int(2*3.141593 * ((1000*(6371+self.h))**3/(3.986*10**14))**0.5)
def eclipse(self):
"""
eclipse(h)
Note: Only valid between LTAN [10:00, 11:00], based on logarithmic
regression of simulated eclipse data in GMAT. For more info,
consult eclipse_predictions.xlsx.
ACCURATE TO WITHIN A FEW SECONDS
Parameters
----------
h : double
Orbital altitude in [km].
Returns
-------
double
Total eclipse duration (including penumbras) in [s].
"""
# If LTAN is 10:00
# e = -151*log(self.h) + 2965 # [s]
# If LTAN is 10:30
e = -125*log(self.h) + 2860 # [s]
# If LTAN is 11:00
# e = -109*log(self.h) + 2800 # [s]
return e
def eclipse_frac(self):
"""
eclipse(h)
Note: Only valid for LTAN 10:00, 10:30, 11:00, based on logarithmic
regression of simulated eclipse data in GMAT. For more info,
consult eclipse_predictions.xlsx.
ACCURACY TO WITHIN 0.1 OF TRUE VALUE
Parameters
----------
h : double
Orbital altitude in [km].
Returns
-------
double
Percentage of orbit that is in ECLIPSE [%].
"""
return self.eclipse()/self.period() | [
"numpy.log"
] | [((1541, 1552), 'numpy.log', 'log', (['self.h'], {}), '(self.h)\n', (1544, 1552), False, 'from numpy import log\n')] |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compat modules."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import tensorflow as tf # TF2
_DEFAULT_TF_BEHAVIOR = 2
# Get version of tf behavior in use (valid 1 or 2).
_tf_behavior_version = _DEFAULT_TF_BEHAVIOR
def setup_tf_behavior(tf_version=_DEFAULT_TF_BEHAVIOR):
"""Setup tf behavior. It must be used before the main()."""
global _tf_behavior_version
if tf_version not in [1, 2]:
raise ValueError(
'tf_version should be in [1, 2], but got {}'.format(tf_version))
if tf_version == 1:
tf.compat.v1.logging.warn(
'Using v1 behavior. Please note that it is mainly to run legacy models,'
'however v2 is more preferrable if they are supported.')
tf.compat.v1.disable_v2_behavior()
else:
assert tf.__version__.startswith('2')
_tf_behavior_version = tf_version
def get_tf_behavior():
"""Gets version for tf behavior.
Returns:
int, 1 or 2 indicating the behavior version.
"""
return _tf_behavior_version
def test_in_tf_1(fn):
"""Decorator to test in tf 1 behaviors."""
@functools.wraps(fn)
def decorator(*args, **kwargs):
if get_tf_behavior() != 1:
tf.compat.v1.logging.info('Skip function {} for test_in_tf_1'.format(
fn.__name__))
return
fn(*args, **kwargs)
return decorator
def test_in_tf_2(fn):
"""Decorator to test in tf 2 behaviors."""
@functools.wraps(fn)
def decorator(*args, **kwargs):
if get_tf_behavior() != 2:
tf.compat.v1.logging.info('Skip function {} for test_in_tf_2'.format(
fn.__name__))
return
fn(*args, **kwargs)
return decorator
def test_in_tf_1and2(fn):
"""Decorator to test in tf 1 and 2 behaviors."""
@functools.wraps(fn)
def decorator(*args, **kwargs):
if get_tf_behavior() not in [1, 2]:
tf.compat.v1.logging.info('Skip function {} for test_in_tf_1and2'.format(
fn.__name__))
return
fn(*args, **kwargs)
return decorator
| [
"tensorflow.compat.v1.logging.warn",
"tensorflow.compat.v1.disable_v2_behavior",
"functools.wraps",
"tensorflow.__version__.startswith"
] | [((1743, 1762), 'functools.wraps', 'functools.wraps', (['fn'], {}), '(fn)\n', (1758, 1762), False, 'import functools\n'), ((2058, 2077), 'functools.wraps', 'functools.wraps', (['fn'], {}), '(fn)\n', (2073, 2077), False, 'import functools\n'), ((2383, 2402), 'functools.wraps', 'functools.wraps', (['fn'], {}), '(fn)\n', (2398, 2402), False, 'import functools\n'), ((1215, 1377), 'tensorflow.compat.v1.logging.warn', 'tf.compat.v1.logging.warn', (['"""Using v1 behavior. Please note that it is mainly to run legacy models,however v2 is more preferrable if they are supported."""'], {}), "(\n 'Using v1 behavior. Please note that it is mainly to run legacy models,however v2 is more preferrable if they are supported.'\n )\n", (1240, 1377), True, 'import tensorflow as tf\n'), ((1392, 1426), 'tensorflow.compat.v1.disable_v2_behavior', 'tf.compat.v1.disable_v2_behavior', ([], {}), '()\n', (1424, 1426), True, 'import tensorflow as tf\n'), ((1446, 1476), 'tensorflow.__version__.startswith', 'tf.__version__.startswith', (['"""2"""'], {}), "('2')\n", (1471, 1476), True, 'import tensorflow as tf\n')] |
import importlib as _importlib
import six as _six
from flytekit.common.exceptions import scopes as _exception_scopes
from flytekit.common.exceptions import user as _user_exceptions
from flytekit.configuration import sdk as _sdk_config
from flytekit.models import literals as _literal_models
class _TypeEngineLoader(object):
_LOADED_ENGINES = None
_LAST_LOADED = None
@classmethod
def _load_engines(cls):
config = _sdk_config.TYPE_ENGINES.get()
if cls._LOADED_ENGINES is None or config != cls._LAST_LOADED:
cls._LAST_LOADED = config
cls._LOADED_ENGINES = []
for fqdn in config:
split = fqdn.split(".")
module_path, attr = ".".join(split[:-1]), split[-1]
module = _exception_scopes.user_entry_point(_importlib.import_module)(module_path)
if not hasattr(module, attr):
raise _user_exceptions.FlyteValueException(
module,
"Failed to load the type engine because the attribute named '{}' could not be found"
"in the module '{}'.".format(attr, module_path),
)
engine_impl = getattr(module, attr)()
cls._LOADED_ENGINES.append(engine_impl)
from flytekit.type_engines.default.flyte import FlyteDefaultTypeEngine as _DefaultEngine
cls._LOADED_ENGINES.append(_DefaultEngine())
@classmethod
def iterate_engines_in_order(cls):
"""
:rtype: Generator[flytekit.type_engines.common.TypeEngine]
"""
cls._load_engines()
return iter(cls._LOADED_ENGINES)
def python_std_to_sdk_type(t):
"""
:param T t: User input. Should be of the form: Types.Integer, [Types.Integer], {Types.String: Types.Integer}, etc.
:rtype: flytekit.common.types.base_sdk_types.FlyteSdkType
"""
for e in _TypeEngineLoader.iterate_engines_in_order():
out = e.python_std_to_sdk_type(t)
if out is not None:
return out
raise _user_exceptions.FlyteValueException(t, "Could not resolve to an SDK type for this value.")
def get_sdk_type_from_literal_type(literal_type):
"""
:param flytekit.models.types.LiteralType literal_type:
:rtype: flytekit.common.types.base_sdk_types.FlyteSdkType
"""
for e in _TypeEngineLoader.iterate_engines_in_order():
out = e.get_sdk_type_from_literal_type(literal_type)
if out is not None:
return out
raise _user_exceptions.FlyteValueException(
literal_type, "Could not resolve to a type implementation for this " "value."
)
def infer_sdk_type_from_literal(literal):
"""
:param flytekit.models.literals.Literal literal:
:rtype: flytekit.common.types.base_sdk_types.FlyteSdkType
"""
for e in _TypeEngineLoader.iterate_engines_in_order():
out = e.infer_sdk_type_from_literal(literal)
if out is not None:
return out
raise _user_exceptions.FlyteValueException(literal, "Could not resolve to a type implementation for this value.")
def get_sdk_value_from_literal(literal, sdk_type=None):
"""
:param flytekit.models.literals.Literal literal:
:param flytekit.models.types.LiteralType sdk_type:
:rtype: flytekit.common.types.base_sdk_types.FlyteSdkValue
"""
# The spec states everything must be nullable, so if we receive a null value, swap to the null type behavior.
if sdk_type is None:
sdk_type = infer_sdk_type_from_literal(literal)
return sdk_type.from_flyte_idl(literal.to_flyte_idl())
def unpack_literal_map_to_sdk_object(literal_map, type_map=None):
"""
:param lytekit.models.literals.LiteralMap literal_map:
:param dict[Text, flytekit.common.types.base_sdk_types.FlyteSdkType] type_map: Type map directing unpacking.
:rtype: dict[Text, T]
"""
type_map = type_map or {}
return {k: get_sdk_value_from_literal(v, sdk_type=type_map.get(k, None)) for k, v in literal_map.literals.items()}
def unpack_literal_map_to_sdk_python_std(literal_map, type_map=None):
"""
:param flytekit.models.literals.LiteralMap literal_map: Literal map containing values for unpacking.
:param dict[Text, flytekit.common.types.base_sdk_types.FlyteSdkType] type_map: Type map directing unpacking.
:rtype: dict[Text, T]
"""
return {k: v.to_python_std() for k, v in unpack_literal_map_to_sdk_object(literal_map, type_map=type_map).items()}
def pack_python_std_map_to_literal_map(std_map, type_map):
"""
:param dict[Text, T] std_map:
:param dict[Text, flytekit.common.types.base_sdk_types.FlyteSdkType] type_map:
:rtype: flytekit.models.literals.LiteralMap
:raises: flytekit.common.exceptions.user.FlyteTypeException
"""
return _literal_models.LiteralMap(literals={k: v.from_python_std(std_map[k]) for k, v in _six.iteritems(type_map)})
| [
"flytekit.type_engines.default.flyte.FlyteDefaultTypeEngine",
"flytekit.common.exceptions.scopes.user_entry_point",
"flytekit.configuration.sdk.TYPE_ENGINES.get",
"flytekit.common.exceptions.user.FlyteValueException",
"six.iteritems"
] | [((2084, 2179), 'flytekit.common.exceptions.user.FlyteValueException', '_user_exceptions.FlyteValueException', (['t', '"""Could not resolve to an SDK type for this value."""'], {}), "(t,\n 'Could not resolve to an SDK type for this value.')\n", (2120, 2179), True, 'from flytekit.common.exceptions import user as _user_exceptions\n'), ((2546, 2662), 'flytekit.common.exceptions.user.FlyteValueException', '_user_exceptions.FlyteValueException', (['literal_type', '"""Could not resolve to a type implementation for this value."""'], {}), "(literal_type,\n 'Could not resolve to a type implementation for this value.')\n", (2582, 2662), True, 'from flytekit.common.exceptions import user as _user_exceptions\n'), ((3024, 3135), 'flytekit.common.exceptions.user.FlyteValueException', '_user_exceptions.FlyteValueException', (['literal', '"""Could not resolve to a type implementation for this value."""'], {}), "(literal,\n 'Could not resolve to a type implementation for this value.')\n", (3060, 3135), True, 'from flytekit.common.exceptions import user as _user_exceptions\n'), ((442, 472), 'flytekit.configuration.sdk.TYPE_ENGINES.get', '_sdk_config.TYPE_ENGINES.get', ([], {}), '()\n', (470, 472), True, 'from flytekit.configuration import sdk as _sdk_config\n'), ((1456, 1472), 'flytekit.type_engines.default.flyte.FlyteDefaultTypeEngine', '_DefaultEngine', ([], {}), '()\n', (1470, 1472), True, 'from flytekit.type_engines.default.flyte import FlyteDefaultTypeEngine as _DefaultEngine\n'), ((783, 843), 'flytekit.common.exceptions.scopes.user_entry_point', '_exception_scopes.user_entry_point', (['_importlib.import_module'], {}), '(_importlib.import_module)\n', (817, 843), True, 'from flytekit.common.exceptions import scopes as _exception_scopes\n'), ((4912, 4936), 'six.iteritems', '_six.iteritems', (['type_map'], {}), '(type_map)\n', (4926, 4936), True, 'import six as _six\n')] |
from django.test import TestCase
# Create your tests here.
from gigs.models import Venue, Event
from gigs.views import LookupView
from factory.fuzzy import BaseFuzzyAttribute
from django.contrib.gis.geos import Point
from django.utils import timezone
from django.test import RequestFactory
from django.urls import reverse
import factory.django
import random
class FuzzyPoint(BaseFuzzyAttribute):
def fuzz(self):
return Point(random.uniform(-180.0, 180.0),
random.uniform(-90.0, 90.0))
# Factories for tests
class VenueFactory(factory.django.DjangoModelFactory):
class Meta:
model = Venue
django_get_or_create = (
'name',
'location'
)
name = 'Wembley Arena'
location = FuzzyPoint()
class EventFactory(factory.django.DjangoModelFactory):
class Meta:
model = Event
django_get_or_create = (
'name',
'venue',
'datetime'
)
name = 'Queens of the Stone Age'
datetime = timezone.now()
# Test
class VenueTest(TestCase):
def test_create_venue(self):
# Create the venue
venue = VenueFactory()
# Check we can find it
all_venues = Venue.objects.all()
self.assertEqual(len(all_venues), 1)
only_venue = all_venues[0]
self.assertEqual(only_venue, venue)
# Check attributes
self.assertEqual(only_venue.name, 'Wembley Arena')
# Check string representation
self.assertEqual(only_venue.__str__(), 'Wembley Arena')
class EventTest(TestCase):
def test_create_event(self):
# Create the venue
venue = VenueFactory()
# Create the event
event = EventFactory(venue=venue)
# Check we can find it
all_events = Event.objects.all()
self.assertEqual(len(all_events), 1)
only_event = all_events[0]
self.assertEqual(only_event, event)
# Check attributes
self.assertEqual(only_event.name, 'Queens of the Stone Age')
self.assertEqual(only_event.venue.name, 'Wembley Arena')
# Check string representation
self.assertEqual(only_event.__str__(), 'Queens of the Stone Age - Wembley Arena')
class LookupViewTest(TestCase):
"""
Test Lookup View
"""
def setUp(self):
self.factory = RequestFactory()
def test_get(self):
request = self.factory.get(reverse('gigs:lookup'))
response = LookupView.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('gigs/lookup.html')
def test_post(self):
# Create venues to return
v1 = VenueFactory(name='Venue1')
v2 = VenueFactory(name='Venue2')
v3 = VenueFactory(name='Venue3')
v4 = VenueFactory(name='Venue4')
v5 = VenueFactory(name='Venue5')
v6 = VenueFactory(name='Venue6')
v7 = VenueFactory(name='Venue7')
v8 = VenueFactory(name='Venue8')
v9 = VenueFactory(name='Venue9')
v10 = VenueFactory(name='Venue10')
# Create events to return
e1 = EventFactory(name='Event1', venue=v1)
e2 = EventFactory(name='Event2', venue=v2)
e3 = EventFactory(name='Event3', venue=v3)
e4 = EventFactory(name='Event4', venue=v4)
e5 = EventFactory(name='Event5', venue=v5)
e6 = EventFactory(name='Event6', venue=v6)
e7 = EventFactory(name='Event7', venue=v7)
e8 = EventFactory(name='Event8', venue=v8)
e9 = EventFactory(name='Event9', venue=v9)
e10 = EventFactory(name='Event10', venue=v10)
# Set parameters
lat = 52.3749159
lon = 1.1067473
# Put together request
data = {
'latitude': lat,
'longitude': lon
}
request = self.factory.post(reverse('gigs:lookup'), data)
response = LookupView.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('gigs/lookupresults.html') | [
"django.test.RequestFactory",
"gigs.models.Event.objects.all",
"random.uniform",
"django.utils.timezone.now",
"gigs.views.LookupView.as_view",
"django.urls.reverse",
"gigs.models.Venue.objects.all"
] | [((1032, 1046), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1044, 1046), False, 'from django.utils import timezone\n'), ((1226, 1245), 'gigs.models.Venue.objects.all', 'Venue.objects.all', ([], {}), '()\n', (1243, 1245), False, 'from gigs.models import Venue, Event\n'), ((1799, 1818), 'gigs.models.Event.objects.all', 'Event.objects.all', ([], {}), '()\n', (1816, 1818), False, 'from gigs.models import Venue, Event\n'), ((2347, 2363), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (2361, 2363), False, 'from django.test import RequestFactory\n'), ((439, 468), 'random.uniform', 'random.uniform', (['(-180.0)', '(180.0)'], {}), '(-180.0, 180.0)\n', (453, 468), False, 'import random\n'), ((491, 518), 'random.uniform', 'random.uniform', (['(-90.0)', '(90.0)'], {}), '(-90.0, 90.0)\n', (505, 518), False, 'import random\n'), ((2424, 2446), 'django.urls.reverse', 'reverse', (['"""gigs:lookup"""'], {}), "('gigs:lookup')\n", (2431, 2446), False, 'from django.urls import reverse\n'), ((2467, 2487), 'gigs.views.LookupView.as_view', 'LookupView.as_view', ([], {}), '()\n', (2485, 2487), False, 'from gigs.views import LookupView\n'), ((3849, 3871), 'django.urls.reverse', 'reverse', (['"""gigs:lookup"""'], {}), "('gigs:lookup')\n", (3856, 3871), False, 'from django.urls import reverse\n'), ((3898, 3918), 'gigs.views.LookupView.as_view', 'LookupView.as_view', ([], {}), '()\n', (3916, 3918), False, 'from gigs.views import LookupView\n')] |
from tkinter import Frame,Label,Button,Checkbutton,Scale,StringVar,IntVar,Entry,Tk
import serial
import time
import threading
import pandas as pd
import mysql.connector
class MainFrame(Frame):
cad = str()
def __init__(self, master=None):
super().__init__(master, width=420, height=270)
self.master = master
self.master.protocol('WM_DELETE_WINDOW',self.askQuit)
self.pack()
self.hilo1 = threading.Thread(target=self.getSensorValues,daemon=True)
self.arduino = serial.Serial("COM3",9600,timeout=1.0)
time.sleep(1)
self.value_temp_1 = IntVar()
self.value_temp=StringVar()
self.nombreA = StringVar()
self.apelli=StringVar()
self.age=IntVar()
self.dato=IntVar()
self.create_widgets()
self.isRun=True
self.hilo1.start()
self.enviar()
self.cad= str()
self.cnn=mysql.connector.connect(host="localhost",user="root",passwd="",database="historial") #Conectar con MySQL
print(self.cnn)
def Enviar_db(self):
cur=self.cnn.cursor()
sql="INSERT INTO historialmedico (Nombre,Apellido,Edad,Temperatura)VALUES('{}','{}','{}','{}')".format(self.nombreA.get(),self.apelli.get(),self.age.get(),self.value_temp_1)
cur.execute(sql)
self.cnn.commit()
time.sleep(1)
cur.close()
def askQuit(self):
self.isRun=False
self.arduino.close()
self.hilo1.join(0.1)
self.master.quit()
self.master.destroy()
print("*** finalizando...")
def getSensorValues(self):
while self.isRun:
cad =self.arduino.readline().decode('ascii').strip()
self.value_temp.set(cad)
self.value_temp_1=float(cad)
def enviar(self):
x= (self.cad)
print(x)
datos= list()
def create_widgets(self):
self.labelBPM= Label(self,text = "Nombre: ", bg= "#5CFE05",fg="black", font="Helvetica 13 bold",width=9 ,justify="center")
self.labelBPM.pack()
self.labelBPM.grid(row=0,column=0, padx=5,ipady=8, pady=10)
self.label1= Entry(self, textvariable=self.nombreA, bg= "red",fg="black", font="Helvetica 13 bold",width=15 ,justify="center")
self.label1.grid(row=0,column=1, padx=5,ipady=8, pady=10)
self.labelapellido= Label(self,text = "Apellido: ", bg= "#5CFE05",fg="black", font="Helvetica 13 bold",width=9 ,justify="center")
self.labelapellido.grid(row=1,column=0, padx=5,ipady=8, pady=10)
self.label2= Entry(self,textvariable=self.apelli,bg= "red",fg="black", font="Helvetica 13 bold",width=15 ,justify="center")
self.label2.grid(row=1,column=1, padx=5,ipady=8, pady=10)
self.labeledad= Label(self,text = "Edad: ", bg= "#5CFE05",fg="black", font="Helvetica 13 bold",width=9 ,justify="center")
self.labeledad.grid(row=2,column=0, padx=5,ipady=8, pady=10)
self.label3= Entry(self,textvariable=self.age, bg= "red",fg="black", font="Helvetica 13 bold",width=15 ,justify="center")
self.label3.grid(row=2,column=1, padx=5,ipady=8, pady=10)
self.Limpiar= Button(self,command= self.Enviar_db, text= "Enviar historial ",bg="blue",fg="white", font="Helvetica 14 bold",width=20,justify="center")
self.Limpiar.pack
self.Limpiar.grid(row=3,column=0, padx=5,pady=15,columnspan=2)
self.labelT= Label(self,textvariable = self.value_temp, bg= "yellow",fg="black", font="Helvetica 13 bold",width=9 ,justify="center")
self.labelT.grid(row=0,column=2, padx=5,ipady=8, pady=10)
self.Limpiar1= Button(self,command= self.askQuit, text= "Salir ",bg="red",fg="white", font="Helvetica 14 bold",width=7,justify="center")
self.Limpiar1.pack
self.Limpiar1.grid(row=3,column=3, padx=5,pady=15,columnspan=2)
def main():
root = Tk()
root.wm_title("Monitoro del signo vital de la temperatura")
app = MainFrame(root)
app.mainloop()
if __name__=="__main__":
main()
| [
"tkinter.IntVar",
"tkinter.Entry",
"time.sleep",
"tkinter.Button",
"tkinter.StringVar",
"tkinter.Tk",
"serial.Serial",
"tkinter.Label",
"threading.Thread"
] | [((4143, 4147), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (4145, 4147), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((485, 543), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.getSensorValues', 'daemon': '(True)'}), '(target=self.getSensorValues, daemon=True)\n', (501, 543), False, 'import threading\n'), ((567, 607), 'serial.Serial', 'serial.Serial', (['"""COM3"""', '(9600)'], {'timeout': '(1.0)'}), "('COM3', 9600, timeout=1.0)\n", (580, 607), False, 'import serial\n'), ((615, 628), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (625, 628), False, 'import time\n'), ((658, 666), 'tkinter.IntVar', 'IntVar', ([], {}), '()\n', (664, 666), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((692, 703), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (701, 703), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((728, 739), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (737, 739), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((761, 772), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (770, 772), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((791, 799), 'tkinter.IntVar', 'IntVar', ([], {}), '()\n', (797, 799), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((819, 827), 'tkinter.IntVar', 'IntVar', ([], {}), '()\n', (825, 827), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((1431, 1444), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1441, 1444), False, 'import time\n'), ((2057, 2169), 'tkinter.Label', 'Label', (['self'], {'text': '"""Nombre: """', 'bg': '"""#5CFE05"""', 'fg': '"""black"""', 'font': '"""Helvetica 13 bold"""', 'width': '(9)', 'justify': '"""center"""'}), "(self, text='Nombre: ', bg='#5CFE05', fg='black', font=\n 'Helvetica 13 bold', width=9, justify='center')\n", (2062, 2169), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((2296, 2415), 'tkinter.Entry', 'Entry', (['self'], {'textvariable': 'self.nombreA', 'bg': '"""red"""', 'fg': '"""black"""', 'font': '"""Helvetica 13 bold"""', 'width': '(15)', 'justify': '"""center"""'}), "(self, textvariable=self.nombreA, bg='red', fg='black', font=\n 'Helvetica 13 bold', width=15, justify='center')\n", (2301, 2415), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((2526, 2640), 'tkinter.Label', 'Label', (['self'], {'text': '"""Apellido: """', 'bg': '"""#5CFE05"""', 'fg': '"""black"""', 'font': '"""Helvetica 13 bold"""', 'width': '(9)', 'justify': '"""center"""'}), "(self, text='Apellido: ', bg='#5CFE05', fg='black', font=\n 'Helvetica 13 bold', width=9, justify='center')\n", (2531, 2640), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((2742, 2860), 'tkinter.Entry', 'Entry', (['self'], {'textvariable': 'self.apelli', 'bg': '"""red"""', 'fg': '"""black"""', 'font': '"""Helvetica 13 bold"""', 'width': '(15)', 'justify': '"""center"""'}), "(self, textvariable=self.apelli, bg='red', fg='black', font=\n 'Helvetica 13 bold', width=15, justify='center')\n", (2747, 2860), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((2963, 3073), 'tkinter.Label', 'Label', (['self'], {'text': '"""Edad: """', 'bg': '"""#5CFE05"""', 'fg': '"""black"""', 'font': '"""Helvetica 13 bold"""', 'width': '(9)', 'justify': '"""center"""'}), "(self, text='Edad: ', bg='#5CFE05', fg='black', font=\n 'Helvetica 13 bold', width=9, justify='center')\n", (2968, 3073), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((3171, 3286), 'tkinter.Entry', 'Entry', (['self'], {'textvariable': 'self.age', 'bg': '"""red"""', 'fg': '"""black"""', 'font': '"""Helvetica 13 bold"""', 'width': '(15)', 'justify': '"""center"""'}), "(self, textvariable=self.age, bg='red', fg='black', font=\n 'Helvetica 13 bold', width=15, justify='center')\n", (3176, 3286), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((3388, 3531), 'tkinter.Button', 'Button', (['self'], {'command': 'self.Enviar_db', 'text': '"""Enviar historial """', 'bg': '"""blue"""', 'fg': '"""white"""', 'font': '"""Helvetica 14 bold"""', 'width': '(20)', 'justify': '"""center"""'}), "(self, command=self.Enviar_db, text='Enviar historial ', bg='blue',\n fg='white', font='Helvetica 14 bold', width=20, justify='center')\n", (3394, 3531), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((3656, 3780), 'tkinter.Label', 'Label', (['self'], {'textvariable': 'self.value_temp', 'bg': '"""yellow"""', 'fg': '"""black"""', 'font': '"""Helvetica 13 bold"""', 'width': '(9)', 'justify': '"""center"""'}), "(self, textvariable=self.value_temp, bg='yellow', fg='black', font=\n 'Helvetica 13 bold', width=9, justify='center')\n", (3661, 3780), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n'), ((3877, 4005), 'tkinter.Button', 'Button', (['self'], {'command': 'self.askQuit', 'text': '"""Salir """', 'bg': '"""red"""', 'fg': '"""white"""', 'font': '"""Helvetica 14 bold"""', 'width': '(7)', 'justify': '"""center"""'}), "(self, command=self.askQuit, text='Salir ', bg='red', fg='white',\n font='Helvetica 14 bold', width=7, justify='center')\n", (3883, 4005), False, 'from tkinter import Frame, Label, Button, Checkbutton, Scale, StringVar, IntVar, Entry, Tk\n')] |
"""
Multivariate from independent marginals and copula
==================================================
"""
#%% md
#
# - How to define α bivariate distribution from independent marginals and change its structure based on a copula supported by UQpy
# - How to plot the pdf of the distribution
# - How to modify the parameters of the distribution
#%%
#%% md
#
# Import the necessary modules.
#%%
import numpy as np
import matplotlib.pyplot as plt
#%% md
#
# Example of a multivariate distribution from joint independent marginals
# ------------------------------------------------------------------------
#%%
from UQpy.distributions import Normal, JointIndependent
from UQpy.distributions import Gumbel, JointCopula
#%% md
#
# Define a Copula
# ---------------
# The definition of bivariate distribution with a copula, is similar to defining a multivariate distribution from
# independent marginals. In both cases a list of marginals needs to be defined. In case of
#%%
marginals = [Normal(loc=0., scale=1), Normal(loc=0., scale=1)]
copula = Gumbel(theta=3.)
# dist_1 is a multivariate normal with independent marginals
dist_1 = JointIndependent(marginals)
print('Does the distribution with independent marginals have an rvs method?')
print(hasattr(dist_1, 'rvs'))
# dist_2 exhibits dependence between the two dimensions, defined using a gumbel copula
dist_2 = JointCopula(marginals=marginals, copula=copula)
print('Does the distribution with copula have an rvs method?')
print(hasattr(dist_2, 'rvs'))
#%% md
#
# Plot the pdf of the distribution before and after the copula
# -------------------------------------------------------------
#
#%%
fig, ax = plt.subplots(ncols=2, figsize=(10, 4))
x = np.arange(-3, 3, 0.1)
y = np.arange(-3, 3, 0.1)
X, Y = np.meshgrid(x, y)
Z = dist_1.pdf(x=np.concatenate([X.reshape((-1, 1)), Y.reshape((-1, 1))], axis=1))
CS = ax[0].contour(X, Y, Z.reshape(X.shape))
ax[0].clabel(CS, inline=1, fontsize=10)
ax[0].set_title('Contour plot of pdf - independent normals')
x = np.arange(-3, 3, 0.1)
y = np.arange(-3, 3, 0.1)
X, Y = np.meshgrid(x, y)
Z = dist_2.pdf(x=np.concatenate([X.reshape((-1, 1)), Y.reshape((-1, 1))], axis=1))
CS = ax[1].contour(X, Y, Z.reshape(X.shape))
ax[1].clabel(CS, inline=1, fontsize=10)
ax[1].set_title('Contour plot of pdf - normals with Gumbel copula')
plt.show()
#%% md
#
# Modify the parameters of the multivariate copula.
# -------------------------------------------------
#
# Use the update_parameters method.
#%%
print(dist_2.copula.parameters)
dist_2.update_parameters(theta_c=2.)
print(dist_2.copula.parameters) | [
"UQpy.distributions.JointCopula",
"UQpy.distributions.JointIndependent",
"UQpy.distributions.Normal",
"UQpy.distributions.Gumbel",
"numpy.meshgrid",
"matplotlib.pyplot.subplots",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((1055, 1072), 'UQpy.distributions.Gumbel', 'Gumbel', ([], {'theta': '(3.0)'}), '(theta=3.0)\n', (1061, 1072), False, 'from UQpy.distributions import Gumbel, JointCopula\n'), ((1143, 1170), 'UQpy.distributions.JointIndependent', 'JointIndependent', (['marginals'], {}), '(marginals)\n', (1159, 1170), False, 'from UQpy.distributions import Normal, JointIndependent\n'), ((1376, 1423), 'UQpy.distributions.JointCopula', 'JointCopula', ([], {'marginals': 'marginals', 'copula': 'copula'}), '(marginals=marginals, copula=copula)\n', (1387, 1423), False, 'from UQpy.distributions import Gumbel, JointCopula\n'), ((1672, 1710), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'ncols': '(2)', 'figsize': '(10, 4)'}), '(ncols=2, figsize=(10, 4))\n', (1684, 1710), True, 'import matplotlib.pyplot as plt\n'), ((1716, 1737), 'numpy.arange', 'np.arange', (['(-3)', '(3)', '(0.1)'], {}), '(-3, 3, 0.1)\n', (1725, 1737), True, 'import numpy as np\n'), ((1742, 1763), 'numpy.arange', 'np.arange', (['(-3)', '(3)', '(0.1)'], {}), '(-3, 3, 0.1)\n', (1751, 1763), True, 'import numpy as np\n'), ((1771, 1788), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (1782, 1788), True, 'import numpy as np\n'), ((2023, 2044), 'numpy.arange', 'np.arange', (['(-3)', '(3)', '(0.1)'], {}), '(-3, 3, 0.1)\n', (2032, 2044), True, 'import numpy as np\n'), ((2049, 2070), 'numpy.arange', 'np.arange', (['(-3)', '(3)', '(0.1)'], {}), '(-3, 3, 0.1)\n', (2058, 2070), True, 'import numpy as np\n'), ((2078, 2095), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (2089, 2095), True, 'import numpy as np\n'), ((2332, 2342), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2340, 2342), True, 'import matplotlib.pyplot as plt\n'), ((996, 1020), 'UQpy.distributions.Normal', 'Normal', ([], {'loc': '(0.0)', 'scale': '(1)'}), '(loc=0.0, scale=1)\n', (1002, 1020), False, 'from UQpy.distributions import Normal, JointIndependent\n'), ((1021, 1045), 'UQpy.distributions.Normal', 'Normal', ([], {'loc': '(0.0)', 'scale': '(1)'}), '(loc=0.0, scale=1)\n', (1027, 1045), False, 'from UQpy.distributions import Normal, JointIndependent\n')] |
import logging
from dd.bdd import BDD as _BDD
from dd.bdd import preimage
from dd import autoref
from dd import bdd as _bdd
import nose.tools as nt
import networkx as nx
import networkx.algorithms.isomorphism as iso
class BDD(_BDD):
"""Disables refcount check upon shutdown.
This script tests the low-level manager, where
reference counting is not automated. For simplicity,
references are not cleared at the end of tests here.
Automated reference counting is in `dd.autoref`.
"""
def __del__(self):
pass
def test_add_var():
b = BDD()
#
# automated level selection
# first var
j = b.add_var('x')
assert len(b.vars) == 1, b.vars
assert 'x' in b.vars, b.vars
assert b.vars['x'] == 0, b.vars
assert j == 0, j
# second var
j = b.add_var('y')
assert len(b.vars) == 2, b.vars
assert 'y' in b.vars, b.vars
assert b.vars['y'] == 1, b.vars
assert j == 1, j
# third var
j = b.add_var('z')
assert len(b.vars) == 3, b.vars
assert 'z' in b.vars, b.vars
assert b.vars['z'] == 2, b.vars
assert j == 2, j
#
# explicit level selection
b = BDD()
j = b.add_var('x', level=35)
assert len(b.vars) == 1, b.vars
assert 'x' in b.vars, b.vars
assert b.vars['x'] == 35, b.vars
assert j == 35, j
j = b.add_var('y', level=5)
assert len(b.vars) == 2, b.vars
assert 'y' in b.vars, b.vars
assert b.vars['y'] == 5, b.vars
assert j == 5, j
# attempt to add var at an existing level
with nt.assert_raises(AssertionError):
b.add_var('z', level=35)
with nt.assert_raises(AssertionError):
b.add_var('z', level=5)
#
# mixing automated and
# explicit level selection
b = BDD()
b.add_var('x', level=2)
b.add_var('y')
assert len(b.vars) == 2, b.vars
assert 'x' in b.vars, b.vars
assert 'y' in b.vars, b.vars
assert b.vars['x'] == 2, b.vars
assert b.vars['y'] == 1, b.vars
with nt.assert_raises(AssertionError):
b.add_var('z')
b.add_var('z', level=0)
def test_var():
b = BDD()
with nt.assert_raises(AssertionError):
b.var('x')
j = b.add_var('x')
u = b.var('x')
assert u > 0, u
level, low, high = b.succ(u)
assert level == j, (level, j)
assert low == b.false, low
assert high == b.true, high
def test_assert_consistent():
g = two_vars_xy()
assert g.assert_consistent()
g = x_or_y()
assert g.assert_consistent()
g._succ[2] = (5, 1, 2)
with nt.assert_raises(AssertionError):
g.assert_consistent()
g = x_or_y()
g.roots.add(2)
g._succ[4] = (0, 10, 1)
with nt.assert_raises(AssertionError):
g.assert_consistent()
g = x_or_y()
g.roots.add(2)
g._succ[1] = (2, None, 1)
with nt.assert_raises(AssertionError):
g.assert_consistent()
g = x_and_y()
assert g.assert_consistent()
def test_level_to_variable():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
assert g.var_at_level(0) == 'x'
assert g.var_at_level(1) == 'y'
with nt.assert_raises(AssertionError):
g.var_at_level(10)
def test_descendants():
ordering = dict(x=0, y=1)
b = BDD(ordering)
u = b.add_expr('x /\ y')
v = b.add_expr('x \/ y')
roots = [u, v]
nodes = b.descendants(roots)
nodes_u = b.descendants([u])
nodes_v = b.descendants([v])
assert u in nodes_u, nodes_u
assert v in nodes_v, nodes_v
assert u in nodes, nodes
assert v in nodes, nodes
assert 1 in nodes_u, nodes_u
assert 1 in nodes_v, nodes_v
assert 1 in nodes, nodes
assert len(nodes_u) == 3, nodes_u
assert len(nodes_v) == 3, nodes_v
assert nodes_u != nodes_v, (nodes_u, nodes_v)
assert len(nodes) == 4, nodes
assert nodes == nodes_u.union(nodes_v), (
nodes, b._succ)
# no roots
roots = []
nodes = b.descendants(roots)
assert len(nodes) == 0, nodes
def test_is_essential():
g = two_vars_xy()
assert g.is_essential(2, 'x')
assert not g.is_essential(2, 'y')
assert g.is_essential(3, 'y')
assert not g.is_essential(3, 'x')
g = x_and_y()
assert g.is_essential(2, 'x')
assert g.is_essential(3, 'y')
assert g.is_essential(4, 'x')
assert g.is_essential(4, 'y')
assert not g.is_essential(3, 'x')
assert not g.is_essential(-1, 'x')
assert not g.is_essential(-1, 'y')
assert not g.is_essential(1, 'x')
assert not g.is_essential(1, 'y')
# variable not in the ordering
assert not g.is_essential(2, 'z')
def test_support():
g = two_vars_xy()
assert g.support(2) == {'x'}
assert g.support(3) == {'y'}
g = x_and_y()
assert g.support(4) == {'x', 'y'}
assert g.support(3) == {'y'}
g = x_or_y()
assert g.support(4) == {'x', 'y'}
assert g.support(3) == {'y'}
def test_count():
g = x_and_y()
assert g.count(4) == 1
g = x_or_y()
r = g.count(4)
assert r == 3, r
r = g.count(4, nvars=2)
assert r == 3, r
r = g.count(-4)
assert r == 1, r
r = g.count(-4, nvars=2)
assert r == 1, r
r = g.count(4, 3)
assert r == 6, r
r = g.count(-4, 3)
assert r == 2, r
with nt.assert_raises(Exception):
g.count()
r = g.count(4)
assert r == 3, r
g = _bdd.BDD()
g.add_var('x')
g.add_var('y')
u = g.add_expr('x /\ y ')
r = g.count(u)
assert r == 1, r
def test_pick_iter():
# x /\ y
g = x_and_y()
u = 4
bits = {'x', 'y'}
s = [{'x': 1, 'y': 1}]
compare_iter_to_list_of_sets(u, g, s, bits)
# care_bits == support (default)
bits = None
compare_iter_to_list_of_sets(u, g, s, bits)
#
# x \/ y
g = x_or_y()
u = 4
# support
bits = None
s = [{'x': 1, 'y': 0}, {'x': 1, 'y': 1},
{'x': 0, 'y': 1}]
compare_iter_to_list_of_sets(u, g, s, bits)
# only what appears along traversal
bits = set()
s = [{'x': 1}, {'x': 0, 'y': 1}]
compare_iter_to_list_of_sets(u, g, s, bits)
# bits < support
bits = {'x'}
s = [{'x': 1}, {'x': 0, 'y': 1}]
compare_iter_to_list_of_sets(u, g, s, bits)
bits = {'y'}
s = [{'x': 1, 'y': 0},{'x': 1, 'y': 1},
{'x': 0, 'y': 1}]
compare_iter_to_list_of_sets(u, g, s, bits)
#
# x /\ ~ y
g = x_and_not_y()
u = -2
bits = {'x', 'y'}
s = [{'x': 1, 'y': 0}]
compare_iter_to_list_of_sets(u, g, s, bits)
# gaps in order
order = {'x': 0, 'y': 1, 'z': 2}
bdd = BDD(order)
u = bdd.add_expr('x /\ z')
(m,) = bdd.pick_iter(u)
assert m == {'x': 1, 'z': 1}, m
def compare_iter_to_list_of_sets(u, g, s, care_bits):
s = list(s)
for d in g.pick_iter(u, care_bits):
assert d in s, d
s.remove(d)
assert not s, s
def test_enumerate_minterms():
# non-empty cube
cube = dict(x=False)
bits = ['x', 'y', 'z']
r = _bdd._enumerate_minterms(cube, bits)
p = set_from_generator_of_dict(r)
q = set()
for y in (False, True):
for z in (False, True):
m = (('x', False), ('y', y), ('z', z))
q.add(m)
assert p == q, (p, q)
# empty cube
cube = dict()
bits = ['x', 'y', 'z']
r = _bdd._enumerate_minterms(cube, bits)
p = set_from_generator_of_dict(r)
q = set()
for x in (False, True):
for y in (False, True):
for z in (False, True):
m = (('x', x), ('y', y), ('z', z))
q.add(m)
assert p == q, (p, q)
# fewer bits than cube
cube = dict(x=False, y=True)
bits = set()
r = _bdd._enumerate_minterms(cube, bits)
p = set_from_generator_of_dict(r)
q = {(('x', False), ('y', True))}
assert p == q, (p, q)
def set_from_generator_of_dict(gen):
r = list(gen)
p = {tuple(sorted(m.items(), key=lambda x: x[0]))
for m in r}
return p
def test_isomorphism():
ordering = {'x': 0}
g = BDD(ordering)
g.roots.update([2, 3])
g._succ[2] = (0, -1, 1)
g._succ[3] = (0, -1, 1)
h = g.reduction()
assert set(h) == {1, 2}, set(h)
assert 0 not in h
assert h._succ[1] == (1, None, None)
assert h._succ[2] == (0, -1, 1)
assert h.roots == {2}
def test_elimination():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
g.roots.add(2)
# high == low, so node 2 is redundant
g._succ[2] = (0, 3, 3)
g._succ[3] = (1, -1, 1)
h = g.reduction()
assert set(h) == {1, 2}
def test_reduce_combined():
"""Fig.5 in 1986 Bryant TOC"""
ordering = {'x': 0, 'y': 1, 'z': 2}
g = BDD(ordering)
g.roots.add(2)
g._succ[2] = (0, 3, 4)
g._succ[3] = (1, -1, 5)
g._succ[4] = (1, 5, 6)
g._succ[5] = (2, -1, 1)
g._succ[6] = (2, -1, 1)
h = g.reduction()
assert 1 in h
assert ordering == h.vars
r = nx.MultiDiGraph()
r.add_node(1, level=3)
r.add_node(2, level=0)
r.add_node(3, level=1)
r.add_node(4, level=2)
r.add_edge(2, 3, value=False, complement=False)
r.add_edge(2, 4, value=True, complement=False)
r.add_edge(3, 4, value=True, complement=False)
r.add_edge(3, 1, value=False, complement=True)
r.add_edge(4, 1, value=False, complement=True)
r.add_edge(4, 1, value=True, complement=False)
(u, ) = h.roots
compare(u, h, r)
# r.write('r.pdf')
# h.write('h.pdf')
def test_reduction_complemented_edges():
bdd = BDD()
bdd.add_var('x', level=0)
bdd.add_var('y', level=1)
a, b = map(bdd.level_of_var, ['x', 'y'])
assert a < b, (a, b)
# complemented edge from internal node to
# non-terminal node
expr = '~ x /\ y'
_test_reduction_complemented_edges(expr, bdd)
# complemented edge from external reference to
# non-terminal node
expr = 'x /\ ~ y'
u = bdd.add_expr(expr)
assert u < 0, u
_test_reduction_complemented_edges(expr, bdd)
def _test_reduction_complemented_edges(expr, bdd):
u = bdd.add_expr(expr)
bdd.roots.add(u)
bdd_r = bdd.reduction()
v, = bdd_r.roots
v_ = bdd_r.add_expr(expr)
assert v == v_, (v, v_)
bdd_r.assert_consistent()
bdd.roots.remove(u)
def test_find_or_add():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
# init
n = len(g)
m = g._min_free
assert n == 1, n
assert m == 2, m
# elimination rule
i = 0
v = -1
w = 1
n = len(g)
u = g.find_or_add(i, v, v)
n_ = len(g)
assert n == n_, (n, n_)
assert u == v, (u, v)
assert len(g._pred) == 1, g._pred
t = (2, None, None)
assert t in g._pred, g._pred
assert g._pred[t] == 1, g._pred
# unchanged min_free
v = 1
m = g._min_free
g.find_or_add(i, v, v)
m_ = g._min_free
assert m_ == m, (m_, m)
# add new node
g = BDD(ordering)
v = -1
w = 1
n = len(g)
m = g._min_free
assert n == 1, n
u = g.find_or_add(i, v, w)
n_ = len(g)
m_ = g._min_free
assert u != v, (u, v)
assert n_ == n + 1, (n, n_)
assert m_ == m + 1, (m, m_)
assert g._succ[u] == (i, -1, 1)
assert (i, v, w) in g._pred
assert abs(u) in g._ref
assert g._ref[abs(u)] == 0
# terminal node `v`: 2 refs + 1 ref by manager
assert g._ref[abs(v)] == 3, g._ref
# independent increase of reference counters
v = u
w = w
refv = g._ref[abs(v)]
refw = g._ref[w]
u = g.find_or_add(i, v, w)
refv_ = g._ref[abs(v)]
refw_ = g._ref[w]
assert refv + 1 == refv_, (refv, refv_)
assert refw + 1 == refw_, (refw, refw_)
# add existing
n = len(g)
m = g._min_free
refv = g._ref[abs(v)]
refw = g._ref[w]
r = g.find_or_add(i, v, w)
n_ = len(g)
m_ = g._min_free
refv_ = g._ref[abs(v)]
refw_ = g._ref[w]
assert n == n_, (n, n_)
assert m == m_, (m, m_)
assert u == r, u
assert refv == refv_, (refv, refv_)
assert refw == refw_, (refw, refw_)
# only non-terminals can be added
with nt.assert_raises(AssertionError):
g.find_or_add(2, -1, 1)
# low and high must already exist
with nt.assert_raises(AssertionError):
g.find_or_add(0, 3, 4)
# canonicity of complemented edges
# v < 0, w > 0
g = BDD(ordering)
i = 0
v = -1
w = 1
u = g.find_or_add(i, v, w)
assert u > 0, u
# v > 0, w < 0
v = 1
w = -1
u = g.find_or_add(i, v, w)
assert u < 0, u
assert abs(u) in g._succ, u
_, v, w = g._succ[abs(u)]
assert v < 0, v
assert w > 0, w
# v < 0, w < 0
v = -1
w = -2
u = g.find_or_add(i, v, w)
assert u < 0, u
_, v, w = g._succ[abs(u)]
assert v > 0, v
assert w > 0, w
def test_next_free_int():
g = BDD()
# contiguous
g._succ = {1, 2, 3}
start = 1
n = g._next_free_int(start)
_assert_smaller_are_nodes(start, g)
assert n == 4, n
start = 3
n = g._next_free_int(start)
_assert_smaller_are_nodes(start, g)
assert n == 4, n
# with blanks
g._succ = {1, 3}
start = 1
n = g._next_free_int(start)
_assert_smaller_are_nodes(start, g)
assert n == 2, n
n = g._next_free_int(start=3)
assert n == 4, n
# full
g._succ = {1, 2, 3}
g.max_nodes = 3
with nt.assert_raises(Exception):
g._next_free_int(start=1)
def _assert_smaller_are_nodes(start, bdd):
for i in range(1, start + 1):
assert i in bdd, i
def test_collect_garbage():
# all nodes are garbage
g = BDD({'x': 0, 'y': 1})
u = g.add_expr('x /\ y')
n = len(g)
assert n == 4, n
uref = g._ref[abs(u)]
assert uref == 0, uref
_, v, w = g._succ[abs(u)]
vref = g._ref[abs(v)]
wref = g._ref[w]
# terminal node `v`: 6 refs + 1 ref by manager
assert vref == 6, vref
assert wref == 1, wref
g.collect_garbage()
n = len(g)
assert n == 1, n
assert u not in g, g._succ
assert w not in g, g._succ
# some nodes not garbage
# projection of x is garbage
g = BDD({'x': 0, 'y': 1})
u = g.add_expr('x /\ y')
n = len(g)
assert n == 4, n
g._ref[abs(u)] += 1
uref = g._ref[abs(u)]
assert uref == 1, uref
g.collect_garbage()
n = len(g)
assert n == 3, n
def test_top_cofactor():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
x = ordering['x']
y = ordering['y']
u = g.find_or_add(y, -1, 1)
assert g._top_cofactor(u, x) == (u, u)
assert g._top_cofactor(u, y) == (-1, 1)
u = g.find_or_add(x, -1, 1)
assert g._top_cofactor(u, x) == (-1, 1)
assert g._top_cofactor(-u, x) == (1, -1)
def test_ite():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
# x
ix = ordering['x']
x = g.find_or_add(ix, -1, 1)
h = ref_var(ix)
compare(x, g, h)
# y
iy = ordering['y']
y = g.find_or_add(iy, -1, 1)
h = ref_var(iy)
compare(y, g, h)
# x and y
u = g.ite(x, y, -1)
h = ref_x_and_y()
compare(u, g, h)
# x or y
u = g.ite(x, 1, y)
h = ref_x_or_y()
compare(u, g, h)
# negation
assert g.ite(x, -1, 1) == -x, g._succ
assert g.ite(-x, -1, 1) == x, g._succ
def test_add_expr():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
# x
ix = ordering['x']
u = g.add_expr('x')
h = ref_var(ix)
compare(u, g, h)
# x and y
u = g.add_expr('x /\ y')
h = ref_x_and_y()
compare(u, g, h)
def test_compose():
ordering = {'x': 0, 'y': 1, 'z': 2}
g = BDD(ordering)
# x /\ (x \/ z)
a = g.add_expr('x /\ y')
b = g.add_expr('x \/ z')
c = g.let({'y': b}, a)
d = g.add_expr('x /\ (x \/ z)')
assert c == d, (c, d)
# (y \/ z) /\ x
ordering = {'x': 0, 'y': 1, 'z': 2, 'w': 3}
g = BDD(ordering)
a = g.add_expr('(x /\ y) \/ z')
b = g.add_expr('(y \/ z) /\ x')
c = g.let({'z': b}, a)
assert c == b, (c, b)
# long expr
ordering = {'x': 0, 'y': 1, 'z': 2, 'w': 3}
g = BDD(ordering)
a = g.add_expr('(x /\ y) \/ (~ z \/ (w /\ y /\ x))')
b = g.add_expr('(y \/ ~ z) /\ x')
c = g.let({'y': b}, a)
d = g.add_expr(
'(x /\ ((y \/ ~ z) /\ x)) \/ '
' (~ z \/ (w /\ ((y \/ ~ z) /\ x) /\ x))')
assert c == d, (c, d)
# complemented edges
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
f = g.add_expr('x <=> y')
var = 'y'
new_level = 0
var_node = g.find_or_add(new_level, -1, 1)
u = g.let({var: var_node}, f)
assert u == 1, g.to_expr(u)
def test_cofactor():
ordering = {'x': 0, 'y': 1, 'z': 2}
g = BDD(ordering)
# u not in g
with nt.assert_raises(AssertionError):
g.let({'x': False, 'y': True, 'z': False}, 5)
# x /\ y
e = g.add_expr('x /\ y')
x = g.add_expr('x')
assert g.let({'x': False}, x) == -1
assert g.let({'x': True}, x) == 1
assert g.let({'x': False}, -x) == 1
assert g.let({'x': True}, -x) == -1
y = g.add_expr('y')
assert g.let({'x': True}, e) == y
assert g.let({'x': False}, e) == -1
assert g.let({'y': True}, e) == x
assert g.let({'y': False}, e) == -1
assert g.let({'x': False}, -e) == 1
assert g.let({'x': True}, -e) == -y
assert g.let({'y': False}, -e) == 1
assert g.let({'y': True}, -e) == -x
def test_swap():
# x, y
g = BDD({'x': 0, 'y': 1})
x = g.add_expr('x')
y = g.add_expr('y')
g.incref(x)
g.incref(y)
n = len(g)
assert n == 3, n
nold, n = g.swap('x', 'y')
assert n == 3, n
assert nold == n, nold
assert g.vars == {'y': 0, 'x': 1}, g.vars
assert g.assert_consistent()
# functions remain invariant
x_ = g.add_expr('x')
y_ = g.add_expr('y')
assert x == x_, (x, x_, g._succ)
assert y == y_, (y, y_, g._succ)
# external reference counts remain unchanged
assert g._ref[abs(x)] == 1
assert g._ref[abs(y)] == 1
# x /\ y
g = BDD({'x': 0, 'y': 1})
u = g.add_expr('x /\ y')
g.incref(u)
nold, n = g.swap('x', 'y')
assert nold == n, (nold, n)
assert g.vars == {'y': 0, 'x': 1}, g.vars
u_ = g.add_expr('x /\ y')
assert u == u_, (u, u_)
assert g.assert_consistent()
# reference counts unchanged
assert g._ref[abs(u)] == 1
# x /\ ~ y
# tests handling of complement edges
e = 'x /\ ~ y'
g = x_and_not_y()
u = g.add_expr(e)
g.incref(u)
g.collect_garbage()
n = len(g)
assert n == 3, n
nold, n = g.swap('x', 'y')
assert n == 3, n
assert nold == n, nold
assert g.vars == {'x': 1, 'y': 0}
assert g.assert_consistent()
u_ = g.add_expr(e)
# function u must have remained unaffected
assert u_ == u, (u, u_, g._succ)
# invert swap of:
# x /\ ~ y
nold, n = g.swap('x', 'y')
assert n == 3, n
assert nold == n, nold
assert g.vars == {'x': 0, 'y': 1}
assert g.assert_consistent()
u_ = g.add_expr(e)
assert u_ == u, (u, u_, g._succ)
# Figs. 6.24, 6.25 Baier 2008
g = BDD({'z1': 0, 'y1': 1, 'z2': 2,
'y2': 3, 'z3': 4, 'y3': 5})
u = g.add_expr('(z1 /\ y1) \/ (z2 /\ y2) \/ (z3 /\ y3)')
g.incref(u)
n = len(g)
assert n == 16, n
g.collect_garbage()
n = len(g)
assert n == 7, n
# sift to inefficient order
g.swap('y1', 'z2') # z1, z2, y1, y2, z3, y3
g.swap('y2', 'z3') # z1, z2, y1, z3, y2, y3
g.swap('y1', 'z3') # z1, z2, z3, y1, y2, y3
n = len(g)
assert n == 15, n
assert g.assert_consistent()
new_ordering = {
'z1': 0, 'z2': 1, 'z3': 2,
'y1': 3, 'y2': 4, 'y3': 5}
assert g.vars == new_ordering, g.vars
u_ = g.add_expr('(z1 /\ y1) \/ (z2 /\ y2) \/ (z3 /\ y3)')
assert u_ == u, (u, u_, g._succ)
# g.dump('g.pdf')
def test_sifting():
# Figs. 6.24, 6.25 Baier 2008
g = BDD({'z1': 0, 'z2': 1, 'z3': 2,
'y1': 3, 'y2': 4, 'y3': 5})
u = g.add_expr('(z1 /\ y1) \/ (z2 /\ y2) \/ (z3 /\ y3)')
g.incref(u)
g.collect_garbage()
n = len(g)
assert n == 15, n
_bdd.reorder(g)
n_ = len(g)
assert n > n_, (n, n_)
u_ = g.add_expr('(z1 /\ y1) \/ (z2 /\ y2) \/ (z3 /\ y3)')
g.incref(u)
g.collect_garbage()
g.assert_consistent()
assert u == u_, (u, u_)
def test_request_reordering():
ctx = Dummy()
# reordering off
n = ctx._last_len
assert n is None, n
_bdd._request_reordering(ctx)
# reordering on
ctx._last_len = 1
ctx.length = 3 # >= 2 = 2 * _last_len
# large growth
with nt.assert_raises(_bdd._NeedsReordering):
_bdd._request_reordering(ctx)
ctx._last_len = 2
ctx.length = 3 # < 4 = 2 * _last_len
# small growth
_bdd._request_reordering(ctx)
def test_reordering_context():
ctx = Dummy()
# top context
ctx.assert_(False)
with _bdd._ReorderingContext(ctx):
ctx.assert_(True)
raise _bdd._NeedsReordering()
ctx.assert_(False)
# nested context
ctx._reordering_context = True
with nt.assert_raises(_bdd._NeedsReordering):
with _bdd._ReorderingContext(ctx):
ctx.assert_(True)
raise _bdd._NeedsReordering()
ctx.assert_(True)
# other exception
ctx._reordering_context = False
with nt.assert_raises(AssertionError):
with _bdd._ReorderingContext(ctx):
ctx.assert_(True)
raise AssertionError()
ctx.assert_(False)
ctx._reordering_context = True
with nt.assert_raises(Exception):
with _bdd._ReorderingContext(ctx):
raise Exception()
ctx.assert_(True)
class Dummy(object):
"""To test state machine for nesting context."""
def __init__(self):
self._reordering_context = False
self._last_len = None
self.length = 1
def __len__(self):
return self.length
def assert_(self, value):
c = self._reordering_context
assert c is value, c
def test_dynamic_reordering():
b = TrackReorderings()
[b.add_var(var) for var in ['x', 'y', 'z', 'a', 'b', 'c', 'e']]
# add expr with reordering off
assert not b.reordering_is_on()
assert b.n_swaps == 0, b.n_swaps
u = b.add_expr('x /\ y /\ z')
assert b.n_swaps == 0, b.n_swaps
b.incref(u)
n = len(b)
assert n == 7, n
# add expr with reordering on
b._last_len = 6
assert b.reordering_is_on()
v = b.add_expr('a /\ b')
assert b.reordering_is_on()
assert b.n_swaps == 0, b.n_swaps
b.incref(v)
n = len(b)
assert n == 10, n
# add an expr that triggers reordering
assert b.reordering_is_on()
w = b.add_expr('z \/ (~ a /\ x /\ ~ y)')
assert b.reordering_is_on()
n_swaps = b.n_swaps
assert n_swaps > 0, n_swaps
b.incref(w)
assert u in b, (w, b._succ)
assert v in b, (v, b._succ)
assert w in b, (w, b._succ)
# add another expr that triggers reordering
old_n_swaps = n_swaps
assert b.reordering_is_on()
r = b.add_expr('(~ z \/ (c /\ b)) /\ e /\ (a /\ (~x \/ y))')
b.add_expr('(e \/ ~ a) /\ x /\ (b \/ ~ y)')
n_swaps = b.n_swaps
assert n_swaps > old_n_swaps, (n_swaps, old_n_swaps)
assert b.reordering_is_on()
class TrackReorderings(BDD):
"""To record invocations of reordering."""
def __init__(self, *arg, **kw):
self.n_swaps = 0
super(TrackReorderings, self).__init__(*arg, **kw)
def swap(self, *arg, **kw):
self.n_swaps += 1
return super(TrackReorderings, self).swap(*arg, **kw)
def reordering_is_on(self):
d = self.configure()
r = d['reordering']
return r is True
def test_undeclare_vars():
bdd = BDD()
bdd.declare('x', 'y', 'z', 'w')
# empty arg `vrs`
u = bdd.add_expr('x /\ y /\ w')
rm_vars = bdd.undeclare_vars()
rm_vars_ = {'z'}
assert rm_vars == rm_vars_, (rm_vars, rm_vars_)
bdd_vars_ = dict(x=0, y=1, w=2)
assert bdd.vars == bdd_vars_, bdd.vars
assert bdd.assert_consistent()
# nonempty `vrs` with all empty levels
bdd = BDD()
bdd.declare('x', 'y', 'z', 'w')
u = bdd.add_expr('y /\ w')
rm_vars = bdd.undeclare_vars('x', 'z')
rm_vars_ = {'x', 'z'}
assert rm_vars == rm_vars_, (rm_vars, rm_vars_)
bdd_vars_ = dict(y=0, w=1)
assert bdd.vars == bdd_vars_, bdd.vars
assert bdd.assert_consistent()
# nonempty `vrs` without all empty levels
bdd = BDD()
bdd.declare('x', 'y', 'z', 'w')
u = bdd.add_expr('y /\ w')
rm_vars = bdd.undeclare_vars('z')
rm_vars_ = {'z'}
assert rm_vars == rm_vars_, (rm_vars, rm_vars_)
bdd_vars_ = dict(x=0, y=1, w=2)
assert bdd.vars == bdd_vars_, bdd.vars
assert bdd.assert_consistent()
# remove only unused variables
bdd = BDD()
bdd.declare('x', 'y', 'z', 'w')
u = bdd.add_expr('y /\ w')
with nt.assert_raises(AssertionError):
bdd.undeclare_vars('z', 'y')
def test_dump_load():
prefix = 'test_dump_load'
fname = prefix + '.p'
dvars = dict(x=0, y=1)
# dump
b = BDD(dvars)
e = 'x /\ ~ y'
u_dumped = b.add_expr(e)
b.dump(fname, [u_dumped])
b.dump(fname) # no roots
# load
b = BDD(dvars)
b.add_expr('x \/ y')
u_new = b.add_expr(e)
umap = b.load(fname)
u_loaded = umap[abs(u_dumped)]
if u_dumped < 0:
u_loaded = -u_loaded
assert u_loaded == u_new, (
u_dumped, u_loaded, u_new, umap)
assert b.assert_consistent()
def test_dump_load_manager():
prefix = 'test_dump_load_manager'
g = BDD({'x': 0, 'y': 1})
e = 'x /\ ~ y'
u = g.add_expr(e)
g.incref(u)
fname = prefix + '.p'
g._dump_manager(fname)
h = g._load_manager(fname)
assert g.assert_consistent()
u_ = h.add_expr(e)
assert u == u_, (u, u_)
# h.dump(prefix + '.pdf')
def test_quantify():
ordering = {'x': 0, 'y': 1, 'z': 2}
g = BDD(ordering)
# x /\ y
e = g.add_expr('x /\ ~ y')
x = g.add_expr('x')
not_y = g.add_expr('~ y')
assert g.quantify(e, {'x'}) == not_y
assert g.quantify(e, {'x'}, forall=True) == -1
assert g.quantify(e, {'y'}) == x
assert g.quantify(e, {'x'}, forall=True) == -1
# x \/ y \/ z
e = g.add_expr('x \/ y \/ z')
xy = g.add_expr('x \/ y')
yz = g.add_expr('y \/ z')
zx = g.add_expr('z \/ x')
assert g.quantify(e, {'x'})
assert g.quantify(e, {'y'})
assert g.quantify(e, {'z'})
assert g.quantify(e, {'z'}, forall=True) == xy
assert g.quantify(e, {'x'}, forall=True) == yz
assert g.quantify(e, {'y'}, forall=True) == zx
# complement edges
u = -x
v = g.quantify(u, {'y'}, forall=True)
assert v == -x, g.to_expr(v)
# multiple values: test recursion
e = g.add_expr('x /\ y /\ z')
x = g.add_expr('x')
r = g.quantify(e, {'y', 'z'})
assert r == x, r
def test_quantifier_syntax():
b = BDD()
[b.add_var(var) for var in ['x', 'y']]
# constants
u = b.add_expr('\E x: TRUE')
assert u == b.true, u
u = b.add_expr('\E x, y: TRUE')
assert u == b.true, u
u = b.add_expr('\E x: FALSE')
assert u == b.false, u
u = b.add_expr('\A x: TRUE')
assert u == b.true, u
u = b.add_expr('\A x: FALSE')
assert u == b.false, u
u = b.add_expr('\A x, y: FALSE')
assert u == b.false, u
# variables
u = b.add_expr('\E x: x')
assert u == b.true, u
u = b.add_expr('\A x: x')
assert u == b.false, u
u = b.add_expr('\E x, y: x')
assert u == b.true, u
u = b.add_expr('\E x, y: y')
assert u == b.true, u
u = b.add_expr('\A x: y')
assert u == b.var('y'), u
u = b.add_expr('\A x: ~ y')
u_ = b.apply('not', b.var('y'))
assert u == u_, (u, u_)
def test_rename():
ordering = {'x': 0, 'xp': 1}
g = BDD(ordering)
x = g.add_expr('x')
xp = g.add_expr('xp')
dvars = {'x': 'xp'}
xrenamed = g.let(dvars, x)
assert xrenamed == xp, xrenamed
ordering = {'x': 0, 'xp': 1,
'y': 2, 'yp': 3,
'z': 4, 'zp': 5}
g = BDD(ordering)
u = g.add_expr('x /\ y /\ ~ z')
dvars = {'x': 'xp', 'y': 'yp', 'z': 'zp'}
urenamed = g.let(dvars, u)
up = g.add_expr('xp /\ yp /\ ~ zp')
assert urenamed == up, urenamed
# assertion violations
# non-neighbors
dvars = {'x': 'yp'}
r = g.let(dvars, u)
r_ = g.add_expr('yp /\ y /\ ~ z')
assert r == r_, (r, r_)
# u not in bdd
dvars = {'x': 'xp'}
with nt.assert_raises(AssertionError):
g.let(dvars, 1000)
# y essential for u
dvars = {'x': 'y'}
v = g.let(dvars, u)
v_ = g.add_expr('y /\ ~ z')
assert v == v_, (v, v_)
# old and new vars intersect
dvars = {'x': 'x'}
v = g.let(dvars, u)
assert v == u, (v, u)
def test_rename_syntax():
b = BDD()
[b.add_var(var) for var in ['x', 'y', 'z', 'w']]
# single substitution
u = b.add_expr('\S y / x: TRUE')
assert u == b.true, u
u = b.add_expr('\S y / x: FALSE')
assert u == b.false, u
u = b.add_expr('\S y / x: x')
u_ = b.add_expr('y')
assert u == u_, (u, u_)
u = b.add_expr('\S y / x: z')
u_ = b.add_expr('z')
assert u == u_, (u, u_)
u = b.add_expr('\S y / x: x /\ z')
u_ = b.add_expr('y /\ z')
assert u == u_, (u, u_)
# multiple substitution
u = b.add_expr('\S y / x, w / z: x /\ z')
u_ = b.add_expr('y /\ w')
assert u == u_, (u, u_)
u = b.add_expr('\S y / x, w / z: z \/ ~ x')
u_ = b.add_expr('w \/ ~ y')
assert u == u_, (u, u_)
def test_image_rename_map_checks():
ordering = {'x': 0, 'xp': 1,
'y': 2, 'yp': 3,
'z': 4, 'zp': 5}
bdd = BDD(ordering)
# non-adjacent
rename = {0: 2, 3: 4}
qvars = set()
r = _bdd.image(1, 1, rename, qvars, bdd)
assert r == 1, r
r = _bdd.preimage(1, 1, rename, qvars, bdd)
assert r == 1, r
# overlapping keys and values
rename = {0: 1, 1: 2}
with nt.assert_raises(AssertionError):
_bdd.image(1, 1, rename, qvars, bdd)
with nt.assert_raises(AssertionError):
_bdd.preimage(1, 1, rename, qvars, bdd)
# may be in support after quantification ?
trans = bdd.add_expr('x => xp')
source = bdd.add_expr('x /\ y')
qvars = {0}
rename = {1: 0, 3: 2}
with nt.assert_raises(AssertionError):
_bdd.image(trans, source, rename, qvars, bdd)
# in support of `target` ?
qvars = set()
trans = bdd.add_expr('y')
target = bdd.add_expr('x /\ y')
rename = {0: 2}
r = _bdd.preimage(trans, target, rename, qvars, bdd)
assert r == bdd.var('y'), r
def test_preimage():
# exists: x, y
# forall: z
ordering = {'x': 0, 'xp': 1,
'y': 2, 'yp': 3,
'z': 4, 'zp': 5}
rename = {0: 1, 2: 3, 4: 5}
g = BDD(ordering)
f = g.add_expr('~ x')
t = g.add_expr('x <=> ~ xp')
qvars = {1, 3}
p = preimage(t, f, rename, qvars, g)
x = g.add_expr('x')
assert x == p, (x, p)
# a cycle
# (x /\ y) --> (~ x /\ y) -->
# (~ x /\ ~ y) --> (x /\ ~ y) --> wrap around
t = g.add_expr(
'((x /\ y) => (~ xp /\ yp)) /\ '
'((~ x /\ y) => (~ xp /\ ~ yp)) /\ '
'((~ x /\ ~ y) => (xp /\ ~ yp)) /\ '
'((x /\ ~ y) => (xp /\ yp))')
f = g.add_expr('x /\ y')
p = preimage(t, f, rename, qvars, g)
assert p == g.add_expr('x /\ ~ y')
f = g.add_expr('x /\ ~ y')
p = preimage(t, f, rename, qvars, g)
assert p == g.add_expr('~ x /\ ~ y')
# backward reachable set
f = g.add_expr('x /\ y')
oldf = None
while oldf != f:
p = preimage(t, f, rename, qvars, g)
oldf = f
f = g.apply('or', p, oldf)
assert f == 1
# go around once
f = g.add_expr('x /\ y')
start = f
for i in range(4):
f = preimage(t, f, rename, qvars, g)
end = f
assert start == end
# forall z exists x, y
t = g.add_expr(
'('
' ((x /\ y) => (zp /\ xp /\ ~ yp)) \/ '
' ((x /\ y) => (~ zp /\ ~ xp /\ yp))'
') /\ '
'(~ (x /\ y) => False)')
f = g.add_expr('x /\ ~ y')
ep = preimage(t, f, rename, qvars, g)
p = g.quantify(ep, {'zp'}, forall=True)
assert p == -1
f = g.add_expr('(x /\ ~ y) \/ (~ x /\ y)')
ep = preimage(t, f, rename, qvars, g)
p = g.quantify(ep, {'zp'}, forall=True)
assert p == g.add_expr('x /\ y')
def test_assert_valid_ordering():
ordering = {'x': 0, 'y': 1}
_bdd._assert_valid_ordering(ordering)
incorrect_ordering = {'x': 0, 'y': 2}
with nt.assert_raises(AssertionError):
_bdd._assert_valid_ordering(incorrect_ordering)
def test_assert_refined_ordering():
ordering = {'x': 0, 'y': 1}
new_ordering = {'z': 0, 'x': 1, 'w': 2, 'y': 3}
_bdd._assert_isomorphic_orders(ordering, new_ordering, ordering)
def test_to_pydot():
def f(x):
return str(abs(x))
# with roots
g = x_and_y()
pd = _bdd.to_pydot([4, 2], g)
r = nx.drawing.nx_pydot.from_pydot(pd)
for u in g:
assert f(u) in r, (u, r.nodes())
for u in g._succ:
i, v, w = g._succ[u]
if v is None or w is None:
assert v is None, v
assert w is None, w
continue
assert r.has_edge(f(u), f(v)), (u, v)
assert r.has_edge(f(u), f(w)), (u, w)
# no roots
pd = _bdd.to_pydot(None, g)
r = nx.drawing.nx_pydot.from_pydot(pd)
assert len(r) == 8, r.nodes() # 3 hidden nodes for levels
def test_function_wrapper():
levels = dict(x=0, y=1, z=2)
bdd = autoref.BDD(levels)
u = bdd.add_expr('x /\ y')
assert u.bdd is bdd, (repr(u.bdd), repr(bdd))
assert abs(u.node) in bdd._bdd, (u.node, bdd._bdd._succ)
# operators
x = bdd.add_expr('x')
z = bdd.add_expr('z')
v = x.implies(z)
w = u & ~ v
w_ = bdd.add_expr('(x /\ y) /\ ~ ((~ x) \/ z)')
assert w_ == w, (w_, w)
r = ~ (u | v).equiv(w)
r_ = bdd.add_expr(
'( (x /\ y) \/ ((~ x) \/ z) ) ^'
'( (x /\ y) /\ ~ ((~ x) \/ z) )')
assert r_ == r, (r_, r)
p = bdd.add_expr('y')
q = p.equiv(x)
q_ = bdd.add_expr('x <=> y')
assert q_ == q, (q_, q)
# to_expr
s = q.to_expr()
assert s == 'ite(x, y, (~ y))', s
# equality
p_ = bdd.add_expr('y')
assert p_ == p, p_
# decref and collect garbage
bdd.collect_garbage()
n = len(bdd)
assert n > 1, bdd._bdd._ref
del p
del q, q_
del r, r_
bdd.collect_garbage()
m = len(bdd)
assert m > 1, bdd._bdd._ref
assert m < n, (m, n)
del u
del v
del w, w_
del x
del z
bdd.collect_garbage()
n = len(bdd)
assert n == 2, bdd._bdd._ref
del p_
bdd.collect_garbage()
n = len(bdd)
assert n == 1, bdd._bdd._ref
# properties
bdd = autoref.BDD({'x': 0, 'y': 1, 'z': 2})
u = bdd.add_expr('x \/ ~ y')
assert u.level == 0, u.level
assert u.var == 'x', u.var
y = bdd.add_expr('~ y')
assert u.low == y, (u.low.node, y.node)
assert u.high.node == 1, u.high.node
assert u.ref == 1, u.ref
def x_or_y():
g = two_vars_xy()
u = 4
t = (0, 3, 1)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[u] = 1
g._min_free = u + 1
g.assert_consistent()
return g
def x_and_y():
g = two_vars_xy()
u = 4
t = (0, -1, 3)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[u] = 1
g._min_free = u + 1
return g
def two_vars_xy():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
u = 2
t = (0, -1, 1)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[u] = 1
u = 3
t = (1, -1, 1)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[u] = 1
g._min_free = u + 1
return g
def x_and_not_y():
# remember:
# 2 = ~ (x /\ ~ y)
# -2 = x /\ ~ y
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
u = 3
v = -1
w = 1
t = (1, v, w)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[abs(v)] += 1
g._ref[abs(w)] += 1
g._ref[abs(u)] = 0
u = 2
v = 1
w = 3
t = (0, v, w)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[abs(v)] += 1
g._ref[abs(w)] += 1
g._ref[abs(u)] = 0
g._min_free = 4
return g
def assert_valid_succ_pred(u, t, g):
assert u > 1, u
assert isinstance(t, tuple), t
assert len(t) == 3, t
assert t[0] >= 0, t
assert u not in g._succ, g._succ
assert t not in g._pred, g._pred
def ref_var(i):
h = nx.MultiDiGraph()
h.add_node(1, level=2)
h.add_node(2, level=i)
h.add_edge(2, 1, value=False, complement=True)
h.add_edge(2, 1, value=True, complement=False)
return h
def ref_x_and_y():
h = nx.MultiDiGraph()
h.add_node(1, level=2)
h.add_node(2, level=0)
h.add_node(3, level=1)
h.add_edge(2, 1, value=False, complement=True)
h.add_edge(2, 3, value=True, complement=False)
h.add_edge(3, 1, value=False, complement=True)
h.add_edge(3, 1, value=True, complement=False)
return h
def ref_x_or_y():
h = nx.MultiDiGraph()
h.add_node(1, level=2)
h.add_node(2, level=0)
h.add_node(3, level=1)
h.add_edge(2, 3, value=False, complement=False)
h.add_edge(2, 1, value=True, complement=False)
h.add_edge(3, 1, value=False, complement=True)
h.add_edge(3, 1, value=True, complement=False)
return h
def compare(u, bdd, h):
g = _bdd.to_nx(bdd, [u])
# nx.drawing.nx_pydot.to_pydot(g).write_pdf('g.pdf')
post = nx.descendants(g, u)
post.add(u)
r = g.subgraph(post)
# nx.drawing.nx_pydot.to_pydot(r).write_pdf('r.pdf')
# nx.drawing.nx_pydot.to_pydot(h).write_pdf('h.pdf')
gm = iso.GraphMatcher(r, h, node_match=_nm, edge_match=_em)
assert gm.is_isomorphic()
d = gm.mapping
assert d[1] == 1
def _nm(x, y):
return x['level'] == y['level']
def _em(x, y):
return (
bool(x[0]['value']) == bool(y[0]['value']) and
bool(x[0]['complement']) == bool(y[0]['complement']))
if __name__ == '__main__':
log = logging.getLogger('astutils')
log.setLevel(logging.ERROR)
log = logging.getLogger('dd.bdd')
log.setLevel(logging.INFO)
log.addHandler(logging.StreamHandler())
test_dynamic_reordering()
| [
"logging.getLogger",
"logging.StreamHandler",
"networkx.algorithms.isomorphism.GraphMatcher",
"dd.bdd.BDD",
"nose.tools.assert_raises",
"dd.bdd._request_reordering",
"dd.bdd._enumerate_minterms",
"dd.bdd._assert_valid_ordering",
"dd.bdd._assert_isomorphic_orders",
"dd.bdd.image",
"dd.bdd.preimag... | [((5305, 5315), 'dd.bdd.BDD', '_bdd.BDD', ([], {}), '()\n', (5313, 5315), True, 'from dd import bdd as _bdd\n'), ((6899, 6935), 'dd.bdd._enumerate_minterms', '_bdd._enumerate_minterms', (['cube', 'bits'], {}), '(cube, bits)\n', (6923, 6935), True, 'from dd import bdd as _bdd\n'), ((7216, 7252), 'dd.bdd._enumerate_minterms', '_bdd._enumerate_minterms', (['cube', 'bits'], {}), '(cube, bits)\n', (7240, 7252), True, 'from dd import bdd as _bdd\n'), ((7588, 7624), 'dd.bdd._enumerate_minterms', '_bdd._enumerate_minterms', (['cube', 'bits'], {}), '(cube, bits)\n', (7612, 7624), True, 'from dd import bdd as _bdd\n'), ((8819, 8836), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ([], {}), '()\n', (8834, 8836), True, 'import networkx as nx\n'), ((19873, 19888), 'dd.bdd.reorder', '_bdd.reorder', (['g'], {}), '(g)\n', (19885, 19888), True, 'from dd import bdd as _bdd\n'), ((20210, 20239), 'dd.bdd._request_reordering', '_bdd._request_reordering', (['ctx'], {}), '(ctx)\n', (20234, 20239), True, 'from dd import bdd as _bdd\n'), ((20519, 20548), 'dd.bdd._request_reordering', '_bdd._request_reordering', (['ctx'], {}), '(ctx)\n', (20543, 20548), True, 'from dd import bdd as _bdd\n'), ((29542, 29578), 'dd.bdd.image', '_bdd.image', (['(1)', '(1)', 'rename', 'qvars', 'bdd'], {}), '(1, 1, rename, qvars, bdd)\n', (29552, 29578), True, 'from dd import bdd as _bdd\n'), ((29608, 29647), 'dd.bdd.preimage', '_bdd.preimage', (['(1)', '(1)', 'rename', 'qvars', 'bdd'], {}), '(1, 1, rename, qvars, bdd)\n', (29621, 29647), True, 'from dd import bdd as _bdd\n'), ((30309, 30357), 'dd.bdd.preimage', '_bdd.preimage', (['trans', 'target', 'rename', 'qvars', 'bdd'], {}), '(trans, target, rename, qvars, bdd)\n', (30322, 30357), True, 'from dd import bdd as _bdd\n'), ((30687, 30719), 'dd.bdd.preimage', 'preimage', (['t', 'f', 'rename', 'qvars', 'g'], {}), '(t, f, rename, qvars, g)\n', (30695, 30719), False, 'from dd.bdd import preimage\n'), ((31094, 31126), 'dd.bdd.preimage', 'preimage', (['t', 'f', 'rename', 'qvars', 'g'], {}), '(t, f, rename, qvars, g)\n', (31102, 31126), False, 'from dd.bdd import preimage\n'), ((31205, 31237), 'dd.bdd.preimage', 'preimage', (['t', 'f', 'rename', 'qvars', 'g'], {}), '(t, f, rename, qvars, g)\n', (31213, 31237), False, 'from dd.bdd import preimage\n'), ((31905, 31937), 'dd.bdd.preimage', 'preimage', (['t', 'f', 'rename', 'qvars', 'g'], {}), '(t, f, rename, qvars, g)\n', (31913, 31937), False, 'from dd.bdd import preimage\n'), ((32057, 32089), 'dd.bdd.preimage', 'preimage', (['t', 'f', 'rename', 'qvars', 'g'], {}), '(t, f, rename, qvars, g)\n', (32065, 32089), False, 'from dd.bdd import preimage\n'), ((32243, 32280), 'dd.bdd._assert_valid_ordering', '_bdd._assert_valid_ordering', (['ordering'], {}), '(ordering)\n', (32270, 32280), True, 'from dd import bdd as _bdd\n'), ((32548, 32612), 'dd.bdd._assert_isomorphic_orders', '_bdd._assert_isomorphic_orders', (['ordering', 'new_ordering', 'ordering'], {}), '(ordering, new_ordering, ordering)\n', (32578, 32612), True, 'from dd import bdd as _bdd\n'), ((32721, 32745), 'dd.bdd.to_pydot', '_bdd.to_pydot', (['[4, 2]', 'g'], {}), '([4, 2], g)\n', (32734, 32745), True, 'from dd import bdd as _bdd\n'), ((32754, 32788), 'networkx.drawing.nx_pydot.from_pydot', 'nx.drawing.nx_pydot.from_pydot', (['pd'], {}), '(pd)\n', (32784, 32788), True, 'import networkx as nx\n'), ((33133, 33155), 'dd.bdd.to_pydot', '_bdd.to_pydot', (['None', 'g'], {}), '(None, g)\n', (33146, 33155), True, 'from dd import bdd as _bdd\n'), ((33164, 33198), 'networkx.drawing.nx_pydot.from_pydot', 'nx.drawing.nx_pydot.from_pydot', (['pd'], {}), '(pd)\n', (33194, 33198), True, 'import networkx as nx\n'), ((33336, 33355), 'dd.autoref.BDD', 'autoref.BDD', (['levels'], {}), '(levels)\n', (33347, 33355), False, 'from dd import autoref\n'), ((34577, 34614), 'dd.autoref.BDD', 'autoref.BDD', (["{'x': 0, 'y': 1, 'z': 2}"], {}), "({'x': 0, 'y': 1, 'z': 2})\n", (34588, 34614), False, 'from dd import autoref\n'), ((36424, 36441), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ([], {}), '()\n', (36439, 36441), True, 'import networkx as nx\n'), ((36640, 36657), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ([], {}), '()\n', (36655, 36657), True, 'import networkx as nx\n'), ((36984, 37001), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ([], {}), '()\n', (36999, 37001), True, 'import networkx as nx\n'), ((37335, 37355), 'dd.bdd.to_nx', '_bdd.to_nx', (['bdd', '[u]'], {}), '(bdd, [u])\n', (37345, 37355), True, 'from dd import bdd as _bdd\n'), ((37424, 37444), 'networkx.descendants', 'nx.descendants', (['g', 'u'], {}), '(g, u)\n', (37438, 37444), True, 'import networkx as nx\n'), ((37609, 37663), 'networkx.algorithms.isomorphism.GraphMatcher', 'iso.GraphMatcher', (['r', 'h'], {'node_match': '_nm', 'edge_match': '_em'}), '(r, h, node_match=_nm, edge_match=_em)\n', (37625, 37663), True, 'import networkx.algorithms.isomorphism as iso\n'), ((37972, 38001), 'logging.getLogger', 'logging.getLogger', (['"""astutils"""'], {}), "('astutils')\n", (37989, 38001), False, 'import logging\n'), ((38044, 38071), 'logging.getLogger', 'logging.getLogger', (['"""dd.bdd"""'], {}), "('dd.bdd')\n", (38061, 38071), False, 'import logging\n'), ((1540, 1572), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (1556, 1572), True, 'import nose.tools as nt\n'), ((1616, 1648), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (1632, 1648), True, 'import nose.tools as nt\n'), ((1990, 2022), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (2006, 2022), True, 'import nose.tools as nt\n'), ((2116, 2148), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (2132, 2148), True, 'import nose.tools as nt\n'), ((2534, 2566), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (2550, 2566), True, 'import nose.tools as nt\n'), ((2671, 2703), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (2687, 2703), True, 'import nose.tools as nt\n'), ((2810, 2842), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (2826, 2842), True, 'import nose.tools as nt\n'), ((3092, 3124), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (3108, 3124), True, 'import nose.tools as nt\n'), ((5210, 5237), 'nose.tools.assert_raises', 'nt.assert_raises', (['Exception'], {}), '(Exception)\n', (5226, 5237), True, 'import nose.tools as nt\n'), ((11927, 11959), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (11943, 11959), True, 'import nose.tools as nt\n'), ((12040, 12072), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (12056, 12072), True, 'import nose.tools as nt\n'), ((13184, 13211), 'nose.tools.assert_raises', 'nt.assert_raises', (['Exception'], {}), '(Exception)\n', (13200, 13211), True, 'import nose.tools as nt\n'), ((16496, 16528), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (16512, 16528), True, 'import nose.tools as nt\n'), ((20353, 20392), 'nose.tools.assert_raises', 'nt.assert_raises', (['_bdd._NeedsReordering'], {}), '(_bdd._NeedsReordering)\n', (20369, 20392), True, 'import nose.tools as nt\n'), ((20402, 20431), 'dd.bdd._request_reordering', '_bdd._request_reordering', (['ctx'], {}), '(ctx)\n', (20426, 20431), True, 'from dd import bdd as _bdd\n'), ((20650, 20678), 'dd.bdd._ReorderingContext', '_bdd._ReorderingContext', (['ctx'], {}), '(ctx)\n', (20673, 20678), True, 'from dd import bdd as _bdd\n'), ((20720, 20743), 'dd.bdd._NeedsReordering', '_bdd._NeedsReordering', ([], {}), '()\n', (20741, 20743), True, 'from dd import bdd as _bdd\n'), ((20832, 20871), 'nose.tools.assert_raises', 'nt.assert_raises', (['_bdd._NeedsReordering'], {}), '(_bdd._NeedsReordering)\n', (20848, 20871), True, 'import nose.tools as nt\n'), ((21077, 21109), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (21093, 21109), True, 'import nose.tools as nt\n'), ((21286, 21313), 'nose.tools.assert_raises', 'nt.assert_raises', (['Exception'], {}), '(Exception)\n', (21302, 21313), True, 'import nose.tools as nt\n'), ((24635, 24667), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (24651, 24667), True, 'import nose.tools as nt\n'), ((28243, 28275), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (28259, 28275), True, 'import nose.tools as nt\n'), ((29738, 29770), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (29754, 29770), True, 'import nose.tools as nt\n'), ((29780, 29816), 'dd.bdd.image', '_bdd.image', (['(1)', '(1)', 'rename', 'qvars', 'bdd'], {}), '(1, 1, rename, qvars, bdd)\n', (29790, 29816), True, 'from dd import bdd as _bdd\n'), ((29826, 29858), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (29842, 29858), True, 'import nose.tools as nt\n'), ((29868, 29907), 'dd.bdd.preimage', '_bdd.preimage', (['(1)', '(1)', 'rename', 'qvars', 'bdd'], {}), '(1, 1, rename, qvars, bdd)\n', (29881, 29907), True, 'from dd import bdd as _bdd\n'), ((30078, 30110), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (30094, 30110), True, 'import nose.tools as nt\n'), ((30120, 30165), 'dd.bdd.image', '_bdd.image', (['trans', 'source', 'rename', 'qvars', 'bdd'], {}), '(trans, source, rename, qvars, bdd)\n', (30130, 30165), True, 'from dd import bdd as _bdd\n'), ((31386, 31418), 'dd.bdd.preimage', 'preimage', (['t', 'f', 'rename', 'qvars', 'g'], {}), '(t, f, rename, qvars, g)\n', (31394, 31418), False, 'from dd.bdd import preimage\n'), ((31588, 31620), 'dd.bdd.preimage', 'preimage', (['t', 'f', 'rename', 'qvars', 'g'], {}), '(t, f, rename, qvars, g)\n', (31596, 31620), False, 'from dd.bdd import preimage\n'), ((32332, 32364), 'nose.tools.assert_raises', 'nt.assert_raises', (['AssertionError'], {}), '(AssertionError)\n', (32348, 32364), True, 'import nose.tools as nt\n'), ((32374, 32421), 'dd.bdd._assert_valid_ordering', '_bdd._assert_valid_ordering', (['incorrect_ordering'], {}), '(incorrect_ordering)\n', (32401, 32421), True, 'from dd import bdd as _bdd\n'), ((38122, 38145), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (38143, 38145), False, 'import logging\n'), ((20886, 20914), 'dd.bdd._ReorderingContext', '_bdd._ReorderingContext', (['ctx'], {}), '(ctx)\n', (20909, 20914), True, 'from dd import bdd as _bdd\n'), ((20964, 20987), 'dd.bdd._NeedsReordering', '_bdd._NeedsReordering', ([], {}), '()\n', (20985, 20987), True, 'from dd import bdd as _bdd\n'), ((21124, 21152), 'dd.bdd._ReorderingContext', '_bdd._ReorderingContext', (['ctx'], {}), '(ctx)\n', (21147, 21152), True, 'from dd import bdd as _bdd\n'), ((21328, 21356), 'dd.bdd._ReorderingContext', '_bdd._ReorderingContext', (['ctx'], {}), '(ctx)\n', (21351, 21356), True, 'from dd import bdd as _bdd\n')] |
"""
Abstract analyser
"""
from functools import wraps
from inspect import getmembers, ismethod
from typing import Callable
from ..type_hints import AnalyserResults, AnalyserHelper
from ..utils import SyntaxTree, BaseViolation, ViolationResult
def register_check(error_format: str):
"""
Registers a new checker to an analyser
Args:
error_format: error format of violation
"""
def decorator(check_method: Callable):
@wraps(check_method)
def wrapper(*args, **kwargs):
analyser = args[0]
checker_name = check_method.__name__
analyser.register_checker(checker_name,
check_method.__doc__,
error_format)
result: list[ViolationResult] = check_method(*args, **kwargs)
analyser.add_violations(checker_name, result)
return wrapper
return decorator
class Analyser:
"""Abstract base analyser"""
def __init__(self, sources: dict[str, AnalyserHelper]):
"""
Constructor
Args:
tree: syntax tree
source: list of lines from source code
"""
self._check_results: AnalyserResults = {}
self._sources = sources
def register_checker(self, name: str, description: str, error_format: str):
"""
Registers a new checker to this analyser
Args:
name: name of the checker, typically the method name
description: description of this checker
error_format: format string used to display violations
"""
self._check_results[name] = BaseViolation(description, error_format, [])
def get_results(self) -> AnalyserResults:
"""
Returns results of all checkers of this analyser
"""
return self._check_results
def add_violations(self, checker_name: str,
results: list[ViolationResult]) -> None:
"""
Adds violation results to a checker
Args:
checker_name: name of the checker
results: list of violation results
"""
self._check_results[checker_name].values.extend(results)
def get_line(self, file_name: str, line_number: int) -> str:
"""Returns line given line number"""
return self._sources[file_name].source[line_number - 1].strip()
def run(self):
"""
Runs all checkers
"""
for method_name, method in getmembers(self, predicate=ismethod):
if not method_name.startswith("check_"):
continue
method()
| [
"inspect.getmembers",
"functools.wraps"
] | [((454, 473), 'functools.wraps', 'wraps', (['check_method'], {}), '(check_method)\n', (459, 473), False, 'from functools import wraps\n'), ((2502, 2538), 'inspect.getmembers', 'getmembers', (['self'], {'predicate': 'ismethod'}), '(self, predicate=ismethod)\n', (2512, 2538), False, 'from inspect import getmembers, ismethod\n')] |
from __future__ import absolute_import, division, print_function
import numpy as np
import tensorflow as tf
from IPython.core.debugger import Tracer; debug_here = Tracer();
batch_size = 5
max_it = tf.constant(6)
char_mat_1 = [[0.0, 0.0, 0.0, 0.9, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.9, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.9, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.9, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.9, 0.0, 0.0]]
char_mat_2 = [[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0]]
char_mat_3 = [[0.0, 0.0, 0.0, 0.1, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0]]
char_mat_4 = [[0.0, 0.0, 0.0, 0.1, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
char_mat_5 = [[1.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
#expected output: [5, 2, 4, 5, 4]
char_lst = [char_mat_1, char_mat_2, char_mat_3,
char_mat_4, char_mat_5]
np_char_tensor = np.array(char_lst)
char_prob = tf.constant(np.array(np_char_tensor), tf.float64)
char_prob = tf.transpose(char_prob, [1, 0, 2])
print(tf.Tensor.get_shape(char_prob))
sequence_length_lst = [1, 1, 1, 1, 1]
sequence_length = tf.constant(sequence_length_lst)
done_mask = tf.cast(tf.zeros(batch_size), tf.bool)
for time in range(0, 5):
print(time)
current_date = char_prob[:, time, :]
max_vals = tf.argmax(current_date, 1)
mask = tf.equal(max_vals, tf.constant(0, tf.int64))
current_mask = tf.logical_and(mask, tf.logical_not(done_mask))
done_mask = tf.logical_or(mask, done_mask)
time_vec = tf.ones(batch_size, tf.int32)*(time+2)
sequence_length = tf.select(done_mask, sequence_length, time_vec, name=None)
not_done_no = tf.reduce_sum(tf.cast(tf.logical_not(done_mask), tf.int32))
all_eos = tf.equal(not_done_no, tf.constant(0))
stop_loop = tf.logical_or(all_eos, tf.greater(time, max_it))
keep_working = tf.logical_not(stop_loop)
sess = tf.Session()
with sess.as_default():
tf.initialize_all_variables().run()
#print(char_prob.eval())
print(max_vals.eval())
print(mask.eval())
print(done_mask.eval())
print(sequence_length.eval())
print(keep_working.eval())
| [
"tensorflow.Tensor.get_shape",
"IPython.core.debugger.Tracer",
"tensorflow.initialize_all_variables",
"tensorflow.transpose",
"tensorflow.logical_or",
"tensorflow.ones",
"tensorflow.logical_not",
"tensorflow.Session",
"numpy.array",
"tensorflow.argmax",
"tensorflow.constant",
"tensorflow.great... | [((164, 172), 'IPython.core.debugger.Tracer', 'Tracer', ([], {}), '()\n', (170, 172), False, 'from IPython.core.debugger import Tracer\n'), ((199, 213), 'tensorflow.constant', 'tf.constant', (['(6)'], {}), '(6)\n', (210, 213), True, 'import tensorflow as tf\n'), ((1507, 1525), 'numpy.array', 'np.array', (['char_lst'], {}), '(char_lst)\n', (1515, 1525), True, 'import numpy as np\n'), ((1601, 1635), 'tensorflow.transpose', 'tf.transpose', (['char_prob', '[1, 0, 2]'], {}), '(char_prob, [1, 0, 2])\n', (1613, 1635), True, 'import tensorflow as tf\n'), ((1730, 1762), 'tensorflow.constant', 'tf.constant', (['sequence_length_lst'], {}), '(sequence_length_lst)\n', (1741, 1762), True, 'import tensorflow as tf\n'), ((2495, 2507), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (2505, 2507), True, 'import tensorflow as tf\n'), ((1551, 1575), 'numpy.array', 'np.array', (['np_char_tensor'], {}), '(np_char_tensor)\n', (1559, 1575), True, 'import numpy as np\n'), ((1642, 1672), 'tensorflow.Tensor.get_shape', 'tf.Tensor.get_shape', (['char_prob'], {}), '(char_prob)\n', (1661, 1672), True, 'import tensorflow as tf\n'), ((1783, 1803), 'tensorflow.zeros', 'tf.zeros', (['batch_size'], {}), '(batch_size)\n', (1791, 1803), True, 'import tensorflow as tf\n'), ((1912, 1938), 'tensorflow.argmax', 'tf.argmax', (['current_date', '(1)'], {}), '(current_date, 1)\n', (1921, 1938), True, 'import tensorflow as tf\n'), ((2079, 2109), 'tensorflow.logical_or', 'tf.logical_or', (['mask', 'done_mask'], {}), '(mask, done_mask)\n', (2092, 2109), True, 'import tensorflow as tf\n'), ((2187, 2245), 'tensorflow.select', 'tf.select', (['done_mask', 'sequence_length', 'time_vec'], {'name': 'None'}), '(done_mask, sequence_length, time_vec, name=None)\n', (2196, 2245), True, 'import tensorflow as tf\n'), ((2461, 2486), 'tensorflow.logical_not', 'tf.logical_not', (['stop_loop'], {}), '(stop_loop)\n', (2475, 2486), True, 'import tensorflow as tf\n'), ((1969, 1993), 'tensorflow.constant', 'tf.constant', (['(0)', 'tf.int64'], {}), '(0, tf.int64)\n', (1980, 1993), True, 'import tensorflow as tf\n'), ((2036, 2061), 'tensorflow.logical_not', 'tf.logical_not', (['done_mask'], {}), '(done_mask)\n', (2050, 2061), True, 'import tensorflow as tf\n'), ((2126, 2155), 'tensorflow.ones', 'tf.ones', (['batch_size', 'tf.int32'], {}), '(batch_size, tf.int32)\n', (2133, 2155), True, 'import tensorflow as tf\n'), ((2361, 2375), 'tensorflow.constant', 'tf.constant', (['(0)'], {}), '(0)\n', (2372, 2375), True, 'import tensorflow as tf\n'), ((2416, 2440), 'tensorflow.greater', 'tf.greater', (['time', 'max_it'], {}), '(time, max_it)\n', (2426, 2440), True, 'import tensorflow as tf\n'), ((2287, 2312), 'tensorflow.logical_not', 'tf.logical_not', (['done_mask'], {}), '(done_mask)\n', (2301, 2312), True, 'import tensorflow as tf\n'), ((2536, 2565), 'tensorflow.initialize_all_variables', 'tf.initialize_all_variables', ([], {}), '()\n', (2563, 2565), True, 'import tensorflow as tf\n')] |
from typing import List, Tuple
import seaborn as sns
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pyplot as plt
import pandas as pd
import numpy as np
"""
Plots of tensorboard results with adjusted theming for presentation
"""
label_dict = {0: 'akiec', 1: 'bcc', 2: 'bkl', 3: 'df', 4: 'mel', 5: 'nv', 6: 'vasc'}
sns.set_context(rc={'patch.linewidth': 0.0})
bg_color = '#DAEDEF'
first_color = '#ADC9C4'
second_color = '#7D918E'
def set_plot_theme(ax):
ax.set_facecolor(bg_color)
ax.spines['left'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_color(second_color)
ax.xaxis.label.set_color(second_color)
ax.yaxis.label.set_color(second_color)
ax.yaxis.grid(color=second_color, linewidth=.5, zorder=0)
ax.tick_params(axis='x', colors=second_color)
ax.tick_params(axis='y', colors=second_color, width=.5)
def plot_label_counts(label_counts):
series = pd.Series(label_counts, index=[label_dict[i] for i in range(7)])
fig, ax = plt.subplots(nrows=1, ncols=1, facecolor=bg_color)
ax.set_title('', color=second_color)
sns.barplot(x=series.index, y=series, ax=ax, ci=None, color=first_color, zorder=3)
set_plot_theme(ax)
fig.show()
def plot_confusion_matrix(confusion_matrix, title):
pct_matrix = confusion_matrix / np.sum(confusion_matrix, axis=0)
df_cm = pd.DataFrame(pct_matrix,
index=[label_dict[i] for i in range(7)],
columns=[label_dict[i] for i in range(7)])
# draw heatmap
fig, ax = plt.subplots(nrows=1, ncols=1, facecolor=bg_color)
cmap = sns.dark_palette("#E3F8FA", as_cmap=True)
sns.heatmap(df_cm, ax=ax, annot=True, fmt=".2f", cmap=cmap)
ax.set_title(title, color=second_color)
ax.spines['left'].set_color(second_color)
ax.spines['left'].set_visible(True)
ax.spines['right'].set_color(second_color)
ax.spines['right'].set_visible(True)
ax.spines['top'].set_color(second_color)
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_color(second_color)
ax.spines['bottom'].set_visible(True)
ax.xaxis.label.set_color(second_color)
ax.yaxis.label.set_color(second_color)
ax.tick_params(axis='x', colors=second_color, width=1.0)
ax.tick_params(axis='y', colors=second_color, width=.5)
fig.show()
def plot_performance_graphs(data: List[Tuple[str, str, str, pd.Series]]):
fig, ax = plt.subplots(nrows=1, ncols=1, facecolor=bg_color)
ax.set_ylim([0.0, 1.0])
set_plot_theme(ax)
for title, color, linestyle, series in data:
ax.plot(series.index, series, label=title, color=color, linestyle=linestyle)
#plt.axvline(x=8, color=second_color)
ax.legend()
fig.show()
| [
"matplotlib.use",
"seaborn.set_context",
"seaborn.heatmap",
"numpy.sum",
"seaborn.dark_palette",
"seaborn.barplot",
"matplotlib.pyplot.subplots"
] | [((72, 95), 'matplotlib.use', 'matplotlib.use', (['"""TkAgg"""'], {}), "('TkAgg')\n", (86, 95), False, 'import matplotlib\n'), ((334, 378), 'seaborn.set_context', 'sns.set_context', ([], {'rc': "{'patch.linewidth': 0.0}"}), "(rc={'patch.linewidth': 0.0})\n", (349, 378), True, 'import seaborn as sns\n'), ((1067, 1117), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(1)', 'facecolor': 'bg_color'}), '(nrows=1, ncols=1, facecolor=bg_color)\n', (1079, 1117), True, 'from matplotlib import pyplot as plt\n'), ((1163, 1249), 'seaborn.barplot', 'sns.barplot', ([], {'x': 'series.index', 'y': 'series', 'ax': 'ax', 'ci': 'None', 'color': 'first_color', 'zorder': '(3)'}), '(x=series.index, y=series, ax=ax, ci=None, color=first_color,\n zorder=3)\n', (1174, 1249), True, 'import seaborn as sns\n'), ((1611, 1661), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(1)', 'facecolor': 'bg_color'}), '(nrows=1, ncols=1, facecolor=bg_color)\n', (1623, 1661), True, 'from matplotlib import pyplot as plt\n'), ((1673, 1714), 'seaborn.dark_palette', 'sns.dark_palette', (['"""#E3F8FA"""'], {'as_cmap': '(True)'}), "('#E3F8FA', as_cmap=True)\n", (1689, 1714), True, 'import seaborn as sns\n'), ((1719, 1778), 'seaborn.heatmap', 'sns.heatmap', (['df_cm'], {'ax': 'ax', 'annot': '(True)', 'fmt': '""".2f"""', 'cmap': 'cmap'}), "(df_cm, ax=ax, annot=True, fmt='.2f', cmap=cmap)\n", (1730, 1778), True, 'import seaborn as sns\n'), ((2483, 2533), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(1)', 'facecolor': 'bg_color'}), '(nrows=1, ncols=1, facecolor=bg_color)\n', (2495, 2533), True, 'from matplotlib import pyplot as plt\n'), ((1374, 1406), 'numpy.sum', 'np.sum', (['confusion_matrix'], {'axis': '(0)'}), '(confusion_matrix, axis=0)\n', (1380, 1406), True, 'import numpy as np\n')] |
"""
A module for a mixture density network layer
(_Mixture Desity Networks_ by Bishop, 1994.)
"""
import sys
import torch
import torch.tensor as ts
import torch.nn as nn
import torch.optim as optim
from torch.distributions import Categorical
import math
# Draw distributions
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.colors import LinearSegmentedColormap
'''
Process:
Input:x -> Some model (body) -> Characteristic vector:z (feature)
-> MDN (head) -> Probabilistic vector:p (output)
'''
class MDN_Module(nn.Module):
"""
A Mixture Density Network Module
Symbols:
B - Batch size
G - Number of Gaussian components
D - Input's dimensions
F - Feature's dimensions
C - Output's dimensions (Gaussian distribution's dimensions)
Arguments:
dim_fea (int): the feature's dimensions
dim_prob (int): the output's dimenssions
num_gaus (int): the number of Gaussians per output dimension
Input:
minibatch (BxF)
Output:
(alp, mu, sigma) (BxG, BxGxC, BxGxC)
alp - (alpha) Component's weight
mu - Mean value
sigma - Standard deviation
"""
def __init__(self, dim_fea, dim_prob, num_gaus):
super(MDN_Module, self).__init__()
self.dim_fea = dim_fea
self.dim_prob = dim_prob
self.num_gaus = num_gaus
self.layer_alp = nn.Sequential(
nn.Linear(dim_fea, num_gaus),
nn.Softmax(dim=1) # If 1, go along each row
)
self.layer_mu = nn.Linear(dim_fea, dim_prob*num_gaus)
self.layer_sigma = nn.Sequential(
nn.Linear(dim_fea, dim_prob*num_gaus),
ReExp_Layer()
)
def forward(self, batch):
alp = self.layer_alp(batch)
mu = self.layer_mu(batch)
mu = mu.view(-1, self.num_gaus, self.dim_prob)
sigma = self.layer_sigma(batch)
sigma = sigma.view(-1, self.num_gaus, self.dim_prob)
return alp, mu, sigma
class ReExp_Layer(nn.Module):
"""
A modified exponential layer.
Only the negative part of the exponential retains.
The positive part is linear: y=x+1.
"""
def __init__(self):
super().__init__()
def forward(self, x):
l = nn.ELU() # ELU: max(0,x)+min(0,α∗(exp(x)−1))
return torch.add(l(x), 1) # assure no negative sigma produces!!!
class classic_MDN_Module(nn.Module):
def __init__(self, dim_fea, dim_prob, num_gaus):
super(classic_MDN_Module, self).__init__()
self.dim_fea = dim_fea
self.dim_prob = dim_prob
self.num_gaus = num_gaus
self.layer_alp = nn.Sequential(
nn.Linear(dim_fea, num_gaus),
nn.Softmax(dim=1) # If 1, go along each row
)
self.layer_mu = nn.Linear(dim_fea, dim_prob*num_gaus)
self.layer_sigma = nn.Sequential(
nn.Linear(dim_fea, dim_prob*num_gaus)
)
def forward(self, batch):
alp = self.layer_alp(batch)
mu = self.layer_mu(batch)
mu = mu.view(-1, self.num_gaus, self.dim_prob)
sigma = torch.exp(self.layer_sigma(batch))
sigma = sigma.view(-1, self.num_gaus, self.dim_prob)
return alp, mu, sigma
def cal_GauProb(mu, sigma, x):
"""
Return the probability of "data" given MoG parameters "mu" and "sigma".
Arguments:
mu (BxGxC) - The means of the Gaussians.
sigma (BxGxC) - The standard deviation of the Gaussians.
x (BxC) - A batch of data points.
Return:
probabilities (BxG): The probability of each point in the probability
of the distribution in the corresponding mu/sigma index.
"""
x = x.unsqueeze(1).expand_as(mu) # BxC -> Bx1xC -> BxGxC
prob = torch.rsqrt(torch.tensor(2*math.pi)) * torch.exp(-((x - mu) / sigma)**2 / 2) / sigma
return torch.prod(prob, dim=2) # overall probability for all output's dimensions in each component, BxG
def cal_multiGauProb(alp, mu, sigma, x):
"""
Arguments:
alp (BxG) - (alpha) Component's weight
"""
prob = alp * cal_GauProb(mu, sigma, x) # BxG
prob = torch.sum(prob, dim=1) # Bx1
# overall prob for each batch (sum is for all compos)
return prob
def loss_NLL(alp, mu, sigma, data):
"""
Calculates the error, given the MoG parameters and the data.
The loss is the negative log likelihood of the data given the MoG parameters.
"""
nll = -torch.log(cal_multiGauProb(alp, mu, sigma, data))
return torch.mean(nll)
def loss_MaDist(alp, mu, sigma, data): # Mahalanobis distance
'''
mu (GxC) - The means of the Gaussians.
sigma (GxC) - The standard deviation of the Gaussians.
'''
md = []
alp = alp/sum(alp) #normalization
for i in range(mu.shape[0]): # do through every component
mu0 = (data-mu[i,:]).unsqueeze(0) # (x-mu)
S_inv = ts([[1/sigma[i,0],0],[0,1/sigma[i,1]]]) # S^-1 inversed covariance matrix
md0 = torch.sqrt( S_inv[0,0]*mu0[0,0]**2 + S_inv[1,1]*mu0[0,1]**2 )
md.append(md0)
return ts(md), sum(ts(md)*alp)
def loss_EMD(): pass
def sample(alp, mu, sigma):
"""
Draw samples from a MoG.
Return one sample for each batch
"""
categorical = Categorical(alp) # aka. generalized Bernoulli
try:
alps = list(categorical.sample().data) # take a sample of alpha for each batch
except:
raise Exception('Ooooops! Model collapse!')
sample = sigma.new_empty(sigma.size(0), sigma.size(2)).normal_() # sample of (0,1) normal distribution
for i, idx in enumerate(alps):
sample[i] = sample[i].mul(sigma[i,idx]).add(mu[i,idx])
return sample
def take_mainCompo(alp, mu, sigma, main=3):
alp = alp[0,:]
mu = mu[0,:,:]
sigma = sigma[0,:,:]
main_alp = alp[:main] # placeholder
main_mu = mu[:main,:] # placeholder
main_sigma = sigma[:main,:] # placeholder
_, indices = torch.sort(alp) # ascending order
for i in range(1,main+1):
idx = indices[-i].item() # largest to smallest
main_alp[i-1] = alp[idx]
main_mu[i-1,:] = mu[idx,:]
main_sigma[i-1,:] = sigma[idx,:]
return main_alp.unsqueeze(0), main_mu.unsqueeze(0), main_sigma.unsqueeze(0) # insert the "batch" dimension
def take_goodCompo(alp, mu, sigma, thre=0.1):
alp = alp[0,:]
mu = mu[0,:,:]
sigma = sigma[0,:,:]
thre = thre*max(alp)
idx = (alp>thre)
good_alp = alp[idx]
good_mu = mu[idx,:]
good_sigma = sigma[idx,:]
return good_alp.unsqueeze(0), good_mu.unsqueeze(0), good_sigma.unsqueeze(0) # insert the "batch" dimension
def sigma_limit(mu, sigma, nsigma=3):
# nsigma: 1 -> 0.6827 2 -> 0.9545 3 -> 0.9974
x_scope = [(mu-nsigma*sigma)[0,:,0], (mu+nsigma*sigma)[0,:,0]]
y_scope = [(mu-nsigma*sigma)[0,:,1], (mu+nsigma*sigma)[0,:,1]]
x_min = torch.min(x_scope[0])
x_max = torch.max(x_scope[1])
y_min = torch.min(y_scope[0])
y_max = torch.max(y_scope[1])
if x_min != torch.min(abs(x_scope[0])):
x_min = -torch.min(abs(x_scope[0]))
if x_max != torch.max(abs(x_scope[1])):
x_max = -torch.max(abs(x_scope[1]))
if y_min != torch.min(abs(y_scope[0])):
y_min = -torch.min(abs(y_scope[0]))
if y_max != torch.max(abs(y_scope[1])):
y_max = -torch.max(abs(y_scope[1]))
return [x_min, x_max], [y_min, y_max]
def cal_multiGauProbDistr(xx, yy, alp, mu, sigma):
xy = np.concatenate((xx.reshape(-1,1), yy.reshape(-1,1)), axis=1).astype(np.float32)
p = np.array([])
for i in range(xy.shape[0]):
p = np.append( p, cal_multiGauProb(alp, mu, sigma, x=ts(xy[i,:][np.newaxis,:])).detach().numpy() )
p[np.where(p<max(p)/10)] = 0
return p.reshape(xx.shape)
def draw_probDistribution(ax, alp, mu, sigma, main=3, nsigma=3, step=0.5, colorbar=False, toplot=True):
'''
Arguments:
ax - Axis
alp (BxG) - (alpha) Component's weight.
mu (BxGxC) - The means of the Gaussians.
sigma (BxGxC) - The standard deviation of the Gaussians.
'''
if main is not None:
alp, mu, sigma = take_mainCompo(alp, mu, sigma, main=main)
# ================= Register Colormap ================START
ncolors = 256
color_array = plt.get_cmap('gist_rainbow')(range(ncolors)) # get colormap
color_array[:,-1] = np.linspace(0,1,ncolors) # change alpha values
color_array[:,-1][:25] = 0
map_object = LinearSegmentedColormap.from_list(name='rainbow_alpha',colors=color_array) # create a colormap object
plt.register_cmap(cmap=map_object) # register this new colormap with matplotlib
# ================= Register Colormap ==================END
xlim, ylim = sigma_limit(mu, sigma, nsigma=nsigma)
x = np.arange(xlim[0].detach().numpy(), xlim[1].detach().numpy(), step=step)
y = np.arange(ylim[0].detach().numpy(), ylim[1].detach().numpy(), step=step)
xx, yy = np.meshgrid(x, y)
pp = cal_multiGauProbDistr(xx, yy, alp, mu, sigma)
if toplot:
cntr = ax.contourf(xx, yy, pp, cmap="rainbow_alpha")
if colorbar:
plt.colorbar(cntr, ax=ax)
return xx,yy,pp
def draw_GauEllipse(ax, mu, sigma, fc='b', nsigma=3, extend=False, label=None):
'''
mu (GxC) - The means of the Gaussians.
sigma (GxC) - The standard deviation of the Gaussians.
'''
for i in range(mu.shape[0]):
if i != 0:
label=None
if extend:
patch = patches.Ellipse(mu[i,:], nsigma*sigma[i,0]+8, nsigma*sigma[i,1]+8, fc=fc, label=label)
ax.add_patch(patch)
else:
patch = patches.Ellipse(mu[i,:], nsigma*sigma[i,0], nsigma*sigma[i,1], fc=fc, label=label)
ax.add_patch(patch) | [
"torch.distributions.Categorical",
"torch.max",
"torch.sqrt",
"torch.exp",
"torch.min",
"numpy.array",
"torch.sum",
"torch.mean",
"torch.prod",
"numpy.linspace",
"numpy.meshgrid",
"torch.sort",
"matplotlib.pyplot.register_cmap",
"matplotlib.patches.Ellipse",
"matplotlib.colors.LinearSegm... | [((4004, 4027), 'torch.prod', 'torch.prod', (['prob'], {'dim': '(2)'}), '(prob, dim=2)\n', (4014, 4027), False, 'import torch\n'), ((4285, 4307), 'torch.sum', 'torch.sum', (['prob'], {'dim': '(1)'}), '(prob, dim=1)\n', (4294, 4307), False, 'import torch\n'), ((4691, 4706), 'torch.mean', 'torch.mean', (['nll'], {}), '(nll)\n', (4701, 4706), False, 'import torch\n'), ((5430, 5446), 'torch.distributions.Categorical', 'Categorical', (['alp'], {}), '(alp)\n', (5441, 5446), False, 'from torch.distributions import Categorical\n'), ((6123, 6138), 'torch.sort', 'torch.sort', (['alp'], {}), '(alp)\n', (6133, 6138), False, 'import torch\n'), ((7046, 7067), 'torch.min', 'torch.min', (['x_scope[0]'], {}), '(x_scope[0])\n', (7055, 7067), False, 'import torch\n'), ((7080, 7101), 'torch.max', 'torch.max', (['x_scope[1]'], {}), '(x_scope[1])\n', (7089, 7101), False, 'import torch\n'), ((7114, 7135), 'torch.min', 'torch.min', (['y_scope[0]'], {}), '(y_scope[0])\n', (7123, 7135), False, 'import torch\n'), ((7148, 7169), 'torch.max', 'torch.max', (['y_scope[1]'], {}), '(y_scope[1])\n', (7157, 7169), False, 'import torch\n'), ((7717, 7729), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (7725, 7729), True, 'import numpy as np\n'), ((8546, 8572), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'ncolors'], {}), '(0, 1, ncolors)\n', (8557, 8572), True, 'import numpy as np\n'), ((8641, 8716), 'matplotlib.colors.LinearSegmentedColormap.from_list', 'LinearSegmentedColormap.from_list', ([], {'name': '"""rainbow_alpha"""', 'colors': 'color_array'}), "(name='rainbow_alpha', colors=color_array)\n", (8674, 8716), False, 'from matplotlib.colors import LinearSegmentedColormap\n'), ((8747, 8781), 'matplotlib.pyplot.register_cmap', 'plt.register_cmap', ([], {'cmap': 'map_object'}), '(cmap=map_object)\n', (8764, 8781), True, 'import matplotlib.pyplot as plt\n'), ((9122, 9139), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (9133, 9139), True, 'import numpy as np\n'), ((1673, 1712), 'torch.nn.Linear', 'nn.Linear', (['dim_fea', '(dim_prob * num_gaus)'], {}), '(dim_fea, dim_prob * num_gaus)\n', (1682, 1712), True, 'import torch.nn as nn\n'), ((2395, 2403), 'torch.nn.ELU', 'nn.ELU', ([], {}), '()\n', (2401, 2403), True, 'import torch.nn as nn\n'), ((2927, 2966), 'torch.nn.Linear', 'nn.Linear', (['dim_fea', '(dim_prob * num_gaus)'], {}), '(dim_fea, dim_prob * num_gaus)\n', (2936, 2966), True, 'import torch.nn as nn\n'), ((5071, 5119), 'torch.tensor', 'ts', (['[[1 / sigma[i, 0], 0], [0, 1 / sigma[i, 1]]]'], {}), '([[1 / sigma[i, 0], 0], [0, 1 / sigma[i, 1]]])\n', (5073, 5119), True, 'import torch.tensor as ts\n'), ((5159, 5230), 'torch.sqrt', 'torch.sqrt', (['(S_inv[0, 0] * mu0[0, 0] ** 2 + S_inv[1, 1] * mu0[0, 1] ** 2)'], {}), '(S_inv[0, 0] * mu0[0, 0] ** 2 + S_inv[1, 1] * mu0[0, 1] ** 2)\n', (5169, 5230), False, 'import torch\n'), ((5255, 5261), 'torch.tensor', 'ts', (['md'], {}), '(md)\n', (5257, 5261), True, 'import torch.tensor as ts\n'), ((8462, 8490), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gist_rainbow"""'], {}), "('gist_rainbow')\n", (8474, 8490), True, 'import matplotlib.pyplot as plt\n'), ((1550, 1578), 'torch.nn.Linear', 'nn.Linear', (['dim_fea', 'num_gaus'], {}), '(dim_fea, num_gaus)\n', (1559, 1578), True, 'import torch.nn as nn\n'), ((1592, 1609), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (1602, 1609), True, 'import torch.nn as nn\n'), ((1765, 1804), 'torch.nn.Linear', 'nn.Linear', (['dim_fea', '(dim_prob * num_gaus)'], {}), '(dim_fea, dim_prob * num_gaus)\n', (1774, 1804), True, 'import torch.nn as nn\n'), ((2804, 2832), 'torch.nn.Linear', 'nn.Linear', (['dim_fea', 'num_gaus'], {}), '(dim_fea, num_gaus)\n', (2813, 2832), True, 'import torch.nn as nn\n'), ((2846, 2863), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (2856, 2863), True, 'import torch.nn as nn\n'), ((3019, 3058), 'torch.nn.Linear', 'nn.Linear', (['dim_fea', '(dim_prob * num_gaus)'], {}), '(dim_fea, dim_prob * num_gaus)\n', (3028, 3058), True, 'import torch.nn as nn\n'), ((3947, 3986), 'torch.exp', 'torch.exp', (['(-((x - mu) / sigma) ** 2 / 2)'], {}), '(-((x - mu) / sigma) ** 2 / 2)\n', (3956, 3986), False, 'import torch\n'), ((9306, 9331), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['cntr'], {'ax': 'ax'}), '(cntr, ax=ax)\n', (9318, 9331), True, 'import matplotlib.pyplot as plt\n'), ((9670, 9772), 'matplotlib.patches.Ellipse', 'patches.Ellipse', (['mu[i, :]', '(nsigma * sigma[i, 0] + 8)', '(nsigma * sigma[i, 1] + 8)'], {'fc': 'fc', 'label': 'label'}), '(mu[i, :], nsigma * sigma[i, 0] + 8, nsigma * sigma[i, 1] + \n 8, fc=fc, label=label)\n', (9685, 9772), True, 'import matplotlib.patches as patches\n'), ((9823, 9916), 'matplotlib.patches.Ellipse', 'patches.Ellipse', (['mu[i, :]', '(nsigma * sigma[i, 0])', '(nsigma * sigma[i, 1])'], {'fc': 'fc', 'label': 'label'}), '(mu[i, :], nsigma * sigma[i, 0], nsigma * sigma[i, 1], fc=fc,\n label=label)\n', (9838, 9916), True, 'import matplotlib.patches as patches\n'), ((3920, 3945), 'torch.tensor', 'torch.tensor', (['(2 * math.pi)'], {}), '(2 * math.pi)\n', (3932, 3945), False, 'import torch\n'), ((5267, 5273), 'torch.tensor', 'ts', (['md'], {}), '(md)\n', (5269, 5273), True, 'import torch.tensor as ts\n'), ((7824, 7851), 'torch.tensor', 'ts', (['xy[i, :][np.newaxis, :]'], {}), '(xy[i, :][np.newaxis, :])\n', (7826, 7851), True, 'import torch.tensor as ts\n')] |
import io
from atws.wrapper import Wrapper
from django.core.management import call_command
from django.test import TestCase
from djautotask.tests import fixtures, mocks, fixture_utils
from djautotask import models
def sync_summary(class_name, created_count, updated_count=0):
return '{} Sync Summary - Created: {}, Updated: {}, Skipped: 0'.format(
class_name, created_count, updated_count
)
def full_sync_summary(class_name, deleted_count, updated_count=0):
return '{} Sync Summary - Created: 0, Updated: {}, Skipped: 0, ' \
'Deleted: {}'.format(class_name, updated_count, deleted_count)
def slug_to_title(slug):
return slug.title().replace('_', ' ')
def run_sync_command(full_option=False, command_name=None):
out = io.StringIO()
args = ['atsync']
if command_name:
args.append(command_name)
if full_option:
args.append('--full')
call_command(*args, stdout=out)
return out
class AbstractBaseSyncRestTest(object):
def _test_sync(self, mock_call, return_value, at_object,
full_option=False):
mock_call(return_value)
out = io.StringIO()
args = ['atsync', at_object]
if full_option:
args.append('--full')
call_command(*args, stdout=out)
return out
def _title_for_at_object(self, at_object):
return at_object.title().replace('_', ' ')
def test_sync(self):
out = self._test_sync(*self.args)
obj_title = self._title_for_at_object(self.args[-1])
self.assertIn(obj_title, out.getvalue().strip())
def test_full_sync(self):
self.test_sync()
mock_call, return_value, at_object = self.args
args = [
mock_call,
{
"items": [],
"pageDetails": fixtures.API_PAGE_DETAILS
},
at_object
]
out = self._test_sync(*args, full_option=True)
obj_label = self._title_for_at_object(at_object)
msg_tmpl = '{} Sync Summary - Created: 0, Updated: 0, Skipped: 0, ' \
'Deleted: {}'
msg = msg_tmpl.format(obj_label, len(return_value.get('items')))
self.assertEqual(msg, out.getvalue().strip())
class PicklistSyncTest(AbstractBaseSyncRestTest):
def test_full_sync(self):
self.test_sync()
mock_call, return_value, at_object = self.args
args = [
mock_call,
{
"fields": []
},
at_object
]
out = self._test_sync(*args, full_option=True)
obj_label = self._title_for_at_object(at_object)
msg_tmpl = '{} Sync Summary - Created: 0, Updated: 0, Skipped: 0, ' \
'Deleted: {}'
msg = msg_tmpl.format(
obj_label, len(return_value.get('fields')[0].get('picklistValues'))
)
self.assertEqual(msg, out.getvalue().strip())
class TestSyncContactCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_contacts_call,
fixtures.API_CONTACT,
'contact',
)
def setUp(self):
super().setUp()
fixture_utils.init_contacts()
class AbstractBaseSyncTest(object):
def setUp(self):
mocks.init_api_connection(Wrapper)
mocks.init_api_rest_connection()
def _title_for_at_object(self, at_object):
return at_object.title().replace('_', ' ')
def get_api_mock(self):
return mocks.api_query_call
def get_return_value(self, at_object, fixture_list):
return fixture_utils.generate_objects(
at_object.title().replace('_', ''), fixture_list)
def init_sync_command(self, fixture_list, at_object, full_option=False):
return_value = self.get_return_value(at_object, fixture_list)
api_call = self.get_api_mock()
api_call(return_value)
output = run_sync_command(full_option, at_object)
return output
def _test_sync(self):
out = self.init_sync_command(*self.args)
obj_title = self._title_for_at_object(self.args[-1])
self.assertIn(obj_title, out.getvalue().strip())
def test_full_sync(self):
out = self.init_sync_command(*self.args)
fixture_list, at_object = self.args
args = [
[],
at_object,
]
out = self.init_sync_command(*args, full_option=True)
obj_label = self._title_for_at_object(at_object)
msg_tmpl = '{} Sync Summary - Created: 0, Updated: 0, Skipped: 0, ' \
'Deleted: {}'
value_count = len(fixture_list)
msg = msg_tmpl.format(obj_label, value_count)
self.assertEqual(msg, out.getvalue().strip())
class AbstractPicklistSyncCommandTest(AbstractBaseSyncTest):
def get_return_value(self, at_object, fixture_list):
field_info = fixture_utils.generate_picklist_objects(
self.field_name, fixture_list)
return field_info
def get_api_mock(self):
return mocks.api_picklist_call
class TestSyncTicketCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_tickets_call,
fixtures.API_TICKET,
'ticket',
)
def setUp(self):
super().setUp()
fixture_utils.init_tickets()
class TestSyncStatusCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_STATUS_FIELD,
'status',
)
def setUp(self):
super().setUp()
fixture_utils.init_statuses()
class TestSyncPriorityCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_PRIORITY_FIELD,
'priority',
)
def setUp(self):
super().setUp()
fixture_utils.init_priorities()
class TestSyncQueueCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_QUEUE_FIELD,
'queue',
)
def setUp(self):
super().setUp()
fixture_utils.init_queues()
class TestSyncProjectStatusCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_project_picklist_call,
fixtures.API_PROJECT_STATUS_FIELD,
'project_status',
)
def setUp(self):
super().setUp()
fixture_utils.init_project_statuses()
class TestSyncProjectTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_project_picklist_call,
fixtures.API_PROJECT_TYPE_FIELD,
'project_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_project_types()
class TestSyncSourceCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_SOURCE_FIELD,
'source',
)
def setUp(self):
super().setUp()
fixture_utils.init_sources()
class TestSyncIssueTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_ISSUE_TYPE_FIELD,
'issue_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_issue_types()
class TestSyncSubIssueTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_SUB_ISSUE_TYPE_FIELD,
'sub_issue_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_issue_types()
fixture_utils.init_sub_issue_types()
class TestSyncTicketTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_TICKET_TYPE_FIELD,
'ticket_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_ticket_types()
class TestSyncAccountTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_account_types_call,
fixtures.API_ACCOUNT_TYPE_FIELD,
'account_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_account_types()
class TestSyncServiceCallStatusCommand(PicklistSyncTest,
TestCase):
args = (
mocks.service_api_get_service_call_statuses_call,
fixtures.API_SERVICE_CALL_STATUS_FIELD,
'service_call_status',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
class TestSyncDisplayColorCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_category_picklist_call,
fixtures.API_DISPLAY_COLOR_FIELD,
'display_color',
)
def setUp(self):
super().setUp()
fixture_utils.init_display_colors()
class TestSyncLicenseTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_license_types_call,
fixtures.API_LICENSE_TYPE_FIELD,
'license_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_license_types()
class TestSyncTaskTypeLinkCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_task_type_links_call,
fixtures.API_TASK_TYPE_LINK_FIELD,
'task_type_link',
)
def setUp(self):
super().setUp()
fixture_utils.init_task_type_links()
class TestSyncUseTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_use_types_call,
fixtures.API_USE_TYPE_FIELD,
'use_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_use_types()
class TestSyncTicketCategoryCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_ticket_categories_call,
fixtures.API_TICKET_CATEGORY,
'ticket_category',
)
def setUp(self):
super().setUp()
fixture_utils.init_ticket_categories()
class TestSyncResourceCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_RESOURCE_LIST,
'resource',
)
class TestSyncTicketSecondaryResourceCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_SECONDARY_RESOURCE_LIST,
'ticket_secondary_resource',
)
class TestSyncAccountCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_ACCOUNT_LIST,
'account',
)
class TestSyncAccountLocationCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_account_physical_locations_call,
fixtures.API_ACCOUNT_PHYSICAL_LOCATION,
'account_physical_location',
)
def setUp(self):
super().setUp()
fixture_utils.init_accounts()
fixture_utils.init_account_physical_locations()
class TestSyncProjectCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_projects_call,
fixtures.API_PROJECT,
'project',
)
def setUp(self):
super().setUp()
fixture_utils.init_projects()
class TestSyncPhaseCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_PHASE_LIST,
'phase',
)
class TestSyncTaskCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_tasks_call,
fixtures.API_TASK,
'task',
)
def setUp(self):
super().setUp()
fixture_utils.init_projects()
fixture_utils.init_tasks()
class TestSyncTaskSecondaryResourceCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_TASK_SECONDARY_RESOURCE_LIST,
'task_secondary_resource',
)
class TestSyncTicketNoteCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_TICKET_NOTE_LIST,
'ticket_note',
)
def setUp(self):
super().setUp()
fixture_utils.init_tickets()
fixture_utils.init_ticket_notes()
class TestSyncTaskNoteCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_TASK_NOTE_LIST,
'task_note',
)
def setUp(self):
super().setUp()
fixture_utils.init_projects()
fixture_utils.init_tasks()
fixture_utils.init_task_notes()
class TestSyncTimeEntryCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_TIME_ENTRY_LIST,
'time_entry',
)
def setUp(self):
super().setUp()
fixture_utils.init_tickets()
class TestSyncAllocationCodeCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_allocation_codes_call,
fixtures.API_ALLOCATION_CODE,
'allocation_code',
)
def setUp(self):
super().setUp()
fixture_utils.init_allocation_codes()
class TestSyncRoleCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_roles_call,
fixtures.API_ROLE,
'role',
)
def setUp(self):
super().setUp()
fixture_utils.init_roles()
class TestSyncDepartmentCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_departments_call,
fixtures.API_DEPARTMENT,
'department',
)
def setUp(self):
super().setUp()
fixture_utils.init_departments()
class TestSyncResourceServiceDeskRoleCommand(AbstractBaseSyncRestTest,
TestCase):
args = (
mocks.service_api_get_resource_service_desk_roles_call,
fixtures.API_RESOURCE_SERVICE_DESK_ROLE,
'resource_service_desk_role',
)
def setUp(self):
super().setUp()
fixture_utils.init_roles()
fixture_utils.init_resources()
fixture_utils.init_resource_service_desk_roles()
class TestSyncResourceRoleDepartmentCommand(AbstractBaseSyncRestTest,
TestCase):
args = (
mocks.service_api_get_resource_role_departments_call,
fixtures.API_RESOURCE_ROLE_DEPARTMENT,
'resource_role_department',
)
def setUp(self):
super().setUp()
fixture_utils.init_departments()
fixture_utils.init_roles()
fixture_utils.init_resources()
fixture_utils.init_resource_role_departments()
class TestSyncContractCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_contracts_call,
fixtures.API_CONTRACT,
'contract',
)
def setUp(self):
super().setUp()
fixture_utils.init_contracts()
class TestSyncServiceCallCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_service_calls_call,
fixtures.API_SERVICE_CALL,
'service_call',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
fixture_utils.init_resources()
fixture_utils.init_account_types()
fixture_utils.init_accounts()
class TestSyncServiceCallTicketCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_service_call_tickets_call,
fixtures.API_SERVICE_CALL_TICKET,
'service_call_ticket',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
fixture_utils.init_resources()
fixture_utils.init_account_types()
fixture_utils.init_accounts()
fixture_utils.init_service_calls()
fixture_utils.init_statuses()
fixture_utils.init_tickets()
class TestSyncServiceCallTaskCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_service_call_tasks_call,
fixtures.API_SERVICE_CALL_TASK,
'service_call_task',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
fixture_utils.init_account_types()
fixture_utils.init_accounts()
fixture_utils.init_service_calls()
fixture_utils.init_statuses()
fixture_utils.init_projects()
fixture_utils.init_tasks()
class TestSyncServiceCallTicketResourceCommand(AbstractBaseSyncRestTest,
TestCase):
args = (
mocks.service_api_get_service_call_ticket_resources_call,
fixtures.API_SERVICE_CALL_TICKET_RESOURCE,
'service_call_ticket_resource',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
fixture_utils.init_resources()
fixture_utils.init_account_types()
fixture_utils.init_accounts()
fixture_utils.init_service_calls()
fixture_utils.init_statuses()
fixture_utils.init_tickets()
fixture_utils.init_service_call_tickets()
class TestSyncServiceCallTaskResourceCommand(AbstractBaseSyncRestTest,
TestCase):
args = (
mocks.service_api_get_service_call_task_resources_call,
fixtures.API_SERVICE_CALL_TASK_RESOURCE,
'service_call_task_resource',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
fixture_utils.init_resources()
fixture_utils.init_account_types()
fixture_utils.init_accounts()
fixture_utils.init_service_calls()
fixture_utils.init_statuses()
fixture_utils.init_projects()
fixture_utils.init_tasks()
fixture_utils.init_service_call_tasks()
class TestSyncTaskPredecessor(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_task_predecessors_call,
fixtures.API_TASK_PREDECESSOR,
'task_predecessor',
)
def setUp(self):
super().setUp()
fixture_utils.init_projects()
fixture_utils.init_tasks()
fixture_utils.init_task_predecessors()
class TestSyncAllCommand(TestCase):
def setUp(self):
super().setUp()
mocks.init_api_connection(Wrapper)
mocks.create_mock_call(
'djautotask.sync.TicketNoteSynchronizer._get_query_conditions',
None
)
mocks.create_mock_call(
'djautotask.sync.TaskNoteSynchronizer._get_query_conditions',
None
)
fixture_utils.mock_udfs()
self._call_service_api()
# Mock API calls to return values based on what entity
# is being requested
mocks.get_field_info_api_calls(
fixture_utils.manage_sync_picklist_return_data
)
mocks.wrapper_query_api_calls(
fixture_utils.manage_full_sync_return_data
)
sync_test_cases = [
TestSyncLicenseTypeCommand,
TestSyncTaskTypeLinkCommand,
TestSyncUseTypeCommand,
TestSyncAccountTypeCommand,
TestSyncRoleCommand,
TestSyncDepartmentCommand,
TestSyncTicketCommand,
TestSyncTaskCommand,
TestSyncStatusCommand,
TestSyncResourceCommand,
TestSyncPriorityCommand,
TestSyncQueueCommand,
TestSyncAccountCommand,
TestSyncProjectCommand,
TestSyncProjectStatusCommand,
TestSyncProjectTypeCommand,
TestSyncTicketCategoryCommand,
TestSyncSourceCommand,
TestSyncIssueTypeCommand,
TestSyncSubIssueTypeCommand,
TestSyncTicketTypeCommand,
TestSyncDisplayColorCommand,
TestSyncTaskSecondaryResourceCommand,
TestSyncPhaseCommand,
TestSyncTicketNoteCommand,
TestSyncTaskNoteCommand,
TestSyncTimeEntryCommand,
TestSyncAllocationCodeCommand,
TestSyncResourceRoleDepartmentCommand,
TestSyncResourceServiceDeskRoleCommand,
TestSyncContractCommand,
TestSyncServiceCallStatusCommand,
TestSyncServiceCallCommand,
TestSyncServiceCallTicketCommand,
TestSyncServiceCallTaskCommand,
TestSyncServiceCallTicketResourceCommand,
TestSyncServiceCallTaskResourceCommand,
TestSyncAccountLocationCommand,
TestSyncTaskPredecessor,
TestSyncContactCommand,
]
self.test_args = []
for test_case in sync_test_cases:
# for REST API
if len(test_case.args) == 3:
self.test_args.append(test_case.args)
# for SOAP API
else:
new_test_case = [None, *test_case.args]
self.test_args.append(new_test_case)
def test_partial_sync(self):
"""
Test the command to run a sync of all objects without
the --full argument.
"""
output = run_sync_command()
for mock_call, fixture, at_object in self.test_args:
if mock_call:
if 'fields' in fixture:
fixture_len = \
len(fixture.get('fields')[0].get('picklistValues'))
else:
fixture_len = len(fixture.get('items'))
else:
fixture_len = len(fixture)
summary = sync_summary(slug_to_title(at_object), fixture_len)
self.assertIn(summary, output.getvalue().strip())
self.assertEqual(
models.Ticket.objects.all().count(),
len(fixtures.API_TICKET['items'])
)
def test_full_sync(self):
"""Test the command to run a full sync of all objects."""
at_object_map = {
'account_type': models.AccountType,
'role': models.Role,
'department': models.Department,
'status': models.Status,
'priority': models.Priority,
'queue': models.Queue,
'source': models.Source,
'issue_type': models.IssueType,
'display_color': models.DisplayColor,
'ticket': models.Ticket,
'resource': models.Resource,
'ticket_secondary_resource': models.TicketSecondaryResource,
'account': models.Account,
'account_physical_location': models.AccountPhysicalLocation,
'project': models.Project,
'project_status': models.ProjectStatus,
'project_type': models.ProjectType,
'ticket_category': models.TicketCategory,
'sub_issue_type': models.SubIssueType,
'ticket_type': models.TicketType,
'license_type': models.LicenseType,
'task': models.Task,
'task_secondary_resource': models.TaskSecondaryResource,
'phase': models.Phase,
'ticket_note': models.TicketNote,
'task_note': models.TaskNote,
'time_entry': models.TimeEntry,
'task_type_link': models.TaskTypeLink,
'use_type': models.UseType,
'allocation_code': models.AllocationCode,
'resource_role_department': models.ResourceRoleDepartment,
'resource_service_desk_role': models.ResourceServiceDeskRole,
'contract': models.Contract,
'service_call_status': models.ServiceCallStatus,
'service_call': models.ServiceCall,
'service_call_ticket': models.ServiceCallTicket,
'service_call_task': models.ServiceCallTask,
'service_call_ticket_resource': models.ServiceCallTicketResource,
'service_call_task_resource': models.ServiceCallTaskResource,
'task_predecessor': models.TaskPredecessor,
'contact': models.Contact,
}
run_sync_command()
pre_full_sync_counts = {}
mocks.wrapper_query_api_calls()
mocks.get_field_info_api_calls()
_, _patch = mocks.build_batch_query()
self._call_empty_service_api()
for key, model_class in at_object_map.items():
pre_full_sync_counts[key] = model_class.objects.all().count()
output = run_sync_command(full_option=True)
_patch.stop()
# Verify the rest of sync classes summaries.
for mock_call, fixture, at_object in self.test_args:
if at_object in (
'resource_role_department',
'resource_service_desk_role',
'service_call',
'service_call_ticket',
'service_call_task',
'service_call_ticket_resource',
'service_call_task_resource',
'task_predecessor',
'task'
):
# Assert that there were objects to get deleted, then change
# to zero to verify the output formats correctly.
# We are just testing the command, there are sync tests to
# verify that the synchronizers work correctly
self.assertGreater(pre_full_sync_counts[at_object], 0)
pre_full_sync_counts[at_object] = 0
summary = full_sync_summary(
slug_to_title(at_object),
pre_full_sync_counts[at_object]
)
self.assertIn(summary, output.getvalue().strip())
def _call_service_api(self):
mocks.service_api_get_roles_call(fixtures.API_ROLE)
mocks.service_api_get_departments_call(fixtures.API_DEPARTMENT)
mocks.service_api_get_resource_service_desk_roles_call(
fixtures.API_RESOURCE_SERVICE_DESK_ROLE)
mocks.service_api_get_resource_role_departments_call(
fixtures.API_RESOURCE_ROLE_DEPARTMENT)
mocks.service_api_get_license_types_call(
fixtures.API_LICENSE_TYPE_FIELD)
mocks.service_api_get_use_types_call(fixtures.API_USE_TYPE_FIELD)
mocks.service_api_get_task_type_links_call(
fixtures.API_TASK_TYPE_LINK_FIELD)
mocks.service_api_get_account_types_call(
fixtures.API_ACCOUNT_TYPE_FIELD)
mocks.service_api_get_ticket_category_picklist_call(
fixtures.API_DISPLAY_COLOR_FIELD)
mocks.service_api_get_ticket_picklist_call(
fixtures.API_TICKET_PICKLIST_FIELD)
mocks.service_api_get_project_picklist_call(
fixtures.API_PROJECT_PICKLIST_FIELD)
mocks.service_api_get_service_call_statuses_call(
fixtures.API_SERVICE_CALL_STATUS_FIELD)
mocks.service_api_get_contacts_call(fixtures.API_CONTACT)
mocks.service_api_get_contracts_call(fixtures.API_CONTRACT)
mocks.service_api_get_allocation_codes_call(
fixtures.API_ALLOCATION_CODE)
mocks.service_api_get_account_physical_locations_call(
fixtures.API_ACCOUNT_PHYSICAL_LOCATION)
mocks.service_api_get_ticket_categories_call(
fixtures.API_TICKET_CATEGORY)
mocks.service_api_get_tickets_call(fixtures.API_TICKET)
mocks.service_api_get_tasks_call(fixtures.API_TASK)
mocks.service_api_get_projects_call(fixtures.API_PROJECT)
mocks.service_api_get_service_calls_call(fixtures.API_SERVICE_CALL)
mocks.service_api_get_service_call_tickets_call(
fixtures.API_SERVICE_CALL_TICKET)
mocks.service_api_get_service_call_ticket_resources_call(
fixtures.API_SERVICE_CALL_TICKET_RESOURCE)
mocks.service_api_get_service_call_tasks_call(
fixtures.API_SERVICE_CALL_TASK)
mocks.service_api_get_service_call_task_resources_call(
fixtures.API_SERVICE_CALL_TASK_RESOURCE)
mocks.service_api_get_task_predecessors_call(
fixtures.API_TASK_PREDECESSOR)
def _call_empty_service_api(self):
mocks.service_api_get_contacts_call(fixtures.API_EMPTY)
mocks.service_api_get_contracts_call(fixtures.API_EMPTY)
mocks.service_api_get_allocation_codes_call(fixtures.API_EMPTY)
mocks.service_api_get_account_physical_locations_call(
fixtures.API_EMPTY)
mocks.service_api_get_tickets_call(fixtures.API_EMPTY)
mocks.service_api_get_tasks_call(fixtures.API_EMPTY)
mocks.service_api_get_projects_call(fixtures.API_EMPTY)
mocks.service_api_get_ticket_categories_call(fixtures.API_EMPTY)
mocks.service_api_get_task_predecessors_call(fixtures.API_EMPTY)
mocks.service_api_get_roles_call(fixtures.API_EMPTY)
mocks.service_api_get_departments_call(fixtures.API_EMPTY)
mocks.service_api_get_resource_service_desk_roles_call(
fixtures.API_EMPTY)
mocks.service_api_get_resource_role_departments_call(
fixtures.API_EMPTY)
mocks.service_api_get_service_calls_call(fixtures.API_EMPTY)
mocks.service_api_get_service_call_tickets_call(fixtures.API_EMPTY)
mocks.service_api_get_service_call_ticket_resources_call(
fixtures.API_EMPTY)
mocks.service_api_get_service_call_tasks_call(fixtures.API_EMPTY)
mocks.service_api_get_service_call_task_resources_call(
fixtures.API_EMPTY)
mocks.service_api_get_ticket_category_picklist_call({"fields": []})
mocks.service_api_get_ticket_picklist_call({"fields": []})
mocks.service_api_get_project_picklist_call({"fields": []})
mocks.service_api_get_license_types_call({"fields": []})
mocks.service_api_get_use_types_call({"fields": []})
mocks.service_api_get_task_type_links_call({"fields": []})
mocks.service_api_get_account_types_call({"fields": []})
mocks.service_api_get_service_call_statuses_call({"fields": []})
| [
"djautotask.tests.fixture_utils.init_service_calls",
"djautotask.tests.fixture_utils.init_account_types",
"djautotask.tests.mocks.service_api_get_use_types_call",
"djautotask.tests.fixture_utils.init_use_types",
"djautotask.tests.fixture_utils.init_accounts",
"djautotask.tests.fixture_utils.init_resource_... | [((761, 774), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (772, 774), False, 'import io\n'), ((909, 940), 'django.core.management.call_command', 'call_command', (['*args'], {'stdout': 'out'}), '(*args, stdout=out)\n', (921, 940), False, 'from django.core.management import call_command\n'), ((1146, 1159), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1157, 1159), False, 'import io\n'), ((1264, 1295), 'django.core.management.call_command', 'call_command', (['*args'], {'stdout': 'out'}), '(*args, stdout=out)\n', (1276, 1295), False, 'from django.core.management import call_command\n'), ((3176, 3205), 'djautotask.tests.fixture_utils.init_contacts', 'fixture_utils.init_contacts', ([], {}), '()\n', (3203, 3205), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((3274, 3308), 'djautotask.tests.mocks.init_api_connection', 'mocks.init_api_connection', (['Wrapper'], {}), '(Wrapper)\n', (3299, 3308), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((3317, 3349), 'djautotask.tests.mocks.init_api_rest_connection', 'mocks.init_api_rest_connection', ([], {}), '()\n', (3347, 3349), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((4891, 4961), 'djautotask.tests.fixture_utils.generate_picklist_objects', 'fixture_utils.generate_picklist_objects', (['self.field_name', 'fixture_list'], {}), '(self.field_name, fixture_list)\n', (4930, 4961), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((5301, 5329), 'djautotask.tests.fixture_utils.init_tickets', 'fixture_utils.init_tickets', ([], {}), '()\n', (5327, 5329), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((5567, 5596), 'djautotask.tests.fixture_utils.init_statuses', 'fixture_utils.init_statuses', ([], {}), '()\n', (5594, 5596), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((5840, 5871), 'djautotask.tests.fixture_utils.init_priorities', 'fixture_utils.init_priorities', ([], {}), '()\n', (5869, 5871), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((6106, 6133), 'djautotask.tests.fixture_utils.init_queues', 'fixture_utils.init_queues', ([], {}), '()\n', (6131, 6133), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((6395, 6432), 'djautotask.tests.fixture_utils.init_project_statuses', 'fixture_utils.init_project_statuses', ([], {}), '()\n', (6430, 6432), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((6688, 6722), 'djautotask.tests.fixture_utils.init_project_types', 'fixture_utils.init_project_types', ([], {}), '()\n', (6720, 6722), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((6960, 6988), 'djautotask.tests.fixture_utils.init_sources', 'fixture_utils.init_sources', ([], {}), '()\n', (6986, 6988), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((7237, 7269), 'djautotask.tests.fixture_utils.init_issue_types', 'fixture_utils.init_issue_types', ([], {}), '()\n', (7267, 7269), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((7529, 7561), 'djautotask.tests.fixture_utils.init_issue_types', 'fixture_utils.init_issue_types', ([], {}), '()\n', (7559, 7561), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((7570, 7606), 'djautotask.tests.fixture_utils.init_sub_issue_types', 'fixture_utils.init_sub_issue_types', ([], {}), '()\n', (7604, 7606), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((7858, 7891), 'djautotask.tests.fixture_utils.init_ticket_types', 'fixture_utils.init_ticket_types', ([], {}), '()\n', (7889, 7891), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((8144, 8178), 'djautotask.tests.fixture_utils.init_account_types', 'fixture_utils.init_account_types', ([], {}), '()\n', (8176, 8178), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((8498, 8540), 'djautotask.tests.fixture_utils.init_service_call_statuses', 'fixture_utils.init_service_call_statuses', ([], {}), '()\n', (8538, 8540), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((8807, 8842), 'djautotask.tests.fixture_utils.init_display_colors', 'fixture_utils.init_display_colors', ([], {}), '()\n', (8840, 8842), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((9095, 9129), 'djautotask.tests.fixture_utils.init_license_types', 'fixture_utils.init_license_types', ([], {}), '()\n', (9127, 9129), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((9389, 9425), 'djautotask.tests.fixture_utils.init_task_type_links', 'fixture_utils.init_task_type_links', ([], {}), '()\n', (9423, 9425), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((9662, 9692), 'djautotask.tests.fixture_utils.init_use_types', 'fixture_utils.init_use_types', ([], {}), '()\n', (9690, 9692), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((9960, 9998), 'djautotask.tests.fixture_utils.init_ticket_categories', 'fixture_utils.init_ticket_categories', ([], {}), '()\n', (9996, 9998), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((10755, 10784), 'djautotask.tests.fixture_utils.init_accounts', 'fixture_utils.init_accounts', ([], {}), '()\n', (10782, 10784), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((10793, 10840), 'djautotask.tests.fixture_utils.init_account_physical_locations', 'fixture_utils.init_account_physical_locations', ([], {}), '()\n', (10838, 10840), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((11076, 11105), 'djautotask.tests.fixture_utils.init_projects', 'fixture_utils.init_projects', ([], {}), '()\n', (11103, 11105), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((11460, 11489), 'djautotask.tests.fixture_utils.init_projects', 'fixture_utils.init_projects', ([], {}), '()\n', (11487, 11489), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((11498, 11524), 'djautotask.tests.fixture_utils.init_tasks', 'fixture_utils.init_tasks', ([], {}), '()\n', (11522, 11524), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((11910, 11938), 'djautotask.tests.fixture_utils.init_tickets', 'fixture_utils.init_tickets', ([], {}), '()\n', (11936, 11938), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((11947, 11980), 'djautotask.tests.fixture_utils.init_ticket_notes', 'fixture_utils.init_ticket_notes', ([], {}), '()\n', (11978, 11980), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((12177, 12206), 'djautotask.tests.fixture_utils.init_projects', 'fixture_utils.init_projects', ([], {}), '()\n', (12204, 12206), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((12215, 12241), 'djautotask.tests.fixture_utils.init_tasks', 'fixture_utils.init_tasks', ([], {}), '()\n', (12239, 12241), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((12250, 12281), 'djautotask.tests.fixture_utils.init_task_notes', 'fixture_utils.init_task_notes', ([], {}), '()\n', (12279, 12281), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((12481, 12509), 'djautotask.tests.fixture_utils.init_tickets', 'fixture_utils.init_tickets', ([], {}), '()\n', (12507, 12509), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((12776, 12813), 'djautotask.tests.fixture_utils.init_allocation_codes', 'fixture_utils.init_allocation_codes', ([], {}), '()\n', (12811, 12813), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((13037, 13063), 'djautotask.tests.fixture_utils.init_roles', 'fixture_utils.init_roles', ([], {}), '()\n', (13061, 13063), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((13311, 13343), 'djautotask.tests.fixture_utils.init_departments', 'fixture_utils.init_departments', ([], {}), '()\n', (13341, 13343), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((13697, 13723), 'djautotask.tests.fixture_utils.init_roles', 'fixture_utils.init_roles', ([], {}), '()\n', (13721, 13723), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((13732, 13762), 'djautotask.tests.fixture_utils.init_resources', 'fixture_utils.init_resources', ([], {}), '()\n', (13760, 13762), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((13771, 13819), 'djautotask.tests.fixture_utils.init_resource_service_desk_roles', 'fixture_utils.init_resource_service_desk_roles', ([], {}), '()\n', (13817, 13819), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((14165, 14197), 'djautotask.tests.fixture_utils.init_departments', 'fixture_utils.init_departments', ([], {}), '()\n', (14195, 14197), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((14206, 14232), 'djautotask.tests.fixture_utils.init_roles', 'fixture_utils.init_roles', ([], {}), '()\n', (14230, 14232), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((14241, 14271), 'djautotask.tests.fixture_utils.init_resources', 'fixture_utils.init_resources', ([], {}), '()\n', (14269, 14271), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((14280, 14326), 'djautotask.tests.fixture_utils.init_resource_role_departments', 'fixture_utils.init_resource_role_departments', ([], {}), '()\n', (14324, 14326), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((14566, 14596), 'djautotask.tests.fixture_utils.init_contracts', 'fixture_utils.init_contracts', ([], {}), '()\n', (14594, 14596), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((14851, 14893), 'djautotask.tests.fixture_utils.init_service_call_statuses', 'fixture_utils.init_service_call_statuses', ([], {}), '()\n', (14891, 14893), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((14902, 14932), 'djautotask.tests.fixture_utils.init_resources', 'fixture_utils.init_resources', ([], {}), '()\n', (14930, 14932), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((14941, 14975), 'djautotask.tests.fixture_utils.init_account_types', 'fixture_utils.init_account_types', ([], {}), '()\n', (14973, 14975), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((14984, 15013), 'djautotask.tests.fixture_utils.init_accounts', 'fixture_utils.init_accounts', ([], {}), '()\n', (15011, 15013), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((15295, 15337), 'djautotask.tests.fixture_utils.init_service_call_statuses', 'fixture_utils.init_service_call_statuses', ([], {}), '()\n', (15335, 15337), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((15346, 15376), 'djautotask.tests.fixture_utils.init_resources', 'fixture_utils.init_resources', ([], {}), '()\n', (15374, 15376), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((15385, 15419), 'djautotask.tests.fixture_utils.init_account_types', 'fixture_utils.init_account_types', ([], {}), '()\n', (15417, 15419), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((15428, 15457), 'djautotask.tests.fixture_utils.init_accounts', 'fixture_utils.init_accounts', ([], {}), '()\n', (15455, 15457), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((15466, 15500), 'djautotask.tests.fixture_utils.init_service_calls', 'fixture_utils.init_service_calls', ([], {}), '()\n', (15498, 15500), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((15509, 15538), 'djautotask.tests.fixture_utils.init_statuses', 'fixture_utils.init_statuses', ([], {}), '()\n', (15536, 15538), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((15547, 15575), 'djautotask.tests.fixture_utils.init_tickets', 'fixture_utils.init_tickets', ([], {}), '()\n', (15573, 15575), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((15849, 15891), 'djautotask.tests.fixture_utils.init_service_call_statuses', 'fixture_utils.init_service_call_statuses', ([], {}), '()\n', (15889, 15891), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((15900, 15934), 'djautotask.tests.fixture_utils.init_account_types', 'fixture_utils.init_account_types', ([], {}), '()\n', (15932, 15934), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((15943, 15972), 'djautotask.tests.fixture_utils.init_accounts', 'fixture_utils.init_accounts', ([], {}), '()\n', (15970, 15972), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((15981, 16015), 'djautotask.tests.fixture_utils.init_service_calls', 'fixture_utils.init_service_calls', ([], {}), '()\n', (16013, 16015), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((16024, 16053), 'djautotask.tests.fixture_utils.init_statuses', 'fixture_utils.init_statuses', ([], {}), '()\n', (16051, 16053), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((16062, 16091), 'djautotask.tests.fixture_utils.init_projects', 'fixture_utils.init_projects', ([], {}), '()\n', (16089, 16091), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((16100, 16126), 'djautotask.tests.fixture_utils.init_tasks', 'fixture_utils.init_tasks', ([], {}), '()\n', (16124, 16126), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((16490, 16532), 'djautotask.tests.fixture_utils.init_service_call_statuses', 'fixture_utils.init_service_call_statuses', ([], {}), '()\n', (16530, 16532), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((16541, 16571), 'djautotask.tests.fixture_utils.init_resources', 'fixture_utils.init_resources', ([], {}), '()\n', (16569, 16571), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((16580, 16614), 'djautotask.tests.fixture_utils.init_account_types', 'fixture_utils.init_account_types', ([], {}), '()\n', (16612, 16614), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((16623, 16652), 'djautotask.tests.fixture_utils.init_accounts', 'fixture_utils.init_accounts', ([], {}), '()\n', (16650, 16652), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((16661, 16695), 'djautotask.tests.fixture_utils.init_service_calls', 'fixture_utils.init_service_calls', ([], {}), '()\n', (16693, 16695), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((16704, 16733), 'djautotask.tests.fixture_utils.init_statuses', 'fixture_utils.init_statuses', ([], {}), '()\n', (16731, 16733), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((16742, 16770), 'djautotask.tests.fixture_utils.init_tickets', 'fixture_utils.init_tickets', ([], {}), '()\n', (16768, 16770), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((16779, 16820), 'djautotask.tests.fixture_utils.init_service_call_tickets', 'fixture_utils.init_service_call_tickets', ([], {}), '()\n', (16818, 16820), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17174, 17216), 'djautotask.tests.fixture_utils.init_service_call_statuses', 'fixture_utils.init_service_call_statuses', ([], {}), '()\n', (17214, 17216), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17225, 17255), 'djautotask.tests.fixture_utils.init_resources', 'fixture_utils.init_resources', ([], {}), '()\n', (17253, 17255), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17264, 17298), 'djautotask.tests.fixture_utils.init_account_types', 'fixture_utils.init_account_types', ([], {}), '()\n', (17296, 17298), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17307, 17336), 'djautotask.tests.fixture_utils.init_accounts', 'fixture_utils.init_accounts', ([], {}), '()\n', (17334, 17336), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17345, 17379), 'djautotask.tests.fixture_utils.init_service_calls', 'fixture_utils.init_service_calls', ([], {}), '()\n', (17377, 17379), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17388, 17417), 'djautotask.tests.fixture_utils.init_statuses', 'fixture_utils.init_statuses', ([], {}), '()\n', (17415, 17417), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17426, 17455), 'djautotask.tests.fixture_utils.init_projects', 'fixture_utils.init_projects', ([], {}), '()\n', (17453, 17455), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17464, 17490), 'djautotask.tests.fixture_utils.init_tasks', 'fixture_utils.init_tasks', ([], {}), '()\n', (17488, 17490), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17499, 17538), 'djautotask.tests.fixture_utils.init_service_call_tasks', 'fixture_utils.init_service_call_tasks', ([], {}), '()\n', (17536, 17538), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17802, 17831), 'djautotask.tests.fixture_utils.init_projects', 'fixture_utils.init_projects', ([], {}), '()\n', (17829, 17831), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17840, 17866), 'djautotask.tests.fixture_utils.init_tasks', 'fixture_utils.init_tasks', ([], {}), '()\n', (17864, 17866), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((17875, 17913), 'djautotask.tests.fixture_utils.init_task_predecessors', 'fixture_utils.init_task_predecessors', ([], {}), '()\n', (17911, 17913), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((18006, 18040), 'djautotask.tests.mocks.init_api_connection', 'mocks.init_api_connection', (['Wrapper'], {}), '(Wrapper)\n', (18031, 18040), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((18049, 18146), 'djautotask.tests.mocks.create_mock_call', 'mocks.create_mock_call', (['"""djautotask.sync.TicketNoteSynchronizer._get_query_conditions"""', 'None'], {}), "(\n 'djautotask.sync.TicketNoteSynchronizer._get_query_conditions', None)\n", (18071, 18146), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((18184, 18279), 'djautotask.tests.mocks.create_mock_call', 'mocks.create_mock_call', (['"""djautotask.sync.TaskNoteSynchronizer._get_query_conditions"""', 'None'], {}), "(\n 'djautotask.sync.TaskNoteSynchronizer._get_query_conditions', None)\n", (18206, 18279), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((18317, 18342), 'djautotask.tests.fixture_utils.mock_udfs', 'fixture_utils.mock_udfs', ([], {}), '()\n', (18340, 18342), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((18477, 18555), 'djautotask.tests.mocks.get_field_info_api_calls', 'mocks.get_field_info_api_calls', (['fixture_utils.manage_sync_picklist_return_data'], {}), '(fixture_utils.manage_sync_picklist_return_data)\n', (18507, 18555), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((18586, 18659), 'djautotask.tests.mocks.wrapper_query_api_calls', 'mocks.wrapper_query_api_calls', (['fixture_utils.manage_full_sync_return_data'], {}), '(fixture_utils.manage_full_sync_return_data)\n', (18615, 18659), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((23764, 23795), 'djautotask.tests.mocks.wrapper_query_api_calls', 'mocks.wrapper_query_api_calls', ([], {}), '()\n', (23793, 23795), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((23804, 23836), 'djautotask.tests.mocks.get_field_info_api_calls', 'mocks.get_field_info_api_calls', ([], {}), '()\n', (23834, 23836), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((23857, 23882), 'djautotask.tests.mocks.build_batch_query', 'mocks.build_batch_query', ([], {}), '()\n', (23880, 23882), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((25327, 25378), 'djautotask.tests.mocks.service_api_get_roles_call', 'mocks.service_api_get_roles_call', (['fixtures.API_ROLE'], {}), '(fixtures.API_ROLE)\n', (25359, 25378), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((25387, 25450), 'djautotask.tests.mocks.service_api_get_departments_call', 'mocks.service_api_get_departments_call', (['fixtures.API_DEPARTMENT'], {}), '(fixtures.API_DEPARTMENT)\n', (25425, 25450), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((25459, 25559), 'djautotask.tests.mocks.service_api_get_resource_service_desk_roles_call', 'mocks.service_api_get_resource_service_desk_roles_call', (['fixtures.API_RESOURCE_SERVICE_DESK_ROLE'], {}), '(fixtures.\n API_RESOURCE_SERVICE_DESK_ROLE)\n', (25513, 25559), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((25576, 25672), 'djautotask.tests.mocks.service_api_get_resource_role_departments_call', 'mocks.service_api_get_resource_role_departments_call', (['fixtures.API_RESOURCE_ROLE_DEPARTMENT'], {}), '(fixtures.\n API_RESOURCE_ROLE_DEPARTMENT)\n', (25628, 25672), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((25689, 25762), 'djautotask.tests.mocks.service_api_get_license_types_call', 'mocks.service_api_get_license_types_call', (['fixtures.API_LICENSE_TYPE_FIELD'], {}), '(fixtures.API_LICENSE_TYPE_FIELD)\n', (25729, 25762), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((25784, 25849), 'djautotask.tests.mocks.service_api_get_use_types_call', 'mocks.service_api_get_use_types_call', (['fixtures.API_USE_TYPE_FIELD'], {}), '(fixtures.API_USE_TYPE_FIELD)\n', (25820, 25849), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((25858, 25935), 'djautotask.tests.mocks.service_api_get_task_type_links_call', 'mocks.service_api_get_task_type_links_call', (['fixtures.API_TASK_TYPE_LINK_FIELD'], {}), '(fixtures.API_TASK_TYPE_LINK_FIELD)\n', (25900, 25935), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((25957, 26030), 'djautotask.tests.mocks.service_api_get_account_types_call', 'mocks.service_api_get_account_types_call', (['fixtures.API_ACCOUNT_TYPE_FIELD'], {}), '(fixtures.API_ACCOUNT_TYPE_FIELD)\n', (25997, 26030), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((26052, 26142), 'djautotask.tests.mocks.service_api_get_ticket_category_picklist_call', 'mocks.service_api_get_ticket_category_picklist_call', (['fixtures.API_DISPLAY_COLOR_FIELD'], {}), '(fixtures.\n API_DISPLAY_COLOR_FIELD)\n', (26103, 26142), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((26159, 26237), 'djautotask.tests.mocks.service_api_get_ticket_picklist_call', 'mocks.service_api_get_ticket_picklist_call', (['fixtures.API_TICKET_PICKLIST_FIELD'], {}), '(fixtures.API_TICKET_PICKLIST_FIELD)\n', (26201, 26237), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((26259, 26344), 'djautotask.tests.mocks.service_api_get_project_picklist_call', 'mocks.service_api_get_project_picklist_call', (['fixtures.API_PROJECT_PICKLIST_FIELD'], {}), '(fixtures.API_PROJECT_PICKLIST_FIELD\n )\n', (26302, 26344), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((26361, 26454), 'djautotask.tests.mocks.service_api_get_service_call_statuses_call', 'mocks.service_api_get_service_call_statuses_call', (['fixtures.API_SERVICE_CALL_STATUS_FIELD'], {}), '(fixtures.\n API_SERVICE_CALL_STATUS_FIELD)\n', (26409, 26454), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((26471, 26528), 'djautotask.tests.mocks.service_api_get_contacts_call', 'mocks.service_api_get_contacts_call', (['fixtures.API_CONTACT'], {}), '(fixtures.API_CONTACT)\n', (26506, 26528), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((26537, 26596), 'djautotask.tests.mocks.service_api_get_contracts_call', 'mocks.service_api_get_contracts_call', (['fixtures.API_CONTRACT'], {}), '(fixtures.API_CONTRACT)\n', (26573, 26596), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((26605, 26678), 'djautotask.tests.mocks.service_api_get_allocation_codes_call', 'mocks.service_api_get_allocation_codes_call', (['fixtures.API_ALLOCATION_CODE'], {}), '(fixtures.API_ALLOCATION_CODE)\n', (26648, 26678), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((26700, 26798), 'djautotask.tests.mocks.service_api_get_account_physical_locations_call', 'mocks.service_api_get_account_physical_locations_call', (['fixtures.API_ACCOUNT_PHYSICAL_LOCATION'], {}), '(fixtures.\n API_ACCOUNT_PHYSICAL_LOCATION)\n', (26753, 26798), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((26815, 26889), 'djautotask.tests.mocks.service_api_get_ticket_categories_call', 'mocks.service_api_get_ticket_categories_call', (['fixtures.API_TICKET_CATEGORY'], {}), '(fixtures.API_TICKET_CATEGORY)\n', (26859, 26889), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((26911, 26966), 'djautotask.tests.mocks.service_api_get_tickets_call', 'mocks.service_api_get_tickets_call', (['fixtures.API_TICKET'], {}), '(fixtures.API_TICKET)\n', (26945, 26966), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((26975, 27026), 'djautotask.tests.mocks.service_api_get_tasks_call', 'mocks.service_api_get_tasks_call', (['fixtures.API_TASK'], {}), '(fixtures.API_TASK)\n', (27007, 27026), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((27035, 27092), 'djautotask.tests.mocks.service_api_get_projects_call', 'mocks.service_api_get_projects_call', (['fixtures.API_PROJECT'], {}), '(fixtures.API_PROJECT)\n', (27070, 27092), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((27101, 27168), 'djautotask.tests.mocks.service_api_get_service_calls_call', 'mocks.service_api_get_service_calls_call', (['fixtures.API_SERVICE_CALL'], {}), '(fixtures.API_SERVICE_CALL)\n', (27141, 27168), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((27177, 27263), 'djautotask.tests.mocks.service_api_get_service_call_tickets_call', 'mocks.service_api_get_service_call_tickets_call', (['fixtures.API_SERVICE_CALL_TICKET'], {}), '(fixtures.\n API_SERVICE_CALL_TICKET)\n', (27224, 27263), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((27280, 27384), 'djautotask.tests.mocks.service_api_get_service_call_ticket_resources_call', 'mocks.service_api_get_service_call_ticket_resources_call', (['fixtures.API_SERVICE_CALL_TICKET_RESOURCE'], {}), '(fixtures.\n API_SERVICE_CALL_TICKET_RESOURCE)\n', (27336, 27384), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((27401, 27478), 'djautotask.tests.mocks.service_api_get_service_call_tasks_call', 'mocks.service_api_get_service_call_tasks_call', (['fixtures.API_SERVICE_CALL_TASK'], {}), '(fixtures.API_SERVICE_CALL_TASK)\n', (27446, 27478), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((27500, 27600), 'djautotask.tests.mocks.service_api_get_service_call_task_resources_call', 'mocks.service_api_get_service_call_task_resources_call', (['fixtures.API_SERVICE_CALL_TASK_RESOURCE'], {}), '(fixtures.\n API_SERVICE_CALL_TASK_RESOURCE)\n', (27554, 27600), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((27617, 27692), 'djautotask.tests.mocks.service_api_get_task_predecessors_call', 'mocks.service_api_get_task_predecessors_call', (['fixtures.API_TASK_PREDECESSOR'], {}), '(fixtures.API_TASK_PREDECESSOR)\n', (27661, 27692), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((27754, 27809), 'djautotask.tests.mocks.service_api_get_contacts_call', 'mocks.service_api_get_contacts_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (27789, 27809), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((27818, 27874), 'djautotask.tests.mocks.service_api_get_contracts_call', 'mocks.service_api_get_contracts_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (27854, 27874), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((27883, 27946), 'djautotask.tests.mocks.service_api_get_allocation_codes_call', 'mocks.service_api_get_allocation_codes_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (27926, 27946), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((27955, 28028), 'djautotask.tests.mocks.service_api_get_account_physical_locations_call', 'mocks.service_api_get_account_physical_locations_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28008, 28028), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28050, 28104), 'djautotask.tests.mocks.service_api_get_tickets_call', 'mocks.service_api_get_tickets_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28084, 28104), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28113, 28165), 'djautotask.tests.mocks.service_api_get_tasks_call', 'mocks.service_api_get_tasks_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28145, 28165), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28174, 28229), 'djautotask.tests.mocks.service_api_get_projects_call', 'mocks.service_api_get_projects_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28209, 28229), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28238, 28302), 'djautotask.tests.mocks.service_api_get_ticket_categories_call', 'mocks.service_api_get_ticket_categories_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28282, 28302), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28311, 28375), 'djautotask.tests.mocks.service_api_get_task_predecessors_call', 'mocks.service_api_get_task_predecessors_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28355, 28375), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28384, 28436), 'djautotask.tests.mocks.service_api_get_roles_call', 'mocks.service_api_get_roles_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28416, 28436), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28445, 28503), 'djautotask.tests.mocks.service_api_get_departments_call', 'mocks.service_api_get_departments_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28483, 28503), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28512, 28586), 'djautotask.tests.mocks.service_api_get_resource_service_desk_roles_call', 'mocks.service_api_get_resource_service_desk_roles_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28566, 28586), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28608, 28680), 'djautotask.tests.mocks.service_api_get_resource_role_departments_call', 'mocks.service_api_get_resource_role_departments_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28660, 28680), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28702, 28762), 'djautotask.tests.mocks.service_api_get_service_calls_call', 'mocks.service_api_get_service_calls_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28742, 28762), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28771, 28838), 'djautotask.tests.mocks.service_api_get_service_call_tickets_call', 'mocks.service_api_get_service_call_tickets_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28818, 28838), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28847, 28923), 'djautotask.tests.mocks.service_api_get_service_call_ticket_resources_call', 'mocks.service_api_get_service_call_ticket_resources_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28903, 28923), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((28945, 29010), 'djautotask.tests.mocks.service_api_get_service_call_tasks_call', 'mocks.service_api_get_service_call_tasks_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (28990, 29010), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((29019, 29093), 'djautotask.tests.mocks.service_api_get_service_call_task_resources_call', 'mocks.service_api_get_service_call_task_resources_call', (['fixtures.API_EMPTY'], {}), '(fixtures.API_EMPTY)\n', (29073, 29093), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((29115, 29182), 'djautotask.tests.mocks.service_api_get_ticket_category_picklist_call', 'mocks.service_api_get_ticket_category_picklist_call', (["{'fields': []}"], {}), "({'fields': []})\n", (29166, 29182), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((29191, 29249), 'djautotask.tests.mocks.service_api_get_ticket_picklist_call', 'mocks.service_api_get_ticket_picklist_call', (["{'fields': []}"], {}), "({'fields': []})\n", (29233, 29249), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((29258, 29317), 'djautotask.tests.mocks.service_api_get_project_picklist_call', 'mocks.service_api_get_project_picklist_call', (["{'fields': []}"], {}), "({'fields': []})\n", (29301, 29317), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((29326, 29382), 'djautotask.tests.mocks.service_api_get_license_types_call', 'mocks.service_api_get_license_types_call', (["{'fields': []}"], {}), "({'fields': []})\n", (29366, 29382), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((29391, 29443), 'djautotask.tests.mocks.service_api_get_use_types_call', 'mocks.service_api_get_use_types_call', (["{'fields': []}"], {}), "({'fields': []})\n", (29427, 29443), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((29452, 29510), 'djautotask.tests.mocks.service_api_get_task_type_links_call', 'mocks.service_api_get_task_type_links_call', (["{'fields': []}"], {}), "({'fields': []})\n", (29494, 29510), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((29519, 29575), 'djautotask.tests.mocks.service_api_get_account_types_call', 'mocks.service_api_get_account_types_call', (["{'fields': []}"], {}), "({'fields': []})\n", (29559, 29575), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((29584, 29648), 'djautotask.tests.mocks.service_api_get_service_call_statuses_call', 'mocks.service_api_get_service_call_statuses_call', (["{'fields': []}"], {}), "({'fields': []})\n", (29632, 29648), False, 'from djautotask.tests import fixtures, mocks, fixture_utils\n'), ((21423, 21450), 'djautotask.models.Ticket.objects.all', 'models.Ticket.objects.all', ([], {}), '()\n', (21448, 21450), False, 'from djautotask import models\n')] |
#!/usr/bin/evn python
# -*- coding:utf-8 -*-
# FileName adbtools.py
# Author: HeyNiu
# Created Time: 2016/9/19
"""
adb 工具类
"""
import os
import platform
import re
import time
#import utils.timetools
class AdbTools(object):
def __init__(self, device_id=''):
self.__system = platform.system()
self.__find = ''
self.__command = ''
self.__device_id = device_id
self.__get_find()
self.__check_adb()
self.__connection_devices()
def __get_find(self):
"""
判断系统类型,windows使用findstr,linux使用grep
:return:
"""
# if self.__system is "Windows":
#self.__find = "findstr"
#else:
#self.__find = "grep"
def __check_adb(self):
"""
检查adb
判断是否设置环境变量ANDROID_HOME
:return:
"""
if "ANDROID_HOME" in os.environ:
if self.__system == "Windows":
path = os.path.join(os.environ["ANDROID_HOME"], "platform-tools", "adb.exe")
if os.path.exists(path):
self.__command = path
else:
raise EnvironmentError(
"Adb not found in $ANDROID_HOME path: %s." % os.environ["ANDROID_HOME"])
else:
path = os.path.join(os.environ["ANDROID_HOME"], "platform-tools", "adb")
if os.path.exists(path):
self.__command = path
else:
raise EnvironmentError(
"Adb not found in $ANDROID_HOME path: %s." % os.environ["ANDROID_HOME"])
else:
raise EnvironmentError(
"Adb not found in $ANDROID_HOME path: %s." % os.environ["ANDROID_HOME"])
def __connection_devices(self):
"""
连接指定设备,单个设备可不传device_id
:return:
"""
if self.__device_id == "":
return
self.__device_id = "-s %s" % self.__device_id
def adb(self, args):
"""
执行adb命令
:param args:参数
:return:
"""
cmd = "%s %s %s" % (self.__command, self.__device_id, str(args))
# print(cmd)
return os.popen(cmd)
def shell(self, args):
"""
执行adb shell命令
:param args:参数
:return:
"""
cmd = "%s %s shell %s" % (self.__command, self.__device_id, str(args))
# print(cmd)
return os.popen(cmd)
def mkdir(self, path):
"""
创建目录
:param path: 路径
:return:
"""
return self.shell('mkdir %s' % path)
def get_devices(self):
"""
获取设备列表
:return:
"""
l = self.adb('devices').readlines()
return (i.split()[0] for i in l if 'devices' not in i and len(i) > 5)
def get_current_application(self):
"""
获取当前运行的应用信息
:return:
"""
return self.shell('dumpsys window w | %s \/ | %s name=' % (self.__find, self.__find)).read()
def get_current_package(self):
"""
获取当前运行app包名
:return:
"""
reg = re.compile(r'name=(.+?)/')
return re.findall(reg, self.get_current_application())[0]
def get_current_activity(self):
"""
获取当前运行activity
:return: package/activity
"""
reg = re.compile(r'name=(.+?)\)')
return re.findall(reg, self.get_current_application())[0]
'''def __get_process(self, package_name):
"""
获取进程信息
:param package_name:
:return:
"""
if self.__system is "Windows":
pid_command = self.shell("ps | %s %s$" % (self.__find, package_name)).read()
else:
pid_command = self.shell("ps | %s -w %s" % (self.__find, package_name)).read()
return pid_command'''
def process_exists(self, package_name):
"""
返回进程是否存在
:param package_name:
:return:
"""
process = self.__get_process(package_name)
return package_name in process
def get_pid(self, package_name):
"""
获取pid
:return:
"""
pid_command = self.__get_process(package_name)
if pid_command == '':
print("The process doesn't exist.")
return pid_command
req = re.compile(r"\d+")
result = str(pid_command).split()
result.remove(result[0])
return req.findall(" ".join(result))[0]
def get_uid(self, pid):
"""
获取uid
:param pid:
:return:
"""
result = self.shell("cat /proc/%s/status" % pid).readlines()
for i in result:
if 'uid' in i.lower():
return i.split()[1]
def get_flow_data_tcp(self, uid):
"""
获取应用tcp流量
:return:(接收, 发送)
"""
tcp_rcv = self.shell("cat proc/uid_stat/%s/tcp_rcv" % uid).read().split()[0]
tcp_snd = self.shell("cat proc/uid_stat/%s/tcp_snd" % uid).read().split()[0]
return tcp_rcv, tcp_snd
def get_flow_data_all(self, uid):
"""
获取应用流量全部数据
包含该应用多个进程的所有数据 tcp udp等
(rx_bytes, tx_bytes) >> (接收, 发送)
:param uid:
:return:list(dict)
"""
all_data = []
d = {}
data = self.shell("cat /proc/net/xt_qtaguid/stats | %s %s" % (self.__find, uid)).readlines()
for i in data:
if not i.startswith('\n'):
item = i.strip().split()
d['idx'] = item[0]
d['iface'] = item[1]
d['acct_tag_hex'] = item[2]
d['uid_tag_int'] = item[3]
d['cnt_set'] = item[4]
d['rx_bytes'] = item[5]
d['rx_packets'] = item[6]
d['tx_bytes'] = item[7]
d['tx_packets'] = item[8]
d['rx_tcp_bytes'] = item[9]
d['rx_tcp_packets'] = item[10]
d['rx_udp_bytes'] = item[11]
d['rx_udp_packets'] = item[12]
d['rx_other_bytes'] = item[13]
d['rx_other_packets'] = item[14]
d['tx_tcp_bytes'] = item[15]
d['tx_tcp_packets'] = item[16]
d['tx_udp_bytes'] = item[17]
d['tx_udp_packets'] = item[18]
d['tx_other_bytes'] = item[19]
d['tx_other_packets'] = item[20]
all_data.append(d)
d = {}
return all_data
@staticmethod
def dump_apk(path):
"""
dump apk文件
:param path: apk路径
:return:
"""
# 检查build-tools是否添加到环境变量中
# 需要用到里面的aapt命令
l = os.environ['PATH'].split(';')
build_tools = False
for i in l:
if 'build-tools' in i:
build_tools = True
if not build_tools:
raise EnvironmentError("ANDROID_HOME BUILD-TOOLS COMMAND NOT FOUND.\nPlease set the environment variable.")
return os.popen('aapt dump badging %s' % (path,))
@staticmethod
def dump_xml(path, filename):
"""
dump apk xml文件
:return:
"""
return os.popen('aapt dump xmlstrings %s %s' % (path, filename))
def uiautomator_dump(self):
"""
获取屏幕uiautomator xml文件
:return:
"""
return self.shell('uiautomator dump').read().split()[-1]
def pull(self, source, target):
"""
从手机端拉取文件到电脑端
:return:
"""
self.adb('pull %s %s' % (source, target))
def push(self, source, target):
"""
从电脑端推送文件到手机端
:param source:
:param target:
:return:
"""
self.adb('push %s %s' % (source, target))
def remove(self, path):
"""
从手机端删除文件
:return:
"""
self.shell('rm %s' % (path,))
def clear_app_data(self, package):
"""
清理应用数据
:return:
"""
self.shell('pm clear %s' % (package,))
def install(self, path):
"""
安装apk文件
:return:
"""
# adb install 安装错误常见列表
errors = {'INSTALL_FAILED_ALREADY_EXISTS': '程序已经存在',
'INSTALL_DEVICES_NOT_FOUND': '找不到设备',
'INSTALL_FAILED_DEVICE_OFFLINE': '设备离线',
'INSTALL_FAILED_INVALID_APK': '无效的APK',
'INSTALL_FAILED_INVALID_URI': '无效的链接',
'INSTALL_FAILED_INSUFFICIENT_STORAGE': '没有足够的存储空间',
'INSTALL_FAILED_DUPLICATE_PACKAGE': '已存在同名程序',
'INSTALL_FAILED_NO_SHARED_USER': '要求的共享用户不存在',
'INSTALL_FAILED_UPDATE_INCOMPATIBLE': '版本不能共存',
'INSTALL_FAILED_SHARED_USER_INCOMPATIBLE': '需求的共享用户签名错误',
'INSTALL_FAILED_MISSING_SHARED_LIBRARY': '需求的共享库已丢失',
'INSTALL_FAILED_REPLACE_COULDNT_DELETE': '需求的共享库无效',
'INSTALL_FAILED_DEXOPT': 'dex优化验证失败',
'INSTALL_FAILED_DEVICE_NOSPACE': '手机存储空间不足导致apk拷贝失败',
'INSTALL_FAILED_DEVICE_COPY_FAILED': '文件拷贝失败',
'INSTALL_FAILED_OLDER_SDK': '系统版本过旧',
'INSTALL_FAILED_CONFLICTING_PROVIDER': '存在同名的内容提供者',
'INSTALL_FAILED_NEWER_SDK': '系统版本过新',
'INSTALL_FAILED_TEST_ONLY': '调用者不被允许测试的测试程序',
'INSTALL_FAILED_CPU_ABI_INCOMPATIBLE': '包含的本机代码不兼容',
'CPU_ABIINSTALL_FAILED_MISSING_FEATURE': '使用了一个无效的特性',
'INSTALL_FAILED_CONTAINER_ERROR': 'SD卡访问失败',
'INSTALL_FAILED_INVALID_INSTALL_LOCATION': '无效的安装路径',
'INSTALL_FAILED_MEDIA_UNAVAILABLE': 'SD卡不存在',
'INSTALL_FAILED_INTERNAL_ERROR': '系统问题导致安装失败',
'INSTALL_PARSE_FAILED_NO_CERTIFICATES': '文件未通过认证 >> 设置开启未知来源',
'INSTALL_PARSE_FAILED_INCONSISTENT_CERTIFICATES': '文件认证不一致 >> 先卸载原来的再安装',
'INSTALL_FAILED_INVALID_ZIP_FILE': '非法的zip文件 >> 先卸载原来的再安装',
'INSTALL_CANCELED_BY_USER': '需要用户确认才可进行安装',
'INSTALL_FAILED_VERIFICATION_FAILURE': '验证失败 >> 尝试重启手机',
'DEFAULT': '未知错误'
}
print('Installing...')
l = self.adb('install -r %s' % (path,)).read()
if 'Success' in l:
print('Install Success')
if 'Failure' in l:
reg = re.compile('\\[(.+?)\\]')
key = re.findall(reg, l)[0]
try:
print('Install Failure >> %s' % errors[key])
except KeyError:
print('Install Failure >> %s' % key)
return l
def uninstall(self, package):
"""
卸载apk
:param package: 包名
:return:
"""
print('Uninstalling...')
l = self.adb('uninstall %s' % (package,)).read()
print(l)
def screenshot(self, target_path=''):
"""
手机截图
:param target_path: 目标路径
:return:
"""
format_time = utils.timetools.timestamp('%Y%m%d%H%M%S')
self.shell('screencap -p /sdcard/%s.png' % (format_time,))
time.sleep(1)
if target_path == '':
self.pull('/sdcard/%s.png' % (format_time,), os.path.expanduser('~'))
else:
self.pull('/sdcard/%s.png' % (format_time,), target_path)
self.remove('/sdcard/%s.png' % (format_time,))
def get_cache_logcat(self):
"""
导出缓存日志
:return:
"""
return self.adb('logcat -v time -d')
def get_crash_logcat(self):
"""
导出崩溃日志
:return:
"""
return self.adb('logcat -v time -d | %s AndroidRuntime' % (self.__find,))
def clear_cache_logcat(self):
"""
清理缓存区日志
:return:
"""
self.adb('logcat -c')
def get_device_time(self):
"""
获取设备时间
:return:
"""
return self.shell('date').read().strip()
def ls(self, command):
"""
shell ls命令
:return:
"""
return self.shell('ls %s' % (command,)).readlines()
def file_exists(self, target):
"""
判断文件在目标路径是否存在
:return:
"""
l = self.ls(target)
for i in l:
if i.strip() == target:
return True
return False
def is_install(self, target_app):
"""
判断目标app在设备上是否已安装
:param target_app: 目标app包名
:return: bool
"""
return target_app in self.shell('pm list packages %s' % (target_app,)).read()
def get_device_model(self):
"""
获取设备型号
:return:
"""
return self.shell('getprop ro.product.model').read().strip()
def get_device_id(self):
"""
获取设备id
:return:
"""
return self.adb('get-serialno').read().strip()
def get_device_android_version(self):
"""
获取设备Android版本
:return:
"""
return self.shell('getprop ro.build.version.release').read().strip()
def get_device_sdk_version(self):
"""
获取设备SDK版本
:return:
"""
return self.shell('getprop ro.build.version.sdk').read().strip()
def get_device_mac_address(self):
"""
获取设备MAC地址
:return:
"""
return self.shell('cat /sys/class/net/wlan0/address').read().strip()
def get_device_ip_address(self):
"""
获取设备IP地址
pass: 适用WIFI 蜂窝数据
:return:
"""
if not self.get_wifi_state() and not self.get_data_state():
return
l = self.shell('ip addr | %s global' % self.__find).read()
reg = re.compile('\d+\.\d+\.\d+\.\d+')
return re.findall(reg, l)[0]
def get_device_imei(self):
"""
获取设备IMEI
:return:
"""
sdk = self.get_device_sdk_version()
# Android 5.0以下方法
if int(sdk) < 21:
l = self.shell('dumpsys iphonesubinfo').read()
reg = re.compile('[0-9]{15}')
return re.findall(reg, l)[0]
elif self.root():
l = self.shell('service call iphonesubinfo 1').read()
print(l)
print(re.findall(re.compile("'.+?'"), l))
imei = ''
for i in re.findall(re.compile("'.+?'"), l):
imei += i.replace('.', '').replace("'", '').replace(' ', '')
return imei
else:
print('The device not root.')
return ''
def check_sim_card(self):
"""
检查设备SIM卡
:return:
"""
return len(self.shell('getprop | %s gsm.operator.alpha]' % self.__find).read().strip().split()[-1]) > 2
def get_device_operators(self):
"""
获取运营商
:return:
"""
return self.shell('getprop | %s gsm.operator.alpha]' % self.__find).read().strip().split()[-1]
def get_device_state(self):
"""
获取设备状态
:return:
"""
return self.adb('get-state').read().strip()
def get_display_state(self):
"""
获取屏幕状态
:return: 亮屏/灭屏
"""
l = self.shell('dumpsys power').readlines()
for i in l:
if 'mScreenOn=' in i:
return i.split()[-1] == 'mScreenOn=true'
if 'Display Power' in i:
return 'ON' in i.split('=')[-1].upper()
def get_screen_normal_size(self):
"""
获取设备屏幕分辨率 >> 标配
:return:
"""
return self.shell('wm size').read().strip().split()[-1].split('x')
def get_screen_reality_size(self):
"""
获取设备屏幕分辨率 >> 实际分辨率
:return:
"""
x = 0
y = 0
l = self.shell(r'getevent -p | %s -e "0"' % self.__find).readlines()
for n in l:
if len(n.split()) > 0:
if n.split()[0] == '0035':
x = int(n.split()[7].split(',')[0])
elif n.split()[0] == '0036':
y = int(n.split()[7].split(',')[0])
return x, y
def get_device_interior_sdcard(self):
"""
获取内部SD卡空间
:return: (path,total,used,free,block)
"""
return self.shell('df | %s \/mnt\/shell\/emulated' % self.__find).read().strip().split()
def get_device_external_sdcard(self):
"""
获取外部SD卡空间
:return: (path,total,used,free,block)
"""
return self.shell('df | %s \/storage' % self.__find).read().strip().split()
def __fill_rom(self, path, stream, count):
"""
填充数据
:param path: 填充地址
:param stream: 填充流大小
:param count: 填充次数
:return:
"""
self.shell('dd if=/dev/zero of=%s bs=%s count=%s' % (path, stream, count)).read().strip()
def fill_interior_sdcard(self, filename, size):
"""
填充内置SD卡
:param filename: 文件名
:param size: 填充大小,单位byte
:return:
"""
if size > 10485760: # 10m
self.__fill_rom('sdcard/%s' % filename, 10485760, size / 10485760)
else:
self.__fill_rom('sdcard/%s' % filename, size, 1)
def fill_external_sdcard(self, filename, size):
"""
填充外置SD卡
:param filename: 文件名
:param size: 填充大小,单位byte
:return:
"""
path = self.get_device_external_sdcard()[0]
if size > 10485760: # 10m
self.__fill_rom('%s/%s' % (path, filename), 10485760, size / 10485760)
else:
self.__fill_rom('%s/%s' % (path, filename), size, 1)
def kill_process(self, pid):
"""
杀死进程
pass: 一般需要权限不推荐使用
:return:
"""
return self.shell('kill %s' % pid).read().strip()
def quit_app(self, package):
"""
退出应用
:return:
"""
return self.shell('am force-stop %s' % package).read().strip()
def reboot(self):
"""
重启设备
:return:
"""
self.adb('reboot')
def recovery(self):
"""
重启设备并进入recovery模式
:return:
"""
self.adb('reboot recovery')
def fastboot(self):
"""
重启设备并进入fastboot模式
:return:
"""
self.adb('reboot bootloader')
def root(self):
"""
获取root状态
:return:
"""
return 'not found' not in self.shell('su -c ls -l /data/').read().strip()
def wifi(self, power):
"""
开启/关闭wifi
pass: 需要root权限
:return:
"""
if not self.root():
print('The device not root.')
return
if power:
self.shell('su -c svc wifi enable').read().strip()
else:
self.shell('su -c svc wifi disable').read().strip()
def data(self, power):
"""
开启/关闭蜂窝数据
pass: 需要root权限
:return:
"""
if not self.root():
print('The device not root.')
return
if power:
self.shell('su -c svc data enable').read().strip()
else:
self.shell('su -c svc data disable').read().strip()
def get_wifi_state(self):
"""
获取WiFi连接状态
:return:
"""
return 'enabled' in self.shell('dumpsys wifi | %s ^Wi-Fi' % self.__find).read().strip()
def get_data_state(self):
"""
获取移动网络连接状态
:return:
"""
return '2' in self.shell('dumpsys telephony.registry | %s mDataConnectionState' % self.__find).read().strip()
def get_network_state(self):
"""
设备是否连上互联网
:return:
"""
return 'unknown host' not in self.shell('ping -w 1 www.baidu.com').read().strip()
def get_wifi_password_list(self):
"""
获取WIFI密码列表
:return:
"""
if not self.root():
print('The device not root.')
return []
l = re.findall(re.compile('ssid=".+?"\s{3}psk=".+?"'), self.shell('su -c cat /data/misc/wifi/*.conf').read())
return [re.findall(re.compile('".+?"'), i) for i in l]
def call(self, number):
"""
拨打电话
:param number:
:return:
"""
self.shell('am start -a android.intent.action.CALL -d tel:%s' % number)
def open_url(self, url):
"""
打开网页
:return:
"""
self.shell('am start -a android.intent.action.VIEW -d %s' % url)
def start_application(self, component):
"""
启动一个应用
e.g: com.android.settings/com.android.settings.Settings
"""
self.shell("am start -n %s" % component)
def send_keyevent(self, keycode):
"""
发送一个按键事件
https://developer.android.com/reference/android/view/KeyEvent.html
:return:
"""
self.shell('input keyevent %s' % keycode)
def rotation_screen(self, param):
"""
旋转屏幕
:param param: 0 >> 纵向,禁止自动旋转; 1 >> 自动旋转
:return:
"""
self.shell('/system/bin/content insert --uri content://settings/system --bind '
'name:s:accelerometer_rotation --bind value:i:%s' % param)
def instrument(self, command):
"""
启动instrument app
:param command: 命令
:return:
"""
return self.shell('am instrument %s' % command).read()
def export_apk(self, package, target_path='', timeout=5000):
"""
从设备导出应用
:param timeout: 超时时间
:param target_path: 导出后apk存储路径
:param package: 包名
:return:
"""
num = 0
if target_path == '':
self.adb('pull /data/app/%s-1/base.apk %s' % (package, os.path.expanduser('~')))
while 1:
num += 1
if num <= timeout:
if os.path.exists(os.path.join(os.path.expanduser('~'), 'base.apk')):
os.rename(os.path.join(os.path.expanduser('~'), 'base.apk'),
os.path.join(os.path.expanduser('~'), '%s.apk' % package))
else:
self.adb('pull /data/app/%s-1/base.apk %s' % (package, target_path))
while 1:
num += 1
if num <= timeout:
if os.path.exists(os.path.join(os.path.expanduser('~'), 'base.apk')):
os.rename(os.path.join(os.path.expanduser('~'), 'base.apk'),
os.path.join(os.path.expanduser('~'), '%s.apk' % package))
class KeyCode:
KEYCODE_CALL = 5 # 拨号键
KEYCODE_ENDCALL = 6 # 挂机键
KEYCODE_HOME = 3 # Home键
KEYCODE_MENU = 82 # 菜单键
KEYCODE_BACK = 4 # 返回键
KEYCODE_SEARCH = 84 # 搜索键
KEYCODE_CAMERA = 27 # 拍照键
KEYCODE_FOCUS = 80 # 对焦键
KEYCODE_POWER = 26 # 电源键
KEYCODE_NOTIFICATION = 83 # 通知键
KEYCODE_MUTE = 91 # 话筒静音键
KEYCODE_VOLUME_MUTE = 164 # 扬声器静音键
KEYCODE_VOLUME_UP = 24 # 音量+键
KEYCODE_VOLUME_DOWN = 25 # 音量-键
KEYCODE_ENTER = 66 # 回车键
KEYCODE_ESCAPE = 111 # ESC键
KEYCODE_DPAD_CENTER = 23 # 导航键 >> 确定键
KEYCODE_DPAD_UP = 19 # 导航键 >> 向上
KEYCODE_DPAD_DOWN = 20 # 导航键 >> 向下
KEYCODE_DPAD_LEFT = 21 # 导航键 >> 向左
KEYCODE_DPAD_RIGHT = 22 # 导航键 >> 向右
KEYCODE_MOVE_HOME = 122 # 光标移动到开始键
KEYCODE_MOVE_END = 123 # 光标移动到末尾键
KEYCODE_PAGE_UP = 92 # 向上翻页键
KEYCODE_PAGE_DOWN = 93 # 向下翻页键
KEYCODE_DEL = 67 # 退格键
KEYCODE_FORWARD_DEL = 112 # 删除键
KEYCODE_INSERT = 124 # 插入键
KEYCODE_TAB = 61 # Tab键
KEYCODE_NUM_LOCK = 143 # 小键盘锁
KEYCODE_CAPS_LOCK = 115 # 大写锁定键
KEYCODE_BREAK = 121 # Break / Pause键
KEYCODE_SCROLL_LOCK = 116 # 滚动锁定键
KEYCODE_ZOOM_IN = 168 # 放大键
KEYCODE_ZOOM_OUT = 169 # 缩小键
KEYCODE_0 = 7
KEYCODE_1 = 8
KEYCODE_2 = 9
KEYCODE_3 = 10
KEYCODE_4 = 11
KEYCODE_5 = 12
KEYCODE_6 = 13
KEYCODE_7 = 14
KEYCODE_8 = 15
KEYCODE_9 = 16
KEYCODE_A = 29
KEYCODE_B = 30
KEYCODE_C = 31
KEYCODE_D = 32
KEYCODE_E = 33
KEYCODE_F = 34
KEYCODE_G = 35
KEYCODE_H = 36
KEYCODE_I = 37
KEYCODE_J = 38
KEYCODE_K = 39
KEYCODE_L = 40
KEYCODE_M = 41
KEYCODE_N = 42
KEYCODE_O = 43
KEYCODE_P = 44
KEYCODE_Q = 45
KEYCODE_R = 46
KEYCODE_S = 47
KEYCODE_T = 48
KEYCODE_U = 49
KEYCODE_V = 50
KEYCODE_W = 51
KEYCODE_X = 52
KEYCODE_Y = 53
KEYCODE_Z = 54
KEYCODE_PLUS = 81 # +
KEYCODE_MINUS = 69 # -
KEYCODE_STAR = 17 # *
KEYCODE_SLASH = 76 # /
KEYCODE_EQUALS = 70 # =
KEYCODE_AT = 77 # @
KEYCODE_POUND = 18 # #
KEYCODE_APOSTROPHE = 75 # '
KEYCODE_BACKSLASH = 73 # \
KEYCODE_COMMA = 55 # ,
KEYCODE_PERIOD = 56 # .
KEYCODE_LEFT_BRACKET = 71 # [
KEYCODE_RIGHT_BRACKET = 72 # ]
KEYCODE_SEMICOLON = 74 # ;
KEYCODE_GRAVE = 68 # `
KEYCODE_SPACE = 62 # 空格键
KEYCODE_MEDIA_PLAY = 126 # 多媒体键 >> 播放
KEYCODE_MEDIA_STOP = 86 # 多媒体键 >> 停止
KEYCODE_MEDIA_PAUSE = 127 # 多媒体键 >> 暂停
KEYCODE_MEDIA_PLAY_PAUSE = 85 # 多媒体键 >> 播放 / 暂停
KEYCODE_MEDIA_FAST_FORWARD = 90 # 多媒体键 >> 快进
KEYCODE_MEDIA_REWIND = 89 # 多媒体键 >> 快退
KEYCODE_MEDIA_NEXT = 87 # 多媒体键 >> 下一首
KEYCODE_MEDIA_PREVIOUS = 88 # 多媒体键 >> 上一首
KEYCODE_MEDIA_CLOSE = 128 # 多媒体键 >> 关闭
KEYCODE_MEDIA_EJECT = 129 # 多媒体键 >> 弹出
KEYCODE_MEDIA_RECORD = 130 # 多媒体键 >> 录音
if __name__ == '__main__':
a = AdbTools()
pass | [
"os.path.exists",
"re.compile",
"os.path.join",
"time.sleep",
"platform.system",
"os.popen",
"re.findall",
"os.path.expanduser"
] | [((310, 327), 'platform.system', 'platform.system', ([], {}), '()\n', (325, 327), False, 'import platform\n'), ((2267, 2280), 'os.popen', 'os.popen', (['cmd'], {}), '(cmd)\n', (2275, 2280), False, 'import os\n'), ((2520, 2533), 'os.popen', 'os.popen', (['cmd'], {}), '(cmd)\n', (2528, 2533), False, 'import os\n'), ((3234, 3259), 're.compile', 're.compile', (['"""name=(.+?)/"""'], {}), "('name=(.+?)/')\n", (3244, 3259), False, 'import re\n'), ((3467, 3494), 're.compile', 're.compile', (['"""name=(.+?)\\\\)"""'], {}), "('name=(.+?)\\\\)')\n", (3477, 3494), False, 'import re\n'), ((4484, 4502), 're.compile', 're.compile', (['"""\\\\d+"""'], {}), "('\\\\d+')\n", (4494, 4502), False, 'import re\n'), ((7242, 7284), 'os.popen', 'os.popen', (["('aapt dump badging %s' % (path,))"], {}), "('aapt dump badging %s' % (path,))\n", (7250, 7284), False, 'import os\n'), ((7425, 7482), 'os.popen', 'os.popen', (["('aapt dump xmlstrings %s %s' % (path, filename))"], {}), "('aapt dump xmlstrings %s %s' % (path, filename))\n", (7433, 7482), False, 'import os\n'), ((11489, 11502), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (11499, 11502), False, 'import time\n'), ((14152, 14191), 're.compile', 're.compile', (['"""\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+"""'], {}), "('\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+')\n", (14162, 14191), False, 'import re\n'), ((10727, 10752), 're.compile', 're.compile', (['"""\\\\[(.+?)\\\\]"""'], {}), "('\\\\[(.+?)\\\\]')\n", (10737, 10752), False, 'import re\n'), ((14201, 14219), 're.findall', 're.findall', (['reg', 'l'], {}), '(reg, l)\n', (14211, 14219), False, 'import re\n'), ((14497, 14520), 're.compile', 're.compile', (['"""[0-9]{15}"""'], {}), "('[0-9]{15}')\n", (14507, 14520), False, 'import re\n'), ((20632, 20671), 're.compile', 're.compile', (['"""ssid=".+?"\\\\s{3}psk=".+?\\""""'], {}), '(\'ssid=".+?"\\\\s{3}psk=".+?"\')\n', (20642, 20671), False, 'import re\n'), ((992, 1061), 'os.path.join', 'os.path.join', (["os.environ['ANDROID_HOME']", '"""platform-tools"""', '"""adb.exe"""'], {}), "(os.environ['ANDROID_HOME'], 'platform-tools', 'adb.exe')\n", (1004, 1061), False, 'import os\n'), ((1082, 1102), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (1096, 1102), False, 'import os\n'), ((1356, 1421), 'os.path.join', 'os.path.join', (["os.environ['ANDROID_HOME']", '"""platform-tools"""', '"""adb"""'], {}), "(os.environ['ANDROID_HOME'], 'platform-tools', 'adb')\n", (1368, 1421), False, 'import os\n'), ((1442, 1462), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (1456, 1462), False, 'import os\n'), ((10772, 10790), 're.findall', 're.findall', (['reg', 'l'], {}), '(reg, l)\n', (10782, 10790), False, 'import re\n'), ((11592, 11615), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (11610, 11615), False, 'import os\n'), ((14541, 14559), 're.findall', 're.findall', (['reg', 'l'], {}), '(reg, l)\n', (14551, 14559), False, 'import re\n'), ((20755, 20774), 're.compile', 're.compile', (['"""".+?\\""""'], {}), '(\'".+?"\')\n', (20765, 20774), False, 'import re\n'), ((14790, 14809), 're.compile', 're.compile', (['"""\'.+?\'"""'], {}), '("\'.+?\'")\n', (14800, 14809), False, 'import re\n'), ((14709, 14728), 're.compile', 're.compile', (['"""\'.+?\'"""'], {}), '("\'.+?\'")\n', (14719, 14728), False, 'import re\n'), ((22442, 22465), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (22460, 22465), False, 'import os\n'), ((22604, 22627), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (22622, 22627), False, 'import os\n'), ((23058, 23081), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (23076, 23081), False, 'import os\n'), ((22691, 22714), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (22709, 22714), False, 'import os\n'), ((22777, 22800), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (22795, 22800), False, 'import os\n'), ((23145, 23168), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (23163, 23168), False, 'import os\n'), ((23231, 23254), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (23249, 23254), False, 'import os\n')] |
# use sys._getframe() -- it returns a frame object, whose attribute
# f_code is a code object, whose attribute co_name is the name:
import sys
this_function_name = sys._getframe().f_code.co_name
# the frame and code objects also offer other useful information:
this_line_number = sys._getframe().f_lineno
this_filename = sys._getframe().f_code.co_filename
# also, by calling sys._getframe(1), you can get this information
# for the *caller* of the current function. So you can package
# this functionality up into your own handy functions:
def whoami():
import sys
return sys._getframe(1).f_code.co_name
me = whoami()
# this uses argument 1, because the call to whoami is now frame 0.
# and similarly:
def callersname():
import sys
return sys._getframe(2).f_code.co_name
him = callersname()
| [
"sys._getframe"
] | [((281, 296), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (294, 296), False, 'import sys\n'), ((164, 179), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (177, 179), False, 'import sys\n'), ((322, 337), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (335, 337), False, 'import sys\n'), ((583, 599), 'sys._getframe', 'sys._getframe', (['(1)'], {}), '(1)\n', (596, 599), False, 'import sys\n'), ((761, 777), 'sys._getframe', 'sys._getframe', (['(2)'], {}), '(2)\n', (774, 777), False, 'import sys\n')] |
from collections import defaultdict
from PIL import Image
import operator
image = Image.open("images\\test.png")
pixel_map = image.load()
initial_coordinate = tuple(int(x.strip())
for x in input("Initial coordinates: ").split(','))
pixel_list = []
directions = [(1, 0), (0, -1), (1, -1), (-1, 0), (-1, 1), (0, 1)]
def store_pixel(current_pixel):
if(
current_pixel[0] == image.size[0] or
current_pixel[1] == image.size[1] or
current_pixel[0] < 0 or
current_pixel[1] < 0 or
current_pixel in pixel_list or
pixel_map[current_pixel][3] == 0
):
return
pixel_list.append(current_pixel)
for direction in directions:
store_pixel(tuple(map(operator.add, current_pixel, direction)))
store_pixel(initial_coordinate)
print(pixel_list)
object_image = Image.new('RGBA', image.size, (0, 0, 0, 0))
object_image_pixel_map = object_image.load()
line_image = Image.new('RGBA', (1, len(pixel_list)), (0, 0, 0, 0))
line_image_pixel_map = line_image.load()
for index, pixel in enumerate(pixel_list):
object_image_pixel_map[pixel] = pixel_map[pixel]
line_image_pixel_map[0, index] = pixel_map[pixel]
object_image.save(f"out/{index}.png")
line_image.save(f"out/line.png")
| [
"PIL.Image.new",
"PIL.Image.open"
] | [((83, 113), 'PIL.Image.open', 'Image.open', (['"""images\\\\test.png"""'], {}), "('images\\\\test.png')\n", (93, 113), False, 'from PIL import Image\n'), ((857, 900), 'PIL.Image.new', 'Image.new', (['"""RGBA"""', 'image.size', '(0, 0, 0, 0)'], {}), "('RGBA', image.size, (0, 0, 0, 0))\n", (866, 900), False, 'from PIL import Image\n')] |
import logging
import os
from figcli.config.style.color import Color
from figcli.io.input import Input
from figcli.svcs.config_manager import ConfigManager
from figcli.config.aws import *
from figcli.config.constants import *
log = logging.getLogger(__name__)
class AWSConfig:
"""
Utility methods for interacting with AWSCLI resources, such as the ~/.aws/credentials and ~/.aws/config files
"""
def __init__(self, color: Color = Color(False)):
self.init_files()
self.c = color
self._config = ConfigManager(AWS_CONFIG_FILE_PATH)
self._creds = ConfigManager(AWS_CREDENTIALS_FILE_PATH)
@staticmethod
def init_files():
os.makedirs(os.path.dirname(AWS_CREDENTIALS_FILE_PATH), exist_ok=True)
if not os.path.exists(AWS_CREDENTIALS_FILE_PATH):
with open(AWS_CREDENTIALS_FILE_PATH, "w+") as file:
file.write("")
if not os.path.exists(AWS_CONFIG_FILE_PATH):
with open(AWS_CONFIG_FILE_PATH, "w+") as file:
file.write("")
def _is_temporary_session(self, profile_name: str):
if self._creds.has_section(profile_name):
return self._creds.has_option(profile_name, AWS_CFG_TOKEN)
return False
def _backup_section(self, section: str):
backup_name, backup_profile = f'{section}-figgy-backup', f'profile {section}-figgy-backup'
profile_name = f'profile {section}'
if self._creds.has_section(section):
for opt in self._creds.options(section):
self._creds.set_config(backup_name, opt, self._creds.get_option(section, opt))
if self._config.has_section(profile_name):
for opt in self._config.options(profile_name):
self._config.set_config(backup_profile, opt, self._config.get_option(profile_name, opt))
def restore(self, profile_name: str) :
"""
Restore a credentials previously backed up by Figgy
"""
config_profile = f'profile {profile_name}'
backup_name, backup_profile = f'{profile_name}-figgy-backup', f'profile {profile_name}-figgy-backup'
creds_restored, config_restored = False, False
if self._creds.has_section(backup_name):
for opt in self._creds.options(backup_name):
self._creds.set_config(profile_name, opt, self._creds.get_option(backup_name, opt))
creds_restored = True
if self._config.has_section(backup_profile):
for opt in self._config.options(backup_profile):
self._config.set_config(config_profile, opt, self._config.get_option(backup_profile, opt))
config_restored = True
self._creds.delete(profile_name, AWS_CFG_TOKEN)
self._creds.save()
self._config.save()
if creds_restored and config_restored:
print(f"\n{self.c.fg_gr}Restoration successful!{self.c.rs}")
else:
print(f"\n{self.c.fg_yl}Unable to restore credentials. Profile: "
f"{self.c.fg_bl}[{backup_name}]{self.c.rs}{self.c.fg_yl} was not found in either the "
f"~/.aws/credentials or ~/.aws/config files.{self.c.rs}")
def write_credentials(self, access_key: str, secret_key: str, token: str, region: str,
profile_name: str = 'default') -> None:
"""
Overwrite credentials stored in the [default] profile in both ~/.aws/config and ~/.aws/credentials file
with the provided temporary credentials. This method also CREATES these files if they do not already exist.
"""
if not self._is_temporary_session(profile_name):
print(f"\n{self.c.fg_yl}Existing AWS Profile {self.c.fg_bl}[{profile_name}]{self.c.rs}{self.c.fg_yl} "
f"was found with long-lived access keys "
f"in file: {self.c.fg_bl}~/.aws/credentials{self.c.rs}{self.c.fg_yl}.\n"
f"To avoid overwriting these keys, they will be moved under profile: "
f"{self.c.rs}{self.c.fg_bl}[{profile_name}-figgy-backup]{self.c.rs}{self.c.fg_yl}.{self.c.rs}\n\n"
f"These old keys may be restored with: {self.c.fg_bl}`"
f"{CLI_NAME} iam restore`{self.c.rs}.")
self._backup_section(profile_name)
self._creds.set_config(profile_name, AWS_CFG_ACCESS_KEY_ID, access_key)
self._creds.set_config(profile_name, AWS_CFG_SECRET_KEY, secret_key)
self._creds.set_config(profile_name, AWS_CFG_TOKEN, token)
config_section = f'profile {profile_name}'
self._config.set_config(config_section, AWS_CFG_REGION, region)
self._config.set_config(config_section, AWS_CFG_OUTPUT, 'json')
print(f"\n\n{self.c.fg_gr}Successfully updated: {AWS_CREDENTIALS_FILE_PATH}{self.c.rs}")
print(f"{self.c.fg_gr}Successfully updated: {AWS_CONFIG_FILE_PATH}{self.c.rs}")
| [
"logging.getLogger",
"os.path.exists",
"figcli.svcs.config_manager.ConfigManager",
"os.path.dirname",
"figcli.config.style.color.Color"
] | [((234, 261), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (251, 261), False, 'import logging\n'), ((450, 462), 'figcli.config.style.color.Color', 'Color', (['(False)'], {}), '(False)\n', (455, 462), False, 'from figcli.config.style.color import Color\n'), ((537, 572), 'figcli.svcs.config_manager.ConfigManager', 'ConfigManager', (['AWS_CONFIG_FILE_PATH'], {}), '(AWS_CONFIG_FILE_PATH)\n', (550, 572), False, 'from figcli.svcs.config_manager import ConfigManager\n'), ((595, 635), 'figcli.svcs.config_manager.ConfigManager', 'ConfigManager', (['AWS_CREDENTIALS_FILE_PATH'], {}), '(AWS_CREDENTIALS_FILE_PATH)\n', (608, 635), False, 'from figcli.svcs.config_manager import ConfigManager\n'), ((697, 739), 'os.path.dirname', 'os.path.dirname', (['AWS_CREDENTIALS_FILE_PATH'], {}), '(AWS_CREDENTIALS_FILE_PATH)\n', (712, 739), False, 'import os\n'), ((772, 813), 'os.path.exists', 'os.path.exists', (['AWS_CREDENTIALS_FILE_PATH'], {}), '(AWS_CREDENTIALS_FILE_PATH)\n', (786, 813), False, 'import os\n'), ((926, 962), 'os.path.exists', 'os.path.exists', (['AWS_CONFIG_FILE_PATH'], {}), '(AWS_CONFIG_FILE_PATH)\n', (940, 962), False, 'import os\n')] |
import itertools as it
import os
import pathlib
from collections.abc import MutableMapping
from typing import TYPE_CHECKING, Any, Iterator, Optional, Union
import xarray as xr
from netCDF4 import Dataset, Group, Variable
from eopf.exceptions import StoreNotOpenError
from eopf.product.store import EOProductStore
from eopf.product.utils import conv, decode_attrs, reverse_conv
if TYPE_CHECKING: # pragma: no cover
from eopf.product.core.eo_object import EOObject
class EONetCDFStore(EOProductStore):
"""
Store representation to access NetCDF format of the given URL
Parameters
----------
url: str
path url or the target store
Attributes
----------
url: str
path url or the target store
zlib: bool
enable/disable compression
complevel: int [1-9]
level of the compression
shuffle: bool
enable/disable hdf5 shuffle
"""
RESTRICTED_ATTR_KEY = ("_FillValue",)
# docstr-coverage: inherited
def __init__(self, url: str) -> None:
url = os.path.expanduser(url)
super().__init__(url)
self._root: Optional[Dataset] = None
self.zlib: bool = True
self.complevel: int = 4
self.shuffle: bool = True
def __getitem__(self, key: str) -> "EOObject":
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
from eopf.product.core import EOGroup, EOVariable
try:
obj = self._select_node(key)
except IndexError as e: # if key is invalid, netcdf4 raise IndexError ...
raise KeyError(e)
if self.is_group(key):
return EOGroup(attrs=decode_attrs(obj.__dict__))
return EOVariable(data=obj, attrs=decode_attrs(obj.__dict__), dims=obj.dimensions)
def __iter__(self) -> Iterator[str]:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
return it.chain(iter(self._root.groups), iter(self._root.variables))
def __len__(self) -> int:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
return len(self._root.groups) + len(self._root.variables)
def __setitem__(self, key: str, value: "EOObject") -> None:
from eopf.product.core import EOGroup, EOVariable
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
if isinstance(value, EOGroup):
self._root.createGroup(key)
self.write_attrs(key, value.attrs)
elif isinstance(value, EOVariable):
# Recover / create dimensions from target product
for idx, dim in enumerate(value.dims):
if dim not in self._root.dimensions:
self._root.createDimension(dim, size=value._data.shape[idx])
if len(self._root.dimensions[dim]) != value._data.shape[idx]:
raise ValueError(
"Netdf4 format does not support mutiples dimensions with the same name and different size.",
)
# Create and write EOVariable
variable = self._root.createVariable(
key,
value._data.values[:].dtype,
dimensions=value.dims,
zlib=self.zlib,
complevel=self.complevel,
shuffle=self.shuffle,
)
self.write_attrs(key, value.attrs, value._data.values[:].dtype)
variable[:] = value._data.values
else:
raise TypeError("Only EOGroup and EOVariable can be set")
# docstr-coverage: inherited
def close(self) -> None:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
super().close()
self._root.close()
self._root = None
# docstr-coverage: inherited
@staticmethod
def guess_can_read(file_path: str) -> bool:
"""
Determines if a given file path can be read with the current store
Parameters
----------
file_path: str
Path to netCDF4 file
Return
------
Boolean
"""
return pathlib.Path(file_path).suffix in [".nc"]
# docstr-coverage: inherited
def is_group(self, path: str) -> bool:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
current_node = self._select_node(path)
return isinstance(current_node, (Group, Dataset))
# docstr-coverage: inherited
def is_variable(self, path: str) -> bool:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
current_node = self._select_node(path)
return isinstance(current_node, Variable)
# docstr-coverage: inherited
def iter(self, path: str) -> Iterator[str]:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
current_node = self._select_node(path)
return it.chain(iter(current_node.groups), iter(current_node.variables))
# docstr-coverage: inherited
def open(self, mode: str = "r", **kwargs: Any) -> None:
super().open()
# Overwrite compression / scale parameters if given by user
if "zlib" in kwargs:
self.zlib = bool(kwargs.get("zlib"))
kwargs.pop("zlib")
if "complevel" in kwargs:
self.complevel = int(str(kwargs.get("complevel")))
kwargs.pop("complevel")
if "shuffle" in kwargs:
self.shuffle = bool(kwargs.get("shuffle"))
kwargs.pop("shuffle")
self._root = Dataset(self.url, mode, **kwargs)
def write_attrs(self, group_path: str, attrs: MutableMapping[str, Any] = {}, data_type: Any = int) -> None:
"""
This method is used to update attributes in the store
Raises
------
StoreNotOpenError
If the store is closed
"""
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
current_node = self._select_node(group_path)
from json import dumps
from numbers import Number
conv_attr: MutableMapping[str, Any] = {}
for attr, value in attrs.items():
if attr not in self.RESTRICTED_ATTR_KEY:
if isinstance(value, Number):
conv_attr[attr] = value
else:
conv_attr[attr] = dumps(conv(value))
else:
if type(value) is not data_type:
conv_attr[attr] = reverse_conv(data_type, value)
else:
conv_attr[attr] = value
current_node.setncatts(conv_attr)
def _select_node(self, key: str) -> Union[Dataset, Group, Variable]:
"""Retrieve and return the netcdf4 object corresponding to the node at the given path
Returns
----------
Union of Dataset, Group, Variable
Raises
------
StoreNotOpenError
If the store is closed
"""
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
if key in ["/", ""]:
return self._root
return self._root[key]
class EONetcdfStringToTimeAccessor(EOProductStore):
"""
Store representation to access NetCDF date time format of the given URL
Parameters
----------
url: str
path url or the target store
"""
# docstr-coverage: inherited
def __init__(self, url: str) -> None:
url = os.path.expanduser(url)
super().__init__(url)
self._root = None
def __getitem__(self, key: str) -> "EOObject":
import pandas as pd
from eopf.product.core import EOVariable
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
# convert unix start time to date time format
time_da = self._root.get(key)
start = pd.to_datetime("1970-1-1T0:0:0.000000Z")
end = pd.to_datetime(time_da)
# compute and convert the time difference into microseconds
time_delta = (end - start) // pd.Timedelta("1microsecond")
# create coresponding attributes
attributes = {}
attributes["unit"] = "microseconds since 1970-1-1T0:0:0.000000Z"
attributes["standard_name"] = "time"
if key == "ANX_time":
attributes["long_name"] = "Time of ascending node crossing in UTC"
elif key == "calibration_time":
attributes["long_name"] = "Time of calibration in UTC"
# create an EOVariable and return it
eov: EOVariable = EOVariable(data=time_delta, attrs=attributes)
return eov
def __iter__(self) -> Iterator[str]:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
yield from ()
def __len__(self) -> int:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
return 1
def __setitem__(self, key: str, value: "EOObject") -> None:
from eopf.product.core import EOVariable
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
# set the data
if not isinstance(value, EOVariable):
raise TypeError(f"The value {key} must be an EOVariable")
self._check_node(key)
self._root[key] = value._data
# set the attrs of the value
self.write_attrs(key, value.attrs)
# write to netcdf
self._root.to_netcdf(self.url)
# docstr-coverage: inherited
def close(self) -> None:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
super().close()
self._root.close()
self._root = None
# docstr-coverage: inherited
def is_group(self, path: str) -> bool:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
return True
# docstr-coverage: inherited
def is_variable(self, path: str) -> bool:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
return False
# docstr-coverage: inherited
def iter(self, path: str) -> Iterator[str]:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
self._check_node(path)
return iter([])
# docstr-coverage: inherited
def open(self, mode: str = "r", **kwargs: Any) -> None:
super().open()
self._root = xr.open_dataset(self.url, mode=mode)
# docstr-coverage: inherited
def write_attrs(self, group_path: str, attrs: MutableMapping[str, Any] = {}) -> None:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
self._check_node(group_path)
self._root.attrs.update(attrs)
def _check_node(self, key: str) -> Union[Dataset, Group, Variable]:
"""Check if the key exists, only top level is used
Returns
----------
Union of Dataset, Group, Variable
Raises
------
StoreNotOpenError
If the store is closed
KeyError
If the key does not exist
"""
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
if key not in ["/", ""]:
raise KeyError(f"{key} does not exist")
| [
"os.path.expanduser",
"eopf.product.utils.decode_attrs",
"pathlib.Path",
"pandas.Timedelta",
"netCDF4.Dataset",
"eopf.product.utils.reverse_conv",
"eopf.product.core.EOVariable",
"eopf.product.utils.conv",
"xarray.open_dataset",
"pandas.to_datetime",
"eopf.exceptions.StoreNotOpenError"
] | [((1049, 1072), 'os.path.expanduser', 'os.path.expanduser', (['url'], {}), '(url)\n', (1067, 1072), False, 'import os\n'), ((5806, 5839), 'netCDF4.Dataset', 'Dataset', (['self.url', 'mode'], {}), '(self.url, mode, **kwargs)\n', (5813, 5839), False, 'from netCDF4 import Dataset, Group, Variable\n'), ((7784, 7807), 'os.path.expanduser', 'os.path.expanduser', (['url'], {}), '(url)\n', (7802, 7807), False, 'import os\n'), ((8213, 8253), 'pandas.to_datetime', 'pd.to_datetime', (['"""1970-1-1T0:0:0.000000Z"""'], {}), "('1970-1-1T0:0:0.000000Z')\n", (8227, 8253), True, 'import pandas as pd\n'), ((8268, 8291), 'pandas.to_datetime', 'pd.to_datetime', (['time_da'], {}), '(time_da)\n', (8282, 8291), True, 'import pandas as pd\n'), ((8899, 8944), 'eopf.product.core.EOVariable', 'EOVariable', ([], {'data': 'time_delta', 'attrs': 'attributes'}), '(data=time_delta, attrs=attributes)\n', (8909, 8944), False, 'from eopf.product.core import EOVariable\n'), ((10922, 10958), 'xarray.open_dataset', 'xr.open_dataset', (['self.url'], {'mode': 'mode'}), '(self.url, mode=mode)\n', (10937, 10958), True, 'import xarray as xr\n'), ((1347, 1406), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (1364, 1406), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((1909, 1968), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (1926, 1968), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((2127, 2186), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (2144, 2186), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((2427, 2486), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (2444, 2486), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((3800, 3859), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (3817, 3859), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((4464, 4523), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (4481, 4523), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((4759, 4818), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (4776, 4818), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((5048, 5107), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (5065, 5107), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((6180, 6239), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (6197, 6239), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((7317, 7376), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (7334, 7376), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((8044, 8103), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (8061, 8103), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((8398, 8426), 'pandas.Timedelta', 'pd.Timedelta', (['"""1microsecond"""'], {}), "('1microsecond')\n", (8410, 8426), True, 'import pandas as pd\n'), ((9055, 9114), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (9072, 9114), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((9217, 9276), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (9234, 9276), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((9458, 9517), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (9475, 9517), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((9983, 10042), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (10000, 10042), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((10246, 10305), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (10263, 10305), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((10456, 10515), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (10473, 10515), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((10669, 10728), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (10686, 10728), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((11132, 11191), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (11149, 11191), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((11686, 11745), 'eopf.exceptions.StoreNotOpenError', 'StoreNotOpenError', (['"""Store must be open before access to it"""'], {}), "('Store must be open before access to it')\n", (11703, 11745), False, 'from eopf.exceptions import StoreNotOpenError\n'), ((1768, 1794), 'eopf.product.utils.decode_attrs', 'decode_attrs', (['obj.__dict__'], {}), '(obj.__dict__)\n', (1780, 1794), False, 'from eopf.product.utils import conv, decode_attrs, reverse_conv\n'), ((4295, 4318), 'pathlib.Path', 'pathlib.Path', (['file_path'], {}), '(file_path)\n', (4307, 4318), False, 'import pathlib\n'), ((1698, 1724), 'eopf.product.utils.decode_attrs', 'decode_attrs', (['obj.__dict__'], {}), '(obj.__dict__)\n', (1710, 1724), False, 'from eopf.product.utils import conv, decode_attrs, reverse_conv\n'), ((6778, 6808), 'eopf.product.utils.reverse_conv', 'reverse_conv', (['data_type', 'value'], {}), '(data_type, value)\n', (6790, 6808), False, 'from eopf.product.utils import conv, decode_attrs, reverse_conv\n'), ((6660, 6671), 'eopf.product.utils.conv', 'conv', (['value'], {}), '(value)\n', (6664, 6671), False, 'from eopf.product.utils import conv, decode_attrs, reverse_conv\n')] |
'''
Aqui o programa conterá uma função que permite
listar tanto diretórios, como arquivos na forma de
árvores, ou seja, seus ramos terão linhas, e também,
espaçamentos mostrando a profundidade de cada diretório
dado uma pasta raíz.
'''
#só pode ser importado:
__all__ = ['arvore']
# ********* bibliotecas **********
from os import listdir, chdir, system
from random import randint
import string
from sys import platform
# ************* dados ***********
'''
desabilitando, pois gera "saída" indesejada
no código não relevante. Isto é uma copia
para está biblioteca, funções aqui não são
todas úteis para o código que a ascrecentou
na biblioteca.
# conforme a plataforma em execução.
buffer = 'temporario.txt'
if platform == 'win32':
# no caso do windows.
caminho = 'C:\\Users\\SAVIO\\AppData\\Local\\Temp'
arqBuffer = caminho+'\\'+buffer
elif platform == 'linux':
# no caso de linux.
caminho = '/tmp'
arqBuffer = caminho+'/'+buffer
#criando arquivo... não existente inicialmente.
try:
arq = open(arqBuffer, mode='x')
except FileExistsError:
print('o arquivo "%s" já existe, então apenas continuando...'%arqBuffer)
'''
# ************ funções ****************
# gera uma árvore, imprimindo cada diretório, com
# determinado recuo no arquivo, para simular uma
# falsa hierárquia.
def trilha_dirs(caminho):
#lista dos diretórios desta raíz.
dirs = []
#print(listdir(path=caminho))
for pasta in listdir(path = caminho):
#tenta acessar diretório, caso não
#seja possível, de modo nenhum, a
#conclusão é que não é um diretório.
try:
if platform == 'win32':
listdir(caminho+'\\'+pasta)
elif platform == 'linux':
listdir(caminho+'/'+pasta)
dirs.append(pasta)
except: pass
#print('"%s" não é um diretório, nem um vázio.' %pasta)
#delimitando o recuo de espaços.
espacos = 2
#0x20 espaço vázio em hexadecimal.
recuo = (chr(0x20) * espacos) * trilha_dirs.profundidade
#listando pasta em ramos.
for d in dirs:
#limite de profundidade. Definida em 8.
if trilha_dirs.profundidade > 8:
trilha_dirs.profundidade = 0
continue
#se houver subdiretórios, voltar a fazer
#tudo baseado em recursão.
if platform == 'win32':
novo_caminho = caminho + '\\' + d
elif platform == 'linux':
novo_caminho = caminho + '/' + d
#texto limitado.
texto_limitado = '%s|__ "%s [...]"' % (recuo, d[0:20])
#texto não limitado.
texto = '%s|__ "%s"' % (recuo, d)
if len(listdir(novo_caminho)) > 0:
if len(d) > 20: print(texto_limitado,file=open(arqBuffer,'a'))
else: print(texto,file=open(arqBuffer,'a'))
#um espaço à frente para cada profundidade.
trilha_dirs.profundidade += 1
trilha_dirs(novo_caminho)
else:
if len(d) > 20: print(texto_limitado, file=open(arqBuffer,'a'))
else: print(texto,file=open(arqBuffer, 'a'))
#volta par a formatção do primeiro diretório.
#diminui o recuo para zero.
trilha_dirs.profundidade = 0
#função gera string aleatórias de extensão "".tmp".
def gera_str_aleatoria(comprimento):
x, Str = 1,''
while x <= comprimento:
opcoes = [string.ascii_uppercase, string.ascii_lowercase,
string.digits]
escolha = opcoes[randint(0,len(opcoes)-1)]
Str += escolha[randint(0,len(escolha)-1)]
x+=1
return 'temporario_' + Str + '.tmp'
# função retorna uma a string com todo o diretório
# até uma certa profundidade, listado como àrvore.
def arvore(caminho, mostra_arquivos=False):
#caso a opção visualizar arquivos esteja
#disabilitada, que é o padrão.
if not mostra_arquivos:
#armazenando raíz também no arquivo temporário.
print(caminho,file=open(arqBuffer,'a'))
#definindo profundidade em um, já
#que o caminho(raíz) é zero.
trilha_dirs.profundidade = 1
#executando procedimento, e criando
#árvores de caminhos.
trilha_dirs(caminho)
#filtrando conteúdo do arquivo.
conteudo = open(arqBuffer, 'r').read()
# deletando linha em branco.
conteudo = conteudo.split('\n')
conteudo.pop(-1)
conteudo = '\n'.join(conteudo[0:])
#novo nome para arquivo temporário para
#que não atrapalhe na execução de um
#próximo.
if platform == 'win32':
nome_antigo = arqBuffer.split('\\')[-1]
elif platform == 'linux':
nome_antigo = arqBuffer.split('/')[-1]
novo_nome = gera_str_aleatoria(21)
#print(nome_antigo, ' ==> ', novo_nome)
if platform == 'win32':
system('ren %s %s' % (arqBuffer, novo_nome))
elif platform == 'linux':
system('mv %s /tmp/%s' % (arqBuffer, novo_nome))
#retornando string com árvore impressa.
return ' ᐅ ' + conteudo
# transforma string numa matriz, de acordo com
# a formatação dela.
def matriciar_str(_str):
# todas as linhas.
linhas = _str.split('\n')
# linha com maior caractéres.
n = max(len(s) for s in linhas)
# Criando matriz. O resto, por uniformização
# será preenchido os espaços em brancos da
# strings que não completam a matriz, com
# trêmulas.
matriz = [list(s + '¨' * (n-len(s))) for s in linhas]
# preenchendo também os resto dos espaços em
# brancos.
for i in range(len(matriz)):
for j in range(len(matriz[0])):
if matriz[i][j].isspace():
matriz[i][j] = '¨'
return matriz
# imprime matriz, para uma boa visualização do que ocorre.
def imprime_matriz(matriz):
print("mostrando com está indo a matriz:")
m, n = len(matriz), len(matriz[1])
for i in range(m):
for j in range(n):
print(matriz[i][j],end='')
print('')
print('\t\t---- ---- FIM --- ----')
# está função conserta os demais galhos.
def conserta(_str):
matriz = matriciar_str(_str)
# dimensões da matriz.
(m,n) = len(matriz), len(matriz[0])
# marcando colunas contendo mais de três
# barras verticais.
mais = {j:0 for j in range(n)}
for j in range(n):
for i in range(m):
if matriz[i][j] == '|':
mais[j] += 1
for coluna in mais.keys():
def posicao_valida(i, j):
palavra_ij = 0
for j in range(n):
if matriz[i][j].isascii():
palavra_ij = j
if mais[coluna] >= 2:
for i in range(m):
if matriz[i][coluna] == '¨':
matriz[i][coluna] = '|'
# processo de limpeza do código.
for i in range(m):
for j in range(n):
if matriz[i][j] == '¨':
matriz[i][j] = ' '
return matriz
# execução:
if __name__ == '__main__':
#print(gera_str_aleatoria(15))
caminho = "/home/savio/Documents"
str_arv = arvore(caminho)
print(str_arv)
imprime_matriz(matriciar_str(str_arv))
imprime_matriz(conserta(str_arv))
imprime_matriz(conserta(arvore('/etc')))
#print(arvore('/etc')) | [
"os.system",
"os.listdir"
] | [((1489, 1510), 'os.listdir', 'listdir', ([], {'path': 'caminho'}), '(path=caminho)\n', (1496, 1510), False, 'from os import listdir, chdir, system\n'), ((4922, 4966), 'os.system', 'system', (["('ren %s %s' % (arqBuffer, novo_nome))"], {}), "('ren %s %s' % (arqBuffer, novo_nome))\n", (4928, 4966), False, 'from os import listdir, chdir, system\n'), ((1713, 1744), 'os.listdir', 'listdir', (["(caminho + '\\\\' + pasta)"], {}), "(caminho + '\\\\' + pasta)\n", (1720, 1744), False, 'from os import listdir, chdir, system\n'), ((2727, 2748), 'os.listdir', 'listdir', (['novo_caminho'], {}), '(novo_caminho)\n', (2734, 2748), False, 'from os import listdir, chdir, system\n'), ((5009, 5057), 'os.system', 'system', (["('mv %s /tmp/%s' % (arqBuffer, novo_nome))"], {}), "('mv %s /tmp/%s' % (arqBuffer, novo_nome))\n", (5015, 5057), False, 'from os import listdir, chdir, system\n'), ((1795, 1825), 'os.listdir', 'listdir', (["(caminho + '/' + pasta)"], {}), "(caminho + '/' + pasta)\n", (1802, 1825), False, 'from os import listdir, chdir, system\n')] |
from __future__ import absolute_import
from contextlib import contextmanager
from multiprocessing import TimeoutError
import signal
import datetime
import os
import subprocess
import time
import urllib
import zipfile
import shutil
import pytest
from .adb import ADB
from ..logger import Logger
def get_center(bounds):
"""
Returns given element center coords::
from magneto.utils import get_center
element = self.magneto(text='Foo')
(x, y) = get_center(element.info['bounds'])
:param dict bounds: Element position coordinates (top, right, bottom, left)
:return: x and y coordinates of element center
"""
x = bounds['right'] - ((bounds['right'] - bounds['left']) / 2)
y = bounds['bottom'] - ((bounds['bottom'] - bounds['top']) / 2)
return x, y
def get_config(attr, default=None):
"""
Allows access to config parameters::
from magneto.utils import get_config
package = get_config('--app-package')
:param str attr: Command line argument
:return: Requested config value
"""
# must have this check to avoid sphinx-autodoc exception
if getattr(pytest, 'config', None) != None:
return pytest.config.getoption(attr) or default
else:
return default
@contextmanager
def timewarp(timedelta_):
now = datetime.datetime.now()
future = now + timedelta_
ADB.set_datetime(future)
try:
yield
finally:
now = datetime.datetime.now()
ADB.set_datetime(now)
class Timeout():
"""
Allows polling a function till success or timeout::
import time
from magneto.utils import Timeout
result = False
with Timeout(seconds=5):
while not result:
result = some_function()
time.sleep(0.5)
:param integer seconds: Timeout value in seconds. Defaults to 1.
:param str error_message: Error message to display when timeout occurs. Defaults to 'Timeout'.
"""
def __init__(self, seconds=1, error_message='Timeout'):
self.seconds = seconds or 1
self.error_message = error_message
def handle_timeout(self, signum, frame):
Logger.debug('Timeout reached {} seconds limit'.format(self.seconds))
raise TimeoutError(self.error_message)
def __enter__(self):
Logger.debug('Timeout started for {} seconds'.format(self.seconds))
signal.signal(signal.SIGALRM, self.handle_timeout)
signal.alarm(self.seconds)
def __exit__(self, type, value, traceback):
Logger.debug('Timeout stopped.')
signal.alarm(0)
def unlock_device():
"""
Powers on device and unlocks it.
"""
# read device screen state
p = ADB.exec_cmd("shell 'if [ -z $(dumpsys power | grep mScreenOn=true) ]; then echo off; else echo on;fi'",
stdout=subprocess.PIPE)
device_screen = p.stdout.readline().strip('\r\n')
if device_screen == 'off':
# power on device
ADB.exec_cmd('shell input keyevent 26').wait()
# unlock device
ADB.exec_cmd('shell input keyevent 82').wait()
def wait_for_device():
"""
Wait for device to boot. 1 minute timeout.
"""
wait_for_device_cmd = 'wait-for-device shell getprop sys.boot_completed'
p = ADB.exec_cmd(wait_for_device_cmd, stdout=subprocess.PIPE)
boot_completed = p.stdout.readline().strip('\r\n')
try:
with Timeout(seconds=60):
while boot_completed != '1':
time.sleep(1)
p = ADB.exec_cmd(wait_for_device_cmd, stdout=subprocess.PIPE)
boot_completed = p.stdout.readline().strip('\r\n')
Logger.debug('Waiting for device to finish booting (adb shell getprop sys.boot_completed)')
except TimeoutError:
Logger.debug('Timed out while waiting for sys.boot_completed, there might not be a default launcher set, trying to run anyway')
pass
class Bootstrap(object):
_map = {
'no_app': 'https://github.com/EverythingMe/magneto-init/archive/master.zip',
'calc': 'https://github.com/EverythingMe/magneto-demo-calc/archive/master.zip'
}
def __init__(self, name):
if name not in self._map:
raise Exception('{} not recognized'.format(name))
filename, headers = urllib.urlretrieve(self._map[name])
with zipfile.ZipFile(filename) as zip_file:
rootdir = zip_file.namelist()[0]
for member in zip_file.namelist()[1:]:
if not os.path.basename(member):
# create dir from zipfile
os.mkdir(os.path.join(os.path.curdir, member.replace(rootdir, '')))
else:
# copy file (taken from zipfile's extract)
source = zip_file.open(member)
target = file(os.path.join(os.path.curdir, member.replace(rootdir, '')), "wb")
with source, target:
shutil.copyfileobj(source, target)
| [
"multiprocessing.TimeoutError",
"signal.signal",
"shutil.copyfileobj",
"zipfile.ZipFile",
"urllib.urlretrieve",
"pytest.config.getoption",
"time.sleep",
"datetime.datetime.now",
"os.path.basename",
"signal.alarm"
] | [((1327, 1350), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1348, 1350), False, 'import datetime\n'), ((1461, 1484), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1482, 1484), False, 'import datetime\n'), ((2278, 2310), 'multiprocessing.TimeoutError', 'TimeoutError', (['self.error_message'], {}), '(self.error_message)\n', (2290, 2310), False, 'from multiprocessing import TimeoutError\n'), ((2421, 2471), 'signal.signal', 'signal.signal', (['signal.SIGALRM', 'self.handle_timeout'], {}), '(signal.SIGALRM, self.handle_timeout)\n', (2434, 2471), False, 'import signal\n'), ((2480, 2506), 'signal.alarm', 'signal.alarm', (['self.seconds'], {}), '(self.seconds)\n', (2492, 2506), False, 'import signal\n'), ((2605, 2620), 'signal.alarm', 'signal.alarm', (['(0)'], {}), '(0)\n', (2617, 2620), False, 'import signal\n'), ((4329, 4364), 'urllib.urlretrieve', 'urllib.urlretrieve', (['self._map[name]'], {}), '(self._map[name])\n', (4347, 4364), False, 'import urllib\n'), ((1199, 1228), 'pytest.config.getoption', 'pytest.config.getoption', (['attr'], {}), '(attr)\n', (1222, 1228), False, 'import pytest\n'), ((4379, 4404), 'zipfile.ZipFile', 'zipfile.ZipFile', (['filename'], {}), '(filename)\n', (4394, 4404), False, 'import zipfile\n'), ((3514, 3527), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3524, 3527), False, 'import time\n'), ((4537, 4561), 'os.path.basename', 'os.path.basename', (['member'], {}), '(member)\n', (4553, 4561), False, 'import os\n'), ((4997, 5031), 'shutil.copyfileobj', 'shutil.copyfileobj', (['source', 'target'], {}), '(source, target)\n', (5015, 5031), False, 'import shutil\n')] |
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import os
import sys
import errno
import logging
import platform
from mbed_lstools.main import create
from mbed_lstools.main import mbed_os_support
from mbed_lstools.main import mbed_lstools_os_info
class DetectOSTestCase(unittest.TestCase):
""" Test cases for host OS related functionality. Helpful during porting
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_porting_mbed_lstools_os_info(self):
self.assertNotEqual(None, mbed_lstools_os_info())
def test_porting_mbed_os_support(self):
self.assertNotEqual(None, mbed_os_support())
def test_porting_create(self):
self.assertNotEqual(None, create())
def test_supported_os_name(self):
os_names = ['Windows7', 'Ubuntu', 'LinuxGeneric', 'Darwin']
self.assertIn(mbed_os_support(), os_names)
def test_detect_os_support_ext(self):
os_info = (os.name,
platform.system(),
platform.release(),
platform.version(),
sys.platform)
self.assertEqual(os_info, mbed_lstools_os_info())
if __name__ == '__main__':
unittest.main()
| [
"platform.version",
"platform.release",
"platform.system",
"mbed_lstools.main.mbed_lstools_os_info",
"mbed_lstools.main.mbed_os_support",
"mbed_lstools.main.create",
"unittest.main"
] | [((1776, 1791), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1789, 1791), False, 'import unittest\n'), ((1104, 1126), 'mbed_lstools.main.mbed_lstools_os_info', 'mbed_lstools_os_info', ([], {}), '()\n', (1124, 1126), False, 'from mbed_lstools.main import mbed_lstools_os_info\n'), ((1207, 1224), 'mbed_lstools.main.mbed_os_support', 'mbed_os_support', ([], {}), '()\n', (1222, 1224), False, 'from mbed_lstools.main import mbed_os_support\n'), ((1296, 1304), 'mbed_lstools.main.create', 'create', ([], {}), '()\n', (1302, 1304), False, 'from mbed_lstools.main import create\n'), ((1435, 1452), 'mbed_lstools.main.mbed_os_support', 'mbed_os_support', ([], {}), '()\n', (1450, 1452), False, 'from mbed_lstools.main import mbed_os_support\n'), ((1554, 1571), 'platform.system', 'platform.system', ([], {}), '()\n', (1569, 1571), False, 'import platform\n'), ((1592, 1610), 'platform.release', 'platform.release', ([], {}), '()\n', (1608, 1610), False, 'import platform\n'), ((1631, 1649), 'platform.version', 'platform.version', ([], {}), '()\n', (1647, 1649), False, 'import platform\n'), ((1719, 1741), 'mbed_lstools.main.mbed_lstools_os_info', 'mbed_lstools_os_info', ([], {}), '()\n', (1739, 1741), False, 'from mbed_lstools.main import mbed_lstools_os_info\n')] |
from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King
class TestPiece:
def test_sum(self):
groups = ((Pawn(), Knight(), Bishop()),
(Knight(), Bishop(), Queen()),
(Pawn(), Pawn(), Pawn(), Pawn()))
actual_sums = tuple(map(sum, groups))
expected_sums = (7, 15, 4)
for actual_sum, expected_sum in zip(actual_sums, expected_sums):
assert actual_sum == expected_sum
def test_compare(self):
assert Knight() == Knight()
assert Bishop() == Knight()
assert King() <= King()
assert Bishop() <= Rook()
assert King() > Queen()
assert Pawn() < Knight()
assert Queen() != Rook()
| [
"chess.pieces.Bishop",
"chess.pieces.Rook",
"chess.pieces.Queen",
"chess.pieces.Knight",
"chess.pieces.Pawn",
"chess.pieces.King"
] | [((515, 523), 'chess.pieces.Knight', 'Knight', ([], {}), '()\n', (521, 523), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((527, 535), 'chess.pieces.Knight', 'Knight', ([], {}), '()\n', (533, 535), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((551, 559), 'chess.pieces.Bishop', 'Bishop', ([], {}), '()\n', (557, 559), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((563, 571), 'chess.pieces.Knight', 'Knight', ([], {}), '()\n', (569, 571), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((587, 593), 'chess.pieces.King', 'King', ([], {}), '()\n', (591, 593), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((597, 603), 'chess.pieces.King', 'King', ([], {}), '()\n', (601, 603), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((619, 627), 'chess.pieces.Bishop', 'Bishop', ([], {}), '()\n', (625, 627), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((631, 637), 'chess.pieces.Rook', 'Rook', ([], {}), '()\n', (635, 637), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((653, 659), 'chess.pieces.King', 'King', ([], {}), '()\n', (657, 659), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((662, 669), 'chess.pieces.Queen', 'Queen', ([], {}), '()\n', (667, 669), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((685, 691), 'chess.pieces.Pawn', 'Pawn', ([], {}), '()\n', (689, 691), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((694, 702), 'chess.pieces.Knight', 'Knight', ([], {}), '()\n', (700, 702), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((718, 725), 'chess.pieces.Queen', 'Queen', ([], {}), '()\n', (723, 725), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((729, 735), 'chess.pieces.Rook', 'Rook', ([], {}), '()\n', (733, 735), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((127, 133), 'chess.pieces.Pawn', 'Pawn', ([], {}), '()\n', (131, 133), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((135, 143), 'chess.pieces.Knight', 'Knight', ([], {}), '()\n', (141, 143), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((145, 153), 'chess.pieces.Bishop', 'Bishop', ([], {}), '()\n', (151, 153), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((175, 183), 'chess.pieces.Knight', 'Knight', ([], {}), '()\n', (181, 183), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((185, 193), 'chess.pieces.Bishop', 'Bishop', ([], {}), '()\n', (191, 193), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((195, 202), 'chess.pieces.Queen', 'Queen', ([], {}), '()\n', (200, 202), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((224, 230), 'chess.pieces.Pawn', 'Pawn', ([], {}), '()\n', (228, 230), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((232, 238), 'chess.pieces.Pawn', 'Pawn', ([], {}), '()\n', (236, 238), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((240, 246), 'chess.pieces.Pawn', 'Pawn', ([], {}), '()\n', (244, 246), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n'), ((248, 254), 'chess.pieces.Pawn', 'Pawn', ([], {}), '()\n', (252, 254), False, 'from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King\n')] |
# Purpose: Standard definitions
# Created: 08.07.2015
# Copyright (c) 2015-2020, <NAME>
# License: MIT License
# pattern type: predefined (1)
from ezdxf.math import Vec2
PATTERN_NEW = {
"ANSI31": [[45.0, (0.0, 0.0), (-2.2627, 2.2627), []]],
"ANSI32": [
[45.0, (0.0, 0.0), (-6.7882, 6.7882), []],
[45.0, (4.5255, 0.0), (-6.7882, 6.7882), []],
],
"ANSI33": [
[45.0, (512.0, 0.0), (-4.5255, 4.5255), []],
[45.0, (516.5255, 0.0), (-4.5255, 4.5255), [3.2, -1.6]],
],
"ANSI34": [
[45.0, (0.0, 0.0), (-13.5765, 13.5765), []],
[45.0, (4.5255, 0.0), (-13.5765, 13.5765), []],
[45.0, (9.051, 0.0), (-13.5765, 13.5765), []],
[45.0, (13.5765, 0.0), (-13.5765, 13.5765), []],
],
"ANSI35": [
[45.0, (-1024.0, -256.0), (-4.5255, 4.5255), []],
[45.0, (-1019.4745, -256.0), (-4.5255, 4.5255), [8.0, -1.6, 0.0, -1.6]],
],
"ANSI36": [[45.0, (-1024.0, -256.0), (1.6971, 6.2225), [8.0, -1.6, 0.0, -1.6]]],
"ANSI37": [
[45.0, (0.0, 0.0), (-2.2627, 2.2627), []],
[135.0, (0.0, 0.0), (-2.2627, -2.2627), []],
],
"ANSI38": [
[45.0, (0.0, 0.0), (-2.2627, 2.2627), []],
[135.0, (0.0, 0.0), (-6.7882, 2.2627), [8.0, -4.8]],
],
"ACAD_ISO02W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68]]],
"ACAD_ISO03W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -46.08]]],
"ACAD_ISO04W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [61.44, -7.68, 1.28, -7.68]]],
"ACAD_ISO05W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [61.44, -7.68, 1.28, -7.68, 1.28, -7.68]]
],
"ACAD_ISO06W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [61.44, -7.68, 1.28, -7.68, 1.28, -16.64]],
[0.0, (0.0, 0.0), (0.0, 12.8), [-87.04, 1.28, -7.68]],
],
"ACAD_ISO07W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [1.28, -7.68]]],
"ACAD_ISO08W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [61.44, -7.68, 15.36, -7.68]]],
"ACAD_ISO09W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [61.44, -7.68, 15.36, -7.68, 15.36, -7.68]]
],
"ACAD_ISO10W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 1.28, -7.68]]],
"ACAD_ISO11W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 30.72, -7.68, 1.28, -7.68]]
],
"ACAD_ISO12W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 1.28, -7.68, 1.28, -7.68]]
],
"ACAD_ISO13W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 30.72, -7.68, 1.28, -16.64]],
[0.0, (0.0, 0.0), (0.0, 12.8), [-85.76, 1.28, -7.68]],
],
"ACAD_ISO14W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 1.28, -7.68, 1.28, -16.64]],
[0.0, (0.0, 0.0), (0.0, 12.8), [-56.32, 1.28, -7.68]],
],
"ACAD_ISO15W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 30.72, -7.68, 1.28, -25.6]],
[0.0, (0.0, 0.0), (0.0, 12.8), [-85.76, 1.28, -7.68, 1.28, -7.68]],
],
"ANGLE": [
[0.0, (0.0, 0.0), (0.0, 7.04), [5.12, -1.92]],
[90.0, (0.0, 0.0), (-7.04, 0.0), [5.12, -1.92]],
],
"AR-B816": [
[0.0, (0.0, 0.0), (0.0, 20.48), []],
[90.0, (0.0, 0.0), (-20.48, 20.48), [20.48, -20.48]],
],
"AR-B816C": [
[0.0, (0.0, 0.0), (20.48, 20.48), [40.0, -0.96]],
[0.0, (-20.48, 0.96), (20.48, 20.48), [40.0, -0.96]],
[90.0, (0.0, 0.0), (-20.48, 20.48), [-21.44, 19.52]],
[90.0, (-0.96, 0.0), (-20.48, 20.48), [-21.44, 19.52]],
],
"AR-B88": [
[0.0, (0.0, 0.0), (0.0, 20.48), []],
[90.0, (0.0, 0.0), (-10.24, 20.48), [20.48, -20.48]],
],
"AR-BRELM": [
[0.0, (0.0, 0.0), (0.0, 68.2752), [97.6, -4.8]],
[0.0, (0.0, 28.8), (0.0, 68.2752), [97.6, -4.8]],
[0.0, (25.6, 34.1376), (0.0, 68.2752), [46.4, -4.8]],
[0.0, (25.6, 62.9376), (0.0, 68.2752), [46.4, -4.8]],
[90.0, (0.0, 0.0), (-102.4, 0.0), [28.8, -39.4752]],
[90.0, (-4.8, 0.0), (-102.4, 0.0), [28.8, -39.4752]],
[90.0, (25.6, 34.1376), (-51.2, 0.0), [28.8, -39.4752]],
[90.0, (20.8, 34.1376), (-51.2, 0.0), [28.8, -39.4752]],
],
"AR-BRSTD": [
[0.0, (0.0, 0.0), (0.0, 68.2752), []],
[90.0, (0.0, 0.0), (-102.4, 68.2752), [68.2752, -68.2752]],
],
"AR-CONC": [
[50.0, (0.0, 0.0), (36.7237, -3.2129), [3.84, -42.24]],
[355.0, (0.0, 0.0), (-7.1041, 38.5122), [3.072, -33.792]],
[100.4514, (3.0603, -0.2677), (29.6197, 35.2993), [3.2635, -35.8985]],
[46.1842, (0.0, 10.24), (54.6428, -8.4746), [5.76, -63.36]],
[96.6356, (4.5536, 9.5338), (47.8547, 49.8749), [4.8952, -53.8477]],
[351.1842, (0.0, 10.24), (47.8547, 49.8749), [4.608, -50.688]],
[21.0, (5.12, 7.68), (30.5616, -20.6141), [3.84, -42.24]],
[326.0, (5.12, 7.68), (12.4577, 37.1277), [3.072, -33.792]],
[71.4514, (7.6668, 5.9622), (43.0194, 16.5136), [3.2635, -35.8985]],
[
37.5,
(0.0, 0.0),
(0.6226, 17.0442),
[0.0, -33.3824, 0.0, -34.304, 0.0, -33.92],
],
[
7.5,
(0.0, 0.0),
(13.4692, 20.1939),
[0.0, -19.5584, 0.0, -32.6144, 0.0, -12.928],
],
[
327.5,
(-11.4176, 0.0),
(27.3317, -1.1548),
[0.0, -12.8, 0.0, -39.936, 0.0, -52.992],
],
[
317.5,
(-16.5376, 0.0),
(29.8591, 5.1254),
[0.0, -16.64, 0.0, -26.5216, 0.0, -37.632],
],
],
"AR-HBONE": [
[45.0, (0.0, 0.0), (0.0, 28.9631), [61.44, -20.48]],
[135.0, (14.4815, 14.4815), (0.0, 28.9631), [61.44, -20.48]],
],
"AR-PARQ1": [
[90.0, (0.0, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (10.24, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (20.48, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (30.72, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (40.96, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (51.2, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (61.44, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[0.0, (0.0, 61.44), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 71.68), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 81.92), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 92.16), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 102.4), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 112.64), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 122.88), (61.44, -61.44), [61.44, -61.44]],
],
"AR-RROOF": [
[0.0, (0.0, 0.0), (56.32, 25.6), [384.0, -51.2, 128.0, -25.6]],
[0.0, (34.048, 12.8), (-25.6, 34.048), [76.8, -8.448, 153.6, -19.2]],
[0.0, (12.8, 21.76), (133.12, 17.152), [204.8, -35.84, 102.4, -25.6]],
],
"AR-RSHKE": [
[0.0, (0.0, 0.0), (65.28, 30.72), [15.36, -12.8, 17.92, -7.68, 23.04, -10.24]],
[0.0, (15.36, 1.28), (65.28, 30.72), [12.8, -48.64, 10.24, -15.36]],
[0.0, (46.08, -1.92), (65.28, 30.72), [7.68, -79.36]],
[90.0, (0.0, 0.0), (-21.76, 30.72), [29.44, -93.44]],
[90.0, (15.36, 0.0), (-21.76, 30.72), [28.8, -94.08]],
[90.0, (28.16, 0.0), (-21.76, 30.72), [26.88, -96.0]],
[90.0, (46.08, -1.92), (-21.76, 30.72), [29.44, -93.44]],
[90.0, (53.76, -1.92), (-21.76, 30.72), [29.44, -93.44]],
[90.0, (76.8, 0.0), (-21.76, 30.72), [28.16, -94.72]],
],
"AR-SAND": [
[37.5, (0.0, 0.0), (-1.6126, 49.3267), [0.0, -38.912, 0.0, -43.52, 0.0, -41.6]],
[
7.5,
(0.0, 0.0),
(45.3063, 72.2469),
[0.0, -20.992, 0.0, -35.072, 0.0, -13.44],
],
[
327.5,
(-31.488, 0.0),
(79.722, 0.1449),
[0.0, -12.8, 0.0, -46.08, 0.0, -60.16],
],
[
317.5,
(-31.488, 0.0),
(76.9568, 22.4685),
[0.0, -6.4, 0.0, -30.208, 0.0, -34.56],
],
],
"BOX": [
[90.0, (0.0, 0.0), (-25.6, 0.0), []],
[90.0, (6.4, 0.0), (-25.6, 0.0), []],
[0.0, (0.0, 0.0), (0.0, 25.6), [-6.4, 6.4]],
[0.0, (0.0, 6.4), (0.0, 25.6), [-6.4, 6.4]],
[0.0, (0.0, 12.8), (0.0, 25.6), [6.4, -6.4]],
[0.0, (0.0, 19.2), (0.0, 25.6), [6.4, -6.4]],
[90.0, (12.8, 0.0), (-25.6, 0.0), [6.4, -6.4]],
[90.0, (19.2, 0.0), (-25.6, 0.0), [6.4, -6.4]],
],
"BRASS": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[0.0, (0.0, 3.2), (0.0, 6.4), [3.2, -1.6]],
],
"BRICK": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[90.0, (0.0, 0.0), (-12.8, 0.0), [6.4, -6.4]],
[90.0, (6.4, 0.0), (-12.8, 0.0), [-6.4, 6.4]],
],
"BRSTONE": [
[0.0, (0.0, 0.0), (0.0, 8.448), []],
[90.0, (23.04, 0.0), (-12.8, 8.448), [8.448, -8.448]],
[90.0, (20.48, 0.0), (-12.8, 8.448), [8.448, -8.448]],
[0.0, (23.04, 1.408), (12.8, 8.448), [-23.04, 2.56]],
[0.0, (23.04, 2.816), (12.8, 8.448), [-23.04, 2.56]],
[0.0, (23.04, 4.224), (12.8, 8.448), [-23.04, 2.56]],
[0.0, (23.04, 5.632), (12.8, 8.448), [-23.04, 2.56]],
[0.0, (23.04, 7.04), (12.8, 8.448), [-23.04, 2.56]],
],
"CLAY": [
[0.0, (0.0, 0.0), (0.0, 4.8), []],
[0.0, (0.0, 0.8), (0.0, 4.8), []],
[0.0, (0.0, 1.6), (0.0, 4.8), []],
[0.0, (0.0, 3.2), (0.0, 4.8), [4.8, -3.2]],
],
"CORK": [
[0.0, (0.0, 0.0), (0.0, 3.2), []],
[135.0, (1.6, -1.6), (-6.4, -6.4), [4.5255, -4.5255]],
[135.0, (2.4, -1.6), (-6.4, -6.4), [4.5255, -4.5255]],
[135.0, (3.2, -1.6), (-6.4, -6.4), [4.5255, -4.5255]],
],
"CROSS": [
[0.0, (0.0, 0.0), (6.4, 6.4), [3.2, -9.6]],
[90.0, (1.6, -1.6), (-6.4, 6.4), [3.2, -9.6]],
],
"DASH": [[0.0, (0.0, 0.0), (3.2, 3.2), [3.2, -3.2]]],
"DOLMIT": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[45.0, (0.0, 0.0), (-12.8, 12.8), [9.051, -18.1019]],
],
"DOTS": [[0.0, (0.0, 0.0), (0.8, 1.6), [0.0, -1.6]]],
"EARTH": [
[0.0, (0.0, 0.0), (6.4, 6.4), [6.4, -6.4]],
[0.0, (0.0, 2.4), (6.4, 6.4), [6.4, -6.4]],
[0.0, (0.0, 4.8), (6.4, 6.4), [6.4, -6.4]],
[90.0, (0.8, 5.6), (-6.4, 6.4), [6.4, -6.4]],
[90.0, (3.2, 5.6), (-6.4, 6.4), [6.4, -6.4]],
[90.0, (5.6, 5.6), (-6.4, 6.4), [6.4, -6.4]],
],
"ESCHER": [
[60.0, (0.0, 0.0), (-30.72, -0.0), [28.16, -2.56]],
[180.0, (0.0, 0.0), (15.36, -26.6043), [28.16, -2.56]],
[300.0, (0.0, 0.0), (30.72, -0.0), [28.16, -2.56]],
[60.0, (2.56, 0.0), (-30.72, -0.0), [5.12, -25.6]],
[300.0, (2.56, 0.0), (30.72, -0.0), [5.12, -25.6]],
[60.0, (-1.28, 2.217), (-30.72, -0.0), [5.12, -25.6]],
[180.0, (-1.28, 2.217), (15.36, -26.6043), [5.12, -25.6]],
[300.0, (-1.28, -2.217), (30.72, -0.0), [5.12, -25.6]],
[180.0, (-1.28, -2.217), (15.36, -26.6043), [5.12, -25.6]],
[60.0, (-10.24, 0.0), (-30.72, -0.0), [5.12, -25.6]],
[300.0, (-10.24, 0.0), (30.72, -0.0), [5.12, -25.6]],
[60.0, (5.12, -8.8681), (-30.72, -0.0), [5.12, -25.6]],
[180.0, (5.12, -8.8681), (15.36, -26.6043), [5.12, -25.6]],
[300.0, (5.12, 8.8681), (30.72, -0.0), [5.12, -25.6]],
[180.0, (5.12, 8.8681), (15.36, -26.6043), [5.12, -25.6]],
[0.0, (5.12, 4.4341), (-15.36, 26.6043), [17.92, -12.8]],
[0.0, (5.12, -4.4341), (-15.36, 26.6043), [17.92, -12.8]],
[120.0, (1.28, 6.6511), (-30.72, 0.0), [17.92, -12.8]],
[120.0, (-6.4, 2.217), (-30.72, 0.0), [17.92, -12.8]],
[240.0, (-6.4, -2.217), (15.36, -26.6043), [17.92, -12.8]],
[240.0, (1.28, -6.6511), (15.36, -26.6043), [17.92, -12.8]],
],
"FLEX": [
[0.0, (0.0, 0.0), (0.0, 6.4), [6.4, -6.4]],
[45.0, (6.4, 0.0), (0.0, 6.4), [1.6, -5.851, 1.6, -9.051]],
],
"GOST_GLASS": [
[45.0, (0.0, 0.0), (21.7223, -0.0), [12.8, -17.92]],
[45.0, (5.4306, 0.0), (21.7223, -0.0), [5.12, -25.6]],
[45.0, (0.0, 5.4306), (21.7223, -0.0), [5.12, -25.6]],
],
"GOST_WOOD": [
[90.0, (0.0, 0.0), (30.72, -0.0), [51.2, -10.24]],
[90.0, (10.24, -10.24), (30.72, -0.0), [30.72, -7.68, 15.36, -7.68]],
[90.0, (20.48, -25.6), (30.72, -0.0), [51.2, -10.24]],
],
"GOST_GROUND": [
[45.0, (0.0, 0.0), (72.4077, -0.0), [102.4]],
[45.0, (15.36, 0.0), (72.4077, -0.0), [102.4]],
[45.0, (30.72, 0.0), (72.4077, -0.0), [102.4]],
],
"GRASS": [
[90.0, (0.0, 0.0), (-18.1019, 18.1019), [4.8, -31.4039]],
[45.0, (0.0, 0.0), (-18.1019, 18.1019), [4.8, -20.8]],
[135.0, (0.0, 0.0), (-18.1019, -18.1019), [4.8, -20.8]],
],
"GRATE": [[0.0, (0.0, 0.0), (0.0, 0.8), []], [90.0, (0.0, 0.0), (-3.2, 0.0), []]],
"GRAVEL": [
[228.0128, (18.432, 25.6), (-204.8, -230.4), [3.4441, -340.9687]],
[184.9697, (16.128, 23.04), (307.2, 25.6), [5.9102, -585.1117]],
[132.5104, (10.24, 22.528), (256.0, -281.6), [4.1674, -412.5704]],
[267.2737, (0.256, 16.128), (25.6, 512.0), [5.3821, -532.8271]],
[292.8337, (0.0, 10.752), (-128.0, 307.2), [5.2776, -522.48]],
[357.2737, (2.048, 5.888), (-512.0, 25.6), [5.3821, -532.8271]],
[37.6942, (7.424, 5.632), (-332.8, -256.0), [7.1175, -704.6361]],
[72.2553, (13.056, 9.984), (179.2, 563.2), [6.7197, -665.2498]],
[121.4296, (15.104, 16.384), (-204.8, 332.8), [5.4003, -534.6323]],
[175.2364, (12.288, 20.992), (281.6, -25.6), [6.1653, -302.0995]],
[222.3974, (6.144, 21.504), (-307.2, -281.6), [7.9731, -789.3344]],
[138.8141, (25.6, 15.872), (-179.2, 153.6), [2.7213, -269.4104]],
[171.4692, (23.552, 17.664), (332.8, -51.2), [5.1773, -512.5507]],
[225.0, (18.432, 18.432), (-0.0, -25.6), [3.6204, -32.5835]],
[203.1986, (16.64, 21.504), (128.0, 51.2), [1.9496, -193.0141]],
[291.8014, (14.848, 20.736), (-25.6, 76.8), [2.7572, -135.103]],
[30.9638, (15.872, 18.176), (76.8, 51.2), [4.4782, -144.7942]],
[161.5651, (19.712, 20.48), (51.2, -25.6), [3.2382, -77.7161]],
[16.3895, (0.0, 20.736), (256.0, 76.8), [4.5363, -449.0968]],
[70.3462, (4.352, 22.016), (-102.4, -281.6), [3.8057, -376.7656]],
[293.1986, (19.712, 25.6), (-51.2, 128.0), [3.8993, -191.0645]],
[343.6105, (21.248, 22.016), (-256.0, 76.8), [4.5363, -449.0968]],
[339.444, (0.0, 4.864), (-128.0, 51.2), [4.3745, -214.352]],
[294.7751, (4.096, 3.328), (-128.0, 281.6), [3.6654, -362.8709]],
[66.8014, (19.968, 0.0), (51.2, 128.0), [3.8993, -191.0645]],
[17.354, (21.504, 3.584), (-332.8, -102.4), [4.2914, -424.8428]],
[69.444, (7.424, 0.0), (-51.2, -128.0), [2.1873, -216.5392]],
[101.3099, (18.432, 0.0), (-25.6, 102.4), [1.3053, -129.2296]],
[165.9638, (18.176, 1.28), (76.8, -25.6), [5.2776, -100.2739]],
[186.009, (13.056, 2.56), (256.0, 25.6), [4.8909, -484.1964]],
[303.6901, (15.872, 15.872), (-25.6, 51.2), [3.6921, -88.61]],
[353.1572, (17.92, 12.8), (435.2, -51.2), [6.4459, -638.1456]],
[60.9454, (24.32, 12.032), (-102.4, -179.2), [2.6357, -260.9325]],
[90.0, (25.6, 14.336), (-25.6, 25.6), [1.536, -24.064]],
[120.2564, (12.544, 3.328), (102.4, -179.2), [3.5565, -352.0901]],
[48.0128, (10.752, 6.4), (204.8, 230.4), [6.8882, -337.5245]],
[0.0, (15.36, 11.52), (25.6, 25.6), [6.656, -18.944]],
[325.3048, (22.016, 11.52), (256.0, -179.2), [4.0477, -400.7238]],
[254.0546, (25.344, 9.216), (-25.6, -102.4), [3.7274, -182.6434]],
[207.646, (24.32, 5.632), (-486.4, -256.0), [6.0689, -600.8185]],
[175.4261, (18.944, 2.816), (-332.8, 25.6), [6.4205, -635.6243]],
],
"HEX": [
[0.0, (0.0, 0.0), (0.0, 5.5426), [3.2, -6.4]],
[120.0, (0.0, 0.0), (-4.8, -2.7713), [3.2, -6.4]],
[60.0, (3.2, 0.0), (-4.8, 2.7713), [3.2, -6.4]],
],
"HONEY": [
[0.0, (0.0, 0.0), (4.8, 2.7713), [3.2, -6.4]],
[120.0, (0.0, 0.0), (-4.8, 2.7713), [3.2, -6.4]],
[60.0, (0.0, 0.0), (0.0, 5.5426), [-6.4, 3.2]],
],
"HOUND": [
[0.0, (0.0, 0.0), (6.4, 1.6), [25.6, -12.8]],
[90.0, (0.0, 0.0), (-1.6, -6.4), [25.6, -12.8]],
],
"INSUL": [
[0.0, (0.0, 0.0), (0.0, 9.6), []],
[0.0, (0.0, 3.2), (0.0, 9.6), [3.2, -3.2]],
[0.0, (0.0, 6.4), (0.0, 9.6), [3.2, -3.2]],
],
"LINE": [[0.0, (0.0, 0.0), (0.0, 3.2), []]],
"MUDST": [[0.0, (0.0, 0.0), (12.8, 6.4), [6.4, -6.4, 0.0, -6.4, 0.0, -6.4]]],
"NET": [[0.0, (0.0, 0.0), (0.0, 3.2), []], [90.0, (0.0, 0.0), (-3.2, 0.0), []]],
"NET3": [
[0.0, (0.0, 0.0), (0.0, 3.2), []],
[60.0, (0.0, 0.0), (-2.7713, 1.6), []],
[120.0, (0.0, 0.0), (-2.7713, -1.6), []],
],
"PLAST": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[0.0, (0.0, 0.8), (0.0, 6.4), []],
[0.0, (0.0, 1.6), (0.0, 6.4), []],
],
"PLASTI": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[0.0, (0.0, 0.8), (0.0, 6.4), []],
[0.0, (0.0, 1.6), (0.0, 6.4), []],
[0.0, (0.0, 4.0), (0.0, 6.4), []],
],
"SACNCR": [
[45.0, (0.0, 0.0), (-1.6971, 1.6971), []],
[45.0, (1.6971, 0.0), (-1.6971, 1.6971), [0.0, -2.4]],
],
"SQUARE": [
[0.0, (0.0, 0.0), (0.0, 3.2), [3.2, -3.2]],
[90.0, (0.0, 0.0), (-3.2, 0.0), [3.2, -3.2]],
],
"STARS": [
[0.0, (0.0, 0.0), (0.0, 5.5426), [3.2, -3.2]],
[60.0, (0.0, 0.0), (-4.8, 2.7713), [3.2, -3.2]],
[120.0, (1.6, 2.7713), (-4.8, -2.7713), [3.2, -3.2]],
],
"STEEL": [
[45.0, (0.0, 0.0), (-2.2627, 2.2627), []],
[45.0, (0.0, 1.6), (-2.2627, 2.2627), []],
],
"SWAMP": [
[0.0, (0.0, 0.0), (12.8, 22.1703), [3.2, -22.4]],
[90.0, (1.6, 0.0), (-12.8, 22.1703), [1.6, -42.7405]],
[90.0, (2.0, 0.0), (-12.8, 22.1703), [1.28, -43.0605]],
[90.0, (1.2, 0.0), (-12.8, 22.1703), [1.28, -43.0605]],
[60.0, (2.4, 0.0), (-12.8, 22.1703), [1.024, -24.576]],
[120.0, (0.8, 0.0), (-25.6, 0.0), [1.024, -24.576]],
],
"TRANS": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[0.0, (0.0, 3.2), (0.0, 6.4), [3.2, -3.2]],
],
"TRIANG": [
[60.0, (0.0, 0.0), (-4.8, 8.3138), [4.8, -4.8]],
[120.0, (0.0, 0.0), (-9.6, 0.0), [4.8, -4.8]],
[0.0, (-2.4, 4.1569), (4.8, 8.3138), [4.8, -4.8]],
],
"ZIGZAG": [
[0.0, (0.0, 0.0), (3.2, 3.2), [3.2, -3.2]],
[90.0, (3.2, 0.0), (-3.2, 3.2), [3.2, -3.2]],
],
}
def load(old_pattern=None):
from ezdxf.options import options
if old_pattern is not None:
use_old = bool(old_pattern)
options.use_old_predefined_pattern_scaling = use_old
else:
use_old = options.use_old_predefined_pattern_scaling
return PATTERN_OLD if use_old else PATTERN_NEW
def scale_pattern(pattern, factor: float = 1, angle: float = 0, ndigits: int = 4):
def _scale(iterable):
return [round(i * factor, ndigits) for i in iterable]
def _scale_line(line):
angle0, base_point, offset, dash_length_items = line
if angle:
base_point = Vec2(base_point).rotate_deg(angle)
offset = Vec2(offset).rotate_deg(angle)
angle0 = (angle0 + angle) % 360.0
return [
round(angle0, ndigits),
tuple(_scale(base_point)),
tuple(_scale(offset)),
_scale(dash_length_items)
]
return [_scale_line(line) for line in pattern]
def scale_all(pattern: dict, factor: float = 1, angle: float = 0, ndigits: int = 4):
return {name: scale_pattern(p, factor, angle, ndigits) for name, p in pattern.items()}
PATTERN_OLD = scale_all(PATTERN_NEW, factor=0.03906836964688205)
| [
"ezdxf.math.Vec2"
] | [((19347, 19363), 'ezdxf.math.Vec2', 'Vec2', (['base_point'], {}), '(base_point)\n', (19351, 19363), False, 'from ezdxf.math import Vec2\n'), ((19403, 19415), 'ezdxf.math.Vec2', 'Vec2', (['offset'], {}), '(offset)\n', (19407, 19415), False, 'from ezdxf.math import Vec2\n')] |
#!/usr/bin/env python3.3
import requests
def grab_website_data():
'''Get raw data as HTML string from the NOAA website.'''
url = 'http://www.nws.noaa.gov/mdl/gfslamp/docs/stations_info.shtml'
page = requests.get(url)
return page.text
def extract_section(text):
'''Find Illinois data segment (in a PRE tag).
We know (from examination) that inside of the PRE block containing ' IL '
(with whitespace and case matching) we can find the IL station data.
This solution isn't robust, but it's good enough for practical cases.'''
il_start = text.find(' IL ')
tag_start = text.rfind('PRE', il_start-200, il_start) # look backwards
tag_end = text.find('PRE', il_start)
return text[tag_start+4:tag_end-2]
def parse_station_line(line):
'''Extract latitude and longitude of stations. We know the columns are fixed
(which is both inconvenient and convenient). In this case, we will simply
set the limits of the relevant columns by counting the number of columns
over we need to go.'''
#print(line)
r_stn = (5, 9) #remember that the last index is an exclusive bound
r_name = (10, 31)
r_lat = (36, 41) #we don't need the N/W designation; we know where we are
r_lon = (46, 51)
stn = line[r_stn[0]:r_stn[1]]
name = line[r_name[0]:r_name[1]]
lat = float(line[r_lat[0]:r_lat[1]])
lon = -float(line[r_lon[0]:r_lon[1]])
return stn, lat, lon
if __name__ == '__main__':
text = grab_website_data()
data = extract_section(text)
for line in data.splitlines():
try:
stn, lat, lon = parse_station_line(line)
print('%s\t%f\t%f'%(stn,lon,lat))
except:
pass
#print('Could not parse line\n\t%s'%line)
| [
"requests.get"
] | [((212, 229), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (224, 229), False, 'import requests\n')] |
import datetime
import os
from dataclasses import dataclass, field
from operator import attrgetter
from typing import List, Dict, Optional, cast, Set
from tarpn.ax25 import AX25Call
from tarpn.netrom import NetRomPacket, NetRomNodes, NodeDestination
from tarpn.network import L3RoutingTable, L3Address
import tarpn.network.netrom_l3 as l3
from tarpn.util import json_dump, json_load
@dataclass
class Neighbor:
call: AX25Call
port: int
quality: int
def __hash__(self):
return hash(self.call)
def to_safe_dict(self):
return {
"call": str(self.call),
"port": self.port,
"quality": self.quality
}
@classmethod
def from_safe_dict(cls, d):
return cls(call=AX25Call.parse(d["call"]), port=d["port"], quality=d["quality"])
@dataclass
class Route:
neighbor: AX25Call
dest: AX25Call
next_hop: AX25Call
quality: int
obsolescence: int
def to_safe_dict(self):
return {
"neighbor": str(self.neighbor),
"destination": str(self.dest),
"next_hop": str(self.next_hop),
"quality": self.quality,
"obsolescence": self.obsolescence
}
@classmethod
def from_safe_dict(cls, d):
return cls(neighbor=AX25Call.parse(d["neighbor"]), dest=AX25Call.parse(d["destination"]),
next_hop=AX25Call.parse(d["next_hop"]), quality=d["quality"], obsolescence=d["obsolescence"])
def __hash__(self):
return hash((self.neighbor, self.dest))
@dataclass
class Destination:
node_call: AX25Call
node_alias: str
neighbor_map: Dict[str, Route] = field(default_factory=dict, compare=False, hash=False)
freeze: bool = False
def __hash__(self):
return hash((self.node_call, self.node_alias))
def to_safe_dict(self):
return {
"call": str(self.node_call),
"alias": self.node_alias,
"freeze": self.freeze,
"routes": [route.to_safe_dict() for route in self.neighbor_map.values()]
}
@classmethod
def from_safe_dict(cls, d):
instance = cls(node_call=AX25Call.parse(d["call"]), node_alias=d["alias"], freeze=d["freeze"])
instance.neighbor_map = {
route_dict["neighbor"]: Route.from_safe_dict(route_dict) for route_dict in d["routes"]
}
return instance
def sorted_neighbors(self):
return sorted(self.neighbor_map.values(), key=attrgetter("quality"), reverse=True)
@dataclass
class NetRomRoutingTable(L3RoutingTable):
node_alias: str
updated_at: datetime.datetime = field(default_factory=datetime.datetime.now)
our_calls: Set[AX25Call] = field(default_factory=set, compare=False, hash=False)
# Neighbors is a map of direct neighbors we have, i.e., who we have heard NODES from
neighbors: Dict[str, Neighbor] = field(default_factory=dict, compare=False, hash=False)
# Destinations is the content of the NODES table, what routes exist to other nodes through which neighbors
destinations: Dict[str, Destination] = field(default_factory=dict, compare=False, hash=False)
# TODO config all these
default_obs: int = 100
default_quality: int = 255
min_quality: int = 50
min_obs: int = 4
def __repr__(self):
s = "Neighbors:\n"
for neighbor in self.neighbors.values():
s += f"\t{neighbor}\n"
s += "Destinations:\n"
for dest in self.destinations.values():
s += f"\t{dest}\n"
return s.strip()
def __hash__(self):
return hash((self.node_alias, self.updated_at))
def save(self, filename: str):
d = {
"node_alias": self.node_alias,
"updated_at": self.updated_at.isoformat(),
"our_calls": [str(call) for call in self.our_calls],
"neighbors": [n.to_safe_dict() for n in self.neighbors.values()],
"destinations": [d.to_safe_dict() for d in self.destinations.values()]
}
json_dump(filename, d)
@classmethod
def load(cls, filename: str, node_alias: str):
if not os.path.exists(filename):
return NetRomRoutingTable(node_alias=node_alias, updated_at=datetime.datetime.now())
d = json_load(filename)
return NetRomRoutingTable(node_alias=d["node_alias"],
updated_at=datetime.datetime.fromisoformat(d["updated_at"]),
our_calls={AX25Call.parse(call) for call in d["our_calls"]},
neighbors={n_dict["call"]: Neighbor.from_safe_dict(n_dict) for n_dict in d["neighbors"]},
destinations={d_dict["call"]: Destination.from_safe_dict(d_dict) for d_dict in d["destinations"]})
def route(self, packet: NetRomPacket) -> List[AX25Call]:
"""
If a packet's destination is a known neighbor, route to it. Otherwise look up the route with the highest
quality and send the packet to the neighbor which provided that route
:param packet:
:return: list of neighbor callsign's in sorted order of route quality
"""
if packet.dest in self.neighbors:
return [packet.dest]
else:
dest = self.destinations.get(str(packet.dest))
if dest:
return [n.neighbor for n in dest.sorted_neighbors()]
else:
return []
def route1(self, destination: L3Address) -> Optional[int]:
if not isinstance(destination, l3.NetRomAddress):
print(f"Wrong address family, expected NET/ROM got {destination.__class__}")
return None
netrom_dest = cast(l3.NetRomAddress, destination)
packet_dest = AX25Call(netrom_dest.callsign, netrom_dest.ssid)
# TODO handle alias here
if packet_dest in self.neighbors:
return self.neighbors.get(str(packet_dest)).port
else:
dest = self.destinations.get(str(packet_dest))
if dest:
neighbors = dest.sorted_neighbors()
if len(neighbors) > 0:
return self.neighbors.get(str(neighbors[0].neighbor)).port
else:
return None
else:
return None
def listen_for_address(self, app_call: AX25Call, app_alias: str):
app_routes = {}
for our_call in self.our_calls:
app_routes[str(our_call)] = Route(our_call, app_call, our_call, 95, 100)
self.destinations[str(app_call)] = Destination(app_call, app_alias, app_routes, True)
def refresh_route(self, heard_from: str, node: str):
"""
Refresh the obsolescence for a route
"""
if node in self.destinations:
route = self.destinations[node].neighbor_map.get(heard_from)
if route is not None:
route.obsolescence = self.default_obs
else:
print(f"Cannot refresh route to {node} via {heard_from}. {heard_from} is not in our neighbor map.")
else:
print(f"Cannot refresh route to {node}. It is not in our destination map.")
def update_routes(self, heard_from: AX25Call, heard_on_port: int, nodes: NetRomNodes):
"""
Update the routing table with a NODES broadcast.
This method is not thread-safe.
"""
# Get or create the neighbor and destination
neighbor = self.neighbors.get(str(heard_from), Neighbor(heard_from, heard_on_port, self.default_quality))
self.neighbors[str(heard_from)] = neighbor
# Add direct route to whoever sent the NODES
dest = self.destinations.get(str(heard_from), Destination(heard_from, nodes.sending_alias))
dest.neighbor_map[str(heard_from)] = Route(heard_from, heard_from, heard_from,
self.default_quality, self.default_obs)
self.destinations[str(heard_from)] = dest
for destination in nodes.destinations:
# Filter out ourselves
route_quality = 0
if destination.best_neighbor in self.our_calls:
# Best neighbor is us, this is a "trivial loop", quality is zero
continue
else:
# Otherwise compute this route's quality based on the NET/ROM spec
route_quality = (destination.quality * neighbor.quality + 128.) / 256.
# Only add routes which are above the minimum quality to begin with TODO check this logic
if route_quality > self.min_quality:
new_dest = self.destinations.get(str(destination.dest_node),
Destination(destination.dest_node, destination.dest_alias))
new_route = new_dest.neighbor_map.get(
str(neighbor.call), Route(neighbor.call, destination.dest_node, destination.best_neighbor,
int(route_quality), self.default_obs))
new_route.quality = route_quality
new_route.obsolescence = self.default_obs
new_dest.neighbor_map[str(neighbor.call)] = new_route
self.destinations[str(destination.dest_node)] = new_dest
else:
# print(f"Saw new route for {destination}, but quality was too low")
pass
self.updated_at = datetime.datetime.now()
def prune_routes(self) -> None:
"""
Prune any routes which we haven't heard about in a while.
This method is not thread-safe.
"""
# print("Pruning routes")
for call, destination in list(self.destinations.items()):
if destination.freeze:
# Don't prune frozen routes
continue
for neighbor, route in list(destination.neighbor_map.items()):
route.obsolescence -= 1
if route.obsolescence <= 0:
# print(f"Removing {neighbor} from {destination} neighbor's list")
del destination.neighbor_map[neighbor]
if len(destination.neighbor_map.keys()) == 0:
# print(f"No more routes to {call}, removing from routing table")
del self.destinations[call]
if call in self.neighbors.keys():
del self.neighbors[call]
self.updated_at = datetime.datetime.now()
def clear_routes(self) -> None:
self.destinations.clear()
self.neighbors.clear()
self.updated_at = datetime.datetime.now()
def get_nodes(self) -> NetRomNodes:
node_destinations = []
for destination in self.destinations.values():
# Otherwise find best neighbor route
best_neighbor = None
for neighbor in destination.sorted_neighbors():
if neighbor.obsolescence >= self.min_obs:
best_neighbor = neighbor
break
else:
# print(f"Not including {neighbor} in NODES, obsolescence below threshold")
pass
if best_neighbor:
node_destinations.append(NodeDestination(destination.node_call, destination.node_alias,
best_neighbor.next_hop, best_neighbor.quality))
else:
# print(f"No good neighbor was found for {destination}")
pass
return NetRomNodes(self.node_alias, node_destinations)
| [
"operator.attrgetter",
"os.path.exists",
"tarpn.netrom.NetRomNodes",
"tarpn.netrom.NodeDestination",
"tarpn.util.json_dump",
"typing.cast",
"datetime.datetime.now",
"tarpn.ax25.AX25Call.parse",
"datetime.datetime.fromisoformat",
"tarpn.ax25.AX25Call",
"tarpn.util.json_load",
"dataclasses.field... | [((1665, 1719), 'dataclasses.field', 'field', ([], {'default_factory': 'dict', 'compare': '(False)', 'hash': '(False)'}), '(default_factory=dict, compare=False, hash=False)\n', (1670, 1719), False, 'from dataclasses import dataclass, field\n'), ((2635, 2679), 'dataclasses.field', 'field', ([], {'default_factory': 'datetime.datetime.now'}), '(default_factory=datetime.datetime.now)\n', (2640, 2679), False, 'from dataclasses import dataclass, field\n'), ((2711, 2764), 'dataclasses.field', 'field', ([], {'default_factory': 'set', 'compare': '(False)', 'hash': '(False)'}), '(default_factory=set, compare=False, hash=False)\n', (2716, 2764), False, 'from dataclasses import dataclass, field\n'), ((2892, 2946), 'dataclasses.field', 'field', ([], {'default_factory': 'dict', 'compare': '(False)', 'hash': '(False)'}), '(default_factory=dict, compare=False, hash=False)\n', (2897, 2946), False, 'from dataclasses import dataclass, field\n'), ((3102, 3156), 'dataclasses.field', 'field', ([], {'default_factory': 'dict', 'compare': '(False)', 'hash': '(False)'}), '(default_factory=dict, compare=False, hash=False)\n', (3107, 3156), False, 'from dataclasses import dataclass, field\n'), ((4035, 4057), 'tarpn.util.json_dump', 'json_dump', (['filename', 'd'], {}), '(filename, d)\n', (4044, 4057), False, 'from tarpn.util import json_dump, json_load\n'), ((4277, 4296), 'tarpn.util.json_load', 'json_load', (['filename'], {}), '(filename)\n', (4286, 4296), False, 'from tarpn.util import json_dump, json_load\n'), ((5723, 5758), 'typing.cast', 'cast', (['l3.NetRomAddress', 'destination'], {}), '(l3.NetRomAddress, destination)\n', (5727, 5758), False, 'from typing import List, Dict, Optional, cast, Set\n'), ((5781, 5829), 'tarpn.ax25.AX25Call', 'AX25Call', (['netrom_dest.callsign', 'netrom_dest.ssid'], {}), '(netrom_dest.callsign, netrom_dest.ssid)\n', (5789, 5829), False, 'from tarpn.ax25 import AX25Call\n'), ((9477, 9500), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (9498, 9500), False, 'import datetime\n'), ((10483, 10506), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (10504, 10506), False, 'import datetime\n'), ((10635, 10658), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (10656, 10658), False, 'import datetime\n'), ((11567, 11614), 'tarpn.netrom.NetRomNodes', 'NetRomNodes', (['self.node_alias', 'node_destinations'], {}), '(self.node_alias, node_destinations)\n', (11578, 11614), False, 'from tarpn.netrom import NetRomPacket, NetRomNodes, NodeDestination\n'), ((4142, 4166), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (4156, 4166), False, 'import os\n'), ((753, 778), 'tarpn.ax25.AX25Call.parse', 'AX25Call.parse', (["d['call']"], {}), "(d['call'])\n", (767, 778), False, 'from tarpn.ax25 import AX25Call\n'), ((1296, 1325), 'tarpn.ax25.AX25Call.parse', 'AX25Call.parse', (["d['neighbor']"], {}), "(d['neighbor'])\n", (1310, 1325), False, 'from tarpn.ax25 import AX25Call\n'), ((1332, 1364), 'tarpn.ax25.AX25Call.parse', 'AX25Call.parse', (["d['destination']"], {}), "(d['destination'])\n", (1346, 1364), False, 'from tarpn.ax25 import AX25Call\n'), ((1394, 1423), 'tarpn.ax25.AX25Call.parse', 'AX25Call.parse', (["d['next_hop']"], {}), "(d['next_hop'])\n", (1408, 1423), False, 'from tarpn.ax25 import AX25Call\n'), ((2163, 2188), 'tarpn.ax25.AX25Call.parse', 'AX25Call.parse', (["d['call']"], {}), "(d['call'])\n", (2177, 2188), False, 'from tarpn.ax25 import AX25Call\n'), ((2487, 2508), 'operator.attrgetter', 'attrgetter', (['"""quality"""'], {}), "('quality')\n", (2497, 2508), False, 'from operator import attrgetter\n'), ((4404, 4452), 'datetime.datetime.fromisoformat', 'datetime.datetime.fromisoformat', (["d['updated_at']"], {}), "(d['updated_at'])\n", (4435, 4452), False, 'import datetime\n'), ((4240, 4263), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4261, 4263), False, 'import datetime\n'), ((4499, 4519), 'tarpn.ax25.AX25Call.parse', 'AX25Call.parse', (['call'], {}), '(call)\n', (4513, 4519), False, 'from tarpn.ax25 import AX25Call\n'), ((11271, 11384), 'tarpn.netrom.NodeDestination', 'NodeDestination', (['destination.node_call', 'destination.node_alias', 'best_neighbor.next_hop', 'best_neighbor.quality'], {}), '(destination.node_call, destination.node_alias,\n best_neighbor.next_hop, best_neighbor.quality)\n', (11286, 11384), False, 'from tarpn.netrom import NetRomPacket, NetRomNodes, NodeDestination\n')] |
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.humidifiers_and_dehumidifiers import HumidifierSteamGas
log = logging.getLogger(__name__)
class TestHumidifierSteamGas(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_humidifiersteamgas(self):
pyidf.validation_level = ValidationLevel.error
obj = HumidifierSteamGas()
# alpha
var_name = "Name"
obj.name = var_name
# object-list
var_availability_schedule_name = "object-list|Availability Schedule Name"
obj.availability_schedule_name = var_availability_schedule_name
# real
var_rated_capacity = 0.0
obj.rated_capacity = var_rated_capacity
# real
var_rated_gas_use_rate = 0.0
obj.rated_gas_use_rate = var_rated_gas_use_rate
# real
var_thermal_efficiency = 0.50005
obj.thermal_efficiency = var_thermal_efficiency
# object-list
var_thermal_efficiency_modifier_curve_name = "object-list|Thermal Efficiency Modifier Curve Name"
obj.thermal_efficiency_modifier_curve_name = var_thermal_efficiency_modifier_curve_name
# real
var_rated_fan_power = 0.0
obj.rated_fan_power = var_rated_fan_power
# real
var_auxiliary_electric_power = 0.0
obj.auxiliary_electric_power = var_auxiliary_electric_power
# node
var_air_inlet_node_name = "node|Air Inlet Node Name"
obj.air_inlet_node_name = var_air_inlet_node_name
# node
var_air_outlet_node_name = "node|Air Outlet Node Name"
obj.air_outlet_node_name = var_air_outlet_node_name
# object-list
var_water_storage_tank_name = "object-list|Water Storage Tank Name"
obj.water_storage_tank_name = var_water_storage_tank_name
# alpha
var_inlet_water_temperature_option = "FixedInletWaterTemperature"
obj.inlet_water_temperature_option = var_inlet_water_temperature_option
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.humidifiersteamgass[0].name, var_name)
self.assertEqual(idf2.humidifiersteamgass[0].availability_schedule_name, var_availability_schedule_name)
self.assertAlmostEqual(idf2.humidifiersteamgass[0].rated_capacity, var_rated_capacity)
self.assertAlmostEqual(idf2.humidifiersteamgass[0].rated_gas_use_rate, var_rated_gas_use_rate)
self.assertAlmostEqual(idf2.humidifiersteamgass[0].thermal_efficiency, var_thermal_efficiency)
self.assertEqual(idf2.humidifiersteamgass[0].thermal_efficiency_modifier_curve_name, var_thermal_efficiency_modifier_curve_name)
self.assertAlmostEqual(idf2.humidifiersteamgass[0].rated_fan_power, var_rated_fan_power)
self.assertAlmostEqual(idf2.humidifiersteamgass[0].auxiliary_electric_power, var_auxiliary_electric_power)
self.assertEqual(idf2.humidifiersteamgass[0].air_inlet_node_name, var_air_inlet_node_name)
self.assertEqual(idf2.humidifiersteamgass[0].air_outlet_node_name, var_air_outlet_node_name)
self.assertEqual(idf2.humidifiersteamgass[0].water_storage_tank_name, var_water_storage_tank_name)
self.assertEqual(idf2.humidifiersteamgass[0].inlet_water_temperature_option, var_inlet_water_temperature_option) | [
"logging.getLogger",
"pyidf.humidifiers_and_dehumidifiers.HumidifierSteamGas",
"pyidf.idf.IDF",
"tempfile.mkstemp",
"os.remove"
] | [((204, 231), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (221, 231), False, 'import logging\n'), ((333, 351), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (349, 351), False, 'import tempfile\n'), ((385, 405), 'os.remove', 'os.remove', (['self.path'], {}), '(self.path)\n', (394, 405), False, 'import os\n'), ((524, 544), 'pyidf.humidifiers_and_dehumidifiers.HumidifierSteamGas', 'HumidifierSteamGas', ([], {}), '()\n', (542, 544), False, 'from pyidf.humidifiers_and_dehumidifiers import HumidifierSteamGas\n'), ((2177, 2182), 'pyidf.idf.IDF', 'IDF', ([], {}), '()\n', (2180, 2182), False, 'from pyidf.idf import IDF\n'), ((2374, 2388), 'pyidf.idf.IDF', 'IDF', (['self.path'], {}), '(self.path)\n', (2377, 2388), False, 'from pyidf.idf import IDF\n')] |
"""
This example shows how to interact with the Determined PyTorch Lightning Adapter
interface to build a basic MNIST network. LightningAdapter utilizes the provided
LightningModule with Determined's PyTorch control loop.
"""
from determined.pytorch import PyTorchTrialContext, DataLoader
from determined.pytorch.lightning import LightningAdapter
import data
import mnist
class MNISTTrial(LightningAdapter):
def __init__(self, context: PyTorchTrialContext, *args, **kwargs) -> None:
lm = mnist.LitMNIST(
hidden_size=context.get_hparam('hidden_size'),
learning_rate=context.get_hparam('learning_rate'),
)
data_dir = f"/tmp/data-rank{context.distributed.get_rank()}"
self.dm = data.MNISTDataModule(
data_url=context.get_data_config()["url"],
data_dir=data_dir,
batch_size=context.get_per_slot_batch_size(),
)
super().__init__(context, lightning_module=lm, *args, **kwargs)
self.dm.prepare_data()
def build_training_data_loader(self) -> DataLoader:
self.dm.setup()
dl = self.dm.train_dataloader()
return DataLoader(dl.dataset, batch_size=dl.batch_size, num_workers=dl.num_workers)
def build_validation_data_loader(self) -> DataLoader:
self.dm.setup()
dl = self.dm.val_dataloader()
return DataLoader(dl.dataset, batch_size=dl.batch_size, num_workers=dl.num_workers)
| [
"determined.pytorch.DataLoader"
] | [((1155, 1231), 'determined.pytorch.DataLoader', 'DataLoader', (['dl.dataset'], {'batch_size': 'dl.batch_size', 'num_workers': 'dl.num_workers'}), '(dl.dataset, batch_size=dl.batch_size, num_workers=dl.num_workers)\n', (1165, 1231), False, 'from determined.pytorch import PyTorchTrialContext, DataLoader\n'), ((1368, 1444), 'determined.pytorch.DataLoader', 'DataLoader', (['dl.dataset'], {'batch_size': 'dl.batch_size', 'num_workers': 'dl.num_workers'}), '(dl.dataset, batch_size=dl.batch_size, num_workers=dl.num_workers)\n', (1378, 1444), False, 'from determined.pytorch import PyTorchTrialContext, DataLoader\n')] |
# graph
from datetime import date
import numpy as np
from bokeh.client import push_session
from bokeh.io import output_server, show, vform
from bokeh.palettes import RdYlBu3
from bokeh.plotting import figure, curdoc, vplot, output_server
from bokeh.models import ColumnDataSource
from bokeh.models.widgets import DataTable, DateFormatter, TableColumn
from random import randint
# create a plot and style its properties
p = figure(x_range=(0, 100), y_range=(0, 100))
p.border_fill_color = 'black'
p.background_fill_color = 'black'
p.outline_line_color = None
p.grid.grid_line_color = None
# add a text renderer to out plot (no data yet)
r = p.text(x=[], y=[], text=[], text_color=[], text_font_size="20pt",
text_baseline="middle", text_align="center")
session = push_session(curdoc())
data = dict(
dates=[date(2014, 3, i+1) for i in range(10)],
downloads=[randint(0, 100) for i in range(10)],
)
source = ColumnDataSource(data)
columns = [
TableColumn(field="dates", title="Date", formatter=DateFormatter()),
TableColumn(field="downloads", title="Downloads"),
]
data_table = DataTable(source=source, columns=columns, width=400, height=280)
curdoc().add_root(vform(data_table))
session.show()
| [
"bokeh.models.widgets.DateFormatter",
"bokeh.plotting.figure",
"bokeh.models.widgets.DataTable",
"bokeh.models.ColumnDataSource",
"bokeh.models.widgets.TableColumn",
"bokeh.io.vform",
"datetime.date",
"bokeh.plotting.curdoc",
"random.randint"
] | [((444, 486), 'bokeh.plotting.figure', 'figure', ([], {'x_range': '(0, 100)', 'y_range': '(0, 100)'}), '(x_range=(0, 100), y_range=(0, 100))\n', (450, 486), False, 'from bokeh.plotting import figure, curdoc, vplot, output_server\n'), ((976, 998), 'bokeh.models.ColumnDataSource', 'ColumnDataSource', (['data'], {}), '(data)\n', (992, 998), False, 'from bokeh.models import ColumnDataSource\n'), ((1173, 1237), 'bokeh.models.widgets.DataTable', 'DataTable', ([], {'source': 'source', 'columns': 'columns', 'width': '(400)', 'height': '(280)'}), '(source=source, columns=columns, width=400, height=280)\n', (1182, 1237), False, 'from bokeh.models.widgets import DataTable, DateFormatter, TableColumn\n'), ((818, 826), 'bokeh.plotting.curdoc', 'curdoc', ([], {}), '()\n', (824, 826), False, 'from bokeh.plotting import figure, curdoc, vplot, output_server\n'), ((1101, 1150), 'bokeh.models.widgets.TableColumn', 'TableColumn', ([], {'field': '"""downloads"""', 'title': '"""Downloads"""'}), "(field='downloads', title='Downloads')\n", (1112, 1150), False, 'from bokeh.models.widgets import DataTable, DateFormatter, TableColumn\n'), ((1259, 1276), 'bokeh.io.vform', 'vform', (['data_table'], {}), '(data_table)\n', (1264, 1276), False, 'from bokeh.io import output_server, show, vform\n'), ((1241, 1249), 'bokeh.plotting.curdoc', 'curdoc', ([], {}), '()\n', (1247, 1249), False, 'from bokeh.plotting import figure, curdoc, vplot, output_server\n'), ((862, 882), 'datetime.date', 'date', (['(2014)', '(3)', '(i + 1)'], {}), '(2014, 3, i + 1)\n', (866, 882), False, 'from datetime import date\n'), ((922, 937), 'random.randint', 'randint', (['(0)', '(100)'], {}), '(0, 100)\n', (929, 937), False, 'from random import randint\n'), ((1074, 1089), 'bokeh.models.widgets.DateFormatter', 'DateFormatter', ([], {}), '()\n', (1087, 1089), False, 'from bokeh.models.widgets import DataTable, DateFormatter, TableColumn\n')] |
from unittest import TestCase
from decimal import Decimal, ROUND_UP
from monon.currency import DefaultCurrenciesProvider
class DefaultCurrenciesProviderTestCase(TestCase):
def setUp(self):
self.provider = DefaultCurrenciesProvider()
self.isocode = 'USD'
def test_decimal_places(self):
self.assertEqual(2, self.provider.get_decimal_places(self.isocode))
def test_symbol(self):
self.assertEqual('$', self.provider.get_symbol(self.isocode))
def test_validate_currency(self):
self.assertIsNone(self.provider.validate_currency(self.isocode))
def test_format_positive_amount(self):
amount = Decimal('43321.123')
expected = '$43321.123'
self.assertEqual(expected, self.provider.format_amount(self.isocode, amount))
def test_format_negative_amount(self):
amount = Decimal('-1234.123')
expected = '-$1234.123'
self.assertEqual(expected, self.provider.format_amount(self.isocode, amount))
def test_rounding(self):
self.assertEqual(ROUND_UP, self.provider.get_rounding(self.isocode))
| [
"monon.currency.DefaultCurrenciesProvider",
"decimal.Decimal"
] | [((221, 248), 'monon.currency.DefaultCurrenciesProvider', 'DefaultCurrenciesProvider', ([], {}), '()\n', (246, 248), False, 'from monon.currency import DefaultCurrenciesProvider\n'), ((661, 681), 'decimal.Decimal', 'Decimal', (['"""43321.123"""'], {}), "('43321.123')\n", (668, 681), False, 'from decimal import Decimal, ROUND_UP\n'), ((862, 882), 'decimal.Decimal', 'Decimal', (['"""-1234.123"""'], {}), "('-1234.123')\n", (869, 882), False, 'from decimal import Decimal, ROUND_UP\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: <NAME> (1459333)
"""
from os import path
from . import model as md
from tensorflow import keras
class Exporter:
def __init__(self, verbosity = False):
self.verbosity = verbosity
def load(self, base_dir = 'storage/models/', model_name = 'model'):
model_path = base_dir+model_name+".hdf5"
if path.exists(model_path):
if(self.verbosity) :
print("Loading saved model "+model_path+"...")
model = keras.models.load_model(model_path)
if(self.verbosity) :
print("Loaded with success")
return model
else:
return False
def export(self, model, accuracy_filter = 98,
test = None, test_labels = None,
base_dir = 'storage/models/', model_name = 'model'):
model_path = base_dir+model_name+".hdf5"
if(accuracy_filter > 0):
m = md.Model()
score_acc_val = m.getAccuracy(model, test, test_labels)
if self.verbosity :
print("Model accuracy: " + str(score_acc_val))
if score_acc_val >= accuracy_filter :
model.save(model_path)
if self.verbosity:
print("Model " + model_path + " exported with success.")
elif (self.verbosity) :
print("Model not exported. Accuracy ("+str(score_acc_val)+"%) lower than " +
str(accuracy_filter) + "%.")
else:
model.save(model_path)
if self.verbosity:
print("Model " + model_path + " exported with success.")
def exportBestOf(self, train, train_labels, test, test_labels, params,
base_dir = 'storage/models/', model_name = 'model',
accuracy_filter = 98,
num_test = 10
):
if num_test > 1 :
print("")
print("================================================================")
print("Saving the best model in " + str(num_test) + " runs...")
print("================================================================")
m = md.Model()
model = self.load(base_dir, model_name)
if model == False:
(h, model) = m.fit(train, train_labels, test, test_labels,params)
self.export(model,-1, None, None, base_dir, model_name)
score_acc_val = m.getAccuracy(model, test, test_labels)
print("Model accuracy: " + str(score_acc_val))
for i in range(num_test):
step = i+1
if self.verbosity:
print("")
print("Step " + str(step) + "/" + str(num_test) +
" (" +str((step*100)//num_test) + "%" +")" )
print("")
(h,model) = m.fit(train, train_labels, test, test_labels,params)
saved_model = self.load(base_dir, model_name)
saved_score_acc_val = m.getAccuracy(saved_model,test, test_labels)
self.export(model, saved_score_acc_val, test, test_labels, base_dir, model_name)
print("")
print("================================================================")
print("Process completed !")
print("================================================================")
else:
print("Error. Use Exporter.export instead.")
| [
"os.path.exists",
"tensorflow.keras.models.load_model"
] | [((414, 437), 'os.path.exists', 'path.exists', (['model_path'], {}), '(model_path)\n', (425, 437), False, 'from os import path\n'), ((585, 620), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['model_path'], {}), '(model_path)\n', (608, 620), False, 'from tensorflow import keras\n')] |
#!/usr/bin/env python
# encoding: utf-8
import os
import re
import sys
import argparse
import glob
import logging
from numpy import nan as NaN
import pandas as pd
import shutil
import zipfile
def get_arguments():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="",
epilog="""
Convert behavioural data from cimaq to bids format
Input: Folder with zip files
""")
parser.add_argument(
"-d", "--idir",
required=True, nargs="+",
help="Folder to be sorted")
parser.add_argument(
"-o", "--odir",
required=True, nargs="+",
help="Output folder - if doesn\'t exist it will be created.")
parser.add_argument(
'--log_level', default='INFO',
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
help='Log level of the logging class.')
args = parser.parse_args()
if len(sys.argv) == 1:
parser.print_help()
sys.exit()
else:
return args
def get_all_ids(iFolder):
""" List all ZipFile and get all IDs
Parameters:
----------
iFolder: string (input folder)
Return:
----------
ids: list of tuple (behavioral ID, IRM ID)
"""
if not os.path.exists(iFolder):
sys.exit('This folder doesn\'t exist: {}'.format(iFolder))
return
ids = []
allZipFiles = glob.glob(os.path.join(iFolder, '*.zip'))
for currZipFile in allZipFiles:
currZipFile = os.path.basename(currZipFile)
ids.append((currZipFile.split('_')[0], currZipFile.split('_')[1]))
if not ids:
sys.exit('This folder doesn\'t contain any zip files')
return
else:
return ids
def set_subject_data(bID, iFolder, oFolder):
"""
Parameters:
----------
bID: string (PSCID used to identify participants during data collection)
datadir: string (input folder)
oFolder: string (output folder)
Return:
----------
sub_files: list (three input files)
"""
logging.debug('Subject PSCID": {}'.format(bID))
prefix = ['Output-Responses-Encoding_CIMAQ_*',
'Onset-Event-Encoding_CIMAQ_*',
'Output_Retrieval_CIMAQ_*']
sub_files = []
s_dir = glob.glob(os.path.join(iFolder, bID+'*IRM.zip'))
if len(s_dir) != 1:
logging.error(' Multiple directories match \
this subject PSCID: {}'.format(bID))
else:
s_path = os.path.join(oFolder, bID+'*')
s_out = glob.glob(s_path)
if not s_out:
z_ref = zipfile.ZipFile(s_dir[0], 'r')
z_ref.extractall(oFolder)
z_ref.close()
s_out = glob.glob(s_path)
if len(s_out) == 1:
s_out = s_out[0]
for nPrefix in prefix:
file = glob.glob(os.path.join(s_out, nPrefix))
if len(file) == 1:
sub_files.append(file[0])
else:
logging.error('Multiple files found'.format(bID))
else:
logging.error('Multiple folders found'.format(bID))
return sub_files
def cleanMain(mainFile):
"""
Parameters:
----------
mainFile: pandas object
Return:
----------
mainFile: pandas object
"""
# remove first three junk rows (blank trials): CTL0, Enc00 and ENc000
mainFile.drop([0, 1, 2], axis=0, inplace=True)
# re-label columns
mainFile.rename(columns={'TrialNumber': 'trial_number',
'Category': 'trial_type',
'OldNumber': 'stim_id',
'CorrectSource': 'position_correct',
'Stim_RESP': 'response',
'Stim_RT': 'response_time'}, inplace=True)
# remove redundant columns
mainFile.drop(['TrialCode', 'Stim_ACC'], axis=1, inplace=True)
# re-order columns
cols = ['trial_number', 'trial_type', 'response', 'response_time',
'stim_id', 'position_correct']
mainFile = mainFile[cols]
# change in-scan reaction time from ms to s
mainFile['response_time'] = mainFile['response_time'].astype('float64',
copy=False)
mainFile['response_time'] = mainFile['response_time'].div(1000)
# insert new columns
colNames = ['onset', 'duration', 'offset', 'stim_file', 'stim_category',
'stim_name', 'recognition_accuracy',
'recognition_responsetime', 'position_response',
'position_accuracy', 'position_responsetime']
dtype = [NaN, NaN, NaN, 'None', 'None', 'None', -1, NaN, -1, -1, NaN]
colIndex = [0, 1, 2, 8, 9, 10, 11, 12, 14, 15, 16]
for i in range(0, 11):
mainFile.insert(loc=colIndex[i],
column=colNames[i],
value=dtype[i],
allow_duplicates=True)
return mainFile # modified in-place
def cleanOnsets(onsets):
"""
Description:
Label columns and remove first six junk rows
(3 junk trials; 2 rows per trial).
Parameters:
----------
onsets: pandas object
Return:
----------
onsets: pandas object
"""
# add column headers
onsets.columns = ["TrialNum", "Condition", "TrialNum_perCondi",
"ImageID", "Trial_part", "onsetSec", "durationSec"]
onsets.drop([0, 1, 2, 3, 4, 5], axis=0, inplace=True)
return onsets
def cleanRetriev(ret):
"""
Parameters:
----------
ret: pandas object
Return:
----------
ret: pandas object
"""
# Change column headers
ret.rename(columns={'category': 'old_new',
'Stim': 'stim_file',
'OldNumber': 'stim_id',
'Recognition_ACC': 'recognition_accuracy',
'Recognition_RESP': 'recognition_response',
'Recognition_RT': 'recognition_responsetime',
'Spatial_RESP': 'position_response',
'Spatial_RT': 'position_responsetime',
'Spatial_ACC(à corriger voir output-encodage)': 'position_accuracy'},
inplace=True)
# re-order columns
cols = ['old_new', 'stim_file', 'stim_id', 'recognition_response',
'recognition_accuracy', 'recognition_responsetime',
'position_response', 'position_accuracy', 'position_responsetime']
ret = ret[cols]
# Transform reaction time columns from ms to s
ret[['recognition_responsetime']] = ret[['recognition_responsetime']].astype('float64', copy=False) # string is object in pandas, str in Python
ret[['position_responsetime']] = ret[['position_responsetime']].astype('float64', copy=False)
ret['recognition_responsetime'] = ret['recognition_responsetime'].div(1000)
ret['position_responsetime'] = ret['position_responsetime'].div(1000)
# Clean up eprime programming mistake: replace position_response and position_responsetime values
# with NaN if subject perceived image as 'new' (the image was not probed for position).
# There should be no response or RT value there, values were carried over from previous trial (not reset in eprime)
# CONFIRMED w Isabel: subject must give a position answer when probed (image considered OLD) before eprime moves to the next trial.
i = ret[ret['recognition_response'] == 2].index
ret.loc[i, 'position_responsetime'] = NaN
ret.loc[i, 'position_response'] = -1
# clean up eprime mistake (change Old67 condition ('old_new') from New to OLD)
q = ret[ret['stim_id'] == 'Old67'].index
ret.loc[q, 'old_new'] = 'OLD'
# insert new columns
colNames = ['trial_number', 'stim_category', 'stim_name',
'recognition_performance', 'position_correct']
dtype = [-1, 'None', 'None', 'None', -1]
colIndex = [0, 4, 5, 9, 10]
for j in range(0, 5):
ret.insert(loc=colIndex[j], column=colNames[j], value=dtype[j],
allow_duplicates=True)
# Extract info and fill trial_number, stim_category and stim_name columns
k = ret.index
ret.loc[k, 'trial_number'] = k+1
# format: category_imageName.bmp w some space, _ and - in image names
stimInfo = ret.loc[k, 'stim_file']
for s in k:
ret.loc[s, 'stim_category'] = re.findall('(.+?)_', stimInfo[s])[0]
ret.loc[s, 'stim_name'] = re.findall('_(.+?)[.]', stimInfo[s])[0]
# Fill recognition_performance column based on actual and perceived novelty
m = ret[ret['old_new'] == 'OLD'].index.intersection(ret[ret['recognition_accuracy'] == 1].index)
ret.loc[m, 'recognition_performance'] = 'Hit'
n = ret[ret['old_new'] == 'OLD'].index.intersection(ret[ret['recognition_accuracy'] == 0].index)
ret.loc[n, 'recognition_performance'] = 'Miss'
o = ret[ret['old_new'] == 'New'].index.intersection(ret[ret['recognition_accuracy'] == 1].index)
ret.loc[o, 'recognition_performance'] = 'CR'
p = ret[ret['old_new'] == 'New'].index.intersection(ret[ret['recognition_accuracy'] == 0].index)
ret.loc[p, 'recognition_performance'] = 'FA'
# return cleaned up input Dataframe
return ret
def addOnsets(main, enc):
"""
Parameters:
----------
main:
enc: pandas objects
Return:
----------
main: pandas object
"""
# make main file indexable by trial number:
main.set_index('trial_number', inplace=True)
# copy trial onset and offset times from enc into main
# note: fixation's onset time is the trial task's offset time
for i in enc.index:
trialNum = enc.loc[i, 'TrialNum']
if enc.loc[i, 'Trial_part'] == 'Fixation':
main.loc[trialNum, 'offset'] = enc.loc[i, 'onsetSec']
else:
main.loc[trialNum, 'onset'] = enc.loc[i, 'onsetSec']
# Calculate trial duration time from onset and offset times
main['duration'] = main['offset']-main['onset']
# reset main's searchable index to default
main.reset_index(level=None, drop=False, inplace=True)
return main
def addPostScan(main, ret):
"""
Parameters:
----------
main: panda object
ret: panda object
Return:
----------
mainMerged: pandas object
"""
# split main's rows (trials) into sublist based on Condition
mainEnc = main[main['trial_type'] == 'Enc'].copy()
mainCTL = main[main['trial_type'] == 'CTL'].copy()
# make mainEnc indexable by picture id
mainEnc.set_index('stim_id', inplace=True)
# import post-scan data from ret into mainEnc
for i in ret[ret['old_new'] == 'OLD'].index:
stimID = ret.loc[i, 'stim_id']
mainEnc.loc[stimID, 'stim_category'] = ret.loc[i, 'stim_category']
mainEnc.loc[stimID, 'stim_name'] = ret.loc[i, 'stim_name']
mainEnc.loc[stimID, 'recognition_accuracy'] = ret.loc[i, 'recognition_accuracy']
mainEnc.loc[stimID, 'recognition_responsetime'] = ret.loc[i, 'recognition_responsetime']
mainEnc.loc[stimID, 'position_response'] = ret.loc[i, 'position_response']
mainEnc.loc[stimID, 'position_responsetime'] = ret.loc[i, 'position_responsetime']
# calculate post-scan source (position) accuracy;
# -1 = control task; 0 = missed trial; 1 = wrong source (image recognized but wrong quadrant remembered);
# 2 = image recognized with correct source
mainEnc['position_accuracy'] = 0
for j in mainEnc[mainEnc['recognition_accuracy'] == 1].index:
if mainEnc.loc[j, 'position_correct'] == mainEnc.loc[j, 'position_response']:
mainEnc.loc[j, 'position_accuracy'] = 2
else:
mainEnc.loc[j, 'position_accuracy'] = 1
# import source accuracy info from mainEnc into ret (in-place)
for i in ret[ret['old_new'] == 'OLD'].index:
picID = ret.loc[i, 'stim_id']
ret.loc[i, 'position_correct'] = mainEnc.loc[picID, 'position_correct']
ret.loc[i, 'position_accuracy'] = mainEnc.loc[picID,
'position_accuracy']
# reset mainEnc searchable index to default
# and re-order columns to match order in mainCTL
mainEnc.reset_index(level=None, drop=False, inplace=True)
cols = ['trial_number', 'onset', 'duration', 'offset', 'trial_type',
'response', 'response_time', 'stim_id', 'stim_file',
'stim_category', 'stim_name', 'recognition_accuracy',
'recognition_responsetime', 'position_correct',
'position_response', 'position_accuracy', 'position_responsetime']
mainEnc = mainEnc[cols]
# Re-merge mainEnc and mainCTL and re-order by trial number
mainMerged = mainEnc.append(mainCTL, ignore_index=True)
mainMerged.sort_values('trial_number', axis=0, ascending=True,
inplace=True)
return mainMerged
def extract_taskFile(bID, sID, file_list, output):
"""
Parameters:
----------
bID: string (subject PSCID, id used during data collection)
sID: string (subject DCCID, id used in Loris)
file_list: list (three input files)
output: string (output Folder)
Return:
----------
None
"""
# import data from three text files into pandas DataFrames
encMain = pd.read_csv(file_list[0], sep='\t')
manualEdits = ['3303819', '5477234', '6417837', '7674650']
if bID in manualEdits:
encOnsets = pd.read_csv(file_list[1], sep='\t', header=None)
else:
encOnsets = pd.read_fwf(file_list[1], infer_nrows=210,
delim_whitespace=True,
header=None)
retriev = pd.read_csv(file_list[2], sep='\t', encoding='ISO-8859-1')
# clean up each file
encMain = cleanMain(encMain)
encOnsets = cleanOnsets(encOnsets)
retriev = cleanRetriev(retriev)
# import onset times from encOnset into encMain
encMain = addOnsets(encMain, encOnsets)
# import post-scan performance data from retriev into encMain
encMain = addPostScan(encMain, retriev)
# export encMain and retriev into tsv files (output directorty)
encMain.to_csv(output+'/sub-'+sID+'_ses-4_task-memory_events.tsv',
sep='\t', header=True, index=False)
retriev.to_csv(output+'/PostScanBehav_pscid'+bID+'_dccid'+sID+'.tsv',
sep='\t', header=True, index=False)
def main():
args = get_arguments()
logging.basicConfig(level=args.log_level)
oFolder = args.odir[0]
iFolder = args.idir[0]
# Create oFolder if not exists
if not os.path.exists(oFolder):
os.mkdir(oFolder)
all_ids = get_all_ids(iFolder)
# Create tmp folder to temporaly store unziped files
tmpFolder = os.path.join(oFolder, 'tmp')
if not os.path.exists(tmpFolder):
os.mkdir(tmpFolder)
# Create taskFiles folder where all output files will be saved
fileFolder = os.path.join(oFolder, 'taskfiles')
if not os.path.exists(fileFolder):
os.mkdir(fileFolder)
# loop over zip files
for (idBEH, idMRI) in all_ids:
s_files = set_subject_data(idBEH, iFolder, tmpFolder)
if(len(s_files) == 3):
extract_taskFile(idBEH, idMRI, s_files, fileFolder)
shutil.rmtree(tmpFolder, ignore_errors=True)
else:
logging.info('missing files for subject ({},{})'.format(idBEH,
idMRI))
#
if __name__ == '__main__':
sys.exit(main())
| [
"logging.basicConfig",
"os.path.exists",
"argparse.ArgumentParser",
"pandas.read_csv",
"zipfile.ZipFile",
"os.path.join",
"shutil.rmtree",
"os.path.basename",
"os.mkdir",
"sys.exit",
"re.findall",
"pandas.read_fwf",
"glob.glob"
] | [((230, 458), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.RawDescriptionHelpFormatter', 'description': '""""""', 'epilog': '"""\n Convert behavioural data from cimaq to bids format\n Input: Folder with zip files\n """'}), '(formatter_class=argparse.\n RawDescriptionHelpFormatter, description=\'\', epilog=\n """\n Convert behavioural data from cimaq to bids format\n Input: Folder with zip files\n """\n )\n', (253, 458), False, 'import argparse\n'), ((13313, 13348), 'pandas.read_csv', 'pd.read_csv', (['file_list[0]'], {'sep': '"""\t"""'}), "(file_list[0], sep='\\t')\n", (13324, 13348), True, 'import pandas as pd\n'), ((13695, 13753), 'pandas.read_csv', 'pd.read_csv', (['file_list[2]'], {'sep': '"""\t"""', 'encoding': '"""ISO-8859-1"""'}), "(file_list[2], sep='\\t', encoding='ISO-8859-1')\n", (13706, 13753), True, 'import pandas as pd\n'), ((14461, 14502), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'args.log_level'}), '(level=args.log_level)\n', (14480, 14502), False, 'import logging\n'), ((14764, 14792), 'os.path.join', 'os.path.join', (['oFolder', '"""tmp"""'], {}), "(oFolder, 'tmp')\n", (14776, 14792), False, 'import os\n'), ((14944, 14978), 'os.path.join', 'os.path.join', (['oFolder', '"""taskfiles"""'], {}), "(oFolder, 'taskfiles')\n", (14956, 14978), False, 'import os\n'), ((1006, 1016), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1014, 1016), False, 'import sys\n'), ((1276, 1299), 'os.path.exists', 'os.path.exists', (['iFolder'], {}), '(iFolder)\n', (1290, 1299), False, 'import os\n'), ((1424, 1454), 'os.path.join', 'os.path.join', (['iFolder', '"""*.zip"""'], {}), "(iFolder, '*.zip')\n", (1436, 1454), False, 'import os\n'), ((1514, 1543), 'os.path.basename', 'os.path.basename', (['currZipFile'], {}), '(currZipFile)\n', (1530, 1543), False, 'import os\n'), ((1644, 1697), 'sys.exit', 'sys.exit', (['"""This folder doesn\'t contain any zip files"""'], {}), '("This folder doesn\'t contain any zip files")\n', (1652, 1697), False, 'import sys\n'), ((2287, 2326), 'os.path.join', 'os.path.join', (['iFolder', "(bID + '*IRM.zip')"], {}), "(iFolder, bID + '*IRM.zip')\n", (2299, 2326), False, 'import os\n'), ((2491, 2523), 'os.path.join', 'os.path.join', (['oFolder', "(bID + '*')"], {}), "(oFolder, bID + '*')\n", (2503, 2523), False, 'import os\n'), ((2538, 2555), 'glob.glob', 'glob.glob', (['s_path'], {}), '(s_path)\n', (2547, 2555), False, 'import glob\n'), ((13459, 13507), 'pandas.read_csv', 'pd.read_csv', (['file_list[1]'], {'sep': '"""\t"""', 'header': 'None'}), "(file_list[1], sep='\\t', header=None)\n", (13470, 13507), True, 'import pandas as pd\n'), ((13538, 13616), 'pandas.read_fwf', 'pd.read_fwf', (['file_list[1]'], {'infer_nrows': '(210)', 'delim_whitespace': '(True)', 'header': 'None'}), '(file_list[1], infer_nrows=210, delim_whitespace=True, header=None)\n', (13549, 13616), True, 'import pandas as pd\n'), ((14604, 14627), 'os.path.exists', 'os.path.exists', (['oFolder'], {}), '(oFolder)\n', (14618, 14627), False, 'import os\n'), ((14637, 14654), 'os.mkdir', 'os.mkdir', (['oFolder'], {}), '(oFolder)\n', (14645, 14654), False, 'import os\n'), ((14804, 14829), 'os.path.exists', 'os.path.exists', (['tmpFolder'], {}), '(tmpFolder)\n', (14818, 14829), False, 'import os\n'), ((14839, 14858), 'os.mkdir', 'os.mkdir', (['tmpFolder'], {}), '(tmpFolder)\n', (14847, 14858), False, 'import os\n'), ((14990, 15016), 'os.path.exists', 'os.path.exists', (['fileFolder'], {}), '(fileFolder)\n', (15004, 15016), False, 'import os\n'), ((15026, 15046), 'os.mkdir', 'os.mkdir', (['fileFolder'], {}), '(fileFolder)\n', (15034, 15046), False, 'import os\n'), ((2598, 2628), 'zipfile.ZipFile', 'zipfile.ZipFile', (['s_dir[0]', '"""r"""'], {}), "(s_dir[0], 'r')\n", (2613, 2628), False, 'import zipfile\n'), ((2713, 2730), 'glob.glob', 'glob.glob', (['s_path'], {}), '(s_path)\n', (2722, 2730), False, 'import glob\n'), ((8415, 8448), 're.findall', 're.findall', (['"""(.+?)_"""', 'stimInfo[s]'], {}), "('(.+?)_', stimInfo[s])\n", (8425, 8448), False, 'import re\n'), ((8486, 8522), 're.findall', 're.findall', (['"""_(.+?)[.]"""', 'stimInfo[s]'], {}), "('_(.+?)[.]', stimInfo[s])\n", (8496, 8522), False, 'import re\n'), ((15278, 15322), 'shutil.rmtree', 'shutil.rmtree', (['tmpFolder'], {'ignore_errors': '(True)'}), '(tmpFolder, ignore_errors=True)\n', (15291, 15322), False, 'import shutil\n'), ((2857, 2885), 'os.path.join', 'os.path.join', (['s_out', 'nPrefix'], {}), '(s_out, nPrefix)\n', (2869, 2885), False, 'import os\n')] |
import numpy as np
import pytest
from opytimizer.math import hypercomplex
def test_norm():
array = np.array([[1, 1]])
norm_array = hypercomplex.norm(array)
assert norm_array > 0
def test_span():
array = np.array([[0.5, 0.75, 0.5, 0.9]])
lb = [0]
ub = [10]
span_array = hypercomplex.span(array, lb, ub)
assert span_array > 0
| [
"numpy.array",
"opytimizer.math.hypercomplex.span",
"opytimizer.math.hypercomplex.norm"
] | [((106, 124), 'numpy.array', 'np.array', (['[[1, 1]]'], {}), '([[1, 1]])\n', (114, 124), True, 'import numpy as np\n'), ((143, 167), 'opytimizer.math.hypercomplex.norm', 'hypercomplex.norm', (['array'], {}), '(array)\n', (160, 167), False, 'from opytimizer.math import hypercomplex\n'), ((226, 259), 'numpy.array', 'np.array', (['[[0.5, 0.75, 0.5, 0.9]]'], {}), '([[0.5, 0.75, 0.5, 0.9]])\n', (234, 259), True, 'import numpy as np\n'), ((307, 339), 'opytimizer.math.hypercomplex.span', 'hypercomplex.span', (['array', 'lb', 'ub'], {}), '(array, lb, ub)\n', (324, 339), False, 'from opytimizer.math import hypercomplex\n')] |
import datetime
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
class Collection(models.Model):
'''
A collection of cards.
'''
name = models.CharField(max_length=250)
class Card(models.Model):
first_name = models.CharField(max_length=150)
last_name = models.CharField(max_length=150)
team = models.CharField(max_length=100)
year = models.PositiveIntegerField(
validators=[
MinValueValidator(1887), # first produced baseball card (source?)
MaxValueValidator(datetime.datetime.now().year),
],
help_text='Use the following format: <YYYY>',
)
company = models.CharField(max_length=250)
value = models.DecimalField(
decimal_places=2,
max_digits=50,
default=0,
)
collection = models.ManyToManyField(Collection)
| [
"django.core.validators.MinValueValidator",
"django.db.models.ManyToManyField",
"datetime.datetime.now",
"django.db.models.DecimalField",
"django.db.models.CharField"
] | [((206, 238), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)'}), '(max_length=250)\n', (222, 238), False, 'from django.db import models\n'), ((284, 316), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)'}), '(max_length=150)\n', (300, 316), False, 'from django.db import models\n'), ((333, 365), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)'}), '(max_length=150)\n', (349, 365), False, 'from django.db import models\n'), ((377, 409), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (393, 409), False, 'from django.db import models\n'), ((696, 728), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)'}), '(max_length=250)\n', (712, 728), False, 'from django.db import models\n'), ((741, 804), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(50)', 'default': '(0)'}), '(decimal_places=2, max_digits=50, default=0)\n', (760, 804), False, 'from django.db import models\n'), ((854, 888), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Collection'], {}), '(Collection)\n', (876, 888), False, 'from django.db import models\n'), ((483, 506), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(1887)'], {}), '(1887)\n', (500, 506), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((580, 603), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (601, 603), False, 'import datetime\n')] |
import torch
from scipy.sparse.linalg import LinearOperator, cg
from typing import Callable, Optional
from torch import Tensor
import numpy as np
import time
class CG(torch.autograd.Function):
@staticmethod
def forward(ctx, z: Tensor, AcquisitionModel, beta: Tensor, y, G: Callable, GH: Callable, GHG: Optional[Callable]=None,x0:Optional[Tensor]=None) -> Tensor:
tmp = AcquisitionModel.adjoint(y)
if GHG is None:
GHG = lambda x: GH(G(x))
b = tmp.as_array().ravel() + (beta * GH(z)).numpy().ravel()
if x0 is not None:
x0 = x0.numpy().ravel()
def AHA(x):
tmp.fill(x)
return AcquisitionModel.adjoint(AcquisitionModel.direct(tmp)).as_array().ravel()
H = LinearOperator(
shape=(np.prod(b.shape), np.prod(b.shape)),
dtype=np.complex64,
matvec=lambda x: AHA(x)+(beta * GHG(torch.from_numpy(x).reshape(tmp.shape).unsqueeze(0))).numpy().ravel()
)
sol = cg(H, b,tol=1e-3,x0=x0)
xprime = sol[0].reshape(tmp.shape)
ctx.H = H
ctx.G = G
ctx.GH = GH
xprime_tensor = torch.from_numpy(xprime)
ctx.save_for_backward(beta, xprime_tensor, z)
return xprime_tensor
@staticmethod
def backward(ctx, grad_output):
beta, xprime, z = ctx.saved_tensors
b = grad_output.unsqueeze(0).numpy().ravel()
old=time.time()
grad = torch.from_numpy(cg(ctx.H, b,tol=1e-3, x0=b)[0]).reshape(grad_output.shape)
print('backward cg',time.time()-old)
gz = gbeta = None
if ctx.needs_input_grad[0]:
gz = beta * ctx.G(grad.unsqueeze(0))
if ctx.needs_input_grad[2]:
gbeta = (-ctx.GH(ctx.G(xprime.unsqueeze(0)) - z.unsqueeze(0)) * grad).sum().real
return gz, None, gbeta, None, None, None, None, None
| [
"numpy.prod",
"scipy.sparse.linalg.cg",
"time.time",
"torch.from_numpy"
] | [((1002, 1028), 'scipy.sparse.linalg.cg', 'cg', (['H', 'b'], {'tol': '(0.001)', 'x0': 'x0'}), '(H, b, tol=0.001, x0=x0)\n', (1004, 1028), False, 'from scipy.sparse.linalg import LinearOperator, cg\n'), ((1149, 1173), 'torch.from_numpy', 'torch.from_numpy', (['xprime'], {}), '(xprime)\n', (1165, 1173), False, 'import torch\n'), ((1421, 1432), 'time.time', 'time.time', ([], {}), '()\n', (1430, 1432), False, 'import time\n'), ((1552, 1563), 'time.time', 'time.time', ([], {}), '()\n', (1561, 1563), False, 'import time\n'), ((791, 807), 'numpy.prod', 'np.prod', (['b.shape'], {}), '(b.shape)\n', (798, 807), True, 'import numpy as np\n'), ((809, 825), 'numpy.prod', 'np.prod', (['b.shape'], {}), '(b.shape)\n', (816, 825), True, 'import numpy as np\n'), ((1465, 1494), 'scipy.sparse.linalg.cg', 'cg', (['ctx.H', 'b'], {'tol': '(0.001)', 'x0': 'b'}), '(ctx.H, b, tol=0.001, x0=b)\n', (1467, 1494), False, 'from scipy.sparse.linalg import LinearOperator, cg\n'), ((908, 927), 'torch.from_numpy', 'torch.from_numpy', (['x'], {}), '(x)\n', (924, 927), False, 'import torch\n')] |
#! /usr/bin/env python
# Copyright (c) 2018 - 2019 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from __future__ import print_function
import os
import cv2
import numpy as np
from pythutils.fileutils import get_ext
from pythutils.mathutils import closenr, sort_points
def check_media(source, internal=False):
"""Runs some basic checks on a mediafile or stream"""
ext = get_ext(str(source))
ftype = None
if ext in [".mov",".mp4",".avi"]:
ftype = "vid"
if ext in [".jpg", ".png", ".jpeg", ".bmp"]:
ftype = "img"
if type(source) == int:
ftype = "stream"
if ftype == None:
print("File neither video or image file..")
return False
if ftype == "img" or ftype == "vid":
filedir = os.path.dirname(source)
if filedir != "":
if not os.path.isdir(filedir):
print("File directory does not exist..")
return False
if not os.path.isfile(source):
print("File does not exist..")
return False
if ftype == "vid" or ftype == "stream":
cap = cv2.VideoCapture(source)
flag, frame = cap.read()
if not flag:
print("Video source opened but failed to read images..")
return False
if not internal:
print("Mediafile okay.. ", end = "")
return True
def getimg(mediafile):
"""Acquires a numpy array from a video or image"""
try:
cap = cv2.VideoCapture(mediafile)
_, img = cap.read()
except:
img = cv2.imread(mediafile)
return img
def get_vid_params(mediafile):
"""Gets video parameters from file or video instance"""
if type(mediafile) is str:
if get_ext(mediafile) not in [".mov",".mp4",".avi"]:
raise TypeError("File not a video..")
mediafile = cv2.VideoCapture(mediafile)
if not mediafile.read()[0]:
raise RuntimeError("Video could not be read..")
fps = int(mediafile.get(cv2.CAP_PROP_FPS))
width = int(mediafile.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(mediafile.get(cv2.CAP_PROP_FRAME_HEIGHT))
fcount = int(mediafile.get(cv2.CAP_PROP_FRAME_COUNT))
return fps, width, height, fcount
def videowriter(filein, w, h, fps, resizeval = 1):
"""Creates a vidout instance using the opencv VideoWriter class"""
ext = get_ext(filein)
fileout = filein[:-len(ext)]+".mp4" if ext!="" else filein+".mp4"
viddims = (w, h) if resizeval == 1 else (int(w*resizeval), int(h*resizeval))
fourcc = cv2.VideoWriter_fourcc(*"mp4v")
vidout = cv2.VideoWriter(fileout, fourcc, fps, viddims)
return vidout
def safe_framecount(vidfile):
"""Saves video frame counter that counts frame-by-frame"""
cap = cv2.VideoCapture(vidfile)
vidlength = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
count = 0
while True:
ret, frame = cap.read()
if not ret:
break
count += 1
print("video had", vidlength-count, "non-existing frames.. ", end = "")
return count
def crop(image, pt1, pt2=None):
"""Crops image based on based on top left and bottom right corner"""
if pt2 == None:
pt2 = pt1[1]
pt1 = pt1[0]
cropped = image[pt1[1]:pt2[1], pt1[0]:pt2[0]]
return cropped
def fourpt_transform(image, pts):
"""
Perspective transform a section of an image based on four coordinates
to obtain a top-down view
"""
rect = sort_points(pts)
(tl, tr, br, bl) = rect
widthA = np.sqrt(((br[0] - bl[0]) ** 2) + ((br[1] - bl[1]) ** 2))
widthB = np.sqrt(((tr[0] - tl[0]) ** 2) + ((tr[1] - tl[1]) ** 2))
maxWidth = max(int(widthA), int(widthB))
heightA = np.sqrt(((tr[0] - br[0]) ** 2) + ((tr[1] - br[1]) ** 2))
heightB = np.sqrt(((tl[0] - bl[0]) ** 2) + ((tl[1] - bl[1]) ** 2))
maxHeight = max(int(heightA), int(heightB))
dst = np.array([[0, 0], [maxWidth - 1, 0],
[maxWidth - 1, maxHeight - 1],
[0, maxHeight - 1]], dtype = "float32")
M = cv2.getPerspectiveTransform(rect, dst)
warped = cv2.warpPerspective(image, M, (maxWidth, maxHeight))
return warped
def checkroi(roi, resolution):
"""Make sure roi coordinates are within resolution"""
x1 = max(roi[0][0],1)
y1 = max(roi[0][1],1)
x2 = min(roi[1][0],resolution[0])
y2 = min(roi[1][1],resolution[1])
return ((x1,y1),(x2,y2))
def zoom_to_roi(zoom, resolution):
"""Gets region of interest coordinates from x,y,w,h zoom parameters"""
x1 = int(zoom[0] * resolution[0])
x2 = int((zoom[0]+zoom[2]) * resolution[0])
y1 = int(zoom[1] * resolution[1])
y2 = int((zoom[1]+zoom[3]) * resolution[1])
return ((x1,y1),(x2,y2))
def roi_to_zoom(roi, resolution):
"""Gets x,y,w,h zoom parameters from region of interest coordinates"""
((x1,y1),(x2,y2)) = roi
z0 = round(x1 / resolution[0],2)
z1 = round(y1 / resolution[1],2)
z2 = round((x2-x1) / resolution[0],2)
z3 = round((y2-y1) / resolution[1],2)
return (z0, z1, z2, z3)
def picamconv(resolution, maxres = (1632, 1232)):
"""Adapts video resolution to work with raspberry pi camera"""
width = min(closenr(resolution[0],32), maxres[0])
height = min(closenr(resolution[1],16), maxres[1])
return (width, height)
def fix_vidshape(res1,res2):
"""Compares two resolutions and get missing x and y coords"""
xmin,ymin = 0,0
xmult = (res2[0]/res1[0])
ymult = (res2[1]/res1[1])
if xmult > ymult:
xmin = int((res2[0]-(res1[0]*ymult))/2)
if ymult > xmult:
ymin = int((res2[0]-(res1[0]*xmult))/2)
return xmin, ymin
def newdims(img = None, resize = 1, dims = None):
"""Returns new dimensions of an image array based on resize value"""
if dims is None:
if img is None:
print("No img or dims provided..")
return
else:
dims = (img.shape[1],img.shape[0])
width = int(dims[0] * resize)
height = int(dims[1] * resize)
return (width, height)
def imgresize(img, resize = 1, dims = None, back = False):
"""
Returns resized image based on resizevalue or provided dimensions
Parameters
----------
img : numpy array
resize : float, default = 1
Multiplier for image size
dims : tuple, default = None
Dimensions of the to-be returned image
back : bool, default = False
If the inverse of the resize value should be used
"""
if dims is None:
resize = 1/resize if back else resize
dims = newdims(img, resize)
interpol = cv2.INTER_CUBIC if resize > 1 else cv2.INTER_AREA
img = cv2.resize(img, dims, interpolation = interpol)
return img
def add_transimg(bgimg, transimg, offsets):
"""
Adds a semi-transparent (4-channel) image to a 3-channel background
image. Images need to be arrays.
"""
h, w, c = transimg.shape
fix = np.zeros((h, w, 3), np.uint8)
a = transimg[:, :, 3] / 255 #alpha
o = offsets
fix[:,:,0] = (1.-a)*bgimg[o[1]:o[1]+h, o[0]:o[0]+w, 0]+a*transimg[:,:,0]
fix[:,:,1] = (1.-a)*bgimg[o[1]:o[1]+h, o[0]:o[0]+w, 1]+a*transimg[:,:,1]
fix[:,:,2] = (1.-a)*bgimg[o[1]:o[1]+h, o[0]:o[0]+w, 2]+a*transimg[:,:,2]
bgimg[o[1]:o[1]+h, o[0]:o[0]+w] = fix
return bgimg
| [
"pythutils.fileutils.get_ext",
"pythutils.mathutils.sort_points",
"numpy.sqrt",
"cv2.getPerspectiveTransform",
"cv2.VideoWriter",
"os.path.isfile",
"numpy.array",
"cv2.warpPerspective",
"numpy.zeros",
"os.path.dirname",
"cv2.VideoCapture",
"cv2.VideoWriter_fourcc",
"os.path.isdir",
"cv2.re... | [((2904, 2919), 'pythutils.fileutils.get_ext', 'get_ext', (['filein'], {}), '(filein)\n', (2911, 2919), False, 'from pythutils.fileutils import get_ext\n'), ((3084, 3115), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'mp4v'"], {}), "(*'mp4v')\n", (3106, 3115), False, 'import cv2\n'), ((3129, 3175), 'cv2.VideoWriter', 'cv2.VideoWriter', (['fileout', 'fourcc', 'fps', 'viddims'], {}), '(fileout, fourcc, fps, viddims)\n', (3144, 3175), False, 'import cv2\n'), ((3302, 3327), 'cv2.VideoCapture', 'cv2.VideoCapture', (['vidfile'], {}), '(vidfile)\n', (3318, 3327), False, 'import cv2\n'), ((4008, 4024), 'pythutils.mathutils.sort_points', 'sort_points', (['pts'], {}), '(pts)\n', (4019, 4024), False, 'from pythutils.mathutils import closenr, sort_points\n'), ((4067, 4119), 'numpy.sqrt', 'np.sqrt', (['((br[0] - bl[0]) ** 2 + (br[1] - bl[1]) ** 2)'], {}), '((br[0] - bl[0]) ** 2 + (br[1] - bl[1]) ** 2)\n', (4074, 4119), True, 'import numpy as np\n'), ((4137, 4189), 'numpy.sqrt', 'np.sqrt', (['((tr[0] - tl[0]) ** 2 + (tr[1] - tl[1]) ** 2)'], {}), '((tr[0] - tl[0]) ** 2 + (tr[1] - tl[1]) ** 2)\n', (4144, 4189), True, 'import numpy as np\n'), ((4254, 4306), 'numpy.sqrt', 'np.sqrt', (['((tr[0] - br[0]) ** 2 + (tr[1] - br[1]) ** 2)'], {}), '((tr[0] - br[0]) ** 2 + (tr[1] - br[1]) ** 2)\n', (4261, 4306), True, 'import numpy as np\n'), ((4325, 4377), 'numpy.sqrt', 'np.sqrt', (['((tl[0] - bl[0]) ** 2 + (tl[1] - bl[1]) ** 2)'], {}), '((tl[0] - bl[0]) ** 2 + (tl[1] - bl[1]) ** 2)\n', (4332, 4377), True, 'import numpy as np\n'), ((4441, 4551), 'numpy.array', 'np.array', (['[[0, 0], [maxWidth - 1, 0], [maxWidth - 1, maxHeight - 1], [0, maxHeight - 1]]'], {'dtype': '"""float32"""'}), "([[0, 0], [maxWidth - 1, 0], [maxWidth - 1, maxHeight - 1], [0, \n maxHeight - 1]], dtype='float32')\n", (4449, 4551), True, 'import numpy as np\n'), ((4598, 4636), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['rect', 'dst'], {}), '(rect, dst)\n', (4625, 4636), False, 'import cv2\n'), ((4650, 4702), 'cv2.warpPerspective', 'cv2.warpPerspective', (['image', 'M', '(maxWidth, maxHeight)'], {}), '(image, M, (maxWidth, maxHeight))\n', (4669, 4702), False, 'import cv2\n'), ((7232, 7277), 'cv2.resize', 'cv2.resize', (['img', 'dims'], {'interpolation': 'interpol'}), '(img, dims, interpolation=interpol)\n', (7242, 7277), False, 'import cv2\n'), ((7508, 7537), 'numpy.zeros', 'np.zeros', (['(h, w, 3)', 'np.uint8'], {}), '((h, w, 3), np.uint8)\n', (7516, 7537), True, 'import numpy as np\n'), ((1307, 1330), 'os.path.dirname', 'os.path.dirname', (['source'], {}), '(source)\n', (1322, 1330), False, 'import os\n'), ((1652, 1676), 'cv2.VideoCapture', 'cv2.VideoCapture', (['source'], {}), '(source)\n', (1668, 1676), False, 'import cv2\n'), ((2014, 2041), 'cv2.VideoCapture', 'cv2.VideoCapture', (['mediafile'], {}), '(mediafile)\n', (2030, 2041), False, 'import cv2\n'), ((2391, 2418), 'cv2.VideoCapture', 'cv2.VideoCapture', (['mediafile'], {}), '(mediafile)\n', (2407, 2418), False, 'import cv2\n'), ((5754, 5780), 'pythutils.mathutils.closenr', 'closenr', (['resolution[0]', '(32)'], {}), '(resolution[0], 32)\n', (5761, 5780), False, 'from pythutils.mathutils import closenr, sort_points\n'), ((5809, 5835), 'pythutils.mathutils.closenr', 'closenr', (['resolution[1]', '(16)'], {}), '(resolution[1], 16)\n', (5816, 5835), False, 'from pythutils.mathutils import closenr, sort_points\n'), ((1501, 1523), 'os.path.isfile', 'os.path.isfile', (['source'], {}), '(source)\n', (1515, 1523), False, 'import os\n'), ((2096, 2117), 'cv2.imread', 'cv2.imread', (['mediafile'], {}), '(mediafile)\n', (2106, 2117), False, 'import cv2\n'), ((2271, 2289), 'pythutils.fileutils.get_ext', 'get_ext', (['mediafile'], {}), '(mediafile)\n', (2278, 2289), False, 'from pythutils.fileutils import get_ext\n'), ((1376, 1398), 'os.path.isdir', 'os.path.isdir', (['filedir'], {}), '(filedir)\n', (1389, 1398), False, 'import os\n')] |
import boto3
import subprocess
successes = 0
# Dummy AWS Handler to kick off high level processes
def lambda_handler(source_region, destination_region, credentials):
session = boto3.Session()
# Load Records into KINESIS
CLIENT_NAME = 'kinesis'
kinesis = session.client(CLIENT_NAME, region_name=source_region, aws_access_key_id=credentials,
aws_secret_access_key=credentials['SecretAccessKey'],
aws_session_token=credentials['SessionToken'])
process_kinesis(kinesis, "some_file_path.txt")
# Get SNS Topic ARNs
CLIENT_NAME = 'sns'
for region in [source_region, destination_region]:
sns = session.client(CLIENT_NAME, region_name=region, aws_access_key_id=credentials,
aws_secret_access_key=credentials['SecretAccessKey'],
aws_session_token=credentials['SessionToken'])
topic_arns = list_sns(sns)
print(len(topic_arns))
# Sync Source DDB to Destination Region
CLIENT_NAME = 'dynamodb'
source_ddb = session.client(CLIENT_NAME, region_name=source_region, aws_access_key_id=credentials['AccessKeyId'],
aws_secret_access_key=credentials['SecretAccessKey'],
aws_session_token=credentials['SessionToken'])
destination_ddb = session.client(CLIENT_NAME, region_name=destination_region)
sync_ddb_table(source_ddb, destination_ddb)
# Scan returns paginated results, so only partial data will be copied
def sync_ddb_table(source_ddb, destination_ddb):
response = source_ddb.scan(
TableName="table1"
)
for item in response['Items']:
destination_ddb.put_item(
TableName="table2",
Item=item
)
# This code uses a mutable default argument and modifies it to return. This would leak results across calls
def list_sns(sns, topics=[]):
response = sns.list_topics()
for topic_arn in response["Topics"]:
topics.append(topic_arn["TopicArn"])
return topics
# Infinite loop because a list is modified while being iterated over, Indices are not updated.
def infinite_loop():
words = ['aws', 'amazon', 'codeguru']
for w in words:
if len(w) > 4:
words.insert(0, w)
return words
# Prefer DefaultDict over setDefult
def setdefault_example():
std_dict = dict()
for k, v in enumerate(range(5)):
std_dict.setdefault(k, []).append(v)
return std_dict
# This method reads multiple file paths, open each file to load data, but forgets to close it, leading to resource leaks
# Further, it selectively processes content based on string find condition. The find() operation can be simply replaced
# with a membership tests because one does not need to know the position at which the search keyword appears.
def process_kinesis(kinesis, file_list_path):
flp = open(file_list_path, 'r')
for line in flp:
file_path = line.strip('\n').strip('\r\n')
fp = open(file_path, 'r')
for content in fp:
if content.find("kinesis") != -1:
record = load_kinesis_record(content)
save_kinesis_record(kinesis, record)
# Do not call this function unless you're sure that the "cmd" is secure to run
# This function can be misused to carry out shell injection attacks.
# Further, the code is simply passing an exception, which is not the best practice
# Further, the code keeps track of successful loads by writing to a global variable; which can lead to inaccuracies in
# case of concurrent read/writes to the global variable.
def load_kinesis_record(cmd, mode='subprocess'):
global successes
kinesis_record = None
try:
if mode == "subprocess":
kinesis_record = subprocess.call(cmd, shell=True)
else:
kinesis_record = eval(cmd)
successes += 1
except Exception as e:
pass
return kinesis_record
# This code saves records to Kinesis, but does not check and retry for failed records
# Further, it simply re-raises the caught exception without any additional steps. This redundancy will be flagged.
def save_kinesis_record(kinesis_client, record):
try:
kinesis_client.put_records(record)
except:
raise
| [
"boto3.Session",
"subprocess.call"
] | [((183, 198), 'boto3.Session', 'boto3.Session', ([], {}), '()\n', (196, 198), False, 'import boto3\n'), ((3807, 3839), 'subprocess.call', 'subprocess.call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (3822, 3839), False, 'import subprocess\n')] |
# -*- coding: utf-8 -*-
import os
import sys
reload(sys).setdefaultencoding("UTF-8")
from setuptools import setup, find_packages
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
version = '0.2'
setup(
name='django-rules',
version=version,
description="Flexible per-object authorization backend for Django",
long_description=read('README.textile'),
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
"Framework :: Django",
"Environment :: Web Environment",
],
keywords=['authorization', 'backends', 'django', 'rules', 'permissions'],
author='<NAME>',
author_email='<EMAIL>',
url='http://github.com/maraujop/django-rules',
license='BSD',
packages=find_packages(),
zip_safe=False,
)
| [
"os.path.dirname",
"setuptools.find_packages"
] | [((1001, 1016), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1014, 1016), False, 'from setuptools import setup, find_packages\n'), ((210, 235), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (225, 235), False, 'import os\n')] |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import logging
import re
import numpy as np
import paddle as P
import paddle.distributed.fleet as fleet
from propeller.paddle.train.hooks import RunHook
log = logging.getLogger(__name__)
from demo.utils import create_if_not_exists, get_warmup_and_linear_decay
def optimization(
loss,
warmup_steps,
num_train_steps,
learning_rate,
train_program,
startup_prog,
weight_decay,
scheduler='linear_warmup_decay',
use_fp16=False, ):
"""do backword for static"""
def exclude_from_weight_decay(param):
name = param.rstrip('.master')
if name.find("layer_norm") > -1:
return True
bias_suffix = ["_bias", "_b", ".b_0"]
for suffix in bias_suffix:
if name.endswith(suffix):
return True
return False
g_clip = P.nn.ClipGradByGlobalNorm(1.0)
lr_scheduler = P.optimizer.lr.LambdaDecay(
learning_rate,
get_warmup_and_linear_decay(num_train_steps, warmup_steps))
optimizer = P.optimizer.AdamW(
learning_rate=lr_scheduler,
weight_decay=weight_decay,
grad_clip=g_clip,
apply_decay_param_fun=exclude_from_weight_decay)
if use_fp16:
log.info('AMP activated')
if weight_decay > 0.:
raise ValueError(
'paddle amp will ignore `weight_decay`, see https://github.com/PaddlePaddle/Paddle/issues/29794'
)
#amp_list = P.fluid.contrib.mixed_precision.AutoMixedPrecisionLists(
# custom_white_list=['softmax', 'layer_norm', 'gelu'])
optimizer = P.fluid.contrib.mixed_precision.decorate(
optimizer, init_loss_scaling=3**15, use_dynamic_loss_scaling=True)
_, param_grads = optimizer.minimize(loss)
loss_scaling = P.static.default_main_program().global_block().var(
'loss_scaling_0')
else:
_, param_grads = optimizer.minimize(loss)
loss_scaling = None
class LRStepHook(RunHook):
def after_run(self, _, __):
lr_scheduler.step()
log.debug('lr step: %.5f' % lr_scheduler.get_lr())
return LRStepHook(), loss_scaling
| [
"logging.getLogger",
"demo.utils.get_warmup_and_linear_decay",
"paddle.optimizer.AdamW",
"paddle.static.default_main_program",
"paddle.fluid.contrib.mixed_precision.decorate",
"paddle.nn.ClipGradByGlobalNorm"
] | [((963, 990), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (980, 990), False, 'import logging\n'), ((1666, 1696), 'paddle.nn.ClipGradByGlobalNorm', 'P.nn.ClipGradByGlobalNorm', (['(1.0)'], {}), '(1.0)\n', (1691, 1696), True, 'import paddle as P\n'), ((1852, 1995), 'paddle.optimizer.AdamW', 'P.optimizer.AdamW', ([], {'learning_rate': 'lr_scheduler', 'weight_decay': 'weight_decay', 'grad_clip': 'g_clip', 'apply_decay_param_fun': 'exclude_from_weight_decay'}), '(learning_rate=lr_scheduler, weight_decay=weight_decay,\n grad_clip=g_clip, apply_decay_param_fun=exclude_from_weight_decay)\n', (1869, 1995), True, 'import paddle as P\n'), ((1775, 1833), 'demo.utils.get_warmup_and_linear_decay', 'get_warmup_and_linear_decay', (['num_train_steps', 'warmup_steps'], {}), '(num_train_steps, warmup_steps)\n', (1802, 1833), False, 'from demo.utils import create_if_not_exists, get_warmup_and_linear_decay\n'), ((2427, 2541), 'paddle.fluid.contrib.mixed_precision.decorate', 'P.fluid.contrib.mixed_precision.decorate', (['optimizer'], {'init_loss_scaling': '(3 ** 15)', 'use_dynamic_loss_scaling': '(True)'}), '(optimizer, init_loss_scaling=3 ** \n 15, use_dynamic_loss_scaling=True)\n', (2467, 2541), True, 'import paddle as P\n'), ((2621, 2652), 'paddle.static.default_main_program', 'P.static.default_main_program', ([], {}), '()\n', (2650, 2652), True, 'import paddle as P\n')] |
#!/usr/bin/env python3
#
# Fix ToUnicode CMap in PDF
# https://github.com/trueroad/pdf-fix-tuc
#
# build_bfrange2.py:
# Build bfrange2 PDF.
#
# Copyright (C) 2021 <NAME>.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
import re
import sys
def main ():
re_begin = re.compile (r"(\d+)\s+beginbfrange\r?\n?")
for line in sys.stdin.buffer:
try:
line = line.decode ("utf-8")
except UnicodeDecodeError:
sys.stdout.buffer.write (line)
continue
m = re_begin.match (line)
if m:
print ("{} beginbfrange".format (int (m.group (1)) + 5))
print ("""\
<0001> <0003> <2E81>
<0004> <0006> <2E99>
<0007> <0009> <2EF2>
<000A> <000C> <2EFF>
<000D> <000F> <2FD4>\
""")
continue
print (line, end = "", flush = True)
if __name__ == "__main__":
main ()
| [
"sys.stdout.buffer.write",
"re.compile"
] | [((1535, 1579), 're.compile', 're.compile', (['"""(\\\\d+)\\\\s+beginbfrange\\\\r?\\\\n?"""'], {}), "('(\\\\d+)\\\\s+beginbfrange\\\\r?\\\\n?')\n", (1545, 1579), False, 'import re\n'), ((1714, 1743), 'sys.stdout.buffer.write', 'sys.stdout.buffer.write', (['line'], {}), '(line)\n', (1737, 1743), False, 'import sys\n')] |
from typing import List, Union
from fedot.core.data.data import data_has_categorical_features, InputData
from fedot.core.data.multi_modal import MultiModalData
from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node
from fedot.core.pipelines.pipeline import Pipeline
from fedot.core.repository.tasks import Task, TaskTypesEnum
NOT_FITTED_ERR_MSG = 'Model not fitted yet'
class ApiInitialAssumptionsHelper:
def get_initial_assumption(self,
data: Union[InputData, MultiModalData],
task: Task) -> Pipeline:
has_categorical_features = data_has_categorical_features(data)
if isinstance(data, MultiModalData):
node_final = self.create_multidata_pipeline(task, data, has_categorical_features)
elif isinstance(data, InputData):
node_final = self.create_unidata_pipeline(task, has_categorical_features)
else:
raise NotImplementedError(f"Don't handle {type(data)}")
init_pipeline = Pipeline(node_final)
return init_pipeline
def create_unidata_pipeline(self,
task: Task,
has_categorical_features: bool) -> Node:
node_imputation = PrimaryNode('simple_imputation')
if task.task_type == TaskTypesEnum.ts_forecasting:
node_lagged = SecondaryNode('lagged', [node_imputation])
node_final = SecondaryNode('ridge', [node_lagged])
else:
if has_categorical_features:
node_encoder = SecondaryNode('one_hot_encoding', [node_imputation])
node_preprocessing = SecondaryNode('scaling', [node_encoder])
else:
node_preprocessing = SecondaryNode('scaling', [node_imputation])
if task.task_type == TaskTypesEnum.classification:
node_final = SecondaryNode('xgboost', nodes_from=[node_preprocessing])
elif task.task_type == TaskTypesEnum.regression:
node_final = SecondaryNode('xgbreg', nodes_from=[node_preprocessing])
else:
raise NotImplementedError(f"Don't have initial pipeline for task type: {task.task_type}")
return node_final
def create_multidata_pipeline(self, task: Task, data: MultiModalData, has_categorical_features: bool) -> Node:
if task.task_type == TaskTypesEnum.ts_forecasting:
node_final = SecondaryNode('ridge', nodes_from=[])
for data_source_name, values in data.items():
if data_source_name.startswith('data_source_ts'):
node_primary = PrimaryNode(data_source_name)
node_imputation = SecondaryNode('simple_imputation', [node_primary])
node_lagged = SecondaryNode('lagged', [node_imputation])
node_last = SecondaryNode('ridge', [node_lagged])
node_final.nodes_from.append(node_last)
elif task.task_type == TaskTypesEnum.classification:
node_final = SecondaryNode('xgboost', nodes_from=[])
node_final.nodes_from = self.create_first_multimodal_nodes(data, has_categorical_features)
elif task.task_type == TaskTypesEnum.regression:
node_final = SecondaryNode('xgbreg', nodes_from=[])
node_final.nodes_from = self.create_first_multimodal_nodes(data, has_categorical_features)
else:
raise NotImplementedError(f"Don't have initial pipeline for task type: {task.task_type}")
return node_final
def create_first_multimodal_nodes(self, data: MultiModalData, has_categorical: bool) -> List[SecondaryNode]:
nodes_from = []
for data_source_name, values in data.items():
node_primary = PrimaryNode(data_source_name)
node_imputation = SecondaryNode('simple_imputation', [node_primary])
if data_source_name.startswith('data_source_table') and has_categorical:
node_encoder = SecondaryNode('one_hot_encoding', [node_imputation])
node_preprocessing = SecondaryNode('scaling', [node_encoder])
else:
node_preprocessing = SecondaryNode('scaling', [node_imputation])
node_last = SecondaryNode('ridge', [node_preprocessing])
nodes_from.append(node_last)
return nodes_from
| [
"fedot.core.data.data.data_has_categorical_features",
"fedot.core.pipelines.pipeline.Pipeline",
"fedot.core.pipelines.node.PrimaryNode",
"fedot.core.pipelines.node.SecondaryNode"
] | [((625, 660), 'fedot.core.data.data.data_has_categorical_features', 'data_has_categorical_features', (['data'], {}), '(data)\n', (654, 660), False, 'from fedot.core.data.data import data_has_categorical_features, InputData\n'), ((1036, 1056), 'fedot.core.pipelines.pipeline.Pipeline', 'Pipeline', (['node_final'], {}), '(node_final)\n', (1044, 1056), False, 'from fedot.core.pipelines.pipeline import Pipeline\n'), ((1269, 1301), 'fedot.core.pipelines.node.PrimaryNode', 'PrimaryNode', (['"""simple_imputation"""'], {}), "('simple_imputation')\n", (1280, 1301), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((1387, 1429), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""lagged"""', '[node_imputation]'], {}), "('lagged', [node_imputation])\n", (1400, 1429), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((1455, 1492), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""ridge"""', '[node_lagged]'], {}), "('ridge', [node_lagged])\n", (1468, 1492), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((2458, 2495), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""ridge"""'], {'nodes_from': '[]'}), "('ridge', nodes_from=[])\n", (2471, 2495), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((3797, 3826), 'fedot.core.pipelines.node.PrimaryNode', 'PrimaryNode', (['data_source_name'], {}), '(data_source_name)\n', (3808, 3826), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((3857, 3907), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""simple_imputation"""', '[node_primary]'], {}), "('simple_imputation', [node_primary])\n", (3870, 3907), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((4278, 4322), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""ridge"""', '[node_preprocessing]'], {}), "('ridge', [node_preprocessing])\n", (4291, 4322), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((1579, 1631), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""one_hot_encoding"""', '[node_imputation]'], {}), "('one_hot_encoding', [node_imputation])\n", (1592, 1631), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((1669, 1709), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""scaling"""', '[node_encoder]'], {}), "('scaling', [node_encoder])\n", (1682, 1709), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((1765, 1808), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""scaling"""', '[node_imputation]'], {}), "('scaling', [node_imputation])\n", (1778, 1808), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((1902, 1959), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""xgboost"""'], {'nodes_from': '[node_preprocessing]'}), "('xgboost', nodes_from=[node_preprocessing])\n", (1915, 1959), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((3067, 3106), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""xgboost"""'], {'nodes_from': '[]'}), "('xgboost', nodes_from=[])\n", (3080, 3106), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((4024, 4076), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""one_hot_encoding"""', '[node_imputation]'], {}), "('one_hot_encoding', [node_imputation])\n", (4037, 4076), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((4114, 4154), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""scaling"""', '[node_encoder]'], {}), "('scaling', [node_encoder])\n", (4127, 4154), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((4210, 4253), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""scaling"""', '[node_imputation]'], {}), "('scaling', [node_imputation])\n", (4223, 4253), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((2050, 2106), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""xgbreg"""'], {'nodes_from': '[node_preprocessing]'}), "('xgbreg', nodes_from=[node_preprocessing])\n", (2063, 2106), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((2655, 2684), 'fedot.core.pipelines.node.PrimaryNode', 'PrimaryNode', (['data_source_name'], {}), '(data_source_name)\n', (2666, 2684), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((2723, 2773), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""simple_imputation"""', '[node_primary]'], {}), "('simple_imputation', [node_primary])\n", (2736, 2773), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((2808, 2850), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""lagged"""', '[node_imputation]'], {}), "('lagged', [node_imputation])\n", (2821, 2850), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((2883, 2920), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""ridge"""', '[node_lagged]'], {}), "('ridge', [node_lagged])\n", (2896, 2920), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n'), ((3292, 3330), 'fedot.core.pipelines.node.SecondaryNode', 'SecondaryNode', (['"""xgbreg"""'], {'nodes_from': '[]'}), "('xgbreg', nodes_from=[])\n", (3305, 3330), False, 'from fedot.core.pipelines.node import PrimaryNode, SecondaryNode, Node\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
File: ParkingLot.py
Author: <NAME>(Scott)
Email: <EMAIL>
Copyright: Copyright (c) 2021, Skybility Software Co.,Ltd. All rights reserved.
Description:
"""
from datetime import datetime
from TException import SpotOccupiedError, NoSuitableSpotsError
from Ticket import Ticket
from Utils import CalculateTime
class ParkingLot:
def __init__(self, hourly_rate):
self._levels = []
self._hourly_rate = hourly_rate
def get_available_count(self):
count = 0
for level in self._levels:
count += level.get_available_count()
return count
def park_vehicle(self, vehicle):
try:
spots = self._find_spots_for_vehicle(vehicle)
except NoSuitableSpotsError:
# go to another Lot
raise
for spot in spots:
try:
spot.take_spot()
except SpotOccupiedError:
# need refind spots
raise
ticket = Ticket(spots, vehicle)
return ticket
def _find_spots_for_vehicle(self, vehicle):
# ret = []
for level in self._levels:
spots = level.find_spots_for_vehicle(vehicle)
if spots:
return spots
raise NoSuitableSpotsError('can not find spots for vehicle')
def clear_spot(self, ticket):
spots = ticket.spots()
for spot in spots:
spot.leave_spot()
def calculate_price(self, ticket):
end_time = datetime.now()
start_time = ticket.start_time()
cal_inst = CalculateTime(start_time, end_time)
if cal_inst.in_free_time():
return 0
else:
return self._hourly_rate * cal_inst.get_hours()
| [
"datetime.datetime.now",
"Ticket.Ticket",
"Utils.CalculateTime",
"TException.NoSuitableSpotsError"
] | [((1028, 1050), 'Ticket.Ticket', 'Ticket', (['spots', 'vehicle'], {}), '(spots, vehicle)\n', (1034, 1050), False, 'from Ticket import Ticket\n'), ((1300, 1354), 'TException.NoSuitableSpotsError', 'NoSuitableSpotsError', (['"""can not find spots for vehicle"""'], {}), "('can not find spots for vehicle')\n", (1320, 1354), False, 'from TException import SpotOccupiedError, NoSuitableSpotsError\n'), ((1537, 1551), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1549, 1551), False, 'from datetime import datetime\n'), ((1613, 1648), 'Utils.CalculateTime', 'CalculateTime', (['start_time', 'end_time'], {}), '(start_time, end_time)\n', (1626, 1648), False, 'from Utils import CalculateTime\n')] |
import kaldi_io
import numpy as np
import os
def get_parser():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("w2v_dir", help="wav2vec feature and text directory")
parser.add_argument("tar_root", help="output data directory in kaldi's format")
parser.add_argument("split", help="name of the subset")
parser.add_argument("--label", default="", help="if specified, copy labels too")
return parser
def main():
parser = get_parser()
args = parser.parse_args()
tar_dir = os.path.join(args.tar_root, args.split)
os.makedirs(tar_dir, exist_ok=True)
lengths_path = os.path.join(args.w2v_dir, f"{args.split}.lengths")
with open(lengths_path) as f:
lengths = [int(line.rstrip()) for line in f]
offsets = [0] + np.cumsum(lengths[:-1]).tolist()
feats = np.load(
os.path.join(args.w2v_dir, f"{args.split}.npy"),
mmap_mode="r"
)
assert feats.shape[0] == sum(lengths), \
f"lengths mismatch {feats.shape[0]} != {sum(lengths)}"
ark_path = os.path.join(tar_dir, "feats.ark")
scp_path = os.path.join(tar_dir, "feats.scp")
wspec = f"ark:| copy-feats --compress=true ark:- ark,scp:{ark_path},{scp_path}"
with kaldi_io.open_or_fd(wspec, "wb") as f:
for idx, (offset, length) in enumerate(zip(offsets, lengths)):
feat = feats[offset:offset+length]
kaldi_io.write_mat(f, feat, key=f"utt{idx:010d}")
u2s_path = os.path.join(tar_dir, "utt2spk")
s2u_path = os.path.join(tar_dir, "spk2utt")
with open(u2s_path, "w") as f_u2s, open(s2u_path, "w") as f_s2u:
for idx in range(len(lengths)):
f_u2s.write(f"utt{idx:010d} utt{idx:010d}\n")
f_s2u.write(f"utt{idx:010d} utt{idx:010d}\n")
if bool(args.label):
lab_path = os.path.join(args.w2v_dir, f"{args.split}.{args.label}")
txt_path = os.path.join(tar_dir, "text")
with open(lab_path) as f_lab, open(txt_path, "w") as f_txt:
for idx, line in enumerate(f_lab):
f_txt.write(f"utt{idx:010d} {line}")
if __name__ == "__main__":
main()
| [
"os.makedirs",
"argparse.ArgumentParser",
"os.path.join",
"kaldi_io.open_or_fd",
"numpy.cumsum",
"kaldi_io.write_mat"
] | [((105, 130), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (128, 130), False, 'import argparse\n'), ((552, 591), 'os.path.join', 'os.path.join', (['args.tar_root', 'args.split'], {}), '(args.tar_root, args.split)\n', (564, 591), False, 'import os\n'), ((597, 632), 'os.makedirs', 'os.makedirs', (['tar_dir'], {'exist_ok': '(True)'}), '(tar_dir, exist_ok=True)\n', (608, 632), False, 'import os\n'), ((655, 706), 'os.path.join', 'os.path.join', (['args.w2v_dir', 'f"""{args.split}.lengths"""'], {}), "(args.w2v_dir, f'{args.split}.lengths')\n", (667, 706), False, 'import os\n'), ((1092, 1126), 'os.path.join', 'os.path.join', (['tar_dir', '"""feats.ark"""'], {}), "(tar_dir, 'feats.ark')\n", (1104, 1126), False, 'import os\n'), ((1143, 1177), 'os.path.join', 'os.path.join', (['tar_dir', '"""feats.scp"""'], {}), "(tar_dir, 'feats.scp')\n", (1155, 1177), False, 'import os\n'), ((1513, 1545), 'os.path.join', 'os.path.join', (['tar_dir', '"""utt2spk"""'], {}), "(tar_dir, 'utt2spk')\n", (1525, 1545), False, 'import os\n'), ((1562, 1594), 'os.path.join', 'os.path.join', (['tar_dir', '"""spk2utt"""'], {}), "(tar_dir, 'spk2utt')\n", (1574, 1594), False, 'import os\n'), ((885, 932), 'os.path.join', 'os.path.join', (['args.w2v_dir', 'f"""{args.split}.npy"""'], {}), "(args.w2v_dir, f'{args.split}.npy')\n", (897, 932), False, 'import os\n'), ((1273, 1305), 'kaldi_io.open_or_fd', 'kaldi_io.open_or_fd', (['wspec', '"""wb"""'], {}), "(wspec, 'wb')\n", (1292, 1305), False, 'import kaldi_io\n'), ((1872, 1928), 'os.path.join', 'os.path.join', (['args.w2v_dir', 'f"""{args.split}.{args.label}"""'], {}), "(args.w2v_dir, f'{args.split}.{args.label}')\n", (1884, 1928), False, 'import os\n'), ((1949, 1978), 'os.path.join', 'os.path.join', (['tar_dir', '"""text"""'], {}), "(tar_dir, 'text')\n", (1961, 1978), False, 'import os\n'), ((1445, 1494), 'kaldi_io.write_mat', 'kaldi_io.write_mat', (['f', 'feat'], {'key': 'f"""utt{idx:010d}"""'}), "(f, feat, key=f'utt{idx:010d}')\n", (1463, 1494), False, 'import kaldi_io\n'), ((821, 844), 'numpy.cumsum', 'np.cumsum', (['lengths[:-1]'], {}), '(lengths[:-1])\n', (830, 844), True, 'import numpy as np\n')] |
from datetime import date
from uuid import UUID
import pytest
from app.domain.models.Arkivuttrekk import Arkivuttrekk, ArkivuttrekkStatus, ArkivuttrekkType
from app.domain.models.Depotinstitusjoner import DepotinstitusjonerEnum
from app.routers.dto.Arkivuttrekk import ArkivuttrekkCreate as Arkivuttrekk_dto
@pytest.fixture
def _arkivuttrekk_domain() -> Arkivuttrekk:
return Arkivuttrekk(
id_=None,
obj_id=UUID("df53d1d8-39bf-4fea-a741-58d472664ce2"),
status=ArkivuttrekkStatus.OPPRETTET,
type_=ArkivuttrekkType.NOARK5,
tittel="tittel",
sjekksum_sha256="2afeec307b0573339b3292e27e7971b5b040a5d7e8f7432339cae2fcd0eb936a",
avgiver_navn="<NAME>",
avgiver_epost="<EMAIL>",
koordinator_epost="<EMAIL>",
metadatafil_id=1,
arkiv_startdato=date.fromisoformat("1864-04-10"),
arkiv_sluttdato=date.fromisoformat("1900-05-12"),
storrelse=45620,
avtalenummer="01/12345",
depotinstitusjon=DepotinstitusjonerEnum("ARKIVVERKET"),
opprettet=None,
endret=None
)
@pytest.fixture
def _arkivuttrekk_dto() -> Arkivuttrekk_dto:
return Arkivuttrekk_dto(
obj_id=UUID("df53d1d8-39bf-4fea-a741-58d472664ce2"),
status=ArkivuttrekkStatus.OPPRETTET,
type=ArkivuttrekkType.NOARK5,
tittel="tittel",
sjekksum_sha256="2afeec307b0573339b3292e27e7971b5b040a5d7e8f7432339cae2fcd0eb936a",
avgiver_navn="<NAME>",
avgiver_epost="<EMAIL>",
koordinator_epost="<EMAIL>",
metadatafil_id=1,
arkiv_startdato=date.fromisoformat("1864-04-10"),
arkiv_sluttdato=date.fromisoformat("1900-05-12"),
storrelse=45620,
avtalenummer="01/12345",
depotinstitusjon="ARKIVVERKET",
)
def test_to_domain(_arkivuttrekk_dto, _arkivuttrekk_domain):
"""
GIVEN an object of type ArkivuttrekkBase
WHEN calling the internal method to_domain()
THEN check that returned domain object Arkivuttrekk is correct
"""
expected = _arkivuttrekk_domain
actual = _arkivuttrekk_dto.to_domain()
assert vars(actual) == vars(expected)
| [
"uuid.UUID",
"datetime.date.fromisoformat",
"app.domain.models.Depotinstitusjoner.DepotinstitusjonerEnum"
] | [((430, 474), 'uuid.UUID', 'UUID', (['"""df53d1d8-39bf-4fea-a741-58d472664ce2"""'], {}), "('df53d1d8-39bf-4fea-a741-58d472664ce2')\n", (434, 474), False, 'from uuid import UUID\n'), ((828, 860), 'datetime.date.fromisoformat', 'date.fromisoformat', (['"""1864-04-10"""'], {}), "('1864-04-10')\n", (846, 860), False, 'from datetime import date\n'), ((886, 918), 'datetime.date.fromisoformat', 'date.fromisoformat', (['"""1900-05-12"""'], {}), "('1900-05-12')\n", (904, 918), False, 'from datetime import date\n'), ((1003, 1040), 'app.domain.models.Depotinstitusjoner.DepotinstitusjonerEnum', 'DepotinstitusjonerEnum', (['"""ARKIVVERKET"""'], {}), "('ARKIVVERKET')\n", (1025, 1040), False, 'from app.domain.models.Depotinstitusjoner import DepotinstitusjonerEnum\n'), ((1199, 1243), 'uuid.UUID', 'UUID', (['"""df53d1d8-39bf-4fea-a741-58d472664ce2"""'], {}), "('df53d1d8-39bf-4fea-a741-58d472664ce2')\n", (1203, 1243), False, 'from uuid import UUID\n'), ((1596, 1628), 'datetime.date.fromisoformat', 'date.fromisoformat', (['"""1864-04-10"""'], {}), "('1864-04-10')\n", (1614, 1628), False, 'from datetime import date\n'), ((1654, 1686), 'datetime.date.fromisoformat', 'date.fromisoformat', (['"""1900-05-12"""'], {}), "('1900-05-12')\n", (1672, 1686), False, 'from datetime import date\n')] |
from modern_jokes import modern_jokes
from soviet_jokes import soviet_jokes
import random
import argparse
def soviet_joke():
return random.choice(soviet_jokes)
def modern_joke():
return random.choice(modern_jokes)
def random_joke():
return random.choice(soviet_jokes + modern_jokes)
# TODO: Auto-generated jokes
if __name__ == "__main__":
parser = argparse.ArgumentParser(add_help=True,
description='Displays a funny (or not) USSR/Russian joke (also called anecdote).')
parser.add_argument("-m", "--modern", action="store_true", help="display a modern Russian joke")
parser.add_argument("-s", "--soviet", action="store_true", help="display an old USSR joke")
parser.add_argument("-a", "--any", action="store_true", help="display a USSR/Russian joke (default)")
args = parser.parse_args()
if args.modern:
print(modern_joke())
elif args.soviet:
print(soviet_joke())
else:
print(random_joke())
| [
"random.choice",
"argparse.ArgumentParser"
] | [((138, 165), 'random.choice', 'random.choice', (['soviet_jokes'], {}), '(soviet_jokes)\n', (151, 165), False, 'import random\n'), ((198, 225), 'random.choice', 'random.choice', (['modern_jokes'], {}), '(modern_jokes)\n', (211, 225), False, 'import random\n'), ((258, 300), 'random.choice', 'random.choice', (['(soviet_jokes + modern_jokes)'], {}), '(soviet_jokes + modern_jokes)\n', (271, 300), False, 'import random\n'), ((373, 499), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(True)', 'description': '"""Displays a funny (or not) USSR/Russian joke (also called anecdote)."""'}), "(add_help=True, description=\n 'Displays a funny (or not) USSR/Russian joke (also called anecdote).')\n", (396, 499), False, 'import argparse\n')] |
#!/opt/tljh/user/bin/python
import cgitb; cgitb.enable() # pour débogage
import os
entete_http = "Content-type: text/html; charset=utf-8\n"
# gabarit html ... deux zones d'insertions
html_tpl = """
<!DOCTYPE html>
<head>
<title>cookie</title>
</head>
<body>
<a href="/">retour...</a>
<h2>{message}</h2>
{suppr}
</body>
</html>
"""
# négliger cela pour l'instant
lien_suppression = """
<a href="/" onclick="document.cookie='test=; Max-Age=-99999999;'">oublie moi ...</a>
"""
# récupérons le cookie éventuel
cookie = os.environ["HTTP_COOKIE"]
# l'a-t-on déjà enregistré ?
if "test" in cookie: # oui
message = "Tu es déjà venue par ici toi..."
suppr = lien_suppression
else: # non
# demandons au navigateur d'enregistrer un cookie
entete_http += "Set-Cookie: test=ok\n"
message = "Première visite sur cette page?"
suppr = ""
# nous sommes prêt pour produire la page finale
html = html_tpl.format(
message=message,
suppr=suppr,
)
# envoie de la réponse
print(entete_http)
print(html) | [
"cgitb.enable"
] | [((43, 57), 'cgitb.enable', 'cgitb.enable', ([], {}), '()\n', (55, 57), False, 'import cgitb\n')] |
import attr
from attr import attrib, s
from typing import Tuple, List, Optional, Callable, Mapping, Union, Set
from collections import defaultdict
from ..tensor import Operator
@attr.s(auto_attribs=True)
class GOp:
cost : float
size : Tuple[int]
alias : Tuple[int]
args : Tuple['GTensor']
result : Tuple['GTensor']
name : str
meta : dict
def __attrs_post_init__(self):
assert len(self.size) == len(self.alias) == len(self.result)
for i in range(len(self.size)):
assert self.alias[i] == -1 or self.size[i] == 0
def is_aliasing(self) -> bool:
return any([a >= 0 for a in self.alias])
def all_aliasing(self) -> bool:
return all([a >= 0 for a in self.alias])
def is_tuple(self) -> bool:
return len(self.result) > 1
def __str__(self): return self.name
@staticmethod
def make(g : 'Graph',
args : Tuple['GTensor'],
cost : float,
size : Tuple[int],
alias : Tuple[int],
name : str,
res_names : Tuple[str],
meta : dict,
make_uname : bool = True) -> ('GOp', Tuple['GTensor']):
assert len(size) == len(alias) == len(res_names)
uname = '{}/{}'.format(name, g._next_id()) if make_uname else name
result = tuple([GTensor(None, i, res_names[i], None) for i in range(len(res_names))])
op = GOp(cost, size, alias, args, result, uname, meta)
for r in result:
r.op = op
r.storage_size = r.size() if not r.alias() else r.alias().storage_size
assert r.storage_size is not None
g.add_op(op)
return op, result
GOp.CONST_NAME = 'constant'
@attr.s(auto_attribs=True)
class GTensor:
op : 'GOp'
index : int
name : str
storage_size : int
meta : dict = attrib(factory=dict)
def size(self) -> int:
return self.op.size[self.index]
def alias(self) -> Optional['GTensor']:
a = self.op.alias[self.index]
return self.op.args[a] if a >= 0 else None
def __str__(self): return self.name
@attr.s(auto_attribs=True)
class GCompute:
op : 'GOp'
def __str__(self):
return '({},)=Compute({})'.format(
','.join([r.name for r in self.op.result]),
self.op.name
)
@attr.s(auto_attribs=True)
class GGet:
tensor : 'GTensor'
pin : bool
def __str__(self):
op = 'Pin' if self.pin else 'Get'
return '{}({})'.format(op, self.tensor.name)
@attr.s(auto_attribs=True)
class GRelease:
tensor : 'GTensor'
def __str__(self):
return 'Release({})'.format(self.tensor.name)
class Graph:
def __init__(self):
self._id : int = 0
self.schedule : List[Union['GCompute', 'GGet', 'GRelease']] = []
self.ops : Mapping[str, 'GOp'] = {}
self.fwd_ops : Mapping[str, 'GOp'] = {}
self.bwd_ops : Mapping[str, 'GOp'] = {}
self.tensors : Mapping[str, 'GTensor'] = {}
self.op_children : Mapping[str, Set[str]] = defaultdict(set)
self.op_parents : Mapping[str, Set[str]] = defaultdict(set)
self.meta = {
'compute': 0
}
def _next_id(self) -> int:
i = self._id
self._id += 1
return i
def add_op(self, op : 'GOp') -> None:
assert op.name not in self.ops
self.ops[op.name] = op
if op.meta.get('bwd', False):
self.bwd_ops[op.name] = op
else:
self.fwd_ops[op.name] = op
for ti in op.args:
assert ti.name in self.tensors
op_parents = set([ti.op.name for ti in op.args])
for ps in op_parents:
self.op_children[ps].add(op.name)
self.op_parents[op.name] = op_parents
for to in op.result:
assert to.name not in self.tensors
self.tensors[to.name] = to
self.meta['compute'] += op.cost
# returns op names, not ops
def ops_topological(self) -> List[str]:
visited = {v : False for v in self.ops}
stack = []
def visit(v):
visited[v] = True
for u in self.op_children[v]:
if not visited[u]:
visit(u)
stack.insert(0, v)
for v in self.ops:
if not visited[v]:
visit(v)
return stack
def get_closure(self) -> Callable[['Runtime'], None]:
def f(rt):
tensor_map = {}
for cmd in self.schedule:
if isinstance(cmd, GCompute):
# TODO: add a rematerialize cmd? this assumes once-compute only
for x in cmd.op.args:
assert x.name in tensor_map
args = [tensor_map[x.name] for x in cmd.op.args]
rt_op = Operator(
cmd.op.cost,
cmd.op.size,
cmd.op.alias,
cmd.op.name
)
res = rt.compute(args, rt_op, names=tuple([o.name for o in cmd.op.result]))
for i, r in enumerate(res):
assert cmd.op.result[i].name not in tensor_map
tensor_map[cmd.op.result[i].name] = r
elif isinstance(cmd, GGet):
assert cmd.tensor.name in tensor_map
t = tensor_map[cmd.tensor.name]
if cmd.pin:
if not t.defined:
rt.rematerialize(t)
assert t.defined
rt.pin(t)
else:
rt.get(t)
elif isinstance(cmd, GRelease):
assert cmd.tensor.name in tensor_map
rt.release(tensor_map[cmd.tensor.name])
return f
def rewrite_collapse_aliases(g : 'Graph') -> 'Graph':
g_r = Graph()
g_r.meta = g.meta.copy()
g_r.meta['compute'] = 0
ops_topological = g.ops_topological()
# maps old -> new
tensor_map : Mapping[str, 'GTensor'] = {}
op_map : Mapping[str, 'GOp'] = {}
for op_name in ops_topological:
op = g.ops[op_name]
if op.is_aliasing():
if not op.all_aliasing():
raise RuntimeError(
'cannot collapse aliases, {} is not all aliasing'
.format(op)
)
for r in op.result:
tensor_map[r.name] = tensor_map[r.alias().name]
else:
# keep operator
args = [tensor_map[x.name] for x in op.args]
op_new, res = GOp.make(
g_r, args, op.cost, op.size, op.alias,
op.name, tuple([o.name for o in op.result]), op.meta,
make_uname=False
)
for r in res:
tensor_map[r.name] = r
op_map[op.name] = op_new
# rewrite schedule
for cmd in g.schedule:
if isinstance(cmd, GCompute):
if cmd.op.name in op_map:
g_r.schedule.append(GCompute(op_map[cmd.op.name]))
else:
# aliasing op; increase refcount
for r in cmd.op.result:
g_r.schedule.append(GGet(tensor_map[r.name], pin=False))
elif isinstance(cmd, GGet):
g_r.schedule.append(GGet(tensor_map[cmd.tensor.name], pin=cmd.pin))
elif isinstance(cmd, GRelease):
g_r.schedule.append(GRelease(tensor_map[cmd.tensor.name]))
g_r.meta['no_aliases'] = True
g_r.meta['tensor_map'] = {old: new.name for old, new in tensor_map.items()}
g_r.meta['op_map'] = {old: new.name for old, new in op_map.items()}
return g_r
def rewrite_merge_tuples(g : 'Graph') -> 'Graph':
g_r = Graph()
g_r.meta = g.meta.copy()
g_r.meta['compute'] = 0
ops_topological = g.ops_topological()
# maps old -> new
tensor_map : Mapping[str, 'GTensor'] = {}
op_map : Mapping[str, 'GOp'] = {}
for op_name in ops_topological:
op = g.ops[op_name]
assert not op.is_aliasing()
if op.is_tuple():
args = tuple([tensor_map[x.name] for x in op.args])
op_new, res = GOp.make(
g_r, args, op.cost, (sum(op.size),), (-1,),
op.name, ('+'.join([o.name for o in op.result]),), op.meta,
make_uname=False
)
for r in op.result:
tensor_map[r.name] = res[0]
op_map[op.name] = op_new
else:
# keep
args = [tensor_map[x.name] for x in op.args]
op_new, res = GOp.make(
g_r, args, op.cost, op.size, op.alias,
op.name, (op.result[0].name,), op.meta,
make_uname=False
)
tensor_map[res[0].name] = res[0]
op_map[op.name] = op_new
for cmd in g.schedule:
if isinstance(cmd, GCompute):
op_new = op_map[cmd.op.name]
g_r.schedule.append(GCompute(op_new))
# need to get more refs for each missing tuple output
for _ in range(len(cmd.op.result) - 1):
g_r.schedule.append(GGet(op_new.result[0], pin=False))
elif isinstance(cmd, GGet):
g_r.schedule.append(GGet(tensor_map[cmd.tensor.name], pin=cmd.pin))
elif isinstance(cmd, GRelease):
g_r.schedule.append(GRelease(tensor_map[cmd.tensor.name]))
g_r.meta['no_tuples'] = True
g_r.meta['tensor_map'] = {old: new.name for old, new in tensor_map.items()}
g_r.meta['op_map'] = {old: new.name for old, new in op_map.items()}
return g_r
def rewrite_constant_elim(g : 'Graph') -> 'Graph':
if not g.meta.get('no_aliases', False):
raise RuntimeError('cannot eliminate constants, input graph may have aliases')
g_r = Graph()
g_r.meta = g.meta.copy()
compute_pre = g_r.meta['compute']
g_r.meta['compute'] = 0
g_r.meta['constant_ram'] = 0
ops_topological = g.ops_topological()
# maps old -> new
tensor_map : Mapping[str, 'GTensor'] = {}
op_map : Mapping[str, 'GOp'] = {}
for op_name in ops_topological:
op = g.ops[op_name]
if op_name.split('/')[0] == GOp.CONST_NAME:
args = [tensor_map[x.name] for x in op.args]
assert len(args) == 0
g_r.meta['constant_ram'] += sum(op.size)
else:
# keep operator
args = [tensor_map[x.name] for x in op.args if x.name in tensor_map]
op_new, res = GOp.make(
g_r, args, op.cost, op.size, op.alias,
op.name, tuple([o.name for o in op.result]), op.meta,
make_uname=False
)
for r in res:
tensor_map[r.name] = r
op_map[op.name] = op_new
for cmd in g.schedule:
if isinstance(cmd, GCompute):
if cmd.op.name in op_map:
op_new = op_map[cmd.op.name]
g_r.schedule.append(GCompute(op_new))
elif isinstance(cmd, GGet):
if cmd.tensor.name in tensor_map:
g_r.schedule.append(GGet(tensor_map[cmd.tensor.name], pin=cmd.pin))
elif isinstance(cmd, GRelease):
if cmd.tensor.name in tensor_map:
g_r.schedule.append(GRelease(tensor_map[cmd.tensor.name]))
g_r.meta['no_constants'] = True
g_r.meta['tensor_map'] = {old: new.name for old, new in tensor_map.items()}
g_r.meta['op_map'] = {old: new.name for old, new in op_map.items()}
assert compute_pre == g_r.meta['compute']
return g_r
def rewrite_checkmate(g : 'Graph') -> 'Graph':
g_r = rewrite_collapse_aliases(g)
g_r = rewrite_merge_tuples(g_r)
g_r = rewrite_constant_elim(g_r)
return g_r
| [
"attr.attrib",
"attr.s",
"collections.defaultdict"
] | [((180, 205), 'attr.s', 'attr.s', ([], {'auto_attribs': '(True)'}), '(auto_attribs=True)\n', (186, 205), False, 'import attr\n'), ((1632, 1657), 'attr.s', 'attr.s', ([], {'auto_attribs': '(True)'}), '(auto_attribs=True)\n', (1638, 1657), False, 'import attr\n'), ((1998, 2023), 'attr.s', 'attr.s', ([], {'auto_attribs': '(True)'}), '(auto_attribs=True)\n', (2004, 2023), False, 'import attr\n'), ((2192, 2217), 'attr.s', 'attr.s', ([], {'auto_attribs': '(True)'}), '(auto_attribs=True)\n', (2198, 2217), False, 'import attr\n'), ((2375, 2400), 'attr.s', 'attr.s', ([], {'auto_attribs': '(True)'}), '(auto_attribs=True)\n', (2381, 2400), False, 'import attr\n'), ((1750, 1770), 'attr.attrib', 'attrib', ([], {'factory': 'dict'}), '(factory=dict)\n', (1756, 1770), False, 'from attr import attrib, s\n'), ((2870, 2886), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (2881, 2886), False, 'from collections import defaultdict\n'), ((2935, 2951), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (2946, 2951), False, 'from collections import defaultdict\n')] |
from __future__ import annotations
from typing import Generic, TypeVar, Optional, Any, Dict
from collections.abc import Mapping
from ..base_schema import Schema, SchemaAnything
from .DOMElement import DOMElement
from .DOMObject import DOMObject
from . import DOMInfo
from .DOMProperties import DOMProperties
T_co = TypeVar("T_co")
class DOMDict(DOMObject, Generic[T_co]):
"""
An object with dynamic properties (corresponding to a Python dict).
"""
def __init__(
self,
value: Optional[Mapping[str, Any]] = None,
json_dom_info: Optional[DOMInfo] = None,
item_type: Optional[Schema] = None,
) -> None:
"""
:param value: A dict (or any :class:`collections.abc.Mapping`) containing the data to populate this
object's properties.
:param json_dom_info: A :class:`~wysdom.dom.DOMInfo` named tuple containing information about this object's
position in the DOM.
:param item_type: A :class:`~wysdom.Schema` object specifying what constitutes a valid property
of this object.
"""
self.__json_schema_properties__ = DOMProperties(
additional_properties=(item_type or SchemaAnything())
)
super().__init__(value or {}, json_dom_info)
def __getitem__(self, key: str) -> T_co:
return super().__getitem__(key)
def __deepcopy__(self, memo: Dict[int, DOMElement]) -> DOMDict:
cls = self.__class__
result = cls(
value=self.to_builtin(),
json_dom_info=self.__json_dom_info__,
_item_type=self.__json_schema_properties__.additional_properties,
)
memo[id(self)] = result
return result
| [
"typing.TypeVar"
] | [((319, 334), 'typing.TypeVar', 'TypeVar', (['"""T_co"""'], {}), "('T_co')\n", (326, 334), False, 'from typing import Generic, TypeVar, Optional, Any, Dict\n')] |
import dataclasses
import typing
from dataclasses import dataclass
from typing import List
from typing import Optional
from profiles.settings import DENOM_DKEY, VALUE_DKEY, GEOG_DKEY, TIME_DKEY
if typing.TYPE_CHECKING:
from indicators.models import CensusVariable, CKANVariable
@dataclass
class Datum:
variable: str
geog: str
time: str
value: Optional[float] = None
moe: Optional[float] = None
percent: Optional[float] = None
denom: Optional[float] = None
@staticmethod
def from_census_response_datum(variable: 'CensusVariable', census_datum) -> 'Datum':
return Datum(
variable=variable.slug,
geog=census_datum.get('geog'),
time=census_datum.get('time'),
value=census_datum.get('value'),
moe=census_datum.get('moe'),
denom=census_datum.get('denom'),
percent=census_datum.get('percent'), )
@staticmethod
def from_census_response_data(variable: 'CensusVariable', census_data: list[dict]) -> List['Datum']:
return [Datum.from_census_response_datum(variable, census_datum) for census_datum in census_data]
@staticmethod
def from_ckan_response_datum(variable: 'CKANVariable', ckan_datum) -> 'Datum':
denom, percent = None, None
if DENOM_DKEY in ckan_datum:
denom = ckan_datum[DENOM_DKEY]
percent = (ckan_datum[VALUE_DKEY] / ckan_datum[DENOM_DKEY])
return Datum(variable=variable.slug,
geog=ckan_datum[GEOG_DKEY],
time=ckan_datum[TIME_DKEY],
value=ckan_datum[VALUE_DKEY],
denom=denom,
percent=percent)
@staticmethod
def from_ckan_response_data(variable: 'CKANVariable', ckan_data: list[dict]) -> List['Datum']:
return [Datum.from_ckan_response_datum(variable, ckan_datum) for ckan_datum in ckan_data]
def update(self, **kwargs):
""" Creates new Datum similar to the instance with new values from kwargs """
return Datum(**{**self.as_dict(), **kwargs})
def with_denom_val(self, denom_val: Optional[float]):
""" Merge the denom value and generate the percent """
return dataclasses.replace(self, denom=denom_val, percent=(self.value / denom_val))
def as_dict(self):
return {'variable': self.variable, 'geog': self.geog, 'time': self.time,
'value': self.value, 'moe': self.moe, 'percent': self.percent, 'denom': self.denom}
def as_value_dict(self):
return {'value': self.value, 'moe': self.moe, 'percent': self.percent, 'denom': self.denom}
| [
"dataclasses.replace"
] | [((2238, 2312), 'dataclasses.replace', 'dataclasses.replace', (['self'], {'denom': 'denom_val', 'percent': '(self.value / denom_val)'}), '(self, denom=denom_val, percent=self.value / denom_val)\n', (2257, 2312), False, 'import dataclasses\n')] |
import os
import re
import logging
import sqlite3
import json
import threading
from picaapi import PicaApi
from urllib import parse
from multiprocessing.pool import ThreadPool
class PicaAction:
def __init__(self, account, password,
proxies=None, threadn=5,
data_path=os.path.join(os.path.split(__file__)[0], "data"),
db_path=os.path.join(os.path.split(__file__)[0], "data", "data.db"),
global_url="https://picaapi.picacomic.com/",
api_key="C69BAF41DA5ABD1FFEDC6D2FEA56B",
secret_key="~<KEY>"):
logging.info("PicaAction启动中......")
self.picaapi = PicaApi(proxies=proxies,
global_url=global_url,
api_key=api_key,
secret_key=secret_key)
self.download_path = data_path
if not os.path.exists(data_path):
os.makedirs(data_path)
self.db = sqlite3.connect(db_path)
self.__login(account, password)
self.account = account
self.threadn = threadn
def __ExecuteSQL(self, sql, args=None):
cur = self.db.cursor()
if args == None:
logging.info("Executing in DB: %s" % sql)
__res = cur.execute(sql).fetchall()
else:
logging.info("Executing in DB: %s,%s" % (sql, str(args)))
__res = cur.execute(sql, args).fetchall()
self.db.commit()
return __res
def __login(self, account, password):
logging.info("%s 登录......" % account)
_ = self.__ExecuteSQL(
"create table if not exists account (email text PRIMARY KEY NOT NULL, password text, token text);")
logging.info("从数据库中查找 %s 的token......" % account)
token = self.__ExecuteSQL("select token from account where email=?;",
(account,))
def gettoken():
logging.info("为 %s 获取新的token......" % account)
token = self.picaapi.login(account, password)
logging.info("%s 的新token已获取: %s" % (account, token))
return token
if len(token) > 0:
token = token[0][0]
logging.info("数据库中有 %s 的token: %s" % (account, token))
self.picaapi.set_authorization(token)
logging.info("测试数据库中 %s 的token是否有效......" % account)
profile = self.picaapi.profile()
if profile["code"] == 200:
logging.info("数据库中 %s 的token有效" % account)
else:
logging.info("数据库中 %s 的token失效" % account)
token = gettoken()
self.__ExecuteSQL("update account set token=? where email=?;",
(token, account))
else:
logging.info("数据库中没有 %s 的token" % account)
token = gettoken()
self.__ExecuteSQL("insert into account (email, password, token)values (?, ?, ?);",
(account, password, token))
self.picaapi.set_authorization(token)
def __travel_favourites_ol(self, limit=None, order="dd"):
def islimited():
nonlocal limit
if limit != None:
limit -= 1
if limit <= 0:
return True
return False
pages = self.picaapi.favourite(1, order=order)['pages']
if pages < 2:
return
for i in range(1, pages+1):
docs = self.picaapi.favourite(i, order=order)['docs']
for favourite in docs:
yield favourite
if islimited():
return
def __travel_favourites_db(self, limit=None):
favourites = []
if limit != None:
favourites = self.__ExecuteSQL(
"select * from comics limit %d;" % limit)
else:
favourites = self.__ExecuteSQL("select * from comics;")
for favourite in favourites:
data = json.loads(favourite[1])
detail = json.loads(favourite[2])
yield data, detail
def gather_favourites_ol(self, n=None, order="dd"):
favourites = []
for favourite in self.__travel_favourites_ol(limit=n, order=order):
favourites.append(favourite)
return favourites
def gather_favourites_db(self, n=None):
favourites = []
details = []
for favourite, detail in self.__travel_favourites_db(limit=n):
favourites.append(favourite)
details.append(detail)
return favourites, details
def __insert_favourite(self, favourite):
detail = self.picaapi.comics(favourite["_id"])
if detail == None:
return
_ = self.__ExecuteSQL("insert or REPLACE into comics (id, data, detail)values(?, ?, ?);",
(favourite["_id"], json.dumps(favourite), json.dumps(detail)))
_ = self.__ExecuteSQL("insert or REPLACE into favourites (id, user)values(?, ?);",
(favourite["_id"], self.account))
def init_favourites(self, n=None):
logging.info("初始化 %s 的收藏列表......" % self.account)
_ = self.__ExecuteSQL(
"create table if not exists comics (id text PRIMARY KEY NOT NULL, data json, detail json);")
_ = self.__ExecuteSQL(
"create table if not exists favourites (id text, user text, PRIMARY KEY(id, user)," +
"FOREIGN KEY(id) REFERENCES comics(id)," +
"FOREIGN KEY(user) REFERENCES account(email));")
for favourite in self.__travel_favourites_ol(limit=n, order="da"):
self.__insert_favourite(favourite)
logging.info("%s 的收藏列表初始化完成" % self.account)
def append_favourites(self, n=None):
logging.info("将 %s 的收藏列表中的新增收藏写入数据库......" % self.account)
for favourite in self.__travel_favourites_ol(limit=n):
fs = self.__ExecuteSQL("select * from favourites where id=? and user=?;",
(favourite["_id"], self.account))
if len(fs) > 0:
logging.info("%s 的收藏 %s 已入数据库......" %
(self.account, favourite["_id"]))
if n == None:
break
else:
logging.info("设置了更新收藏的数量为 %d,继续......" % n)
else:
logging.info("%s 的收藏 %s 未入数据库......" %
(self.account, favourite["_id"]))
self.__insert_favourite(favourite)
logging.info("%s 的收藏列表中的新增收藏已写入数据库" % self.account)
def update_finish_status(self):
logging.info("更新数据库中已有收藏的finish状态......")
for favourite, _ in self.__travel_favourites_db():
if not favourite["finished"]:
self.__insert_favourite(favourite)
logging.info("数据库中已有收藏的finish状态已更新......")
def __travel_episodes_ol(self, id):
data = self.picaapi.eps(id, 1)
if data is None:
return
pages, epss = data["pages"], data["docs"]
for eps in epss:
yield eps
if pages < 2:
return
for i in (2, pages+1):
epss = self.picaapi.eps(id, i)['docs']
for eps in epss:
yield eps
def init_episode(self, id):
logging.info("初始化漫画%s的分话列表......" % id)
for eps in self.__travel_episodes_ol(id):
_ = self.__ExecuteSQL("insert or REPLACE into episodes (id, data, comic)values(?, ?, ?);",
(eps["_id"], json.dumps(eps), id))
logging.info("漫画%s的分话列表初始化完成" % id)
def init_episodes(self):
logging.info("初始化系统内所有漫画的分话列表......")
_ = self.__ExecuteSQL(
"create table if not exists episodes (id text PRIMARY KEY NOT NULL, data json, comic text," +
"FOREIGN KEY(id) REFERENCES comics(id));")
for favourite, _ in self.__travel_favourites_db():
self.init_episode(favourite["_id"])
logging.info("系统内所有漫画的分话列表初始化完成")
def update_episodes(self):
logging.info("更新系统内所有未完成漫画的分话列表......")
for favourite, _ in self.__travel_favourites_db():
if not favourite["finished"]:
self.init_episode(favourite["_id"])
logging.info("系统内所有未完成漫画的分话列表初始化完成")
def append_download_status(self):
logging.info("为系统内新增的分话添加下载状态记录......")
_ = self.__ExecuteSQL(
"create table if not exists status (id text PRIMARY KEY NOT NULL, finished bool," +
"FOREIGN KEY(id) REFERENCES episodes(id));")
_ = self.__ExecuteSQL(
"insert into status(id, finished) select id,FALSE from" +
"(select * from episodes left outer join status on status.id=episodes.id)" +
"where finished IS NULL;")
logging.info("已为系统内新增的分话添加下载状态记录")
def reset_download_status(self):
logging.info("重置系统内的分话下载状态记录......")
_ = self.__ExecuteSQL(
"insert or REPLACE into status(id, finished) select id,FALSE from episodes;")
logging.info("系统内的分话下载状态记录已重置")
def __travel_img(self, comic, order):
data = self.picaapi.pages(comic, order, 1)
pages, docs = data['pages'], data["docs"]
for img in docs:
yield img
if pages < 2:
return
for i in range(2, pages+1):
docs = self.picaapi.pages(comic, order, i)['docs']
for img in docs:
yield img
def __download(self, comic, eps):
order = eps["order"]
logging.info("开始下载漫画%s的分话%s" % (comic["_id"], eps["_id"]))
threadpool = ThreadPool(processes=self.threadn)
for data in self.__travel_img(comic["_id"], order):
media = data["media"]
url = parse.urljoin(media["fileServer"],
"static/"+media["path"])
path = None
def cor_dirname(dn):
dn = re.sub('[\/:*?"<>|]', '', dn)
dn = dn.strip()
return dn
author = 'null'
if 'author' in comic:
author = cor_dirname(comic['author'])
ctitle = cor_dirname(comic['title'])
etitle = cor_dirname(eps['title'])
if comic['finished'] and comic['epsCount'] <= 1:
path = os.path.join(self.download_path,
author, ctitle,
media['originalName'])
else:
path = os.path.join(self.download_path,
author, ctitle, etitle,
media['originalName'])
threadpool.apply_async(self.picaapi.download, (url, path,))
threadpool.close()
threadpool.join()
_ = self.__ExecuteSQL("update status set finished=true where id=?;",
(eps["_id"],))
logging.info("漫画%s的分话%s下载完成" % (comic["_id"], eps["_id"]))
def download_all(self):
episodes = self.__ExecuteSQL(
"select episodes.data, comics.data from episodes inner join status on status.id=episodes.id and status.finished=false inner join comics on episodes.comic=comics.id;")
n = len(episodes)
logging.info("开始下载系统内所有未完成分话(共%d个)" % n)
for eps_data, comic_data in episodes:
eps_data, comic_data = json.loads(eps_data), json.loads(comic_data)
self.__download(comic_data, eps_data)
n -= 1
logging.info("系统内未完成分话还有%d个" % n)
logging.info("系统内所有分话下载完成")
| [
"os.path.exists",
"json.loads",
"sqlite3.connect",
"os.makedirs",
"json.dumps",
"os.path.join",
"os.path.split",
"multiprocessing.pool.ThreadPool",
"urllib.parse.urljoin",
"picaapi.PicaApi",
"re.sub",
"logging.info"
] | [((610, 645), 'logging.info', 'logging.info', (['"""PicaAction启动中......"""'], {}), "('PicaAction启动中......')\n", (622, 645), False, 'import logging\n'), ((669, 761), 'picaapi.PicaApi', 'PicaApi', ([], {'proxies': 'proxies', 'global_url': 'global_url', 'api_key': 'api_key', 'secret_key': 'secret_key'}), '(proxies=proxies, global_url=global_url, api_key=api_key, secret_key\n =secret_key)\n', (676, 761), False, 'from picaapi import PicaApi\n'), ((984, 1008), 'sqlite3.connect', 'sqlite3.connect', (['db_path'], {}), '(db_path)\n', (999, 1008), False, 'import sqlite3\n'), ((1549, 1586), 'logging.info', 'logging.info', (["('%s 登录......' % account)"], {}), "('%s 登录......' % account)\n", (1561, 1586), False, 'import logging\n'), ((1738, 1787), 'logging.info', 'logging.info', (["('从数据库中查找 %s 的token......' % account)"], {}), "('从数据库中查找 %s 的token......' % account)\n", (1750, 1787), False, 'import logging\n'), ((5144, 5193), 'logging.info', 'logging.info', (["('初始化 %s 的收藏列表......' % self.account)"], {}), "('初始化 %s 的收藏列表......' % self.account)\n", (5156, 5193), False, 'import logging\n'), ((5705, 5749), 'logging.info', 'logging.info', (["('%s 的收藏列表初始化完成' % self.account)"], {}), "('%s 的收藏列表初始化完成' % self.account)\n", (5717, 5749), False, 'import logging\n'), ((5800, 5858), 'logging.info', 'logging.info', (["('将 %s 的收藏列表中的新增收藏写入数据库......' % self.account)"], {}), "('将 %s 的收藏列表中的新增收藏写入数据库......' % self.account)\n", (5812, 5858), False, 'import logging\n'), ((6556, 6607), 'logging.info', 'logging.info', (["('%s 的收藏列表中的新增收藏已写入数据库' % self.account)"], {}), "('%s 的收藏列表中的新增收藏已写入数据库' % self.account)\n", (6568, 6607), False, 'import logging\n'), ((6653, 6694), 'logging.info', 'logging.info', (['"""更新数据库中已有收藏的finish状态......"""'], {}), "('更新数据库中已有收藏的finish状态......')\n", (6665, 6694), False, 'import logging\n'), ((6855, 6897), 'logging.info', 'logging.info', (['"""数据库中已有收藏的finish状态已更新......"""'], {}), "('数据库中已有收藏的finish状态已更新......')\n", (6867, 6897), False, 'import logging\n'), ((7338, 7377), 'logging.info', 'logging.info', (["('初始化漫画%s的分话列表......' % id)"], {}), "('初始化漫画%s的分话列表......' % id)\n", (7350, 7377), False, 'import logging\n'), ((7608, 7643), 'logging.info', 'logging.info', (["('漫画%s的分话列表初始化完成' % id)"], {}), "('漫画%s的分话列表初始化完成' % id)\n", (7620, 7643), False, 'import logging\n'), ((7682, 7719), 'logging.info', 'logging.info', (['"""初始化系统内所有漫画的分话列表......"""'], {}), "('初始化系统内所有漫画的分话列表......')\n", (7694, 7719), False, 'import logging\n'), ((8027, 8060), 'logging.info', 'logging.info', (['"""系统内所有漫画的分话列表初始化完成"""'], {}), "('系统内所有漫画的分话列表初始化完成')\n", (8039, 8060), False, 'import logging\n'), ((8101, 8140), 'logging.info', 'logging.info', (['"""更新系统内所有未完成漫画的分话列表......"""'], {}), "('更新系统内所有未完成漫画的分话列表......')\n", (8113, 8140), False, 'import logging\n'), ((8302, 8338), 'logging.info', 'logging.info', (['"""系统内所有未完成漫画的分话列表初始化完成"""'], {}), "('系统内所有未完成漫画的分话列表初始化完成')\n", (8314, 8338), False, 'import logging\n'), ((8386, 8425), 'logging.info', 'logging.info', (['"""为系统内新增的分话添加下载状态记录......"""'], {}), "('为系统内新增的分话添加下载状态记录......')\n", (8398, 8425), False, 'import logging\n'), ((8847, 8881), 'logging.info', 'logging.info', (['"""已为系统内新增的分话添加下载状态记录"""'], {}), "('已为系统内新增的分话添加下载状态记录')\n", (8859, 8881), False, 'import logging\n'), ((8928, 8964), 'logging.info', 'logging.info', (['"""重置系统内的分话下载状态记录......"""'], {}), "('重置系统内的分话下载状态记录......')\n", (8940, 8964), False, 'import logging\n'), ((9094, 9125), 'logging.info', 'logging.info', (['"""系统内的分话下载状态记录已重置"""'], {}), "('系统内的分话下载状态记录已重置')\n", (9106, 9125), False, 'import logging\n'), ((9588, 9646), 'logging.info', 'logging.info', (["('开始下载漫画%s的分话%s' % (comic['_id'], eps['_id']))"], {}), "('开始下载漫画%s的分话%s' % (comic['_id'], eps['_id']))\n", (9600, 9646), False, 'import logging\n'), ((9668, 9702), 'multiprocessing.pool.ThreadPool', 'ThreadPool', ([], {'processes': 'self.threadn'}), '(processes=self.threadn)\n', (9678, 9702), False, 'from multiprocessing.pool import ThreadPool\n'), ((10962, 11020), 'logging.info', 'logging.info', (["('漫画%s的分话%s下载完成' % (comic['_id'], eps['_id']))"], {}), "('漫画%s的分话%s下载完成' % (comic['_id'], eps['_id']))\n", (10974, 11020), False, 'import logging\n'), ((11301, 11341), 'logging.info', 'logging.info', (["('开始下载系统内所有未完成分话(共%d个)' % n)"], {}), "('开始下载系统内所有未完成分话(共%d个)' % n)\n", (11313, 11341), False, 'import logging\n'), ((11591, 11618), 'logging.info', 'logging.info', (['"""系统内所有分话下载完成"""'], {}), "('系统内所有分话下载完成')\n", (11603, 11618), False, 'import logging\n'), ((904, 929), 'os.path.exists', 'os.path.exists', (['data_path'], {}), '(data_path)\n', (918, 929), False, 'import os\n'), ((943, 965), 'os.makedirs', 'os.makedirs', (['data_path'], {}), '(data_path)\n', (954, 965), False, 'import os\n'), ((1224, 1265), 'logging.info', 'logging.info', (["('Executing in DB: %s' % sql)"], {}), "('Executing in DB: %s' % sql)\n", (1236, 1265), False, 'import logging\n'), ((1949, 1995), 'logging.info', 'logging.info', (["('为 %s 获取新的token......' % account)"], {}), "('为 %s 获取新的token......' % account)\n", (1961, 1995), False, 'import logging\n'), ((2066, 2118), 'logging.info', 'logging.info', (["('%s 的新token已获取: %s' % (account, token))"], {}), "('%s 的新token已获取: %s' % (account, token))\n", (2078, 2118), False, 'import logging\n'), ((2216, 2270), 'logging.info', 'logging.info', (["('数据库中有 %s 的token: %s' % (account, token))"], {}), "('数据库中有 %s 的token: %s' % (account, token))\n", (2228, 2270), False, 'import logging\n'), ((2333, 2385), 'logging.info', 'logging.info', (["('测试数据库中 %s 的token是否有效......' % account)"], {}), "('测试数据库中 %s 的token是否有效......' % account)\n", (2345, 2385), False, 'import logging\n'), ((2798, 2840), 'logging.info', 'logging.info', (["('数据库中没有 %s 的token' % account)"], {}), "('数据库中没有 %s 的token' % account)\n", (2810, 2840), False, 'import logging\n'), ((4005, 4029), 'json.loads', 'json.loads', (['favourite[1]'], {}), '(favourite[1])\n', (4015, 4029), False, 'import json\n'), ((4051, 4075), 'json.loads', 'json.loads', (['favourite[2]'], {}), '(favourite[2])\n', (4061, 4075), False, 'import json\n'), ((9815, 9876), 'urllib.parse.urljoin', 'parse.urljoin', (["media['fileServer']", "('static/' + media['path'])"], {}), "(media['fileServer'], 'static/' + media['path'])\n", (9828, 9876), False, 'from urllib import parse\n'), ((11549, 11582), 'logging.info', 'logging.info', (["('系统内未完成分话还有%d个' % n)"], {}), "('系统内未完成分话还有%d个' % n)\n", (11561, 11582), False, 'import logging\n'), ((320, 343), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (333, 343), False, 'import os\n'), ((395, 418), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (408, 418), False, 'import os\n'), ((2486, 2528), 'logging.info', 'logging.info', (["('数据库中 %s 的token有效' % account)"], {}), "('数据库中 %s 的token有效' % account)\n", (2498, 2528), False, 'import logging\n'), ((2563, 2605), 'logging.info', 'logging.info', (["('数据库中 %s 的token失效' % account)"], {}), "('数据库中 %s 的token失效' % account)\n", (2575, 2605), False, 'import logging\n'), ((4897, 4918), 'json.dumps', 'json.dumps', (['favourite'], {}), '(favourite)\n', (4907, 4918), False, 'import json\n'), ((4920, 4938), 'json.dumps', 'json.dumps', (['detail'], {}), '(detail)\n', (4930, 4938), False, 'import json\n'), ((6121, 6193), 'logging.info', 'logging.info', (["('%s 的收藏 %s 已入数据库......' % (self.account, favourite['_id']))"], {}), "('%s 的收藏 %s 已入数据库......' % (self.account, favourite['_id']))\n", (6133, 6193), False, 'import logging\n'), ((6399, 6471), 'logging.info', 'logging.info', (["('%s 的收藏 %s 未入数据库......' % (self.account, favourite['_id']))"], {}), "('%s 的收藏 %s 未入数据库......' % (self.account, favourite['_id']))\n", (6411, 6471), False, 'import logging\n'), ((9986, 10016), 're.sub', 're.sub', (['"""[\\\\/:*?"<>|]"""', '""""""', 'dn'], {}), '(\'[\\\\/:*?"<>|]\', \'\', dn)\n', (9992, 10016), False, 'import re\n'), ((10370, 10441), 'os.path.join', 'os.path.join', (['self.download_path', 'author', 'ctitle', "media['originalName']"], {}), "(self.download_path, author, ctitle, media['originalName'])\n", (10382, 10441), False, 'import os\n'), ((10555, 10634), 'os.path.join', 'os.path.join', (['self.download_path', 'author', 'ctitle', 'etitle', "media['originalName']"], {}), "(self.download_path, author, ctitle, etitle, media['originalName'])\n", (10567, 10634), False, 'import os\n'), ((11423, 11443), 'json.loads', 'json.loads', (['eps_data'], {}), '(eps_data)\n', (11433, 11443), False, 'import json\n'), ((11445, 11467), 'json.loads', 'json.loads', (['comic_data'], {}), '(comic_data)\n', (11455, 11467), False, 'import json\n'), ((6321, 6364), 'logging.info', 'logging.info', (["('设置了更新收藏的数量为 %d,继续......' % n)"], {}), "('设置了更新收藏的数量为 %d,继续......' % n)\n", (6333, 6364), False, 'import logging\n'), ((7578, 7593), 'json.dumps', 'json.dumps', (['eps'], {}), '(eps)\n', (7588, 7593), False, 'import json\n')] |
'''HAllA setup
To install: python setup.py install
'''
import sys
try:
import setuptools
from setuptools.command.install import install
except ImportError:
sys.exit('Please install setuptools.')
VERSION = '0.8.20'
AUTHOR = 'HAllA Development Team'
MAINTAINER_EMAIL = '<EMAIL>'
class PostInstallCommand(install):
'''Post-installation for installation mode'''
def run(self):
install.run(self)
# post-install script
from rpy2.robjects.packages import importr
try:
eva = importr('eva')
except:
utils = importr('utils')
utils.chooseCRANmirror(ind=1)
utils.install_packages('EnvStats')
utils.install_packages('https://cran.r-project.org/src/contrib/Archive/eva/eva_0.2.5.tar.gz')
# check if eva has been successfully installed
eva = importr('eva')
try:
XICOR = importr('XICOR')
except:
utils = importr('utils')
utils.chooseCRANmirror(ind=1)
utils.install_packages("XICOR")
XICOR = importr('XICOR')
# Installing requirements.txt dependencies
dependencies=[]
requirements = open('requirements.txt', 'r')
for dependency in requirements:
dependencies.append(str(dependency))
setuptools.setup(
name='HAllA',
author=AUTHOR,
author_email=MAINTAINER_EMAIL,
version=VERSION,
license='MIT',
description='HAllA: Hierarchical All-against All Association Testing',
long_description="Given two high-dimensional 'omics datasets X and Y (continuous and/or categorical features) from the same n biosamples, HAllA (Hierarchical All-against-All Association Testing) discovers densely-associated blocks of features in the X vs. Y association matrix where: 1) each block is defined as all associations between features in a subtree of X hierarchy and features in a subtree of Y hierarchy and 2) a block is densely associated if (1 - FNR)% of pairwise associations are FDR significant (FNR is the pre-defined expected false negative rate)",
url='https://github.com/biobakery/halla',
keywords=['halla', 'association testing'],
platforms=['Linux','MacOS'],
install_requires=dependencies,
classifiers=[
'Programming Language :: Python',
'Operating System :: MacOS',
'Operating System :: Unix',
'Programming Language :: Python :: 3.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
packages=setuptools.find_packages(),
package_data={
'halla': ['config.yaml']
},
entry_points={
'console_scripts': [
'halla = scripts.halla:main',
'halladata = scripts.synthetic_data:main',
'hallagram = scripts.hallagram:main',
'hallagnostic = scripts.diagnostic_plot:main',
]
},
cmdclass={
'install': PostInstallCommand,
},
test_suite= 'tests',
)
| [
"setuptools.command.install.install.run",
"setuptools.find_packages",
"rpy2.robjects.packages.importr",
"sys.exit"
] | [((171, 209), 'sys.exit', 'sys.exit', (['"""Please install setuptools."""'], {}), "('Please install setuptools.')\n", (179, 209), False, 'import sys\n'), ((407, 424), 'setuptools.command.install.install.run', 'install.run', (['self'], {}), '(self)\n', (418, 424), False, 'from setuptools.command.install import install\n'), ((2503, 2529), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (2527, 2529), False, 'import setuptools\n'), ((537, 551), 'rpy2.robjects.packages.importr', 'importr', (['"""eva"""'], {}), "('eva')\n", (544, 551), False, 'from rpy2.robjects.packages import importr\n'), ((925, 941), 'rpy2.robjects.packages.importr', 'importr', (['"""XICOR"""'], {}), "('XICOR')\n", (932, 941), False, 'from rpy2.robjects.packages import importr\n'), ((588, 604), 'rpy2.robjects.packages.importr', 'importr', (['"""utils"""'], {}), "('utils')\n", (595, 604), False, 'from rpy2.robjects.packages import importr\n'), ((877, 891), 'rpy2.robjects.packages.importr', 'importr', (['"""eva"""'], {}), "('eva')\n", (884, 891), False, 'from rpy2.robjects.packages import importr\n'), ((978, 994), 'rpy2.robjects.packages.importr', 'importr', (['"""utils"""'], {}), "('utils')\n", (985, 994), False, 'from rpy2.robjects.packages import importr\n'), ((1101, 1117), 'rpy2.robjects.packages.importr', 'importr', (['"""XICOR"""'], {}), "('XICOR')\n", (1108, 1117), False, 'from rpy2.robjects.packages import importr\n')] |
"""
Plot an all-sky average proper motion map, using statistics downloaded from the Gaia archive with a query similar to the
following:
select
gaia_healpix_index(5, source_id) as healpix_5,
avg(pmra) as avg_pmra,
avg(pmdec) as avg_pmdec
from gaiaedr3.gaia_source
where parallax_over_error>=10
and parallax*parallax - 2*parallax - parallax_error*parallax_error < -1
group by healpix_5
<NAME> Oct 2020 - Dec 2020
"""
import argparse
import astropy.units as u
import astropy_healpix.healpy as hp
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import numpy as np
from astropy.coordinates import ICRS, Galactic
from astropy.table import Table
from matplotlib import cm
from matplotlib.gridspec import GridSpec
from matplotlib.patches import ArrowStyle
def make_plot(args):
"""
Take the steps to make the plot.
Parameters
----------
args: dict
Command line arguments
Returns
-------
Nothing
"""
infile = './data/' + args['inputFile']
basename = 'PMmap-' + args['inputFile'].split('.')[0]
default_proj = ccrs.PlateCarree()
sky_proj = ccrs.Mollweide()
backgr = plt.imread('../star-trail-animation/sky-images/GaiaSky-colour-2k.png')
nside = hp.order2nside(args['hplevel'])
hpcol = 'healpix_{0}'.format(args['hplevel'])
edr3data = Table.read(infile)
alpha, delta = hp.pix2ang(nside, edr3data[hpcol], lonlat=True, nest=True)
pmra = edr3data['avg_pmra']
pmdec = edr3data['avg_pmdec']
icrs = ICRS(ra=alpha * u.degree, dec=delta * u.degree, pm_ra_cosdec=pmra * u.mas / u.yr,
pm_dec=pmdec * u.mas / u.yr)
galactic = icrs.transform_to(Galactic)
pmtot = np.sqrt(galactic.pm_l_cosb.value ** 2 + galactic.pm_b.value ** 2)
fig = plt.figure(figsize=(16, 9), dpi=120, frameon=False, tight_layout={'pad': 0.01})
gs = GridSpec(1, 1, figure=fig)
ax = fig.add_subplot(gs[0, 0], projection=sky_proj)
ax.imshow(np.fliplr(backgr), transform=default_proj, zorder=-1, origin='upper')
pmcmap = cm.viridis
veccolor = plt.cm.get_cmap('tab10').colors[9]
linecolor = plt.cm.get_cmap('tab10').colors[9]
if args['quiver']:
vscale = np.median(pmtot) / 10
ax.quiver(galactic.l.value, galactic.b.value, galactic.pm_l_cosb.value, galactic.pm_b.value,
transform=default_proj, angles='xy', scale=vscale, scale_units='dots', color=veccolor,
headwidth=1, headlength=3, headaxislength=2.5)
else:
if args['colourstreams']:
ax.streamplot(galactic.l.value, galactic.b.value, galactic.pm_l_cosb.value, galactic.pm_b.value,
transform=default_proj, linewidth=2.0, density=2, color=pmtot, cmap=pmcmap, maxlength=0.5,
arrowsize=1, arrowstyle=ArrowStyle.Fancy(head_length=1.0, head_width=.4, tail_width=.4))
elif args['lwcode'] > 0:
ax.streamplot(galactic.l.value, galactic.b.value, galactic.pm_l_cosb.value, galactic.pm_b.value,
transform=default_proj, linewidth=args['lwcode'] * pmtot / np.median(pmtot), density=2,
color=linecolor,
maxlength=0.5, arrowsize=1, arrowstyle=ArrowStyle.Fancy(head_length=1.0, head_width=.4,
tail_width=.4))
else:
ax.streamplot(galactic.l.value, galactic.b.value, galactic.pm_l_cosb.value, galactic.pm_b.value,
transform=default_proj, linewidth=1.5, density=2, color=linecolor, maxlength=0.5, arrowsize=1,
arrowstyle=ArrowStyle.Fancy(head_length=1.0, head_width=.4, tail_width=.4))
ax.invert_xaxis()
if args['pdfOutput']:
plt.savefig(basename + '.pdf')
elif args['pngOutput']:
plt.savefig(basename + '.png')
else:
plt.show()
def parse_command_line_arguments():
"""
Set up command line parsing.
"""
parser = argparse.ArgumentParser("Produce all-sky proper motion map.")
parser.add_argument('inputFile', type=str, help="""VOT file with proper motion stats by Healpix.""")
parser.add_argument('hplevel', type=int, nargs='?', default=4, help="""Healpix level of input table.""")
parser.add_argument('--vectors', action="store_true", dest="quiver", help="Plot vectors instead of streamlines")
parser.add_argument('--colourcode', action='store_true', dest='colourstreams', help="""Plot streamlines colour coded
by magnitude of proper motion""")
parser.add_argument('--lwcode', type=float, default=0.0, help="""Plot streamlines with the width indicating the
magnitude of proper motion. Scale the widths by the factor provided""")
parser.add_argument("-p", action="store_true", dest="pdfOutput", help="Make PDF plot")
parser.add_argument("-b", action="store_true", dest="pngOutput", help="Make PNG plot")
args = vars(parser.parse_args())
return args
if __name__ in '__main__':
cmdargs = parse_command_line_arguments()
make_plot(cmdargs)
| [
"numpy.median",
"numpy.sqrt",
"matplotlib.pyplot.savefig",
"argparse.ArgumentParser",
"matplotlib.pyplot.show",
"numpy.fliplr",
"cartopy.crs.Mollweide",
"matplotlib.pyplot.imread",
"cartopy.crs.PlateCarree",
"matplotlib.patches.ArrowStyle.Fancy",
"matplotlib.pyplot.figure",
"matplotlib.gridspe... | [((1082, 1100), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (1098, 1100), True, 'import cartopy.crs as ccrs\n'), ((1116, 1132), 'cartopy.crs.Mollweide', 'ccrs.Mollweide', ([], {}), '()\n', (1130, 1132), True, 'import cartopy.crs as ccrs\n'), ((1147, 1217), 'matplotlib.pyplot.imread', 'plt.imread', (['"""../star-trail-animation/sky-images/GaiaSky-colour-2k.png"""'], {}), "('../star-trail-animation/sky-images/GaiaSky-colour-2k.png')\n", (1157, 1217), True, 'import matplotlib.pyplot as plt\n'), ((1231, 1262), 'astropy_healpix.healpy.order2nside', 'hp.order2nside', (["args['hplevel']"], {}), "(args['hplevel'])\n", (1245, 1262), True, 'import astropy_healpix.healpy as hp\n'), ((1328, 1346), 'astropy.table.Table.read', 'Table.read', (['infile'], {}), '(infile)\n', (1338, 1346), False, 'from astropy.table import Table\n'), ((1367, 1425), 'astropy_healpix.healpy.pix2ang', 'hp.pix2ang', (['nside', 'edr3data[hpcol]'], {'lonlat': '(True)', 'nest': '(True)'}), '(nside, edr3data[hpcol], lonlat=True, nest=True)\n', (1377, 1425), True, 'import astropy_healpix.healpy as hp\n'), ((1504, 1618), 'astropy.coordinates.ICRS', 'ICRS', ([], {'ra': '(alpha * u.degree)', 'dec': '(delta * u.degree)', 'pm_ra_cosdec': '(pmra * u.mas / u.yr)', 'pm_dec': '(pmdec * u.mas / u.yr)'}), '(ra=alpha * u.degree, dec=delta * u.degree, pm_ra_cosdec=pmra * u.mas /\n u.yr, pm_dec=pmdec * u.mas / u.yr)\n', (1508, 1618), False, 'from astropy.coordinates import ICRS, Galactic\n'), ((1686, 1751), 'numpy.sqrt', 'np.sqrt', (['(galactic.pm_l_cosb.value ** 2 + galactic.pm_b.value ** 2)'], {}), '(galactic.pm_l_cosb.value ** 2 + galactic.pm_b.value ** 2)\n', (1693, 1751), True, 'import numpy as np\n'), ((1763, 1842), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 9)', 'dpi': '(120)', 'frameon': '(False)', 'tight_layout': "{'pad': 0.01}"}), "(figsize=(16, 9), dpi=120, frameon=False, tight_layout={'pad': 0.01})\n", (1773, 1842), True, 'import matplotlib.pyplot as plt\n'), ((1852, 1878), 'matplotlib.gridspec.GridSpec', 'GridSpec', (['(1)', '(1)'], {'figure': 'fig'}), '(1, 1, figure=fig)\n', (1860, 1878), False, 'from matplotlib.gridspec import GridSpec\n'), ((4004, 4065), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Produce all-sky proper motion map."""'], {}), "('Produce all-sky proper motion map.')\n", (4027, 4065), False, 'import argparse\n'), ((1949, 1966), 'numpy.fliplr', 'np.fliplr', (['backgr'], {}), '(backgr)\n', (1958, 1966), True, 'import numpy as np\n'), ((3777, 3807), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(basename + '.pdf')"], {}), "(basename + '.pdf')\n", (3788, 3807), True, 'import matplotlib.pyplot as plt\n'), ((2058, 2082), 'matplotlib.pyplot.cm.get_cmap', 'plt.cm.get_cmap', (['"""tab10"""'], {}), "('tab10')\n", (2073, 2082), True, 'import matplotlib.pyplot as plt\n'), ((2109, 2133), 'matplotlib.pyplot.cm.get_cmap', 'plt.cm.get_cmap', (['"""tab10"""'], {}), "('tab10')\n", (2124, 2133), True, 'import matplotlib.pyplot as plt\n'), ((2185, 2201), 'numpy.median', 'np.median', (['pmtot'], {}), '(pmtot)\n', (2194, 2201), True, 'import numpy as np\n'), ((3844, 3874), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(basename + '.png')"], {}), "(basename + '.png')\n", (3855, 3874), True, 'import matplotlib.pyplot as plt\n'), ((3893, 3903), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3901, 3903), True, 'import matplotlib.pyplot as plt\n'), ((2798, 2863), 'matplotlib.patches.ArrowStyle.Fancy', 'ArrowStyle.Fancy', ([], {'head_length': '(1.0)', 'head_width': '(0.4)', 'tail_width': '(0.4)'}), '(head_length=1.0, head_width=0.4, tail_width=0.4)\n', (2814, 2863), False, 'from matplotlib.patches import ArrowStyle\n'), ((3227, 3292), 'matplotlib.patches.ArrowStyle.Fancy', 'ArrowStyle.Fancy', ([], {'head_length': '(1.0)', 'head_width': '(0.4)', 'tail_width': '(0.4)'}), '(head_length=1.0, head_width=0.4, tail_width=0.4)\n', (3243, 3292), False, 'from matplotlib.patches import ArrowStyle\n'), ((3655, 3720), 'matplotlib.patches.ArrowStyle.Fancy', 'ArrowStyle.Fancy', ([], {'head_length': '(1.0)', 'head_width': '(0.4)', 'tail_width': '(0.4)'}), '(head_length=1.0, head_width=0.4, tail_width=0.4)\n', (3671, 3720), False, 'from matplotlib.patches import ArrowStyle\n'), ((3090, 3106), 'numpy.median', 'np.median', (['pmtot'], {}), '(pmtot)\n', (3099, 3106), True, 'import numpy as np\n')] |
# python3
import sys
def compute_min_refills(distance, tank, stops):
# write your code here
if distance <= tank:
return 0
else:
stops.append(distance)
n_stops = len(stops) - 1
count = 0
refill = tank
for i in range(n_stops):
if refill < stops[i] or (stops[-2] + tank < distance):
return -1
if refill < stops[i + 1]:
refill = tank + stops[i]
count += 1
return count
if __name__ == '__main__':
d, m, _, *stops = map(int, sys.stdin.read().split())
print(compute_min_refills(d, m, stops))
| [
"sys.stdin.read"
] | [((566, 582), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (580, 582), False, 'import sys\n')] |
import xlwt
if __name__ == '__main__':
workbook = xlwt.Workbook(encoding='utf-8') # 创建workbook 对象
worksheet = workbook.add_sheet('sheet1') # 创建工作表sheet
# 往表中写内容,第一各参数 行,第二个参数列,第三个参数内容
worksheet.write(0, 0, 'hello world')
worksheet.write(0, 1, '你好')
workbook.save('first.xls') # 保存表为students.xls
| [
"xlwt.Workbook"
] | [((55, 86), 'xlwt.Workbook', 'xlwt.Workbook', ([], {'encoding': '"""utf-8"""'}), "(encoding='utf-8')\n", (68, 86), False, 'import xlwt\n')] |
# Generated by Django 2.0.7 on 2018-07-19 14:38
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('kiestze', '0003_gemeente'),
]
operations = [
migrations.RemoveField(
model_name='gemeente',
name='id',
),
migrations.AlterField(
model_name='gemeente',
name='nis',
field=models.IntegerField(primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='partij',
name='nis',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kiestze.Gemeente'),
),
]
| [
"django.db.models.ForeignKey",
"django.db.migrations.RemoveField",
"django.db.models.IntegerField"
] | [((258, 314), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""gemeente"""', 'name': '"""id"""'}), "(model_name='gemeente', name='id')\n", (280, 314), False, 'from django.db import migrations, models\n'), ((459, 513), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (478, 513), False, 'from django.db import migrations, models\n'), ((632, 722), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""kiestze.Gemeente"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'kiestze.Gemeente')\n", (649, 722), False, 'from django.db import migrations, models\n')] |
import random
from Card import Card
suits = ('Hearts', 'Diamonds', 'Spades', 'Clubs')
ranks = ('Two', 'Three', 'Four', 'Five', 'Six', 'Seven', 'Eight', 'Nine', 'Ten', 'Jack', 'Queen', 'King', 'Ace')
class Deck:
def __init__(self):
# Note this only happens once upon creation of a new Deck
self.all_cards = []
for suit in suits:
for rank in ranks:
# This assumes the Card class has already been defined!
self.all_cards.append(Card(suit, rank))
def shuffle(self):
# Note this doesn't return anything
random.shuffle(self.all_cards)
def deal(self):
# Note we remove one card from the list of all_cards
return self.all_cards.pop()
| [
"Card.Card",
"random.shuffle"
] | [((614, 644), 'random.shuffle', 'random.shuffle', (['self.all_cards'], {}), '(self.all_cards)\n', (628, 644), False, 'import random\n'), ((516, 532), 'Card.Card', 'Card', (['suit', 'rank'], {}), '(suit, rank)\n', (520, 532), False, 'from Card import Card\n')] |
# -*- coding: utf-8 -*-
"""
users.py
~~~~~~~~~~~
user manage
:copyright: (c) 2015 by <NAME>.
:license: Apache, see LICENSE for more details.
"""
import hashlib
from datetime import datetime
from werkzeug import generate_password_hash, check_password_hash, \
cached_property
from flask.ext.sqlalchemy import BaseQuery
from flask.ext.principal import RoleNeed, UserNeed, Permission
from Smaug.extensions import db
from Smaug.permissions import null
class UserQuery(BaseQuery):
def from_identity(self, identity):
"""
Loads user from flask.ext.principal.Identity instance and
assigns permissions from user.
A "user" instance is monkeypatched to the identity instance.
If no user found then None is returned.
"""
try:
user = self.get(int(identity.id))
except ValueError:
user = None
if user:
identity.provides.update(user.provides)
identity.user = user
return user
def authenticate(self, login, password):
user = self.filter(db.or_(User.username==login,
User.email==login)).first()
if user:
authenticate = user.check_password(password)
else:
authenticate = False
return user, authenticate
def authenticate_openid(self, email, openid):
user = self.filter(User.email==email).first()
if user:
authenticate = user.check_openid(openid)
else:
authenticate = False
return user, authenticate
class User(db.Model):
__tablename__ = "users"
query_class = UserQuery
# user roles
MEMBER = 100
MODERATOR = 200
ADMIN = 300
id = db.Column(db.Integer, primary_key = True)
username = db.Column(db.Unicode(60), unique=True, nullable=False)
email = db.Column(db.String(150), unique=True, nullable=False)
role = db.Column(db.Integer, default = MEMBER)
_password = db.Column("password", db.String(80))
_openid = db.Column("openid", db.String(80), unique=True)
class Permissions(object):
def __init__(self, obj):
self.obj = obj
@cached_property
def send_message(self):
if not self.obj.receive_email:
return null
needs = [UserNeed(user_id) for user_id in self.obj.friends]
if not needs:
return null
return Permission(*needs)
def __str__(self):
return self.username
def __repr__(self):
return "<%s>" % self
@cached_property
def permissions(self):
return self.Permissions(self)
def _get_password(self):
return self._password
def _set_password(self, password):
self._password = generate_password_hash(password)
password = db.synonym("_password",
descriptor=property(_get_password,
_set_password))
def check_password(self, password):
if self.password is None:
return False
return check_password_hash(self.password, password)
def _get_openid(self):
return self._openid
def _set_openid(self, openid):
self._openid = generate_password_hash(openid)
openid = db.synonym("_openid",
descriptor=property(_get_openid,
_set_openid))
def check_openid(self, openid):
if self.openid is None:
return False
return check_password_hash(self.openid, openid)
@cached_property
def provides(self):
needs = [RoleNeed('authenticated'),
UserNeed(self.id)]
if self.is_moderator:
needs.append(RoleNeed('moderator'))
if self.is_admin:
needs.append(RoleNeed('admin'))
return needs
@property
def is_moderator(self):
return self.role >= self.MODERATOR
@property
def is_admin(self):
return self.role >= self.ADMIN | [
"werkzeug.check_password_hash",
"flask.ext.principal.Permission",
"flask.ext.principal.UserNeed",
"flask.ext.principal.RoleNeed",
"Smaug.extensions.db.String",
"werkzeug.generate_password_hash",
"Smaug.extensions.db.or_",
"Smaug.extensions.db.Unicode",
"Smaug.extensions.db.Column"
] | [((1763, 1802), 'Smaug.extensions.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (1772, 1802), False, 'from Smaug.extensions import db\n'), ((1953, 1990), 'Smaug.extensions.db.Column', 'db.Column', (['db.Integer'], {'default': 'MEMBER'}), '(db.Integer, default=MEMBER)\n', (1962, 1990), False, 'from Smaug.extensions import db\n'), ((1830, 1844), 'Smaug.extensions.db.Unicode', 'db.Unicode', (['(60)'], {}), '(60)\n', (1840, 1844), False, 'from Smaug.extensions import db\n'), ((1897, 1911), 'Smaug.extensions.db.String', 'db.String', (['(150)'], {}), '(150)\n', (1906, 1911), False, 'from Smaug.extensions import db\n'), ((2031, 2044), 'Smaug.extensions.db.String', 'db.String', (['(80)'], {}), '(80)\n', (2040, 2044), False, 'from Smaug.extensions import db\n'), ((2080, 2093), 'Smaug.extensions.db.String', 'db.String', (['(80)'], {}), '(80)\n', (2089, 2093), False, 'from Smaug.extensions import db\n'), ((2815, 2847), 'werkzeug.generate_password_hash', 'generate_password_hash', (['password'], {}), '(password)\n', (2837, 2847), False, 'from werkzeug import generate_password_hash, check_password_hash, cached_property\n'), ((3127, 3171), 'werkzeug.check_password_hash', 'check_password_hash', (['self.password', 'password'], {}), '(self.password, password)\n', (3146, 3171), False, 'from werkzeug import generate_password_hash, check_password_hash, cached_property\n'), ((3287, 3317), 'werkzeug.generate_password_hash', 'generate_password_hash', (['openid'], {}), '(openid)\n', (3309, 3317), False, 'from werkzeug import generate_password_hash, check_password_hash, cached_property\n'), ((3583, 3623), 'werkzeug.check_password_hash', 'check_password_hash', (['self.openid', 'openid'], {}), '(self.openid, openid)\n', (3602, 3623), False, 'from werkzeug import generate_password_hash, check_password_hash, cached_property\n'), ((2477, 2495), 'flask.ext.principal.Permission', 'Permission', (['*needs'], {}), '(*needs)\n', (2487, 2495), False, 'from flask.ext.principal import RoleNeed, UserNeed, Permission\n'), ((3687, 3712), 'flask.ext.principal.RoleNeed', 'RoleNeed', (['"""authenticated"""'], {}), "('authenticated')\n", (3695, 3712), False, 'from flask.ext.principal import RoleNeed, UserNeed, Permission\n'), ((3731, 3748), 'flask.ext.principal.UserNeed', 'UserNeed', (['self.id'], {}), '(self.id)\n', (3739, 3748), False, 'from flask.ext.principal import RoleNeed, UserNeed, Permission\n'), ((2352, 2369), 'flask.ext.principal.UserNeed', 'UserNeed', (['user_id'], {}), '(user_id)\n', (2360, 2369), False, 'from flask.ext.principal import RoleNeed, UserNeed, Permission\n'), ((3806, 3827), 'flask.ext.principal.RoleNeed', 'RoleNeed', (['"""moderator"""'], {}), "('moderator')\n", (3814, 3827), False, 'from flask.ext.principal import RoleNeed, UserNeed, Permission\n'), ((3881, 3898), 'flask.ext.principal.RoleNeed', 'RoleNeed', (['"""admin"""'], {}), "('admin')\n", (3889, 3898), False, 'from flask.ext.principal import RoleNeed, UserNeed, Permission\n'), ((1097, 1148), 'Smaug.extensions.db.or_', 'db.or_', (['(User.username == login)', '(User.email == login)'], {}), '(User.username == login, User.email == login)\n', (1103, 1148), False, 'from Smaug.extensions import db\n')] |
import os
from libdotfiles.util import (
HOME_DIR,
PKG_DIR,
REPO_ROOT_DIR,
create_symlink,
run,
)
create_symlink(
PKG_DIR / "launcher.json", HOME_DIR / ".config" / "launcher.json"
)
os.chdir(REPO_ROOT_DIR / "opt" / "launcher")
run(
["python3", "-m", "pip", "install", "--user", "--upgrade", "."],
check=False,
)
| [
"os.chdir",
"libdotfiles.util.create_symlink",
"libdotfiles.util.run"
] | [((120, 205), 'libdotfiles.util.create_symlink', 'create_symlink', (["(PKG_DIR / 'launcher.json')", "(HOME_DIR / '.config' / 'launcher.json')"], {}), "(PKG_DIR / 'launcher.json', HOME_DIR / '.config' /\n 'launcher.json')\n", (134, 205), False, 'from libdotfiles.util import HOME_DIR, PKG_DIR, REPO_ROOT_DIR, create_symlink, run\n'), ((209, 253), 'os.chdir', 'os.chdir', (["(REPO_ROOT_DIR / 'opt' / 'launcher')"], {}), "(REPO_ROOT_DIR / 'opt' / 'launcher')\n", (217, 253), False, 'import os\n'), ((254, 340), 'libdotfiles.util.run', 'run', (["['python3', '-m', 'pip', 'install', '--user', '--upgrade', '.']"], {'check': '(False)'}), "(['python3', '-m', 'pip', 'install', '--user', '--upgrade', '.'], check=\n False)\n", (257, 340), False, 'from libdotfiles.util import HOME_DIR, PKG_DIR, REPO_ROOT_DIR, create_symlink, run\n')] |
"""
Test for OWL 2 RL/RDF rules from
Table 8. The Semantics of Datatypes
https://www.w3.org/TR/owl2-profiles/#Reasoning_in_OWL_2_RL_and_RDF_Graphs_using_Rules
NOTE: The following axioms are skipped on purpose
- dt-eq
- dt-diff
"""
from rdflib import Graph, Literal, Namespace, RDF, XSD, RDFS
import owlrl
DAML = Namespace('http://www.daml.org/2002/03/agents/agent-ont#')
T = Namespace('http://test.org/')
def test_dt_type1():
"""
Test dt-type1 rule for OWL 2 RL.
"""
g = Graph()
owlrl.DeductiveClosure(owlrl.OWLRL_Semantics).expand(g)
assert (RDF.PlainLiteral, RDF.type, RDFS.Datatype) in g
assert (RDF.XMLLiteral, RDF.type, RDFS.Datatype) in g
assert (RDFS.Literal, RDF.type, RDFS.Datatype) in g
assert (XSD.decimal, RDF.type, RDFS.Datatype) in g
assert (XSD.integer, RDF.type, RDFS.Datatype) in g
assert (XSD.nonNegativeInteger, RDF.type, RDFS.Datatype) in g
assert (XSD.nonPositiveInteger, RDF.type, RDFS.Datatype) in g
assert (XSD.positiveInteger, RDF.type, RDFS.Datatype) in g
assert (XSD.negativeInteger, RDF.type, RDFS.Datatype) in g
assert (XSD.long, RDF.type, RDFS.Datatype) in g
assert (XSD.int, RDF.type, RDFS.Datatype) in g
assert (XSD.short, RDF.type, RDFS.Datatype) in g
assert (XSD.byte, RDF.type, RDFS.Datatype) in g
assert (XSD.unsignedLong, RDF.type, RDFS.Datatype) in g
assert (XSD.unsignedInt, RDF.type, RDFS.Datatype) in g
assert (XSD.unsignedShort, RDF.type, RDFS.Datatype) in g
assert (XSD.unsignedByte, RDF.type, RDFS.Datatype) in g
assert (XSD.float, RDF.type, RDFS.Datatype) in g
assert (XSD.double, RDF.type, RDFS.Datatype) in g
assert (XSD.string, RDF.type, RDFS.Datatype) in g
assert (XSD.normalizedString, RDF.type, RDFS.Datatype) in g
assert (XSD.token, RDF.type, RDFS.Datatype) in g
assert (XSD.language, RDF.type, RDFS.Datatype) in g
assert (XSD.Name, RDF.type, RDFS.Datatype) in g
assert (XSD.NCName, RDF.type, RDFS.Datatype) in g
assert (XSD.NMTOKEN, RDF.type, RDFS.Datatype) in g
assert (XSD.boolean, RDF.type, RDFS.Datatype) in g
assert (XSD.hexBinary, RDF.type, RDFS.Datatype) in g
assert (XSD.base64Binary, RDF.type, RDFS.Datatype) in g
assert (XSD.anyURI, RDF.type, RDFS.Datatype) in g
assert (XSD.dateTime, RDF.type, RDFS.Datatype) in g
assert (XSD.dateTimeStamp, RDF.type, RDFS.Datatype) in g
def test_dt_type2():
"""
Test dt-type2 rule for OWL 2 RL.
"""
p_one = Literal(1, datatype=XSD.positiveInteger)
g = Graph()
g.add((T.A, T.prop, p_one))
owlrl.DeductiveClosure(owlrl.OWLRL_Semantics).expand(g)
assert (T.A, T.prop, p_one) in g
assert (p_one, RDF.type, XSD.positiveInteger) in g
def test_dt_not_type():
"""
Test dt-not-type rule for OWL 2 RL.
"""
m_one = Literal(-1, datatype=XSD.nonNegativeInteger)
g = Graph()
g.add((T.A, T.prop, m_one))
owlrl.DeductiveClosure(owlrl.OWLRL_Semantics).expand(g)
# TODO, we know this one fails. It is not supposed to.
#assert (m_one, RDF.type, XSD.nonNegativeInteger) not in g
assert True
result = next(g.objects(predicate=DAML.error))
expected = Literal(
'Lexical value of the literal \'-1\' does not match its datatype'
' (http://www.w3.org/2001/XMLSchema#nonNegativeInteger)'
)
assert expected == result
| [
"rdflib.Namespace",
"rdflib.Graph",
"owlrl.DeductiveClosure",
"rdflib.Literal"
] | [((323, 381), 'rdflib.Namespace', 'Namespace', (['"""http://www.daml.org/2002/03/agents/agent-ont#"""'], {}), "('http://www.daml.org/2002/03/agents/agent-ont#')\n", (332, 381), False, 'from rdflib import Graph, Literal, Namespace, RDF, XSD, RDFS\n'), ((386, 415), 'rdflib.Namespace', 'Namespace', (['"""http://test.org/"""'], {}), "('http://test.org/')\n", (395, 415), False, 'from rdflib import Graph, Literal, Namespace, RDF, XSD, RDFS\n'), ((499, 506), 'rdflib.Graph', 'Graph', ([], {}), '()\n', (504, 506), False, 'from rdflib import Graph, Literal, Namespace, RDF, XSD, RDFS\n'), ((2483, 2523), 'rdflib.Literal', 'Literal', (['(1)'], {'datatype': 'XSD.positiveInteger'}), '(1, datatype=XSD.positiveInteger)\n', (2490, 2523), False, 'from rdflib import Graph, Literal, Namespace, RDF, XSD, RDFS\n'), ((2533, 2540), 'rdflib.Graph', 'Graph', ([], {}), '()\n', (2538, 2540), False, 'from rdflib import Graph, Literal, Namespace, RDF, XSD, RDFS\n'), ((2823, 2867), 'rdflib.Literal', 'Literal', (['(-1)'], {'datatype': 'XSD.nonNegativeInteger'}), '(-1, datatype=XSD.nonNegativeInteger)\n', (2830, 2867), False, 'from rdflib import Graph, Literal, Namespace, RDF, XSD, RDFS\n'), ((2877, 2884), 'rdflib.Graph', 'Graph', ([], {}), '()\n', (2882, 2884), False, 'from rdflib import Graph, Literal, Namespace, RDF, XSD, RDFS\n'), ((3183, 3319), 'rdflib.Literal', 'Literal', (['"""Lexical value of the literal \'-1\' does not match its datatype (http://www.w3.org/2001/XMLSchema#nonNegativeInteger)"""'], {}), '(\n "Lexical value of the literal \'-1\' does not match its datatype (http://www.w3.org/2001/XMLSchema#nonNegativeInteger)"\n )\n', (3190, 3319), False, 'from rdflib import Graph, Literal, Namespace, RDF, XSD, RDFS\n'), ((511, 556), 'owlrl.DeductiveClosure', 'owlrl.DeductiveClosure', (['owlrl.OWLRL_Semantics'], {}), '(owlrl.OWLRL_Semantics)\n', (533, 556), False, 'import owlrl\n'), ((2577, 2622), 'owlrl.DeductiveClosure', 'owlrl.DeductiveClosure', (['owlrl.OWLRL_Semantics'], {}), '(owlrl.OWLRL_Semantics)\n', (2599, 2622), False, 'import owlrl\n'), ((2921, 2966), 'owlrl.DeductiveClosure', 'owlrl.DeductiveClosure', (['owlrl.OWLRL_Semantics'], {}), '(owlrl.OWLRL_Semantics)\n', (2943, 2966), False, 'import owlrl\n')] |
# pylint: disable=import-error, wrong-import-position, wrong-import-order, invalid-name
"""Test model provider interface"""
from common import *
from trustyai.model import Model, feature
def foo():
return "works!"
def test_basic_model():
"""Test basic model"""
def test_model(inputs):
outputs = [output(name=feature.name, dtype="number", value=feature.value.as_number()) for feature in
inputs]
return [PredictionOutput(outputs)]
model = Model(test_model)
features = [
feature(name=f"f-num{i}", value=i * 2.0, dtype="number")
for i in range(5)
]
result = model.predictAsync(features).get()
assert len(result[0].outputs) == 5
| [
"trustyai.model.feature",
"trustyai.model.feature.value.as_number",
"trustyai.model.Model"
] | [((496, 513), 'trustyai.model.Model', 'Model', (['test_model'], {}), '(test_model)\n', (501, 513), False, 'from trustyai.model import Model, feature\n'), ((540, 596), 'trustyai.model.feature', 'feature', ([], {'name': 'f"""f-num{i}"""', 'value': '(i * 2.0)', 'dtype': '"""number"""'}), "(name=f'f-num{i}', value=i * 2.0, dtype='number')\n", (547, 596), False, 'from trustyai.model import Model, feature\n'), ((371, 396), 'trustyai.model.feature.value.as_number', 'feature.value.as_number', ([], {}), '()\n', (394, 396), False, 'from trustyai.model import Model, feature\n')] |
#!/usr/bin/env python
from __future__ import print_function
from sklearn.feature_extraction import FeatureHasher
from sklearn.ensemble import RandomForestClassifier
from sklearn.pipeline import make_pipeline
from sklearn.metrics import log_loss
import ctr
learner = RandomForestClassifier(verbose = False, n_jobs = -1)
for ID,x,y in ctr.data(ctr.train, batchsize = 1):
pass | [
"ctr.data",
"sklearn.ensemble.RandomForestClassifier"
] | [((269, 317), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {'verbose': '(False)', 'n_jobs': '(-1)'}), '(verbose=False, n_jobs=-1)\n', (291, 317), False, 'from sklearn.ensemble import RandomForestClassifier\n'), ((337, 369), 'ctr.data', 'ctr.data', (['ctr.train'], {'batchsize': '(1)'}), '(ctr.train, batchsize=1)\n', (345, 369), False, 'import ctr\n')] |
"""Add Review Table
Revision ID: <PASSWORD>
Revises:
Create Date: 2019-08-07 13:09:49.691184
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<PASSWORD>'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('review',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('date_created', sa.DateTime(), nullable=True),
sa.Column('date_modified', sa.DateTime(), nullable=True),
sa.Column('comment', sa.Text(), nullable=True),
sa.Column('tag_id', sa.Integer(), nullable=True),
sa.Column('score', sa.Integer(), nullable=True),
sa.Column('submission_id', sa.Integer(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.Column('assignment_version', sa.Integer(), nullable=True),
sa.Column('submission_version', sa.Integer(), nullable=True),
sa.Column('version', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(('author_id',), ['user.id'], ),
sa.ForeignKeyConstraint(('submission_id',), ['submission.id'], ),
sa.ForeignKeyConstraint(('tag_id',), ['assignment_tag.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_foreign_key(None, 'submission', 'assignment_group', ['assignment_group_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'submission', type_='foreignkey')
op.drop_table('review')
# ### end Alembic commands ###
| [
"sqlalchemy.ForeignKeyConstraint",
"alembic.op.create_foreign_key",
"sqlalchemy.DateTime",
"alembic.op.drop_constraint",
"alembic.op.drop_table",
"sqlalchemy.Text",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.Integer"
] | [((1266, 1365), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""submission"""', '"""assignment_group"""', "['assignment_group_id']", "['id']"], {}), "(None, 'submission', 'assignment_group', [\n 'assignment_group_id'], ['id'])\n", (1287, 1365), False, 'from alembic import op\n'), ((1485, 1543), 'alembic.op.drop_constraint', 'op.drop_constraint', (['None', '"""submission"""'], {'type_': '"""foreignkey"""'}), "(None, 'submission', type_='foreignkey')\n", (1503, 1543), False, 'from alembic import op\n'), ((1548, 1571), 'alembic.op.drop_table', 'op.drop_table', (['"""review"""'], {}), "('review')\n", (1561, 1571), False, 'from alembic import op\n'), ((1029, 1081), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["('author_id',)", "['user.id']"], {}), "(('author_id',), ['user.id'])\n", (1052, 1081), True, 'import sqlalchemy as sa\n'), ((1089, 1151), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["('submission_id',)", "['submission.id']"], {}), "(('submission_id',), ['submission.id'])\n", (1112, 1151), True, 'import sqlalchemy as sa\n'), ((1159, 1218), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["('tag_id',)", "['assignment_tag.id']"], {}), "(('tag_id',), ['assignment_tag.id'])\n", (1182, 1218), True, 'import sqlalchemy as sa\n'), ((1226, 1255), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1249, 1255), True, 'import sqlalchemy as sa\n'), ((407, 419), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (417, 419), True, 'import sqlalchemy as sa\n'), ((468, 481), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (479, 481), True, 'import sqlalchemy as sa\n'), ((530, 543), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (541, 543), True, 'import sqlalchemy as sa\n'), ((586, 595), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (593, 595), True, 'import sqlalchemy as sa\n'), ((637, 649), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (647, 649), True, 'import sqlalchemy as sa\n'), ((690, 702), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (700, 702), True, 'import sqlalchemy as sa\n'), ((751, 763), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (761, 763), True, 'import sqlalchemy as sa\n'), ((808, 820), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (818, 820), True, 'import sqlalchemy as sa\n'), ((874, 886), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (884, 886), True, 'import sqlalchemy as sa\n'), ((940, 952), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (950, 952), True, 'import sqlalchemy as sa\n'), ((995, 1007), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1005, 1007), True, 'import sqlalchemy as sa\n')] |
#!/usr/bin/env python
# Copyright (c) 2019, The Personal Robotics Lab, The MuSHR Team, The Contributors of MuSHR
# License: BSD 3-Clause. See LICENSE.md file in root directory.
from threading import Lock
import numpy as np
import rospy
from std_msgs.msg import Float64
from vesc_msgs.msg import VescStateStamped
# Tune these Values!
KM_V_NOISE = 0.4 # Kinematic car velocity noise std dev
KM_DELTA_NOISE = 0.2 # Kinematic car delta noise std dev
KM_X_FIX_NOISE = 3e-2 # Kinematic car x position constant noise std dev
KM_Y_FIX_NOISE = 3e-2 # Kinematic car y position constant noise std dev
KM_THETA_FIX_NOISE = 1e-1 # Kinematic car theta constant noise std dev
# #Tune these Values!
# KM_V_NOISE = 0.01 # Kinematic car velocity noise std dev
# KM_DELTA_NOISE = 0.06 # Kinematic car delta noise std dev
# KM_X_FIX_NOISE = 3e-2 # Kinematic car x position constant noise std dev
# KM_Y_FIX_NOISE = 1e-3 # Kinematic car y position constant noise std dev
# KM_THETA_FIX_NOISE = 1e-2 # Kinematic car theta constant noise std dev
# #Tune these Values!
# KM_V_NOISE = 0.015 # Kinematic car velocity noise std dev
# KM_DELTA_NOISE = 0.065 # Kinematic car delta noise std dev
# KM_X_FIX_NOISE = 1e-2 # Kinematic car x position constant noise std dev
# KM_Y_FIX_NOISE = 1e-2 # Kinematic car y position constant noise std dev
# KM_THETA_FIX_NOISE = 1e-2 # Kinematic car theta constant noise std dev
"""
Propagates the particles forward based the velocity and steering angle of the car
"""
class KinematicMotionModel:
"""
Initializes the kinematic motion model
motor_state_topic: The topic containing motor state information
servo_state_topic: The topic containing servo state information
speed_to_erpm_offset: Offset conversion param from rpm to speed
speed_to_erpm_gain: Gain conversion param from rpm to speed
steering_angle_to_servo_offset: Offset conversion param from servo position to steering angle
steering_angle_to_servo_gain: Gain conversion param from servo position to steering angle
car_length: The length of the car
particles: The particles to propagate forward
state_lock: Controls access to particles
"""
def __init__(
self,
motor_state_topic,
servo_state_topic,
speed_to_erpm_offset,
speed_to_erpm_gain,
steering_to_servo_offset,
steering_to_servo_gain,
car_length,
particles,
state_lock=None,
):
self.last_servo_cmd = None # The most recent servo command
self.last_vesc_stamp = None # The time stamp from the previous vesc state msg
self.particles = particles
self.SPEED_TO_ERPM_OFFSET = (
speed_to_erpm_offset # Offset conversion param from rpm to speed
)
self.SPEED_TO_ERPM_GAIN = (
speed_to_erpm_gain # Gain conversion param from rpm to speed
)
self.STEERING_TO_SERVO_OFFSET = steering_to_servo_offset # Offset conversion param from servo position to steering angle
self.STEERING_TO_SERVO_GAIN = steering_to_servo_gain # Gain conversion param from servo position to steering angle
self.CAR_LENGTH = car_length # The length of the car
if state_lock is None:
self.state_lock = Lock()
else:
self.state_lock = state_lock
# This subscriber just caches the most recent servo position command
self.servo_pos_sub = rospy.Subscriber(
servo_state_topic, Float64, self.servo_cb, queue_size=1
)
# Subscribe to the state of the vesc
self.motion_sub = rospy.Subscriber(
motor_state_topic, VescStateStamped, self.motion_cb, queue_size=1
)
"""
Caches the most recent servo command
msg: A std_msgs/Float64 message
"""
def servo_cb(self, msg):
self.last_servo_cmd = msg.data # Update servo command
"""
Converts messages to controls and applies the kinematic car model to the
particles
msg: a vesc_msgs/VescStateStamped message
"""
def motion_cb(self, msg):
self.state_lock.acquire()
if self.last_servo_cmd is None:
self.state_lock.release()
return
if self.last_vesc_stamp is None:
print("Vesc callback called for first time....")
self.last_vesc_stamp = msg.header.stamp
self.state_lock.release()
return
# Convert raw msgs to controls
# Note that control = (raw_msg_val - offset_param) / gain_param
curr_speed = (
msg.state.speed - self.SPEED_TO_ERPM_OFFSET
) / self.SPEED_TO_ERPM_GAIN
curr_steering_angle = (
self.last_servo_cmd - self.STEERING_TO_SERVO_OFFSET
) / self.STEERING_TO_SERVO_GAIN
dt = (msg.header.stamp - self.last_vesc_stamp).to_sec()
# Propagate particles forward in place
self.apply_motion_model(
proposal_dist=self.particles, control=[curr_speed, curr_steering_angle, dt]
)
self.last_vesc_stamp = msg.header.stamp
self.state_lock.release()
def apply_motion_model(self, proposal_dist, control):
"""
Propagates particles forward (in-place) by applying the kinematic model and adding
sampled gaussian noise
proposal_dist: The particles to propagate
control: List containing velocity, steering angle, and timer interval - [v,delta,dt]
returns: nothing
"""
# Separate control
v, delta, dt = control
# Add control noise
v = np.random.normal(loc=v, scale=KM_V_NOISE, size=proposal_dist[:, 0].shape)
delta = np.random.normal(
loc=delta, scale=KM_DELTA_NOISE, size=proposal_dist[:, 0].shape
)
# apply motion model's update rule
theta = proposal_dist[:, 2]
theta_new = theta + v / self.CAR_LENGTH * np.tan(delta) * dt
# x
proposal_dist[:, 0] += (
self.CAR_LENGTH / np.tan(delta) * (np.sin(theta_new) - np.sin(theta))
)
# y
proposal_dist[:, 1] += (
self.CAR_LENGTH / np.tan(delta) * (-np.cos(theta_new) + np.cos(theta))
)
# Add noise
proposal_dist[:, 0] = np.random.normal(
loc=proposal_dist[:, 0],
scale=KM_X_FIX_NOISE,
size=proposal_dist[:, 0].shape,
)
proposal_dist[:, 1] = np.random.normal(
loc=proposal_dist[:, 1],
scale=KM_Y_FIX_NOISE,
size=proposal_dist[:, 1].shape,
)
proposal_dist[:, 2] = np.random.normal(
loc=theta_new, scale=KM_THETA_FIX_NOISE, size=proposal_dist[:, 2].shape
)
# print 'v: %f, delta: %f, x: %f, y: %f, theta: %f'%(np.mean(v), np.mean(delta), np.mean(proposal_dist[:,0]), np.mean(proposal_dist[:,1]), np.mean(proposal_dist[:,2]))
# Limit particle rotation to be between -pi and pi
proposal_dist[proposal_dist[:, 2] < -1 * np.pi, 2] += 2 * np.pi
proposal_dist[proposal_dist[:, 2] > np.pi, 2] -= 2 * np.pi
| [
"numpy.random.normal",
"numpy.tan",
"threading.Lock",
"numpy.cos",
"numpy.sin",
"rospy.Subscriber"
] | [((3544, 3617), 'rospy.Subscriber', 'rospy.Subscriber', (['servo_state_topic', 'Float64', 'self.servo_cb'], {'queue_size': '(1)'}), '(servo_state_topic, Float64, self.servo_cb, queue_size=1)\n', (3560, 3617), False, 'import rospy\n'), ((3711, 3798), 'rospy.Subscriber', 'rospy.Subscriber', (['motor_state_topic', 'VescStateStamped', 'self.motion_cb'], {'queue_size': '(1)'}), '(motor_state_topic, VescStateStamped, self.motion_cb,\n queue_size=1)\n', (3727, 3798), False, 'import rospy\n'), ((5694, 5767), 'numpy.random.normal', 'np.random.normal', ([], {'loc': 'v', 'scale': 'KM_V_NOISE', 'size': 'proposal_dist[:, 0].shape'}), '(loc=v, scale=KM_V_NOISE, size=proposal_dist[:, 0].shape)\n', (5710, 5767), True, 'import numpy as np\n'), ((5784, 5870), 'numpy.random.normal', 'np.random.normal', ([], {'loc': 'delta', 'scale': 'KM_DELTA_NOISE', 'size': 'proposal_dist[:, 0].shape'}), '(loc=delta, scale=KM_DELTA_NOISE, size=proposal_dist[:, 0].\n shape)\n', (5800, 5870), True, 'import numpy as np\n'), ((6363, 6463), 'numpy.random.normal', 'np.random.normal', ([], {'loc': 'proposal_dist[:, 0]', 'scale': 'KM_X_FIX_NOISE', 'size': 'proposal_dist[:, 0].shape'}), '(loc=proposal_dist[:, 0], scale=KM_X_FIX_NOISE, size=\n proposal_dist[:, 0].shape)\n', (6379, 6463), True, 'import numpy as np\n'), ((6536, 6636), 'numpy.random.normal', 'np.random.normal', ([], {'loc': 'proposal_dist[:, 1]', 'scale': 'KM_Y_FIX_NOISE', 'size': 'proposal_dist[:, 1].shape'}), '(loc=proposal_dist[:, 1], scale=KM_Y_FIX_NOISE, size=\n proposal_dist[:, 1].shape)\n', (6552, 6636), True, 'import numpy as np\n'), ((6709, 6803), 'numpy.random.normal', 'np.random.normal', ([], {'loc': 'theta_new', 'scale': 'KM_THETA_FIX_NOISE', 'size': 'proposal_dist[:, 2].shape'}), '(loc=theta_new, scale=KM_THETA_FIX_NOISE, size=\n proposal_dist[:, 2].shape)\n', (6725, 6803), True, 'import numpy as np\n'), ((3375, 3381), 'threading.Lock', 'Lock', ([], {}), '()\n', (3379, 3381), False, 'from threading import Lock\n'), ((6112, 6125), 'numpy.tan', 'np.tan', (['delta'], {}), '(delta)\n', (6118, 6125), True, 'import numpy as np\n'), ((6129, 6146), 'numpy.sin', 'np.sin', (['theta_new'], {}), '(theta_new)\n', (6135, 6146), True, 'import numpy as np\n'), ((6149, 6162), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (6155, 6162), True, 'import numpy as np\n'), ((6249, 6262), 'numpy.tan', 'np.tan', (['delta'], {}), '(delta)\n', (6255, 6262), True, 'import numpy as np\n'), ((6287, 6300), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (6293, 6300), True, 'import numpy as np\n'), ((6018, 6031), 'numpy.tan', 'np.tan', (['delta'], {}), '(delta)\n', (6024, 6031), True, 'import numpy as np\n'), ((6267, 6284), 'numpy.cos', 'np.cos', (['theta_new'], {}), '(theta_new)\n', (6273, 6284), True, 'import numpy as np\n')] |
from collections import defaultdict
from heapq import heappop, heappush
class Region(object):
def __init__(self, y, x, gi, el, t):
self.y = y
self.x = x
self.gi = gi
self.el = el
self.t = t
def pos(self):
return (self.y, self.x)
tools = {
'.': set(['c', 't']),
'=': set(['c', 'n']),
'|': set(['t', 'n']),
}
moves = {
'n': lambda p: (p[0]-1,p[1]),
's': lambda p: (p[0]+1,p[1]),
'w': lambda p: (p[0],p[1]-1),
'e': lambda p: (p[0],p[1]+1),
}
def solve(depth, target):
# Mapping cave
cave = defaultdict(Region)
for y in range(1000):
for x in range(1000):
pos = (y,x)
if pos == (0,0) or pos == target:
gi = 0
elif y == 0:
gi = x * 16807
elif x == 0:
gi = y * 48271
else:
gi = cave[(y,x-1)].el * cave[(y-1,x)].el
el = (gi + depth) % 20183
if el % 3 == 0:
t = '.'
elif el % 3 == 1:
t = '='
else:
t = '|'
cave[(y,x)] = Region(y, x, gi, el, t)
# Find quickest route
pos = (0,0)
q = [(0,'t',pos, [(0,'t',pos)])]
seen = {}
while q:
time, tool, pos, steps = heappop(q)
if pos == target:
if tool == 't':
for s in steps:
print(s)
print(time)
return
else:
next_time = time+7
next_pos = pos
next_steps = steps[:]
next_steps.append((next_time, 't', next_pos))
heappush(q, (next_time, 't', next_pos, next_steps))
continue
if (tool,pos) in seen: # and seen[(tool,pos)] <= time:
continue
seen[(tool,pos)] = time
for m in moves.values():
next_pos = m(pos)
# print(next_pos)
if next_pos[0] >= 0 and next_pos[1] >= 0:
p_tools = tools[cave[pos].t]
np_tools = tools[cave[next_pos].t]
common_tools = p_tools.intersection(np_tools)
for next_tool in common_tools:
next_time = time+1
if next_tool != tool:
next_time += 7
next_steps = steps[:]
next_steps.append((next_time, next_tool, next_pos))
heappush(q, (next_time, next_tool, next_pos, next_steps))
solve(510, (10,10))
# solve(11991, (797,6)) | [
"heapq.heappop",
"heapq.heappush",
"collections.defaultdict"
] | [((588, 607), 'collections.defaultdict', 'defaultdict', (['Region'], {}), '(Region)\n', (599, 607), False, 'from collections import defaultdict\n'), ((1322, 1332), 'heapq.heappop', 'heappop', (['q'], {}), '(q)\n', (1329, 1332), False, 'from heapq import heappop, heappush\n'), ((1699, 1750), 'heapq.heappush', 'heappush', (['q', "(next_time, 't', next_pos, next_steps)"], {}), "(q, (next_time, 't', next_pos, next_steps))\n", (1707, 1750), False, 'from heapq import heappop, heappush\n'), ((2500, 2557), 'heapq.heappush', 'heappush', (['q', '(next_time, next_tool, next_pos, next_steps)'], {}), '(q, (next_time, next_tool, next_pos, next_steps))\n', (2508, 2557), False, 'from heapq import heappop, heappush\n')] |
#!/usr/bin/python
# -*-coding: utf-8 -*-
# https://github.com/frictionlessdata/tableschema-py/tree/1d9750248de06a075029c1278404c5db5311fbc5/tableschema/types
# type and format
# Support types and formats:
# string
# email
# uri
# uuid
# ipv4
# ipv6
# hostname
# datetime
# number
# integer
# boolean
#
import rfc3986
import re
import uuid
import datetime
import ipaddress
from pycsvschema import defaults
class TypeValidator(object):
def __init__(self, field_schema):
self.field_schema = field_schema
self.format = self.field_schema.get('format', defaults.FIELDS_FORMAT)
self.value = None
self.to_type = None
def try_convert_value(self, value, to_type, convertor_config=None, update=False):
if not convertor_config:
convertor_config = {}
try:
v = to_type(value, **convertor_config)
except Exception:
return False
if update:
self.value = v
else:
self.value = value
return True
def validate(self, value):
pass
class StringValidator(TypeValidator):
EMAIL_PATTERN = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"
HOSTNAME_PATTERN = r"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*" \
r"([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$"
def __init__(self, field_schema):
super().__init__(field_schema=field_schema)
self.to_type = str
self.pattern = ''
def validate(self, value):
if value is None:
return
self.value = value
if self.format == 'email':
if not re.match(self.EMAIL_PATTERN, value):
return False
elif self.format == 'uri':
if not rfc3986.is_valid_uri(value, require_scheme=True):
return False
elif self.format == 'uuid':
return self.try_convert_value(value=value, to_type=uuid.UUID, convertor_config={'version': 4})
elif self.format == 'ipv4':
return self.try_convert_value(value=value, to_type=ipaddress.IPv4Address)
elif self.format == 'ipv6':
return self.try_convert_value(value=value, to_type=ipaddress.IPv6Address)
elif self.format == 'hostname':
if not re.match(self.HOSTNAME_PATTERN, value):
return False
elif self.format == 'datetime':
self.pattern = self.field_schema.get('datetimePattern', defaults.FIELDS_FORMAT_DATETIME_PATTERN)
try:
datetime.datetime.strptime(value, self.pattern)
except Exception:
return False
elif self.field_schema.get('pattern', defaults.FIELDS_TYPE_STRING_PATTERN):
self.pattern = self.field_schema.get('pattern', defaults.FIELDS_TYPE_STRING_PATTERN)
if not re.match(self.pattern, value):
return False
return True
class NumberValidator(TypeValidator):
def __init__(self, field_schema):
super().__init__(field_schema=field_schema)
self.to_type = float
self.groupchar = self.field_schema.get('groupChar', defaults.FIELDS_GROUPCHAR)
def validate(self, value):
if value is None:
return
value = value.replace(self.groupchar, '')
return self.try_convert_value(value=value, to_type=self.to_type, update=True)
class IntegerValidator(TypeValidator):
def __init__(self, field_schema):
super().__init__(field_schema=field_schema)
self.to_type = int
self.groupchar = self.field_schema.get('groupChar', defaults.FIELDS_GROUPCHAR)
def validate(self, value):
if value is None:
return
value = value.replace(self.groupchar, '')
return self.try_convert_value(value=value, to_type=self.to_type, update=True)
class BooleanValidator(TypeValidator):
def __init__(self, field_schema):
super().__init__(field_schema=field_schema)
self.to_type = bool
self.truevalues = self.field_schema.get('trueValues', defaults.FIELDS_TRUEVALUES)
self.falsevalues = self.field_schema.get('falseValues', defaults.FIELDS_FALSEVALUES)
def validate(self, value):
if value in self.truevalues:
self.value = True
elif value in self.falsevalues:
self.value = False
else:
return False
return True
TYPE_MAPPER = {
'string': StringValidator,
'number': NumberValidator,
'integer': IntegerValidator,
'boolean': BooleanValidator,
}
| [
"datetime.datetime.strptime",
"re.match",
"rfc3986.is_valid_uri"
] | [((1672, 1707), 're.match', 're.match', (['self.EMAIL_PATTERN', 'value'], {}), '(self.EMAIL_PATTERN, value)\n', (1680, 1707), False, 'import re\n'), ((1792, 1840), 'rfc3986.is_valid_uri', 'rfc3986.is_valid_uri', (['value'], {'require_scheme': '(True)'}), '(value, require_scheme=True)\n', (1812, 1840), False, 'import rfc3986\n'), ((2317, 2355), 're.match', 're.match', (['self.HOSTNAME_PATTERN', 'value'], {}), '(self.HOSTNAME_PATTERN, value)\n', (2325, 2355), False, 'import re\n'), ((2568, 2615), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['value', 'self.pattern'], {}), '(value, self.pattern)\n', (2594, 2615), False, 'import datetime\n'), ((2875, 2904), 're.match', 're.match', (['self.pattern', 'value'], {}), '(self.pattern, value)\n', (2883, 2904), False, 'import re\n')] |
from difflib import SequenceMatcher
"""
A Python program to demonstrate the adjacency
list representation of the graph
"""
# weights arreay by value
weights = [1 / 7, 1 / 7, 1 / 7, 1 / 7, 1 / 7, 1 / 7, 1 / 7, 1 / 7]
# A class to represent the adjacency list of the node
class AdjNode:
def __init__(self, data, weight):
self.vertex = data
self.weight = weight
self.next = None
# A class to represent a graph. A graph
# is the list of the adjacency lists.
# Size of the array will be the no. of the
# vertices "V"
class Graph:
def __init__(self):
self.graph = []
# Function to add an edge in an undirected graph
def add_edge(self, src, dest, weight):
# Adding the node to the source node
node = AdjNode(dest, weight)
node.next = self.graph[src]
self.graph[src] = node
# Adding the source node to the destination as
# it is the undirected graph
node = AdjNode(src, weight)
node.next = self.graph[dest]
self.graph[dest] = node
# Function to print the graph (in adj list format)
def print_graph(self):
for i in range(self.V):
# get current node
temp = self.graph[i]
# if no links skip
if(temp == None):
continue
print("Adjacency list of vertex {}\n head".format(i), end="")
while temp:
print(F" -> Node Position = {temp.vertex} : weight = {round(temp.weight,3)}", end="")
temp = temp.next
print(" \n")
# Function to print the graph (in the given data format)
def print_data_matches(self, NodeArray):
for i in range(self.V):
# get current adjancey list node
temp = self.graph[i]
# if no links skip
if (temp == None):
continue
# get current data point
current_data = NodeArray[i]
print("Account Name {}\n".format(current_data[0]), end="")
while temp:
connected_to = NodeArray[temp.vertex]
print(F" -> Connected To: = {connected_to[0]} with a confidence of {round((temp.weight*100), 1)}%", end="\n")
temp = temp.next
print(" \n")
i = i + 1
# goes through and adds edges to adjancey list
def add_edges(self, Nodes):
for i in range(len(NodeArray)):
for j in range(i + 1, len(NodeArray)):
# get weight of each node with others
w = calculate_weights(NodeArray[i], NodeArray[j])
if w >= (2 / 7):
self.add_edge(i, j, w)
# calculate the connected-ness of each node
def calculate_weights(node1, node2):
w = 0
# calculate weight - does not include play time or is banned
# Account and Character name
for i in range(0, 2):
if node1[i] == node2[i]:
w += weights[i]
else:
match = SequenceMatcher(None, node1[i], node2[i]).find_longest_match(0, len(node1[i]), 0, len(node2[i]))
if match.size >= 3:
w += weights[i] * min(weights[i], weights[i] * match.size / 10)
# IP, UUID, Location
for i in range(2, 5):
if node1[i] == node2[i]:
w += weights[i]
return w
| [
"difflib.SequenceMatcher"
] | [((3108, 3149), 'difflib.SequenceMatcher', 'SequenceMatcher', (['None', 'node1[i]', 'node2[i]'], {}), '(None, node1[i], node2[i])\n', (3123, 3149), False, 'from difflib import SequenceMatcher\n')] |
from selenium.webdriver.firefox.options import Options
from webdriver_manager.firefox import GeckoDriverManager
from seleniumwire import webdriver
from .fetch import fetchUser as fetchUser_
class TwitterUser(object):
# __CONSTRUCTOR
def __init__(self, allowed_connection_retries=20, allowed_parsing_retries=500, headless=True):
# allowed_connection_retries: AMOUNT OF TIMES ALLOWED TO RECOVER FROM AN ERROR DURING THE WEB REQUEST/SESSION.
# # # PRIVATE METHODS/OBJECTS # # #
# FIREFOX OPTIONS
firefox_options = Options()
if headless:
firefox_options.add_argument("--headless")
# # # PUBLIC METHODS/OBJECTS # # #
# WEBDRIVER HARNESS
self.driver = webdriver.Firefox(executable_path=GeckoDriverManager().install(), options=firefox_options)
# ALLOWED CONNECTION RETRIES
self.allowed_connection_retries = allowed_connection_retries
# ALLOWED PARSING RETRIES
self.allowed_parsing_retries = allowed_parsing_retries
# FETCH-USER METHOD
#@classmethod
def fetchUser(self, screen_name):
return fetchUser_(self, screen_name)
| [
"selenium.webdriver.firefox.options.Options",
"webdriver_manager.firefox.GeckoDriverManager"
] | [((581, 590), 'selenium.webdriver.firefox.options.Options', 'Options', ([], {}), '()\n', (588, 590), False, 'from selenium.webdriver.firefox.options import Options\n'), ((825, 845), 'webdriver_manager.firefox.GeckoDriverManager', 'GeckoDriverManager', ([], {}), '()\n', (843, 845), False, 'from webdriver_manager.firefox import GeckoDriverManager\n')] |
import cv2 as cv
from os import listdir
from os.path import isfile, join
import json
folders = [
# Img sets to use
"octopus",
"elephant",
"flamingo",
"kangaroo",
"leopards",
"sea_horse"
]
files = [f for f in listdir("./data/camera") if isfile(join("./data/camera", f))] # Get all file names
train_file_names = files[:20] # First 20 is train
test_file_names = files[20:30] # Next 10 is test
def get_train_set():
img_set = [[cv.imread("./data/" + folder + "/" + file_name) for file_name in train_file_names] for folder in folders]
return img_set
def get_test_set():
img_set = [[cv.imread("./data/" + folder + "/" + file_name) for file_name in test_file_names] for folder in folders]
return img_set
def save(file_name, data):
with open("./data/" + file_name, "w+") as f:
f.write(json.dumps(data))
def read(file_name):
with open("./data/" + file_name, "r+") as f:
return json.loads(f.read()) | [
"json.dumps",
"os.listdir",
"os.path.join",
"cv2.imread"
] | [((219, 243), 'os.listdir', 'listdir', (['"""./data/camera"""'], {}), "('./data/camera')\n", (226, 243), False, 'from os import listdir\n'), ((254, 278), 'os.path.join', 'join', (['"""./data/camera"""', 'f'], {}), "('./data/camera', f)\n", (258, 278), False, 'from os.path import isfile, join\n'), ((447, 494), 'cv2.imread', 'cv.imread', (["('./data/' + folder + '/' + file_name)"], {}), "('./data/' + folder + '/' + file_name)\n", (456, 494), True, 'import cv2 as cv\n'), ((603, 650), 'cv2.imread', 'cv.imread', (["('./data/' + folder + '/' + file_name)"], {}), "('./data/' + folder + '/' + file_name)\n", (612, 650), True, 'import cv2 as cv\n'), ((808, 824), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (818, 824), False, 'import json\n')] |
from ase import Atoms
from ase.calculators.emt import EMT
from ase.io.trajectory import Trajectory
from ase.io import read
import numpy as np
import pandas as pd
import argparse
import copy
import os
import pdb
import pickle
from model_eval import model_evaluation
from gmp_feature_selection import backward_elimination
def main():
dir_prefix = "/storage/home/hpaceice1/plai30/sandbox"
parallel_workspace = os.path.join(dir_prefix, "pace/parallel_workspace")
OUTPUT_DIR = os.path.join(dir_prefix, "output")
#setup dataset
np.random.seed(3)
distances = np.linspace(2, 5, 500)
images = []
for i in range(len(distances)):
l = distances[i]
image = Atoms(
"CuCO",
[
(-l * np.sin(0.65), l * np.cos(0.65), np.random.uniform(low=-4.0, high=4.0)),
(0, 0, 0),
(l * np.sin(0.65), l * np.cos(0.65), np.random.uniform(low=-4.0, high=4.0))
],
)
image.set_cell([10, 10, 10])
image.wrap(pbc=True)
image.set_calculator(EMT())
images.append(image)
elements = ["Cu","C","O"]
atom_gaussians = {"C": os.path.join(dir_prefix, "config/MCSH_potential/C_coredensity_5.g"),
"O": os.path.join(dir_prefix, "config/MCSH_potential/O_totaldensity_7.g"),
"Cu": os.path.join(dir_prefix, "config/MCSH_potential/Cu_totaldensity_5.g")}
data = model_evaluation.dataset(elements, images, atom_gaussians=atom_gaussians)
#set up evaluation parameters
cutoff = 8
sigmas = (np.logspace(np.log10(0.05), np.log10(1.0), num=5)).tolist()
model_eval_params = model_evaluation.get_model_eval_params(
fp_type="gmp", eval_type="k_fold_cv", eval_num_folds=2, eval_cv_iters=1,
cutoff=cutoff, sigmas=sigmas, nn_layers=3, nn_nodes=20, nn_learning_rate=1e-3,
nn_batch_size=32, nn_epochs=1000)
back_elim = backward_elimination.backward_elimination(data, model_eval_params)
back_elim.run(enable_parallel=True, parallel_workspace=parallel_workspace, seed=1, output_dir=OUTPUT_DIR)
if __name__ == "__main__":
main()
| [
"model_eval.model_evaluation.dataset",
"gmp_feature_selection.backward_elimination.backward_elimination",
"numpy.log10",
"os.path.join",
"model_eval.model_evaluation.get_model_eval_params",
"ase.calculators.emt.EMT",
"numpy.linspace",
"numpy.random.seed",
"numpy.cos",
"numpy.random.uniform",
"nu... | [((419, 470), 'os.path.join', 'os.path.join', (['dir_prefix', '"""pace/parallel_workspace"""'], {}), "(dir_prefix, 'pace/parallel_workspace')\n", (431, 470), False, 'import os\n'), ((488, 522), 'os.path.join', 'os.path.join', (['dir_prefix', '"""output"""'], {}), "(dir_prefix, 'output')\n", (500, 522), False, 'import os\n'), ((547, 564), 'numpy.random.seed', 'np.random.seed', (['(3)'], {}), '(3)\n', (561, 564), True, 'import numpy as np\n'), ((581, 603), 'numpy.linspace', 'np.linspace', (['(2)', '(5)', '(500)'], {}), '(2, 5, 500)\n', (592, 603), True, 'import numpy as np\n'), ((1442, 1515), 'model_eval.model_evaluation.dataset', 'model_evaluation.dataset', (['elements', 'images'], {'atom_gaussians': 'atom_gaussians'}), '(elements, images, atom_gaussians=atom_gaussians)\n', (1466, 1515), False, 'from model_eval import model_evaluation\n'), ((1664, 1901), 'model_eval.model_evaluation.get_model_eval_params', 'model_evaluation.get_model_eval_params', ([], {'fp_type': '"""gmp"""', 'eval_type': '"""k_fold_cv"""', 'eval_num_folds': '(2)', 'eval_cv_iters': '(1)', 'cutoff': 'cutoff', 'sigmas': 'sigmas', 'nn_layers': '(3)', 'nn_nodes': '(20)', 'nn_learning_rate': '(0.001)', 'nn_batch_size': '(32)', 'nn_epochs': '(1000)'}), "(fp_type='gmp', eval_type='k_fold_cv',\n eval_num_folds=2, eval_cv_iters=1, cutoff=cutoff, sigmas=sigmas,\n nn_layers=3, nn_nodes=20, nn_learning_rate=0.001, nn_batch_size=32,\n nn_epochs=1000)\n", (1702, 1901), False, 'from model_eval import model_evaluation\n'), ((1993, 2059), 'gmp_feature_selection.backward_elimination.backward_elimination', 'backward_elimination.backward_elimination', (['data', 'model_eval_params'], {}), '(data, model_eval_params)\n', (2034, 2059), False, 'from gmp_feature_selection import backward_elimination\n'), ((1166, 1233), 'os.path.join', 'os.path.join', (['dir_prefix', '"""config/MCSH_potential/C_coredensity_5.g"""'], {}), "(dir_prefix, 'config/MCSH_potential/C_coredensity_5.g')\n", (1178, 1233), False, 'import os\n'), ((1262, 1330), 'os.path.join', 'os.path.join', (['dir_prefix', '"""config/MCSH_potential/O_totaldensity_7.g"""'], {}), "(dir_prefix, 'config/MCSH_potential/O_totaldensity_7.g')\n", (1274, 1330), False, 'import os\n'), ((1360, 1429), 'os.path.join', 'os.path.join', (['dir_prefix', '"""config/MCSH_potential/Cu_totaldensity_5.g"""'], {}), "(dir_prefix, 'config/MCSH_potential/Cu_totaldensity_5.g')\n", (1372, 1429), False, 'import os\n'), ((1072, 1077), 'ase.calculators.emt.EMT', 'EMT', ([], {}), '()\n', (1075, 1077), False, 'from ase.calculators.emt import EMT\n'), ((1592, 1606), 'numpy.log10', 'np.log10', (['(0.05)'], {}), '(0.05)\n', (1600, 1606), True, 'import numpy as np\n'), ((1608, 1621), 'numpy.log10', 'np.log10', (['(1.0)'], {}), '(1.0)\n', (1616, 1621), True, 'import numpy as np\n'), ((792, 829), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(-4.0)', 'high': '(4.0)'}), '(low=-4.0, high=4.0)\n', (809, 829), True, 'import numpy as np\n'), ((912, 949), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(-4.0)', 'high': '(4.0)'}), '(low=-4.0, high=4.0)\n', (929, 949), True, 'import numpy as np\n'), ((760, 772), 'numpy.sin', 'np.sin', (['(0.65)'], {}), '(0.65)\n', (766, 772), True, 'import numpy as np\n'), ((778, 790), 'numpy.cos', 'np.cos', (['(0.65)'], {}), '(0.65)\n', (784, 790), True, 'import numpy as np\n'), ((880, 892), 'numpy.sin', 'np.sin', (['(0.65)'], {}), '(0.65)\n', (886, 892), True, 'import numpy as np\n'), ((898, 910), 'numpy.cos', 'np.cos', (['(0.65)'], {}), '(0.65)\n', (904, 910), True, 'import numpy as np\n')] |
from cytoolz import assoc
from cytoolz import merge
from functools import partial
from merlin import chipmunk
from merlin import chips
from merlin import dates
from merlin import formats
from merlin import specs
import os
ubids = {'chipmunk-ard': {'reds': ['LC08_SRB4', 'LE07_SRB3', 'LT05_SRB3', 'LT04_SRB3'],
'greens': ['LC08_SRB3', 'LE07_SRB2', 'LT05_SRB2', 'LT04_SRB2'],
'blues': ['LC08_SRB2', 'LE07_SRB1', 'LT05_SRB1', 'LT04_SRB1'],
'nirs': ['LC08_SRB5', 'LE07_SRB4', 'LT05_SRB4', 'LT04_SRB4'],
'swir1s': ['LC08_SRB6', 'LE07_SRB5', 'LT05_SRB5', 'LT04_SRB5'],
'swir2s': ['LC08_SRB7', 'LE07_SRB7', 'LT05_SRB7', 'LT04_SRB7'],
'thermals': ['LC08_BTB10', 'LE07_BTB6', 'LT05_BTB6', 'LT04_BTB6'],
'qas': ['LC08_PIXELQA', 'LE07_PIXELQA', 'LT05_PIXELQA', 'LT04_PIXELQA']},
'chipmunk-aux': {'nlcd': ['AUX_NLCD'],
'nlcdtrn': ['AUX_NLCDTRN'],
'posidex': ['AUX_POSIDEX'],
'mpw': ['AUX_MPW'],
'aspect': ['AUX_ASPECT'],
'slope': ['AUX_SLOPE'],
'dem': ['AUX_DEM']}
}
def profiles(env, profile=None):
"""Retrieve a configuration profile with env applied.
Args:
env (dict): Environment variables
profile (str): Name of profile to load. If no profile is supplied all profiles
are returned.
Returns:
dict: Profile or profiles with env substitutions.
"""
__profiles = {
'chipmunk-ard' : {
'grid_fn': partial(chipmunk.grid,
url=env.get('CHIPMUNK_URL', None),
resource=env.get('CHIPMUNK_GRID_RESOURCE', '/grid')),
'dates_fn': dates.symmetric,
'chips_fn': partial(chipmunk.chips,
url=env.get('CHIPMUNK_URL', None),
resource=env.get('CHIPMUNK_CHIPS_RESOURCE', '/chips')),
'specs_fn': partial(specs.mapped, ubids=ubids['chipmunk-ard']),
'format_fn': formats.pyccd,
'registry_fn': partial(chipmunk.registry,
url=env.get('CHIPMUNK_URL', None),
resource=env.get('CHIPMUNK_REGISTRY_RESOURCE', '/registry')),
'snap_fn': partial(chipmunk.snap,
url=env.get('CHIPMUNK_URL', None),
resource=env.get('CHIPMUNK_SNAP_RESOURCE', '/grid/snap')),
'near_fn': partial(chipmunk.near,
url=env.get('CHIPMUNK_URL', None),
resource=env.get('CHIPMUNK_NEAR_RESOURCE', '/grid/near'))},
'chipmunk-aux' : {
'grid_fn': partial(chipmunk.grid,
url=env.get('CHIPMUNK_URL', None),
resource=env.get('CHIPMUNK_GRID_RESOURCE', '/grid')),
'dates_fn': dates.single,
'chips_fn': partial(chipmunk.chips,
url=env.get('CHIPMUNK_URL', None),
resource=env.get('CHIPMUNK_CHIPS_RESOURCE', '/chips')),
'specs_fn': partial(specs.mapped, ubids=ubids['chipmunk-aux']),
'format_fn': formats.aux,
'registry_fn': partial(chipmunk.registry,
url=env.get('CHIPMUNK_URL', None),
resource=env.get('CHIPMUNK_REGISTRY_RESOURCE', '/registry')),
'snap_fn': partial(chipmunk.snap,
url=env.get('CHIPMUNK_URL', None),
resource=env.get('CHIPMUNK_SNAP_RESOURCE', '/grid/snap')),
'near_fn': partial(chipmunk.near,
url=env.get('CHIPMUNK_URL', None),
resource=env.get('CHIPMUNK_NEAR_RESOURCE', '/grid/near'))},
}
return __profiles.get(profile, None) if profile else __profiles
def get(profile='chipmunk-ard', env=None):
"""Return a configuration profile.
Args:
profile (str): Name of profile.
env (dict): Environment variables to override os.environ
Returns:
dict: A Merlin configuration
"""
p = profiles(env=merge(os.environ, env if env else {}),
profile=profile)
return assoc(p, 'profile', profile)
| [
"cytoolz.merge",
"functools.partial",
"cytoolz.assoc"
] | [((4768, 4796), 'cytoolz.assoc', 'assoc', (['p', '"""profile"""', 'profile'], {}), "(p, 'profile', profile)\n", (4773, 4796), False, 'from cytoolz import assoc\n'), ((2314, 2364), 'functools.partial', 'partial', (['specs.mapped'], {'ubids': "ubids['chipmunk-ard']"}), "(specs.mapped, ubids=ubids['chipmunk-ard'])\n", (2321, 2364), False, 'from functools import partial\n'), ((3574, 3624), 'functools.partial', 'partial', (['specs.mapped'], {'ubids': "ubids['chipmunk-aux']"}), "(specs.mapped, ubids=ubids['chipmunk-aux'])\n", (3581, 3624), False, 'from functools import partial\n'), ((4683, 4720), 'cytoolz.merge', 'merge', (['os.environ', '(env if env else {})'], {}), '(os.environ, env if env else {})\n', (4688, 4720), False, 'from cytoolz import merge\n')] |
from vivid.core import BaseBlock, network_hash
def test_network_hash():
a = BaseBlock('a')
b = BaseBlock('b')
assert network_hash(a) != network_hash(b)
assert network_hash(a) == network_hash(a)
c = BaseBlock('c', parent=[a, b])
hash1 = network_hash(c)
a._parent = [BaseBlock('z')]
hash2 = network_hash(c)
assert hash1 != hash2
| [
"vivid.core.network_hash",
"vivid.core.BaseBlock"
] | [((82, 96), 'vivid.core.BaseBlock', 'BaseBlock', (['"""a"""'], {}), "('a')\n", (91, 96), False, 'from vivid.core import BaseBlock, network_hash\n'), ((105, 119), 'vivid.core.BaseBlock', 'BaseBlock', (['"""b"""'], {}), "('b')\n", (114, 119), False, 'from vivid.core import BaseBlock, network_hash\n'), ((221, 250), 'vivid.core.BaseBlock', 'BaseBlock', (['"""c"""'], {'parent': '[a, b]'}), "('c', parent=[a, b])\n", (230, 250), False, 'from vivid.core import BaseBlock, network_hash\n'), ((263, 278), 'vivid.core.network_hash', 'network_hash', (['c'], {}), '(c)\n', (275, 278), False, 'from vivid.core import BaseBlock, network_hash\n'), ((324, 339), 'vivid.core.network_hash', 'network_hash', (['c'], {}), '(c)\n', (336, 339), False, 'from vivid.core import BaseBlock, network_hash\n'), ((131, 146), 'vivid.core.network_hash', 'network_hash', (['a'], {}), '(a)\n', (143, 146), False, 'from vivid.core import BaseBlock, network_hash\n'), ((150, 165), 'vivid.core.network_hash', 'network_hash', (['b'], {}), '(b)\n', (162, 165), False, 'from vivid.core import BaseBlock, network_hash\n'), ((177, 192), 'vivid.core.network_hash', 'network_hash', (['a'], {}), '(a)\n', (189, 192), False, 'from vivid.core import BaseBlock, network_hash\n'), ((196, 211), 'vivid.core.network_hash', 'network_hash', (['a'], {}), '(a)\n', (208, 211), False, 'from vivid.core import BaseBlock, network_hash\n'), ((296, 310), 'vivid.core.BaseBlock', 'BaseBlock', (['"""z"""'], {}), "('z')\n", (305, 310), False, 'from vivid.core import BaseBlock, network_hash\n')] |
from __future__ import absolute_import
from chainer import backend
from chainer import functions as F
from chainer.functions import sigmoid_cross_entropy
from chainer.functions import softmax_cross_entropy
from .sigmoid_soft_cross_entropy import sigmoid_soft_cross_entropy
def noised_softmax_cross_entropy(y, t, mc_iteration,
normalize=True, cache_score=True, class_weight=None,
ignore_label=-1, reduce='mean', enable_double_backprop=False):
""" Softmax Cross-entropy for aleatoric uncertainty estimates.
See: https://arxiv.org/pdf/1703.04977.pdf
Args:
y (list of ~chainer.Variable): logits and sigma
t (~numpy.ndarray or ~cupy.ndarray): ground-truth
mc_iteration (int): number of iteration of MCMC.
normalize (bool, optional): Defaults to True.
reduce (str, optional): Defaults to 'mean'.
Returns:
[~chainer.Variable]: Loss value.
"""
assert isinstance(y, (list, tuple))
logits, log_std = y
assert logits.shape[0] == log_std.shape[0]
assert log_std.shape[1] in (logits.shape[1], 1)
assert logits.shape[2:] == log_std.shape[2:]
xp = backend.get_array_module(t)
# std = F.sqrt(F.exp(log_var))
std = F.exp(log_std)
loss = 0.
for _ in range(mc_iteration):
noise = std * xp.random.normal(0., 1., std.shape)
loss += softmax_cross_entropy(logits + noise, t,
normalize=False,
cache_score=cache_score,
class_weight=class_weight,
ignore_label=ignore_label,
reduce='no',
enable_double_backprop=enable_double_backprop)
if not reduce == 'mean':
return loss
if normalize:
count = loss.size * mc_iteration
else:
count = max(1, len(loss)) * mc_iteration
return F.sum(loss) / count
def noised_sigmoid_cross_entropy(y, t, mc_iteration, normalize=True, reduce='mean'):
""" Sigmoid Cross-entropy for aleatoric uncertainty estimates.
Args:
y (list of ~chainer.Variable): logits and sigma
t (~numpy.ndarray or ~cupy.ndarray): ground-truth
mc_iteration (int): number of iteration of MCMC.
normalize (bool, optional): Defaults to True.
reduce (str, optional): Defaults to 'mean'.
Returns:
[~chainer.Variable]: Loss value.
"""
assert isinstance(y, (list, tuple))
logits, log_std = y
assert logits.shape[0] == log_std.shape[0]
assert log_std.shape[1] in (logits.shape[1], 1)
assert logits.shape[2:] == log_std.shape[2:]
assert logits.shape == t.shape
xp = backend.get_array_module(t)
# std = F.sqrt(F.exp(log_var))
std = F.exp(log_std)
loss = 0.
for _ in range(mc_iteration):
noise = std * xp.random.normal(0., 1., std.shape)
loss += sigmoid_cross_entropy(logits + noise, t,
normalize=False,
reduce='no')
if not reduce == 'mean':
return loss
if normalize:
count = loss.size * mc_iteration
else:
count = max(1, len(loss)) * mc_iteration
return F.sum(loss) / count
def noised_sigmoid_soft_cross_entropy(y, t, mc_iteration, normalize=True, reduce='mean'):
""" Sigmoid Soft Cross-entropy for aleatoric uncertainty estimates.
Args:
y (list of ~chainer.Variable): logits and sigma
t (~numpy.ndarray or ~cupy.ndarray): ground-truth
mc_iteration (int): number of iteration of MCMC.
normalize (bool, optional): Defaults to True.
reduce (str, optional): Defaults to 'mean'.
Returns:
[~chainer.Variable]: Loss value.
"""
assert isinstance(y, (list, tuple))
logits, log_std = y
assert logits.shape == log_std.shape
assert logits.shape == t.shape
xp = backend.get_array_module(t)
# std = F.sqrt(F.exp(log_var))
std = F.exp(log_std)
loss = 0.
for _ in range(mc_iteration):
noise = std * xp.random.normal(0., 1., std.shape)
loss += sigmoid_soft_cross_entropy(logits + noise, t,
normalize=False,
reduce='no')
if not reduce == 'mean':
return loss
if normalize:
count = loss.size * mc_iteration
else:
count = max(1, len(loss)) * mc_iteration
return F.sum(loss) / count
| [
"chainer.functions.exp",
"chainer.functions.sum",
"chainer.functions.softmax_cross_entropy",
"chainer.functions.sigmoid_cross_entropy",
"chainer.backend.get_array_module"
] | [((1201, 1228), 'chainer.backend.get_array_module', 'backend.get_array_module', (['t'], {}), '(t)\n', (1225, 1228), False, 'from chainer import backend\n'), ((1275, 1289), 'chainer.functions.exp', 'F.exp', (['log_std'], {}), '(log_std)\n', (1280, 1289), True, 'from chainer import functions as F\n'), ((2804, 2831), 'chainer.backend.get_array_module', 'backend.get_array_module', (['t'], {}), '(t)\n', (2828, 2831), False, 'from chainer import backend\n'), ((2878, 2892), 'chainer.functions.exp', 'F.exp', (['log_std'], {}), '(log_std)\n', (2883, 2892), True, 'from chainer import functions as F\n'), ((4032, 4059), 'chainer.backend.get_array_module', 'backend.get_array_module', (['t'], {}), '(t)\n', (4056, 4059), False, 'from chainer import backend\n'), ((4106, 4120), 'chainer.functions.exp', 'F.exp', (['log_std'], {}), '(log_std)\n', (4111, 4120), True, 'from chainer import functions as F\n'), ((1414, 1619), 'chainer.functions.softmax_cross_entropy', 'softmax_cross_entropy', (['(logits + noise)', 't'], {'normalize': '(False)', 'cache_score': 'cache_score', 'class_weight': 'class_weight', 'ignore_label': 'ignore_label', 'reduce': '"""no"""', 'enable_double_backprop': 'enable_double_backprop'}), "(logits + noise, t, normalize=False, cache_score=\n cache_score, class_weight=class_weight, ignore_label=ignore_label,\n reduce='no', enable_double_backprop=enable_double_backprop)\n", (1435, 1619), False, 'from chainer.functions import softmax_cross_entropy\n'), ((2020, 2031), 'chainer.functions.sum', 'F.sum', (['loss'], {}), '(loss)\n', (2025, 2031), True, 'from chainer import functions as F\n'), ((3017, 3087), 'chainer.functions.sigmoid_cross_entropy', 'sigmoid_cross_entropy', (['(logits + noise)', 't'], {'normalize': '(False)', 'reduce': '"""no"""'}), "(logits + noise, t, normalize=False, reduce='no')\n", (3038, 3087), False, 'from chainer.functions import sigmoid_cross_entropy\n'), ((3345, 3356), 'chainer.functions.sum', 'F.sum', (['loss'], {}), '(loss)\n', (3350, 3356), True, 'from chainer import functions as F\n'), ((4588, 4599), 'chainer.functions.sum', 'F.sum', (['loss'], {}), '(loss)\n', (4593, 4599), True, 'from chainer import functions as F\n')] |
#!/usr/bin/python
"""
Run_long_script governs the running of long gazebo_ros_tensorflow simulations.
The core functionality lies in:
1. parsing the correct arguments at different levels (tensorflow dnn, gazebo environment, ros supervision)
2. different crash handling when for instance starting gazebo / tensorflow fails
The script is organized in different steps:
1. Parsing arguments saved in a name space
2. launching ROS and robot related parameters
3. launching tensorflow in machine (docker/singularity/virtualenv) environment
4. launching experiment with potentially autogenerated gazebo world
Exit code:
0) normal exit code
2) tensorflow stopped working
3) communication with logfolder (Opal) is blocked
4) config file is missing
Example usage:
Let behavior arbitration fly with drone through default canyon in singularity environment 1 time while saving images.
python run_script.py --robot drone_sim --fsm oracle_drone_fsm --world canyon --reuse_default_world -n 1 -ds -p params.yaml
Author: <NAME>
Dependecies: simulation_supervised, pilot, klaas_robots
"""
import rospy
from std_srvs.srv import Empty as Emptyservice
from std_srvs.srv import EmptyRequest # for pausing and unpausing physics engine
from geometry_msgs.msg import Pose
from gazebo_msgs.srv import SetModelState
from gazebo_msgs.srv import SetModelStateRequest
from gazebo_msgs.msg import ModelState
import ast # to parse startingpositions as string to list
import sys, os, os.path
import subprocess, shlex
import shutil
import time
import signal
import argparse
import yaml
import fnmatch
import numpy as np
class bcolors:
""" Colors to print in terminal with color!
"""
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
# global variables for Popen objects used for terminating sessions
ros_popen = None
python_popen = None
gazebo_popen = None
crash_number = 0
run_number = 0
def myprint(message):
"""
Output is not captured on computing cluster,
therefore write it away to logfolder/output as well
"""
print(message)
with open(FLAGS.summary_dir+FLAGS.log_tag+'/output','a') as f:
f.write(message+'\n')
# Predefined functions.
def load_param_file(location):
"""Load yaml as dict and change to proper string arguments.
Note that current implementation will by default change both --key True and --key False to --key."""
yaml_dict={}
with open(location, 'r') as stream:
try:
yaml_dict=yaml.load(stream)
except yaml.YAMLError as exc:
myprint(exc)
yaml_str=""
for k in yaml_dict.keys():
if isinstance(yaml_dict[k],bool):
yaml_str = "{0} --{1}".format(yaml_str, k)
else:
yaml_str = "{0} --{1} {2}".format(yaml_str, k, yaml_dict[k])
return yaml_str
def wait_for_gazebo():
"""gazebo popen is not enough to get gzserver to stop so wait longer..."""
p_ps = subprocess.Popen(["ps", "-ef"], stdout=subprocess.PIPE)
p_grep = subprocess.Popen(["grep","gz"],stdin=p_ps.stdout, stdout=subprocess.PIPE)
myprint("{0}: wait for gazebo".format(time.strftime("%Y-%m-%d_%I:%M:%S")))
out = p_grep.communicate()[0]
while "gzserver" in out:
p_ps = subprocess.Popen(["ps", "-ef"], stdout=subprocess.PIPE)
p_grep = subprocess.Popen(["grep","gz"],stdin=p_ps.stdout, stdout=subprocess.PIPE)
out = p_grep.communicate()[0]
time.sleep(0.2)
time.sleep(1)
def wait_for_create_dataset():
"""gazebo popen is not enough to get gzserver to stop so wait longer..."""
p_ps = subprocess.Popen(["ps", "-ef"], stdout=subprocess.PIPE)
p_grep = subprocess.Popen(["grep","create_dataset"],stdin=p_ps.stdout, stdout=subprocess.PIPE)
myprint("{0}: wait for create_dataset".format(time.strftime("%Y-%m-%d_%I:%M:%S")))
out = p_grep.communicate()[0]
while "create_dataset" in out:
p_ps = subprocess.Popen(["ps", "-ef"], stdout=subprocess.PIPE)
p_grep = subprocess.Popen(["grep","create_dataset"],stdin=p_ps.stdout, stdout=subprocess.PIPE)
out = p_grep.communicate()[0]
time.sleep(0.2)
def wait_for_ros_to_start():
"""Ros might take some time to start the first time so wait till its well in the ps -ef"""
time.sleep(1)
p_ps = subprocess.call(["rosparam", "list"], stdout=subprocess.PIPE)
while p_ps == 1:
myprint("{0}: wait for ros".format(time.strftime("%Y-%m-%d_%I:%M:%S")))
time.sleep(1)
p_ps = subprocess.call(["rosparam", "list"], stdout=subprocess.PIPE)
def kill_popen(process_name, process_popen):
"""Check status, terminate popen and wait for it to stop."""
myprint("{0}: terminate {1}".format(time.strftime("%Y-%m-%d_%I:%M:%S"), process_name))
if process_popen.poll() == None:
process_popen.terminate()
process_popen.wait()
def kill_combo():
"""kill ros, python and gazebo pids and wait for them to finish"""
global ros_popen, python_popen, gazebo_popen
if gazebo_popen: kill_popen('gazebo', gazebo_popen)
wait_for_gazebo()
if python_popen: kill_popen('python', python_popen)
if ros_popen: kill_popen('ros', ros_popen)
time.sleep(5)
##########################################################################################################################
# STEP 1 Load Parameters
parser = argparse.ArgumentParser(description="""Run_simulation_scripts governs the running of long gazebo_ros_tensorflow simulations.
The core functionality lies in:
1. parsing the correct arguments at different levels (tensorflow dnn, gazebo environment, ros supervision)
2. different crash handling when for instance starting gazebo / tensorflow fails""")
# ==========================
# General Settings
# ==========================
parser.add_argument("--summary_dir", default='tensorflow/log/', type=str, help="Choose the directory to which tensorflow should save the summaries.")
parser.add_argument("--data_root", default='pilot_data/', type=str, help="Choose the directory to which tensorflow should save the summaries.")
parser.add_argument("--code_root", default='~', type=str, help="Choose the directory to which tensorflow should save the summaries.")
parser.add_argument("-t", "--log_tag", default='testing_on_policy', type=str, help="LOGTAG: tag used to name logfolder.")
parser.add_argument("--data_location", default='', type=str, help="Datalocation is by default the log_tag but than in data_root instead of summary_dir, otherwise FLAG should indicate relative path to data_root.")
parser.add_argument("-n", "--number_of_runs", default=-1, type=int, help="NUMBER_OF_RUNS: define the number of runs the robot will be trained/evaluated. n=1 avoids a hard stop after 5minutes.")
parser.add_argument("-g", "--graphics", action='store_true', help="Add extra nodes for visualization e.g.: Gazebo GUI, control display, depth prediction, ...")
parser.add_argument("-e", "--evaluation", action='store_true',help="This script can launch 2 modes of experiments: training (default) or evaluation.")
parser.add_argument("--evaluate_every", default=10, type=int, help="Evaluate every N runs when training.")
parser.add_argument("--final_evaluation_runs", default=5, type=int, help="Evaluate N times after training is finished..")
parser.add_argument("-ds", "--create_dataset", action='store_true',help="In case of True, sensor data is saved.")
parser.add_argument("--owr", action='store_true',help="Delete dataset if it is already there.")
parser.add_argument("--save_only_success", action='store_true',help="In case of True, sensor data is saved.")
parser.add_argument("--random_seed", type=int, help="If provided, the simulation is seeded (as good as possible).")
# ==========================
# Robot Settings
# ==========================
parser.add_argument("--robot",default='drone_sim', type=str, help="Specify the robot configuration file: turtle_sim(default), drone_sim, turtle_real, drone_real.")
parser.add_argument("-r", "--recovery", action='store_true',help="Use drone with recovery camera's attached.")
# ==========================
# Tensorflow Settings
# ==========================
parser.add_argument("-m","--checkpoint_path", type=str, help="Specify the directory of the checkpoint of the earlier trained model.")
parser.add_argument("-pe","--python_environment",default='sing', type=str, help="Define which environment should be loaded in shell when launching tensorlfow. Possibilities: sing, docker, virtualenv.")
parser.add_argument("-pp","--python_project",default='pytorch_pilot/pilot', type=str, help="Define which python module should be started with ~/tenorflow/PROJECT_NAME/main.py: q-learning/pilot, pilot/pilot, ddpg, ....")
# ==========================
# Environment Settings
# ==========================
parser.add_argument("--auto_go", action='store_true',help="Publish /go signal after few launching gazebo to start experiment automatically")
parser.add_argument("--reuse_default_world", action='store_true',help="reuse the default forest/canyon/sandbox instead of generating them on the fly.")
parser.add_argument("--one_world", action='store_true',help="Reuse one world to train in over and over again.")
parser.add_argument("-w","--world",dest='worlds', action='append', nargs=1, help="Define different worlds: corridor, canyon, forest, sandbox, esat_v1, esat_v2, ... .")
# parser.add_argument("-p","--paramfile",default='eva_params.yaml',type=str, help="Add more parameters to the command loading the DNN in tensorflow ex: eva_params.yaml or params.yaml.")
parser.add_argument("--fsm",default='nn_drone_fsm',type=str, help="Define the fsm loaded from /simsup/config/fsm: nn_turtle_fsm, console_fsm, console_nn_db_turtle_fsm, ...")
parser.add_argument("--x_pos",default=999,type=float, help="Specify x position.")
parser.add_argument("--x_var",default=0,type=float, help="Specify variation in x position.")
parser.add_argument("--y_pos",default=999,type=float, help="Specify y position.")
parser.add_argument("--y_var",default=0,type=float, help="Specify variation in y position.")
parser.add_argument("--z_pos",default=999,type=float, help="Specify z position.")
parser.add_argument("--z_var",default=0,type=float, help="Specify variation z position.")
parser.add_argument("--yaw_or",default=999,type=float, help="Specify yaw orientation.")
# parser.add_argument("--yaw_var",default=2*3.14,type=float, help="Specify variation in yaw orientation.")
parser.add_argument("--yaw_var",default=0,type=float, help="Specify variation in yaw orientation.")
FLAGS, others = parser.parse_known_args()
# FLAGS=parser.parse_args()
# get simulation_supervised dir
simulation_supervised_dir=subprocess.check_output(shlex.split("rospack find simulation_supervised"))[:-1]
# 3 main directories have to be defined in order to make it also runnable from a read-only system-installed singularity image.
if FLAGS.summary_dir[0] != '/': # 1. Tensorflow log directory for saving tensorflow logs and xterm logs
FLAGS.summary_dir=os.environ['HOME']+'/'+FLAGS.summary_dir
if FLAGS.data_root[0] != '/': # 2. Pilot_data directory for saving data
FLAGS.data_root=os.environ['HOME']+'/'+FLAGS.data_root
if FLAGS.code_root == '~': # 3. location for tensorflow code (and also catkin workspace though they are found with rospack)
#no explicit directory for code is set so try to parse first from environment
try:
FLAGS.code_root = os.environ['CODE']
except KeyError: # in case environment variable is not set, take home dir
FLAGS.code_root = os.environ['HOME']
if FLAGS.log_tag == 'testing_on_policy':
if os.path.isdir(FLAGS.summary_dir+FLAGS.log_tag): shutil.rmtree(FLAGS.summary_dir+FLAGS.log_tag)
if os.path.isdir(FLAGS.data_root+FLAGS.log_tag): shutil.rmtree(FLAGS.data_root+FLAGS.log_tag)
# add default values to be able to operate
if FLAGS.worlds == None : FLAGS.worlds=['canyon']
else: #worlds are appended in a nested list... so get them out.
worlds=[]
for w in FLAGS.worlds: worlds.append(w[0])
FLAGS.worlds = worlds[:]
# FLAGS.params=load_param_file(FLAGS.paramfile) if FLAGS.paramfile else ""
FLAGS.params=others[:]
if FLAGS.random_seed:
np.random.seed(FLAGS.random_seed)
FLAGS.params.append('--random_seed '+str(FLAGS.random_seed))
# check if robot configuration exists is there:
if not os.path.isfile(simulation_supervised_dir+'/config/robot/'+FLAGS.robot+'.yaml'):
myprint("Could not find robot configuration for {}".format(w[0]))
sys.exit(4)
# try to extract condor host
try:
FLAGS.condor_host=subprocess.check_output(shlex.split("cat $_CONDOR_JOB_AD | grep RemoteHost | head -1 | cut -d '=' -f 2 | cut -d '@' -f 2 | cut -d '.' -f 1)"))
except:
FLAGS.condor_host='unknown_host'
# Clear log folder if desired
if FLAGS.owr and os.path.isdir("{0}{1}".format(FLAGS.summary_dir, FLAGS.log_tag)):
shutil.rmtree("{0}{1}".format(FLAGS.summary_dir, FLAGS.log_tag))
# Create main log folder if necessary
if not os.path.isdir("{0}{1}".format(FLAGS.summary_dir, FLAGS.log_tag)):
os.makedirs("{0}{1}".format(FLAGS.summary_dir, FLAGS.log_tag))
else:
# Load last position to start from if lastposition is file log folder already existed
if os.path.isfile("{0}{1}/last_position.txt".format(FLAGS.summary_dir, FLAGS.log_tag)):
try:
with open("{0}{1}/last_position.txt".format(FLAGS.summary_dir, FLAGS.log_tag),'r') as f:
last_position=f.readlines()
FLAGS.x_pos,FLAGS.y_pos,FLAGS.z_pos,FLAGS.yaw_or= [ float(x) for x in last_position[-1].strip().split(',')]
myprint("[run_script] obtained last position as {0} {1} {2} {3}".format(FLAGS.x_pos,FLAGS.y_pos,FLAGS.z_pos,FLAGS.yaw_or))
except:
myprint("[run_script] failed to obtain last position from {0}{1}/last_position.txt".format(FLAGS.summary_dir, FLAGS.log_tag))
# in case of data_creation, make data_location in ~/pilot_data
if FLAGS.create_dataset:
if FLAGS.data_location == "":
FLAGS.data_location = "{0}{1}".format(FLAGS.data_root, FLAGS.log_tag)
else:
FLAGS.data_location = "{0}{1}".format(FLAGS.data_root, FLAGS.data_location)
if os.path.isdir(FLAGS.data_location) and (FLAGS.number_of_runs == 1 or FLAGS.owr):
shutil.rmtree(FLAGS.data_location)
if not os.path.isdir(FLAGS.data_location):
os.makedirs(FLAGS.data_location)
else:
# check number of items already recorded
if len(os.listdir(FLAGS.data_location)) >= 1:
# in case there is already data recorded, parse the number of runs and continue from there
last_run=sorted([d for d in os.listdir(FLAGS.data_location) if os.path.isdir("{0}/{1}".format(FLAGS.data_location,d))])[-1]
run_number=int(last_run.split('_')[0]) +1 #assuming number occurs at first 5 digits xxxxx_name_of_data
myprint("Found data from previous run so adjusted run_number to {}".format(run_number))
# display and save all settings
myprint("\nSettings:")
for f in sorted(FLAGS.__dict__): myprint("{0}: {1}".format( f, FLAGS.__dict__[f]))
with open("{0}{1}/run_conf".format(FLAGS.summary_dir, FLAGS.log_tag),'w') as c:
c.write("Settings of Run_simulation_scripts:\n\n")
for f in FLAGS.__dict__: c.write("{0}: {1}\n".format(f, FLAGS.__dict__[f]))
##########################################################################################################################
# STEP 2 Start ROS with ROBOT specific parameters
# ensure location for logging the xterm outputs exists.
ros_xterm_log_dir="{0}{1}/xterm_ros".format(FLAGS.summary_dir,FLAGS.log_tag)
if not os.path.isdir(ros_xterm_log_dir): os.makedirs(ros_xterm_log_dir)
def start_ros():
"""Start ros core with robot parameters loaded"""
global ros_popen
command="roslaunch simulation_supervised load_params.launch robot_config:={0}.yaml {1}".format(FLAGS.robot, 'random_seed:='+str(FLAGS.random_seed) if FLAGS.random_seed else '')
if os.path.isfile(simulation_supervised_dir+'/config/environment/'+worlds[0]+'.yaml'):
command="{0} world_config:={1}".format(command, simulation_supervised_dir+'/config/environment/'+worlds[0]+'.yaml')
xterm_log_file='{0}/xterm_ros_{1}.txt'.format(ros_xterm_log_dir,time.strftime("%Y-%m-%d_%I%M"))
if os.path.isfile(xterm_log_file): os.remove(xterm_log_file)
args = shlex.split("xterm -iconic -l -lf {0} -hold -e {1}".format(xterm_log_file,command))
ros_popen = subprocess.Popen(args)
pid_ros = ros_popen.pid
myprint("{0}: start_ros pid {1}\n".format(time.strftime("%Y-%m-%d_%I:%M:%S"),pid_ros))
wait_for_ros_to_start()
rospy.set_param('evaluate_every',FLAGS.evaluate_every if not FLAGS.evaluation else 1)
rospy.set_param('recovery',FLAGS.recovery)
start_ros()
##########################################################################################################################
# STEP 3 Start tensorflow
python_xterm_log_dir="{0}{1}/xterm_python".format(FLAGS.summary_dir,FLAGS.log_tag)
if not os.path.isdir(python_xterm_log_dir): os.makedirs(python_xterm_log_dir)
def start_python():
"""Function that initializes python code."""
# delete default test folder
# if logdir already exists probably condor job is just restarted somewhere so use last saved q in case of training
global python_popen
# Add parameters
FLAGS.log_folder = "{0}{1}".format(FLAGS.summary_dir,FLAGS.log_tag)
FLAGS.params.append("--log_tag {0}".format(FLAGS.log_tag))
if not '--on_policy' in FLAGS.params: FLAGS.params.append("--on_policy")
if FLAGS.checkpoint_path: FLAGS.params.append("--checkpoint_path {0}".format(FLAGS.checkpoint_path))
# Create command
params=""
for p in FLAGS.params: params="{0} {1}".format(params,p)
command="{0}/scripts/launch_python/{1}.sh {2}/tensorflow/{3}/main.py {4}".format(simulation_supervised_dir,
FLAGS.python_environment,
FLAGS.code_root,
FLAGS.python_project,
params)
myprint("Tensorflow command:\n {}".format(command))
xterm_log_file='{0}/xterm_python_{1}.txt'.format(python_xterm_log_dir,time.strftime("%Y-%m-%d_%I%M"))
if os.path.isfile(xterm_log_file): os.remove(xterm_log_file)
args = shlex.split("xterm -l -lf {0} -hold -e {1}".format(xterm_log_file, command))
# Execute command
python_popen = subprocess.Popen(args)
pid_python = python_popen.pid
myprint("{0}: start_python pid {1} \n\n".format(time.strftime("%Y-%m-%d_%I:%M:%S"),pid_python))
# Wait for creation of tensorflow log file to know the python node is running
start_time = time.time()
wait_time=10
if os.path.isfile(FLAGS.log_folder+'/nn_ready'):
prev_stat_nn_ready=subprocess.check_output(shlex.split("stat -c %Y "+FLAGS.log_folder+'/nn_ready'))
while prev_stat_nn_ready == subprocess.check_output(shlex.split("stat -c %Y "+FLAGS.log_folder+'/nn_ready')):
if time.time()-start_time > wait_time*60:
myprint("{0}: Waited for {3}minutes on nn_ready in {2} to start, seems like tensorflow has crashed on {1} so exit with error code 2.".format(time.strftime("%Y-%m-%d_%I:%M"), FLAGS.condor_host, FLAGS.log_folder, wait_time))
kill_combo()
sys.exit(2)
time.sleep(1)
else:
while(not os.path.isfile(FLAGS.log_folder+'/nn_ready')):
time.sleep(1)
if time.time()-start_time > wait_time*60:
myprint("{0}: Waited for {3}minutes on nn_ready in {2} to start, seems like tensorflow has crashed on {1} so exit with error code 2.".format(time.strftime("%Y-%m-%d_%I:%M"), FLAGS.condor_host, FLAGS.log_folder, wait_time))
kill_combo()
sys.exit(2)
start_python()
myprint("[runscript] set recovery to {0}".format(rospy.get_param('recovery')))
##########################################################################################################################
# STEP 4 Start gazebo environment
def create_environment(run_number, world_name):
"""Call correct python script for generating potentially new environment.
Returns a string with arguments for the launch file to be concatenated to the launch command.
"""
# generate world if it is possible and allowed, this also changes the loaded world file location from the default simsup_demo/worlds to log_folder
world_file=''
world_config=''
background=''
# don't create a new world if one_world is on
if FLAGS.one_world and run_number > 0: return ''
if world_name in ['canyon', 'forest', 'sandbox'] and not FLAGS.reuse_default_world:
generator_file="{0}/python/generators/{1}_generator.py".format(subprocess.check_output(shlex.split("rospack find simulation_supervised_tools"))[:-1],world_name)
subprocess.Popen(shlex.split("python "+generator_file+" "+FLAGS.log_folder)).wait()
background=FLAGS.log_folder+'/'+world_name+'.png'
world_file=FLAGS.log_folder+'/'+world_name+'.world'
elif world_name in ['canyon', 'corridor', 'different_corridor'] and FLAGS.reuse_default_world:
# reuse default 10 evaluation canyons or corridors
world_file='{0}/../simulation_supervised_demo/worlds/{2}_evaluation/{1:05d}_{2}.world'.format(simulation_supervised_dir,run_number%10, world_name)
background='{0}/../simulation_supervised_demo/worlds/{2}_evaluation/{1:05d}_{2}.png'.format(simulation_supervised_dir,run_number%10, world_name)
if 'corridor' in world_name:
command="{0} world_config:={1}/config/environment/{2:05d}_{3}.yaml".format(command, simulation_supervised_dir,run_number%10, world_name)
elif world_name in ['corridor'] and not FLAGS.reuse_default_world:
generator_file="{0}/python/generators/world_generator.py".format(subprocess.check_output(shlex.split("rospack find simulation_supervised_tools"))[:-1])
generator_command="python "+generator_file+" --output_dir "+FLAGS.log_folder+" --output_file "+world_name+"_"+str(run_number)
for p in others: generator_command="{0} {1}".format(generator_command, p)
print("[runscript] Generate command: {0}".format(generator_command))
return_val=subprocess.call(shlex.split(generator_command))
if return_val != 0:
kill_combo()
myprint("Failed to create env {0}, return value: {1}".format(world_name, return_val))
sys.exit(2)
world_file=FLAGS.log_folder+'/'+world_name+"_"+str(run_number)+'.world'
world_config=FLAGS.log_folder+'/'+world_name+"_"+str(run_number)+'.yaml'
arguments='world_name:='+world_name
for arg in ["world_file", "world_config", "background"]:
if len(eval(arg)) != 0: arguments=arguments+" "+arg+":="+eval(arg)
return arguments
def sample_new_position(starting_positions=[]):
""" Parse a new x,y,z,yaw(quaternion) pose for the robot given the world name and current robot
returns positions: x, y, z and orientation yaw in quaternion (1 ~ +90)
"""
# default with arguments
x, y, z, yaw = 0,0,0,0
if len(starting_positions) != 0:
pos = starting_positions[np.random.choice(range(len(starting_positions)))]
if len(pos) == 2:
x, y = pos
elif len(pos) == 3:
x, y, yaw = pos
elif len(pos) == 4:
x, y, z, yaw = pos
else:
myprint("[run_script] failed to parse starting_position {0}".format(pos))
# overwrite sampled starting positions if they were manually set
if FLAGS.x_pos != 999: x=FLAGS.x_pos
if FLAGS.y_pos != 999: y=FLAGS.y_pos
if FLAGS.z_pos != 999: z=FLAGS.z_pos
if FLAGS.yaw_or != 999: yaw=FLAGS.yaw_or
# add some variation
x += np.random.uniform(-FLAGS.x_var,FLAGS.x_var)
y += np.random.uniform(-FLAGS.y_var,FLAGS.y_var)
z += np.random.uniform(-FLAGS.z_var,FLAGS.z_var)
yaw += np.random.uniform(-FLAGS.yaw_var,FLAGS.yaw_var)
return x, y, z, yaw
# ensure location for logging the xterm outputs exists.
gazebo_xterm_log_dir="{0}{1}/xterm_gazebo".format(FLAGS.summary_dir,FLAGS.log_tag)
if not os.path.isdir(gazebo_xterm_log_dir): os.makedirs(gazebo_xterm_log_dir)
# Some local variables for running different simulations
prev_environment_arguments=''
reset_gazebo_service=rospy.ServiceProxy('/gazebo/reset_simulation',Emptyservice)
model_state_gazebo_service=rospy.ServiceProxy('/gazebo/set_model_state',SetModelState)
unpause_physics_client=rospy.ServiceProxy('/gazebo/unpause_physics',Emptyservice)
gazebo_popen=None
prev_stat_nn_log=''
prev_stat_fsm_log=''
fsm_file = FLAGS.log_folder+'/fsm_log'
if not os.path.isfile(fsm_file):
with open(fsm_file,'a') as f:
f.write('{0}: {1}\n'.format(time.strftime("%Y-%m-%d_%I-%M-%S"), FLAGS.log_folder))
crashed=False
while (run_number < FLAGS.number_of_runs) or FLAGS.number_of_runs==-1:
######################################
# 4.1 Prepare Run
world_name = FLAGS.worlds[run_number%len(FLAGS.worlds)]
# save current status of NN nn_ready to compare afterwards
if os.path.isfile(FLAGS.log_folder+'/nn_ready'):
prev_stat_nn_log=subprocess.check_output(shlex.split("stat -c %Y "+FLAGS.log_folder+'/nn_ready'))
else: # we have last communication with our log folder so exit with code 2
myprint("{2}: lost communication with our log folder {0} on host {1} so exit with code 3.".format(FLAGS.log_folder, FLAGS.condor_host, time.strftime("%Y-%m-%d_%I:%M:%S")))
kill_combo()
sys.exit(3)
# clean up gazebo ros folder every now and then
if run_number%50 == 0 : shutil.rmtree("{0}/.gazebo/log".format(os.environ['HOME']),ignore_errors=True)
evaluate=((run_number%FLAGS.evaluate_every) == 0 and run_number != 0 and FLAGS.evaluate_every != -1) or FLAGS.evaluation
# if evaluate:
# rospy.set_param('max_duration', 120)
# else:
# rospy.set_param('max_duration', 5)
new_environment_arguments=create_environment(run_number, world_name)
######################################
# 4.2 Create environment and perform next run
if rospy.has_param('/starting_positions'):
starting_positions = rospy.get_param('starting_positions')
if isinstance(starting_positions,str):
starting_positions=ast.literal_eval(starting_positions)
else:
starting_positions = []
if (new_environment_arguments == prev_environment_arguments or len(new_environment_arguments) == 0) and not crashed and gazebo_popen != None:
# 4.2.1 Reset environment for next run if possible
# 4.2.1a Ensure correct settings
rospy.set_param('/evaluate',evaluate)
# 4.2.1b Reset environment ==> causes gt_node to freeze for more than a minute...
# reset_gazebo_service(EmptyRequest())
# 4.2.1c Change position of drone according to new selected starting position
pose=Pose()
pose.position.x, pose.position.y, starting_height, yaw = sample_new_position(starting_positions)
# pose.position.x, pose.position.y, starting_height, yaw=0,0,1,0
myprint("[run_script]: x: {0}, y: {1}, z: {2}, yaw:{3}".format(pose.position.x, pose.position.y, starting_height, yaw))
# some yaw to quaternion re-orientation code:
pose.orientation.z=np.sin(yaw)
pose.orientation.w=np.cos(yaw)
pose.position.z = 0.1
model_state = ModelState()
model_state.model_name = 'quadrotor' if FLAGS.robot.startswith('drone') else 'turtlebot3_burger'
model_state.pose=pose
state_request = SetModelStateRequest()
state_request.model_state = model_state
retvals = model_state_gazebo_service(state_request)
rospy.set_param('starting_height', starting_height)
myprint("Changed pose with return values: {0}".format(retvals))
time.sleep(5) # HAS to be 5 otherwise '/overtake' and '/ready' overlap resulting in empty images in gt_listener
unpause_physics_client(EmptyRequest())
else:
# 4.2.2 Launch Gazebo again
# 4.2.2a Ensure previous Gazebo is not running anymore
if gazebo_popen!=None:
kill_popen('gazebo', gazebo_popen)
wait_for_gazebo()
prev_environment_arguments = new_environment_arguments
# 4.2.2b Build command with correct settings
# remove if saving location already exists (probably due to crash previously)
if FLAGS.create_dataset:
data_location="{0}/{1:05d}_{2}".format(FLAGS.data_location,run_number,world_name)
if os.path.isdir(data_location): shutil.rmtree(data_location)
os.makedirs(data_location)
new_environment_arguments+=" save_images:=true"
new_environment_arguments+=" data_location:={0}".format(data_location)
if 'world_file' in new_environment_arguments:
world_file=[a for a in new_environment_arguments.split(' ') if 'world_file' in a][0].split(':=')[1]
myprint("[runscript] world_file {0}".format(world_file))
shutil.copyfile(world_file, data_location+'/'+os.path.basename(world_file))
x,y,z,yaw=sample_new_position(starting_positions)
# x,y,z,yaw=-54, -4, 1, -3.14
command="roslaunch simulation_supervised_demo {0}.launch fsm_config:={1} log_folder:={2} evaluate:={3} {4} graphics:={5} x:={6} y:={7} Yspawned:={9} starting_height:={8} {10}".format(FLAGS.robot,
FLAGS.fsm,
FLAGS.log_folder,
'true' if evaluate else 'false',
new_environment_arguments,
'true' if FLAGS.graphics else 'false',
x,y,z,yaw,
'random_seed:='+str(FLAGS.random_seed) if FLAGS.random_seed else '')
# 4.2.2c Launch command
# Execute command
myprint( "gazebo_command: {0}".format(command))
xterm_log_file='{0}/xterm_gazebo_{1}.txt'.format(gazebo_xterm_log_dir,time.strftime("%Y-%m-%d_%I-%M-%S"))
args = shlex.split("xterm -iconic -l -lf {0} -hold -e {1}".format(xterm_log_file,command))
gazebo_popen = subprocess.Popen(args)
pid_gazebo = gazebo_popen.pid
######################################
# 4.3 Wait for run to finish
# on this moment the run is not crashed (yet).
crashed=False
crash_checked=False
#print starting positions for visualizing later.
with open(FLAGS.log_folder+'/starting_positions.txt','a') as f:
f.write('{0}, {1}, {2}\n'.format(x,y,yaw))
prev_stat_fsm_log=subprocess.check_output(shlex.split("stat -c %Y "+fsm_file))
time.sleep(0.1)
myprint("\n{0}: started run {1} of the {2} in {4} {3} {5}".format(time.strftime("%Y-%m-%d_%I:%M:%S"),
run_number+1,
FLAGS.number_of_runs,
world_name,
bcolors.OKBLUE,
bcolors.ENDC))
start_time=time.time()
time_spend=0
# while fsm_file has not been updated, wait...
while prev_stat_fsm_log == subprocess.check_output(shlex.split("stat -c %Y "+fsm_file)):
# Check on job suspension:
# if between last update and now has been more than 30 seconds (should be less than 0.1s)
if time.time() - start_time - time_spend > 30:
myprint("{0}: Job got suspended.".format(time.strftime("%Y-%m-%d_%I:%M:%S")))
time.sleep(30) #wait for big tick to update
start_time=time.time()
else:
time_spend=time.time() - start_time
# automatically start with /go after 10s
if FLAGS.auto_go:
if 10.05 <= time_spend<10.15:
go_popen=subprocess.Popen(shlex.split("rostopic pub /go std_msgs/Empty"))
elif 11.15 <= time_spend < 11.25 and go_popen.poll()==None:
kill_popen('go', go_popen)
# if False:
# if time_spend > 60*10 and FLAGS.number_of_runs != 1: #don't interupt if this is a single run
if time_spend > 5 and not crash_checked:
crash_checked = True
# check for crash
with open(xterm_log_file, 'r') as f:
for l in f.readlines():
if 'process has died' in l:
myprint("[run_script] {0}: found gz crash in {1}: {2}.".format(time.strftime("%Y-%m-%d_%I:%M:%S"), os.path.basename(xterm_log_file),l[:50]))
crashed=True
crash_number+=1
if crashed:
if crash_number < 10: #after 20 crashes its maybe time to restart everything
kill_popen('gazebo', gazebo_popen)
else:
myprint("{0}: crashed for 10the time so restart everything.".format(time.strftime("%Y-%m-%d_%I:%M:%S")))
kill_combo()
start_ros()
start_python()
crash_number = 0
break # get out of this loop
time.sleep(0.1)
######################################
# 4.4 Clean up run
# 4.4.1 Wait for NN framework if it is running
if not crashed and 'nn' in FLAGS.fsm:
# wait for nn_ready and stop in case of no tensorflow communication
if os.path.isfile(FLAGS.log_folder+'/nn_ready'):
current_stat=subprocess.check_output(shlex.split("stat -c %Y "+FLAGS.log_folder+'/nn_ready'))
start_time=time.time()
myprint("{0}: waiting for nn_ready.".format(time.strftime("%Y-%m-%d_%I:%M:%S")))
while current_stat == prev_stat_nn_log:
current_stat=subprocess.check_output(shlex.split("stat -c %Y "+FLAGS.log_folder+'/nn_ready'))
time.sleep(1)
if time.time()-start_time > 8*60:
myprint("{0}: waited for 8minutes on nn_ready to finish training so something went wrong on {1} exit with code 2.".format(time.strftime("%Y-%m-%d_%I:%M:%S"), FLAGS.condor_host))
kill_combo()
sys.exit(2)
else:
myprint("{2}: we have lost communication with our log folder {0} on host {1} so exit with code 3.".format(FLAGS.log_folder, FLAGS.condor_host, time.strftime("%Y-%m-%d_%I:%M:%S")))
kill_combo()
sys.exit(3)
if not crashed:
message = open(fsm_file,'r').readlines()[-1].strip()
myprint("{0}: ended run {1} with {3}{2}{4}".format(time.strftime("%Y-%m-%d_%I:%M:%S"), run_number+1, message, bcolors.OKGREEN if 'success' in message else bcolors.FAIL, bcolors.ENDC))
# increment also in case of crash as drone has zero turning speed:
run_number+=1
if message == 'FINISHED': # make this the final run for evaluation
FLAGS.number_of_runs=run_number+FLAGS.final_evaluation_runs
FLAGS.evaluation=True
# run_number = FLAGS.number_of_runs-1
time.sleep(3)
# extra second needed to save image in gt_listener
# after all required runs are finished
kill_combo()
myprint("\n{0}: done.".format(time.strftime("%Y-%m-%d_%I:%M:%S")))
| [
"shlex.split",
"yaml.load",
"time.sleep",
"sys.exit",
"numpy.sin",
"geometry_msgs.msg.Pose",
"os.remove",
"os.listdir",
"argparse.ArgumentParser",
"subprocess.Popen",
"rospy.ServiceProxy",
"os.path.isdir",
"subprocess.call",
"numpy.random.seed",
"gazebo_msgs.srv.SetModelStateRequest",
... | [((5271, 5653), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run_simulation_scripts governs the running of long gazebo_ros_tensorflow simulations.\n The core functionality lies in:\n 1. parsing the correct arguments at different levels (tensorflow dnn, gazebo environment, ros supervision)\n 2. different crash handling when for instance starting gazebo / tensorflow fails"""'}), '(description=\n """Run_simulation_scripts governs the running of long gazebo_ros_tensorflow simulations.\n The core functionality lies in:\n 1. parsing the correct arguments at different levels (tensorflow dnn, gazebo environment, ros supervision)\n 2. different crash handling when for instance starting gazebo / tensorflow fails"""\n )\n', (5294, 5653), False, 'import argparse\n'), ((24021, 24081), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""/gazebo/reset_simulation"""', 'Emptyservice'], {}), "('/gazebo/reset_simulation', Emptyservice)\n", (24039, 24081), False, 'import rospy\n'), ((24108, 24168), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""/gazebo/set_model_state"""', 'SetModelState'], {}), "('/gazebo/set_model_state', SetModelState)\n", (24126, 24168), False, 'import rospy\n'), ((24191, 24250), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""/gazebo/unpause_physics"""', 'Emptyservice'], {}), "('/gazebo/unpause_physics', Emptyservice)\n", (24209, 24250), False, 'import rospy\n'), ((2953, 3008), 'subprocess.Popen', 'subprocess.Popen', (["['ps', '-ef']"], {'stdout': 'subprocess.PIPE'}), "(['ps', '-ef'], stdout=subprocess.PIPE)\n", (2969, 3008), False, 'import subprocess, shlex\n'), ((3020, 3095), 'subprocess.Popen', 'subprocess.Popen', (["['grep', 'gz']"], {'stdin': 'p_ps.stdout', 'stdout': 'subprocess.PIPE'}), "(['grep', 'gz'], stdin=p_ps.stdout, stdout=subprocess.PIPE)\n", (3036, 3095), False, 'import subprocess, shlex\n'), ((3440, 3453), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3450, 3453), False, 'import time\n'), ((3574, 3629), 'subprocess.Popen', 'subprocess.Popen', (["['ps', '-ef']"], {'stdout': 'subprocess.PIPE'}), "(['ps', '-ef'], stdout=subprocess.PIPE)\n", (3590, 3629), False, 'import subprocess, shlex\n'), ((3641, 3733), 'subprocess.Popen', 'subprocess.Popen', (["['grep', 'create_dataset']"], {'stdin': 'p_ps.stdout', 'stdout': 'subprocess.PIPE'}), "(['grep', 'create_dataset'], stdin=p_ps.stdout, stdout=\n subprocess.PIPE)\n", (3657, 3733), False, 'import subprocess, shlex\n'), ((4224, 4237), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4234, 4237), False, 'import time\n'), ((4247, 4308), 'subprocess.call', 'subprocess.call', (["['rosparam', 'list']"], {'stdout': 'subprocess.PIPE'}), "(['rosparam', 'list'], stdout=subprocess.PIPE)\n", (4262, 4308), False, 'import subprocess, shlex\n'), ((5095, 5108), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (5105, 5108), False, 'import time\n'), ((11536, 11584), 'os.path.isdir', 'os.path.isdir', (['(FLAGS.summary_dir + FLAGS.log_tag)'], {}), '(FLAGS.summary_dir + FLAGS.log_tag)\n', (11549, 11584), False, 'import sys, os, os.path\n'), ((11640, 11686), 'os.path.isdir', 'os.path.isdir', (['(FLAGS.data_root + FLAGS.log_tag)'], {}), '(FLAGS.data_root + FLAGS.log_tag)\n', (11653, 11686), False, 'import sys, os, os.path\n'), ((12099, 12132), 'numpy.random.seed', 'np.random.seed', (['FLAGS.random_seed'], {}), '(FLAGS.random_seed)\n', (12113, 12132), True, 'import numpy as np\n'), ((12252, 12340), 'os.path.isfile', 'os.path.isfile', (["(simulation_supervised_dir + '/config/robot/' + FLAGS.robot + '.yaml')"], {}), "(simulation_supervised_dir + '/config/robot/' + FLAGS.robot +\n '.yaml')\n", (12266, 12340), False, 'import sys, os, os.path\n'), ((12402, 12413), 'sys.exit', 'sys.exit', (['(4)'], {}), '(4)\n', (12410, 12413), False, 'import sys, os, os.path\n'), ((15421, 15453), 'os.path.isdir', 'os.path.isdir', (['ros_xterm_log_dir'], {}), '(ros_xterm_log_dir)\n', (15434, 15453), False, 'import sys, os, os.path\n'), ((15455, 15485), 'os.makedirs', 'os.makedirs', (['ros_xterm_log_dir'], {}), '(ros_xterm_log_dir)\n', (15466, 15485), False, 'import sys, os, os.path\n'), ((15759, 15852), 'os.path.isfile', 'os.path.isfile', (["(simulation_supervised_dir + '/config/environment/' + worlds[0] + '.yaml')"], {}), "(simulation_supervised_dir + '/config/environment/' + worlds[\n 0] + '.yaml')\n", (15773, 15852), False, 'import sys, os, os.path\n'), ((16066, 16096), 'os.path.isfile', 'os.path.isfile', (['xterm_log_file'], {}), '(xterm_log_file)\n', (16080, 16096), False, 'import sys, os, os.path\n'), ((16231, 16253), 'subprocess.Popen', 'subprocess.Popen', (['args'], {}), '(args)\n', (16247, 16253), False, 'import subprocess, shlex\n'), ((16397, 16488), 'rospy.set_param', 'rospy.set_param', (['"""evaluate_every"""', '(FLAGS.evaluate_every if not FLAGS.evaluation else 1)'], {}), "('evaluate_every', FLAGS.evaluate_every if not FLAGS.\n evaluation else 1)\n", (16412, 16488), False, 'import rospy\n'), ((16487, 16530), 'rospy.set_param', 'rospy.set_param', (['"""recovery"""', 'FLAGS.recovery'], {}), "('recovery', FLAGS.recovery)\n", (16502, 16530), False, 'import rospy\n'), ((16785, 16820), 'os.path.isdir', 'os.path.isdir', (['python_xterm_log_dir'], {}), '(python_xterm_log_dir)\n', (16798, 16820), False, 'import sys, os, os.path\n'), ((16822, 16855), 'os.makedirs', 'os.makedirs', (['python_xterm_log_dir'], {}), '(python_xterm_log_dir)\n', (16833, 16855), False, 'import sys, os, os.path\n'), ((18186, 18216), 'os.path.isfile', 'os.path.isfile', (['xterm_log_file'], {}), '(xterm_log_file)\n', (18200, 18216), False, 'import sys, os, os.path\n'), ((18367, 18389), 'subprocess.Popen', 'subprocess.Popen', (['args'], {}), '(args)\n', (18383, 18389), False, 'import subprocess, shlex\n'), ((18615, 18626), 'time.time', 'time.time', ([], {}), '()\n', (18624, 18626), False, 'import time\n'), ((18648, 18694), 'os.path.isfile', 'os.path.isfile', (["(FLAGS.log_folder + '/nn_ready')"], {}), "(FLAGS.log_folder + '/nn_ready')\n", (18662, 18694), False, 'import sys, os, os.path\n'), ((23467, 23511), 'numpy.random.uniform', 'np.random.uniform', (['(-FLAGS.x_var)', 'FLAGS.x_var'], {}), '(-FLAGS.x_var, FLAGS.x_var)\n', (23484, 23511), True, 'import numpy as np\n'), ((23518, 23562), 'numpy.random.uniform', 'np.random.uniform', (['(-FLAGS.y_var)', 'FLAGS.y_var'], {}), '(-FLAGS.y_var, FLAGS.y_var)\n', (23535, 23562), True, 'import numpy as np\n'), ((23569, 23613), 'numpy.random.uniform', 'np.random.uniform', (['(-FLAGS.z_var)', 'FLAGS.z_var'], {}), '(-FLAGS.z_var, FLAGS.z_var)\n', (23586, 23613), True, 'import numpy as np\n'), ((23622, 23670), 'numpy.random.uniform', 'np.random.uniform', (['(-FLAGS.yaw_var)', 'FLAGS.yaw_var'], {}), '(-FLAGS.yaw_var, FLAGS.yaw_var)\n', (23639, 23670), True, 'import numpy as np\n'), ((23841, 23876), 'os.path.isdir', 'os.path.isdir', (['gazebo_xterm_log_dir'], {}), '(gazebo_xterm_log_dir)\n', (23854, 23876), False, 'import sys, os, os.path\n'), ((23878, 23911), 'os.makedirs', 'os.makedirs', (['gazebo_xterm_log_dir'], {}), '(gazebo_xterm_log_dir)\n', (23889, 23911), False, 'import sys, os, os.path\n'), ((24356, 24380), 'os.path.isfile', 'os.path.isfile', (['fsm_file'], {}), '(fsm_file)\n', (24370, 24380), False, 'import sys, os, os.path\n'), ((24779, 24825), 'os.path.isfile', 'os.path.isfile', (["(FLAGS.log_folder + '/nn_ready')"], {}), "(FLAGS.log_folder + '/nn_ready')\n", (24793, 24825), False, 'import sys, os, os.path\n'), ((25779, 25817), 'rospy.has_param', 'rospy.has_param', (['"""/starting_positions"""'], {}), "('/starting_positions')\n", (25794, 25817), False, 'import rospy\n'), ((29998, 30013), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (30008, 30013), False, 'import time\n'), ((30502, 30513), 'time.time', 'time.time', ([], {}), '()\n', (30511, 30513), False, 'import time\n'), ((34050, 34063), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (34060, 34063), False, 'import time\n'), ((3241, 3296), 'subprocess.Popen', 'subprocess.Popen', (["['ps', '-ef']"], {'stdout': 'subprocess.PIPE'}), "(['ps', '-ef'], stdout=subprocess.PIPE)\n", (3257, 3296), False, 'import subprocess, shlex\n'), ((3310, 3385), 'subprocess.Popen', 'subprocess.Popen', (["['grep', 'gz']"], {'stdin': 'p_ps.stdout', 'stdout': 'subprocess.PIPE'}), "(['grep', 'gz'], stdin=p_ps.stdout, stdout=subprocess.PIPE)\n", (3326, 3385), False, 'import subprocess, shlex\n'), ((3422, 3437), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3432, 3437), False, 'import time\n'), ((3888, 3943), 'subprocess.Popen', 'subprocess.Popen', (["['ps', '-ef']"], {'stdout': 'subprocess.PIPE'}), "(['ps', '-ef'], stdout=subprocess.PIPE)\n", (3904, 3943), False, 'import subprocess, shlex\n'), ((3957, 4049), 'subprocess.Popen', 'subprocess.Popen', (["['grep', 'create_dataset']"], {'stdin': 'p_ps.stdout', 'stdout': 'subprocess.PIPE'}), "(['grep', 'create_dataset'], stdin=p_ps.stdout, stdout=\n subprocess.PIPE)\n", (3973, 4049), False, 'import subprocess, shlex\n'), ((4081, 4096), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (4091, 4096), False, 'import time\n'), ((4408, 4421), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4418, 4421), False, 'import time\n'), ((4433, 4494), 'subprocess.call', 'subprocess.call', (["['rosparam', 'list']"], {'stdout': 'subprocess.PIPE'}), "(['rosparam', 'list'], stdout=subprocess.PIPE)\n", (4448, 4494), False, 'import subprocess, shlex\n'), ((10640, 10689), 'shlex.split', 'shlex.split', (['"""rospack find simulation_supervised"""'], {}), "('rospack find simulation_supervised')\n", (10651, 10689), False, 'import subprocess, shlex\n'), ((11584, 11632), 'shutil.rmtree', 'shutil.rmtree', (['(FLAGS.summary_dir + FLAGS.log_tag)'], {}), '(FLAGS.summary_dir + FLAGS.log_tag)\n', (11597, 11632), False, 'import shutil\n'), ((11686, 11732), 'shutil.rmtree', 'shutil.rmtree', (['(FLAGS.data_root + FLAGS.log_tag)'], {}), '(FLAGS.data_root + FLAGS.log_tag)\n', (11699, 11732), False, 'import shutil\n'), ((12493, 12620), 'shlex.split', 'shlex.split', (['"""cat $_CONDOR_JOB_AD | grep RemoteHost | head -1 | cut -d \'=\' -f 2 | cut -d \'@\' -f 2 | cut -d \'.\' -f 1)"""'], {}), '(\n "cat $_CONDOR_JOB_AD | grep RemoteHost | head -1 | cut -d \'=\' -f 2 | cut -d \'@\' -f 2 | cut -d \'.\' -f 1)"\n )\n', (12504, 12620), False, 'import subprocess, shlex\n'), ((14019, 14053), 'os.path.isdir', 'os.path.isdir', (['FLAGS.data_location'], {}), '(FLAGS.data_location)\n', (14032, 14053), False, 'import sys, os, os.path\n'), ((14104, 14138), 'shutil.rmtree', 'shutil.rmtree', (['FLAGS.data_location'], {}), '(FLAGS.data_location)\n', (14117, 14138), False, 'import shutil\n'), ((14148, 14182), 'os.path.isdir', 'os.path.isdir', (['FLAGS.data_location'], {}), '(FLAGS.data_location)\n', (14161, 14182), False, 'import sys, os, os.path\n'), ((14188, 14220), 'os.makedirs', 'os.makedirs', (['FLAGS.data_location'], {}), '(FLAGS.data_location)\n', (14199, 14220), False, 'import sys, os, os.path\n'), ((16029, 16059), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I%M"""'], {}), "('%Y-%m-%d_%I%M')\n", (16042, 16059), False, 'import time\n'), ((16098, 16123), 'os.remove', 'os.remove', (['xterm_log_file'], {}), '(xterm_log_file)\n', (16107, 16123), False, 'import sys, os, os.path\n'), ((18149, 18179), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I%M"""'], {}), "('%Y-%m-%d_%I%M')\n", (18162, 18179), False, 'import time\n'), ((18218, 18243), 'os.remove', 'os.remove', (['xterm_log_file'], {}), '(xterm_log_file)\n', (18227, 18243), False, 'import sys, os, os.path\n'), ((19727, 19754), 'rospy.get_param', 'rospy.get_param', (['"""recovery"""'], {}), "('recovery')\n", (19742, 19754), False, 'import rospy\n'), ((25201, 25212), 'sys.exit', 'sys.exit', (['(3)'], {}), '(3)\n', (25209, 25212), False, 'import sys, os, os.path\n'), ((25844, 25881), 'rospy.get_param', 'rospy.get_param', (['"""starting_positions"""'], {}), "('starting_positions')\n", (25859, 25881), False, 'import rospy\n'), ((26264, 26302), 'rospy.set_param', 'rospy.set_param', (['"""/evaluate"""', 'evaluate'], {}), "('/evaluate', evaluate)\n", (26279, 26302), False, 'import rospy\n'), ((26532, 26538), 'geometry_msgs.msg.Pose', 'Pose', ([], {}), '()\n', (26536, 26538), False, 'from geometry_msgs.msg import Pose\n'), ((26911, 26922), 'numpy.sin', 'np.sin', (['yaw'], {}), '(yaw)\n', (26917, 26922), True, 'import numpy as np\n'), ((26946, 26957), 'numpy.cos', 'np.cos', (['yaw'], {}), '(yaw)\n', (26952, 26957), True, 'import numpy as np\n'), ((27002, 27014), 'gazebo_msgs.msg.ModelState', 'ModelState', ([], {}), '()\n', (27012, 27014), False, 'from gazebo_msgs.msg import ModelState\n'), ((27162, 27184), 'gazebo_msgs.srv.SetModelStateRequest', 'SetModelStateRequest', ([], {}), '()\n', (27182, 27184), False, 'from gazebo_msgs.srv import SetModelStateRequest\n'), ((27289, 27340), 'rospy.set_param', 'rospy.set_param', (['"""starting_height"""', 'starting_height'], {}), "('starting_height', starting_height)\n", (27304, 27340), False, 'import rospy\n'), ((27418, 27431), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (27428, 27431), False, 'import time\n'), ((29528, 29550), 'subprocess.Popen', 'subprocess.Popen', (['args'], {}), '(args)\n', (29544, 29550), False, 'import subprocess, shlex\n'), ((29959, 29996), 'shlex.split', 'shlex.split', (["('stat -c %Y ' + fsm_file)"], {}), "('stat -c %Y ' + fsm_file)\n", (29970, 29996), False, 'import subprocess, shlex\n'), ((32299, 32314), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (32309, 32314), False, 'import time\n'), ((32546, 32592), 'os.path.isfile', 'os.path.isfile', (["(FLAGS.log_folder + '/nn_ready')"], {}), "(FLAGS.log_folder + '/nn_ready')\n", (32560, 32592), False, 'import sys, os, os.path\n'), ((34201, 34235), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (34214, 34235), False, 'import time\n'), ((2547, 2564), 'yaml.load', 'yaml.load', (['stream'], {}), '(stream)\n', (2556, 2564), False, 'import yaml\n'), ((3134, 3168), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (3147, 3168), False, 'import time\n'), ((3775, 3809), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (3788, 3809), False, 'import time\n'), ((4642, 4676), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (4655, 4676), False, 'import time\n'), ((16324, 16358), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (16337, 16358), False, 'import time\n'), ((18472, 18506), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (18485, 18506), False, 'import time\n'), ((18741, 18800), 'shlex.split', 'shlex.split', (["('stat -c %Y ' + FLAGS.log_folder + '/nn_ready')"], {}), "('stat -c %Y ' + FLAGS.log_folder + '/nn_ready')\n", (18752, 18800), False, 'import subprocess, shlex\n'), ((19238, 19251), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (19248, 19251), False, 'import time\n'), ((19274, 19320), 'os.path.isfile', 'os.path.isfile', (["(FLAGS.log_folder + '/nn_ready')"], {}), "(FLAGS.log_folder + '/nn_ready')\n", (19288, 19320), False, 'import sys, os, os.path\n'), ((19327, 19340), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (19337, 19340), False, 'import time\n'), ((24870, 24929), 'shlex.split', 'shlex.split', (["('stat -c %Y ' + FLAGS.log_folder + '/nn_ready')"], {}), "('stat -c %Y ' + FLAGS.log_folder + '/nn_ready')\n", (24881, 24929), False, 'import subprocess, shlex\n'), ((25950, 25986), 'ast.literal_eval', 'ast.literal_eval', (['starting_positions'], {}), '(starting_positions)\n', (25966, 25986), False, 'import ast\n'), ((27557, 27571), 'std_srvs.srv.EmptyRequest', 'EmptyRequest', ([], {}), '()\n', (27569, 27571), False, 'from std_srvs.srv import EmptyRequest\n'), ((28088, 28116), 'os.path.isdir', 'os.path.isdir', (['data_location'], {}), '(data_location)\n', (28101, 28116), False, 'import sys, os, os.path\n'), ((28153, 28179), 'os.makedirs', 'os.makedirs', (['data_location'], {}), '(data_location)\n', (28164, 28179), False, 'import sys, os, os.path\n'), ((29378, 29412), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I-%M-%S"""'], {}), "('%Y-%m-%d_%I-%M-%S')\n", (29391, 29412), False, 'import time\n'), ((30082, 30116), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (30095, 30116), False, 'import time\n'), ((30631, 30668), 'shlex.split', 'shlex.split', (["('stat -c %Y ' + fsm_file)"], {}), "('stat -c %Y ' + fsm_file)\n", (30642, 30668), False, 'import subprocess, shlex\n'), ((30936, 30950), 'time.sleep', 'time.sleep', (['(30)'], {}), '(30)\n', (30946, 30950), False, 'import time\n'), ((30997, 31008), 'time.time', 'time.time', ([], {}), '()\n', (31006, 31008), False, 'import time\n'), ((32709, 32720), 'time.time', 'time.time', ([], {}), '()\n', (32718, 32720), False, 'import time\n'), ((33474, 33485), 'sys.exit', 'sys.exit', (['(3)'], {}), '(3)\n', (33482, 33485), False, 'import sys, os, os.path\n'), ((4367, 4401), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (4380, 4401), False, 'import time\n'), ((14285, 14316), 'os.listdir', 'os.listdir', (['FLAGS.data_location'], {}), '(FLAGS.data_location)\n', (14295, 14316), False, 'import sys, os, os.path\n'), ((18854, 18913), 'shlex.split', 'shlex.split', (["('stat -c %Y ' + FLAGS.log_folder + '/nn_ready')"], {}), "('stat -c %Y ' + FLAGS.log_folder + '/nn_ready')\n", (18865, 18913), False, 'import subprocess, shlex\n'), ((19220, 19231), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (19228, 19231), False, 'import sys, os, os.path\n'), ((19649, 19660), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (19657, 19660), False, 'import sys, os, os.path\n'), ((24447, 24481), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I-%M-%S"""'], {}), "('%Y-%m-%d_%I-%M-%S')\n", (24460, 24481), False, 'import time\n'), ((25143, 25177), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (25156, 25177), False, 'import time\n'), ((28118, 28146), 'shutil.rmtree', 'shutil.rmtree', (['data_location'], {}), '(data_location)\n', (28131, 28146), False, 'import shutil\n'), ((31036, 31047), 'time.time', 'time.time', ([], {}), '()\n', (31045, 31047), False, 'import time\n'), ((32635, 32694), 'shlex.split', 'shlex.split', (["('stat -c %Y ' + FLAGS.log_folder + '/nn_ready')"], {}), "('stat -c %Y ' + FLAGS.log_folder + '/nn_ready')\n", (32646, 32694), False, 'import subprocess, shlex\n'), ((32964, 32977), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (32974, 32977), False, 'import time\n'), ((33616, 33650), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (33629, 33650), False, 'import time\n'), ((18921, 18932), 'time.time', 'time.time', ([], {}), '()\n', (18930, 18932), False, 'import time\n'), ((19350, 19361), 'time.time', 'time.time', ([], {}), '()\n', (19359, 19361), False, 'import time\n'), ((20620, 20675), 'shlex.split', 'shlex.split', (['"""rospack find simulation_supervised_tools"""'], {}), "('rospack find simulation_supervised_tools')\n", (20631, 20675), False, 'import subprocess, shlex\n'), ((20715, 20779), 'shlex.split', 'shlex.split', (["('python ' + generator_file + ' ' + FLAGS.log_folder)"], {}), "('python ' + generator_file + ' ' + FLAGS.log_folder)\n", (20726, 20779), False, 'import subprocess, shlex\n'), ((22057, 22087), 'shlex.split', 'shlex.split', (['generator_command'], {}), '(generator_command)\n', (22068, 22087), False, 'import subprocess, shlex\n'), ((22230, 22241), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (22238, 22241), False, 'import sys, os, os.path\n'), ((30802, 30813), 'time.time', 'time.time', ([], {}), '()\n', (30811, 30813), False, 'import time\n'), ((30893, 30927), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (30906, 30927), False, 'import time\n'), ((31200, 31246), 'shlex.split', 'shlex.split', (['"""rostopic pub /go std_msgs/Empty"""'], {}), "('rostopic pub /go std_msgs/Empty')\n", (31211, 31246), False, 'import subprocess, shlex\n'), ((32771, 32805), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (32784, 32805), False, 'import time\n'), ((32899, 32958), 'shlex.split', 'shlex.split', (["('stat -c %Y ' + FLAGS.log_folder + '/nn_ready')"], {}), "('stat -c %Y ' + FLAGS.log_folder + '/nn_ready')\n", (32910, 32958), False, 'import subprocess, shlex\n'), ((33241, 33252), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (33249, 33252), False, 'import sys, os, os.path\n'), ((33412, 33446), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (33425, 33446), False, 'import time\n'), ((19109, 19140), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M"""'], {}), "('%Y-%m-%d_%I:%M')\n", (19122, 19140), False, 'import time\n'), ((19538, 19569), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M"""'], {}), "('%Y-%m-%d_%I:%M')\n", (19551, 19569), False, 'import time\n'), ((28590, 28618), 'os.path.basename', 'os.path.basename', (['world_file'], {}), '(world_file)\n', (28606, 28618), False, 'import sys, os, os.path\n'), ((32989, 33000), 'time.time', 'time.time', ([], {}), '()\n', (32998, 33000), False, 'import time\n'), ((14455, 14486), 'os.listdir', 'os.listdir', (['FLAGS.data_location'], {}), '(FLAGS.data_location)\n', (14465, 14486), False, 'import sys, os, os.path\n'), ((21682, 21737), 'shlex.split', 'shlex.split', (['"""rospack find simulation_supervised_tools"""'], {}), "('rospack find simulation_supervised_tools')\n", (21693, 21737), False, 'import subprocess, shlex\n'), ((32124, 32158), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (32137, 32158), False, 'import time\n'), ((33152, 33186), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (33165, 33186), False, 'import time\n'), ((31753, 31787), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d_%I:%M:%S"""'], {}), "('%Y-%m-%d_%I:%M:%S')\n", (31766, 31787), False, 'import time\n'), ((31789, 31821), 'os.path.basename', 'os.path.basename', (['xterm_log_file'], {}), '(xterm_log_file)\n', (31805, 31821), False, 'import sys, os, os.path\n')] |
from rubrik_polaris import PolarisClient
domain = 'my-company'
username = '<EMAIL>'
password = '<PASSWORD>)'
client = PolarisClient(domain, username, password, insecure=True)
print(client.get_storage_object_ids_ebs(tags = {"Class": "Management"}))
| [
"rubrik_polaris.PolarisClient"
] | [((122, 178), 'rubrik_polaris.PolarisClient', 'PolarisClient', (['domain', 'username', 'password'], {'insecure': '(True)'}), '(domain, username, password, insecure=True)\n', (135, 178), False, 'from rubrik_polaris import PolarisClient\n')] |
import torch
from torch.utils.data import DataLoader
from src import qmodel, rmodel
from src.loss import IQALoss, PredictLoss
from src.framework import IQAnModel, RecognitionModel, TripRecognitionModel
from src.dataset import get_eye_dataset, EyePairDataset, FaceDataset
def set_r_model(config):
if 'r_model_name' in config:
model_name = config['r_model_name']
else:
model_name = config['model_name'].split('_')[-1]
# model
if model_name.lower() == 'maxout':
model = rmodel.Maxout(num_classes=config['num_classes'])
elif model_name.lower() == 'maxout_o':
model = rmodel.MaxoutO(num_classes=config['num_classes'])
elif model_name.lower() == 'nlightcnn':
model = rmodel.LightCNN(num_classes=config['num_classes'], norm=True)
elif model_name.lower() == 'lightcnn':
model = rmodel.LightCNN(num_classes=config['num_classes'], norm=False)
elif model_name.lower() == 'embedding':
model = rmodel.Embedding(num_classes=config['num_classes'])
elif model_name.lower() == 'vninet':
model = rmodel.VniNet(num_classes=config['num_classes'])
elif model_name.lower() == 'resnet18':
model = rmodel.Resnet18(num_classes=config['num_classes'],
norm=False,
pretrained=config['pretrained'])
elif model_name.lower() == 'resnet18n':
model = rmodel.Resnet18(num_classes=config['num_classes'],
norm=True,
pretrained=config['pretrained'])
elif model_name.lower() == 'vgg11bn':
model = rmodel.VGG11BN(num_classes=config['num_classes'],
pretrained=config['pretrained'])
else:
raise ValueError('Unsupported model: ' + model_name)
# criterion
criterion = PredictLoss(loss_type=config['rec_loss'])
if config['rec_loss'] == 'triplet':
model = TripRecognitionModel(model, criterion)
else:
model = RecognitionModel(model, criterion)
return model
def set_q_model(config):
if 'q_model_name' in config:
model_name = config['q_model_name']
else:
model_name = config['model_name'].split('_')[0]
# model
if model_name.lower() == 'resunet':
model = qmodel.ResUnet()
elif model_name.lower() == 'unet':
model = qmodel.Unet()
else:
raise ValueError('Unsupported model: ' + model_name)
# criterion
criterion = IQALoss(loss_type=config['qua_loss'], alpha=config['alpha'])
model = IQAnModel(model, criterion)
return model
def set_eye_dataloaders(config, mode='rtrain', pdfs=None):
if config['qua_loss'] == 'triplet':
train_data = EyePairDataset(dataset=config['dataset'],
mode=mode,
less_data=config['less_data'],
dfs=pdfs,
weight=config['weight'])
else:
train_data, num_classes = get_eye_dataset(
datasets=config['dataset'],
mode=mode,
less_data=config['less_data'],
dfs=pdfs,
weight=config['weight'])
train_data_loader = DataLoader(train_data,
config['r_batchsize'],
drop_last=True,
shuffle=True,
pin_memory=True,
num_workers=config['num_workers'])
val_data, num_classes = get_eye_dataset(datasets=config['dataset'],
mode='val',
less_data=config['less_data'])
val_data_loader = DataLoader(val_data,
config['r_batchsize'],
shuffle=True,
drop_last=True,
pin_memory=True,
num_workers=config['num_workers'])
# num_classes = train_data.num_classes
return (train_data_loader, val_data_loader), num_classes
def set_face_dataloaders(config, mode='qtrain', dfs=[None, None]):
train_data = FaceDataset(
dataset=config['dataset'],
mode=mode,
less_data=config['less_data'],
dfs=dfs[0],
)
train_data_loader = DataLoader(train_data,
config['q_batchsize'],
drop_last=True,
shuffle=True,
pin_memory=True,
num_workers=config['num_workers'])
val_data = FaceDataset(
dataset=config['dataset'],
mode='val',
less_data=config['less_data'],
dfs=dfs[1],
)
val_data_loader = DataLoader(val_data,
config['q_batchsize'],
shuffle=True,
drop_last=True,
pin_memory=True,
num_workers=config['num_workers'])
return (train_data_loader, val_data_loader) | [
"src.rmodel.VniNet",
"src.rmodel.VGG11BN",
"src.loss.IQALoss",
"src.dataset.FaceDataset",
"src.framework.RecognitionModel",
"src.qmodel.ResUnet",
"src.rmodel.LightCNN",
"src.rmodel.Resnet18",
"src.qmodel.Unet",
"src.rmodel.Embedding",
"src.rmodel.Maxout",
"src.loss.PredictLoss",
"src.dataset... | [((1845, 1886), 'src.loss.PredictLoss', 'PredictLoss', ([], {'loss_type': "config['rec_loss']"}), "(loss_type=config['rec_loss'])\n", (1856, 1886), False, 'from src.loss import IQALoss, PredictLoss\n'), ((2490, 2550), 'src.loss.IQALoss', 'IQALoss', ([], {'loss_type': "config['qua_loss']", 'alpha': "config['alpha']"}), "(loss_type=config['qua_loss'], alpha=config['alpha'])\n", (2497, 2550), False, 'from src.loss import IQALoss, PredictLoss\n'), ((2563, 2590), 'src.framework.IQAnModel', 'IQAnModel', (['model', 'criterion'], {}), '(model, criterion)\n', (2572, 2590), False, 'from src.framework import IQAnModel, RecognitionModel, TripRecognitionModel\n'), ((3244, 3375), 'torch.utils.data.DataLoader', 'DataLoader', (['train_data', "config['r_batchsize']"], {'drop_last': '(True)', 'shuffle': '(True)', 'pin_memory': '(True)', 'num_workers': "config['num_workers']"}), "(train_data, config['r_batchsize'], drop_last=True, shuffle=True,\n pin_memory=True, num_workers=config['num_workers'])\n", (3254, 3375), False, 'from torch.utils.data import DataLoader\n'), ((3575, 3666), 'src.dataset.get_eye_dataset', 'get_eye_dataset', ([], {'datasets': "config['dataset']", 'mode': '"""val"""', 'less_data': "config['less_data']"}), "(datasets=config['dataset'], mode='val', less_data=config[\n 'less_data'])\n", (3590, 3666), False, 'from src.dataset import get_eye_dataset, EyePairDataset, FaceDataset\n'), ((3772, 3901), 'torch.utils.data.DataLoader', 'DataLoader', (['val_data', "config['r_batchsize']"], {'shuffle': '(True)', 'drop_last': '(True)', 'pin_memory': '(True)', 'num_workers': "config['num_workers']"}), "(val_data, config['r_batchsize'], shuffle=True, drop_last=True,\n pin_memory=True, num_workers=config['num_workers'])\n", (3782, 3901), False, 'from torch.utils.data import DataLoader\n'), ((4254, 4351), 'src.dataset.FaceDataset', 'FaceDataset', ([], {'dataset': "config['dataset']", 'mode': 'mode', 'less_data': "config['less_data']", 'dfs': 'dfs[0]'}), "(dataset=config['dataset'], mode=mode, less_data=config[\n 'less_data'], dfs=dfs[0])\n", (4265, 4351), False, 'from src.dataset import get_eye_dataset, EyePairDataset, FaceDataset\n'), ((4410, 4541), 'torch.utils.data.DataLoader', 'DataLoader', (['train_data', "config['q_batchsize']"], {'drop_last': '(True)', 'shuffle': '(True)', 'pin_memory': '(True)', 'num_workers': "config['num_workers']"}), "(train_data, config['q_batchsize'], drop_last=True, shuffle=True,\n pin_memory=True, num_workers=config['num_workers'])\n", (4420, 4541), False, 'from torch.utils.data import DataLoader\n'), ((4728, 4826), 'src.dataset.FaceDataset', 'FaceDataset', ([], {'dataset': "config['dataset']", 'mode': '"""val"""', 'less_data': "config['less_data']", 'dfs': 'dfs[1]'}), "(dataset=config['dataset'], mode='val', less_data=config[\n 'less_data'], dfs=dfs[1])\n", (4739, 4826), False, 'from src.dataset import get_eye_dataset, EyePairDataset, FaceDataset\n'), ((4883, 5012), 'torch.utils.data.DataLoader', 'DataLoader', (['val_data', "config['q_batchsize']"], {'shuffle': '(True)', 'drop_last': '(True)', 'pin_memory': '(True)', 'num_workers': "config['num_workers']"}), "(val_data, config['q_batchsize'], shuffle=True, drop_last=True,\n pin_memory=True, num_workers=config['num_workers'])\n", (4893, 5012), False, 'from torch.utils.data import DataLoader\n'), ((511, 559), 'src.rmodel.Maxout', 'rmodel.Maxout', ([], {'num_classes': "config['num_classes']"}), "(num_classes=config['num_classes'])\n", (524, 559), False, 'from src import qmodel, rmodel\n'), ((1943, 1981), 'src.framework.TripRecognitionModel', 'TripRecognitionModel', (['model', 'criterion'], {}), '(model, criterion)\n', (1963, 1981), False, 'from src.framework import IQAnModel, RecognitionModel, TripRecognitionModel\n'), ((2008, 2042), 'src.framework.RecognitionModel', 'RecognitionModel', (['model', 'criterion'], {}), '(model, criterion)\n', (2024, 2042), False, 'from src.framework import IQAnModel, RecognitionModel, TripRecognitionModel\n'), ((2300, 2316), 'src.qmodel.ResUnet', 'qmodel.ResUnet', ([], {}), '()\n', (2314, 2316), False, 'from src import qmodel, rmodel\n'), ((2731, 2854), 'src.dataset.EyePairDataset', 'EyePairDataset', ([], {'dataset': "config['dataset']", 'mode': 'mode', 'less_data': "config['less_data']", 'dfs': 'pdfs', 'weight': "config['weight']"}), "(dataset=config['dataset'], mode=mode, less_data=config[\n 'less_data'], dfs=pdfs, weight=config['weight'])\n", (2745, 2854), False, 'from src.dataset import get_eye_dataset, EyePairDataset, FaceDataset\n'), ((3038, 3163), 'src.dataset.get_eye_dataset', 'get_eye_dataset', ([], {'datasets': "config['dataset']", 'mode': 'mode', 'less_data': "config['less_data']", 'dfs': 'pdfs', 'weight': "config['weight']"}), "(datasets=config['dataset'], mode=mode, less_data=config[\n 'less_data'], dfs=pdfs, weight=config['weight'])\n", (3053, 3163), False, 'from src.dataset import get_eye_dataset, EyePairDataset, FaceDataset\n'), ((619, 668), 'src.rmodel.MaxoutO', 'rmodel.MaxoutO', ([], {'num_classes': "config['num_classes']"}), "(num_classes=config['num_classes'])\n", (633, 668), False, 'from src import qmodel, rmodel\n'), ((2372, 2385), 'src.qmodel.Unet', 'qmodel.Unet', ([], {}), '()\n', (2383, 2385), False, 'from src import qmodel, rmodel\n'), ((729, 790), 'src.rmodel.LightCNN', 'rmodel.LightCNN', ([], {'num_classes': "config['num_classes']", 'norm': '(True)'}), "(num_classes=config['num_classes'], norm=True)\n", (744, 790), False, 'from src import qmodel, rmodel\n'), ((850, 912), 'src.rmodel.LightCNN', 'rmodel.LightCNN', ([], {'num_classes': "config['num_classes']", 'norm': '(False)'}), "(num_classes=config['num_classes'], norm=False)\n", (865, 912), False, 'from src import qmodel, rmodel\n'), ((973, 1024), 'src.rmodel.Embedding', 'rmodel.Embedding', ([], {'num_classes': "config['num_classes']"}), "(num_classes=config['num_classes'])\n", (989, 1024), False, 'from src import qmodel, rmodel\n'), ((1082, 1130), 'src.rmodel.VniNet', 'rmodel.VniNet', ([], {'num_classes': "config['num_classes']"}), "(num_classes=config['num_classes'])\n", (1095, 1130), False, 'from src import qmodel, rmodel\n'), ((1190, 1290), 'src.rmodel.Resnet18', 'rmodel.Resnet18', ([], {'num_classes': "config['num_classes']", 'norm': '(False)', 'pretrained': "config['pretrained']"}), "(num_classes=config['num_classes'], norm=False, pretrained=\n config['pretrained'])\n", (1205, 1290), False, 'from src import qmodel, rmodel\n'), ((1410, 1509), 'src.rmodel.Resnet18', 'rmodel.Resnet18', ([], {'num_classes': "config['num_classes']", 'norm': '(True)', 'pretrained': "config['pretrained']"}), "(num_classes=config['num_classes'], norm=True, pretrained=\n config['pretrained'])\n", (1425, 1509), False, 'from src import qmodel, rmodel\n'), ((1627, 1714), 'src.rmodel.VGG11BN', 'rmodel.VGG11BN', ([], {'num_classes': "config['num_classes']", 'pretrained': "config['pretrained']"}), "(num_classes=config['num_classes'], pretrained=config[\n 'pretrained'])\n", (1641, 1714), False, 'from src import qmodel, rmodel\n')] |
from win32ctypes.pywin32 import win32api
import win32.lib.win32con as win32con
import win32.win32gui as win32gui
from wx.lib.delayedresult import startWorker
import PIL
import wx
import wx.aui as aui
import wx.adv as adv
import wx.lib.newevent
import os
import threading
import datetime
import random
import mouse
from screeninfo import get_monitors
import Plugins.tama_drawer.tama_drawer_events
from Plugins.tama_drawer.tama_drawer_events import TamaMoodEvent
from Plugins.tama_drawer.tama_drawer_events import EVT_TAMA_MOOD
#
# The two tools below were acquired from the below wxPythonWiki tutorial:
# https://wiki.wxpython.org/WorkingWithImages
#
import Plugins.tama_drawer.ImgConv # wxImage <==> PilImage
import Plugins.tama_drawer.BitmapManip # mask wxBmap <==> PilImage <== file
from PIL import Image, ImageDraw, ImageChops, ImageSequence
def GetRamdomWxColorAndInverse() :
r, g, b = (random.randint(0, 127), random.randint(0, 127), random.randint(0, 127))
if random.randint(0, 1) : # Gaurantee a large contrast
r, g, b = (255-r, 255-g, 255-b)
#end if
R, G, B, = (255-r, 255-g, 255-b) # The inverse
return (wx.Colour(r, g, b), wx.Colour(R, G, B))
def CreateInnerMaskBmapFromOuterMask( srcBmap ) :
"""
Derive the inner mask wxBitmap from the Outer mask wxBitmap.
The srcBmap must be "well behaved" in that a continuous border
must present so that a floodfill to the perimeter area will not reach
into the inner area. The border color must be >=128. So,
the srcBmap consists of a transparent/BLACK perimeter, an white/opaque
frame border and a transparent/BLACK inner area.
When completed, the outer_area+border will be transparent/BLACK,
the parent's frame border will be transparent/BLACK and the inner area
will be opaque/WHITE.
1. outer perimeter (black) --> Floodfill to white/255
Now both perimeter and border are white).
2. Invert the image and return as a wxBitmap..
"""
# Start with an 'L' Pil copy of the RGB input wxBitmap.
dstPilImage = ImgConv.PilImageFromWxBitmap( srcBmap ).convert( 'L' )
# Make sure the image is quantized to binary.
dstPilImage = dstPilImage.point(lambda i: (i / 128) * 255)
size = dstPilImage.size
ImageDraw.floodfill( dstPilImage, (0, 0), (255) )
return ImgConv.WxBitmapFromPilImage( ImageChops.invert( dstPilImage ) )
#end def
#------------------------------------------------------------------------------
class TamaFrame(wx.Frame):
"""
Shaped window from disk image files and optional disk transparency mask files.
The user cannot resize the window because there are no resizing decorations !
The entire popup is just a control-less bitmap image.
However, all that is visible (opaque) can be repositioned by dragging.
"""
def __init__( self, parent, image_filename=None, mask_filename=None,
outer_or_inner_window=1, # default to a shaped frame window
posn=(0, 0), bgTransparency=100 ) :
style = ( wx.STAY_ON_TOP )
"""
The TamaFrame inherits from wx.Frame, and thus receives the ability to be used in a wxpython (wx) app
This is the window that is created for the application, Tama's actual form will be inside of this frame,
and the frame itself is only slightly visible (This can be tweaked).
"""
wx.Frame.__init__(self, parent, wx.ID_ANY, style=style, title = 'Tama', name = 'Tama')
self.bgTransparency = bgTransparency
self.SetBackgroundStyle(wx.BG_STYLE_ERASE)
self.image_filename = image_filename
self.image_wxBitmaps = []
self.parent = parent
self.current_bitmap = None
self.timer = wx.Timer(self, wx.ID_ANY)
self.timer.Start(60)
#Will be used to get locations of screens, so that the correct
#screen is drawn to when drawing with a ScreenDC
self.screens = []
self.screen_positions = []
for screen in get_monitors():
self.screens.append(screen)
#Bounding_boxes is a list of rect objects where bounding_boxes[0]
#Represents the client size of screen[0]
#This will be used to draw with a ScreenDC, which considers all
#monitors to be one screen.
self.bounding_boxes = []
for screen_idx in range(len(self.screens)):
self.bounding_boxes.append(wx.Display(screen_idx).GetClientArea())
self.SetTitle('Tama')
self.SetSize( (250, 250) )
self.current_screen = wx.Display().GetFromPoint((self.bounding_boxes[0].GetX(), self.bounding_boxes[0].GetY()))
self.SetPosition((self.bounding_boxes[0].GetX(), self.bounding_boxes[0].GetY()))
self.current_mood = None
self.last_mouse_pos = wx.Point(0,0)
self.tama_widget = TamaWidget(self)
self.previous_update = datetime.datetime.now()
self.screenContext = None
self.is_border_window = outer_or_inner_window
self.is_inner_window = not outer_or_inner_window
if wx.Platform == '__WXGTK__' : # GTK-only, use as an event handler.
self.Bind( wx.EVT_WINDOW_CREATE, self.DrawWindow )
#end if
#------------------------------
# This handler is always required.
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_TIMER, self.OnTimer)
#self.Bind(wx.EVT_ERASE_BACKGROUND, self.DoNothing)
# Enable the user to quit the app by pressing <ESC>.
self.Bind( wx.EVT_KEY_UP, self.OnKeyDown ) # Comment this to disable.
# Enable window dragging.
self.Bind( wx.EVT_MOTION, self.OnMotion ) # Comment this to disable.
self.Bind(wx.EVT_LEFT_UP, self.OnRelease)
self.Bind(wx.EVT_CLOSE, parent.OnClose)
#mouse.on_right_click(self.ShowRightMenu)
self.Bind(wx.EVT_CONTEXT_MENU, self.ShowRightMenu)
#Linux and Windows will have different ways to create this kind of transparent frame.
if wx.Platform == '__WXMSW__':
hwnd = self.GetHandle()
extendedStyleSettings = win32gui.GetWindowLong(hwnd, win32con.GWL_EXSTYLE)
win32gui.SetWindowLong(hwnd, win32con.GWL_EXSTYLE, extendedStyleSettings | win32con.WS_EX_LAYERED | win32con.WS_CHILD)
win32gui.SetLayeredWindowAttributes(hwnd, 0, 255, win32con.LWA_COLORKEY)
self.SetTransparent(190)
elif wx.Platform == '__WXGTK__':
pass
else:
pass
self.SetDoubleBuffered(True)
self.Layout()
self.Show()
#--------------------------------------------
def SetOtherWindow( self, otherWindow ) :
""" Allow the other ShapedWindow to be referenced in this instantiation. """
self.otherWindow = otherWindow
#end def
def SetMyPosition( self, posn ) :
""" This is for "OtherWindow" to call, never "self"."""
self.SetPosition( posn )
#end def
def OnPaint( self, event ) :
self.DrawWindow()
event.Skip()
return # Very important to let all higher level handlers be called.
#end def
def DoNothing(self, event):
pass
def SetImage(self, pil_image):
if pil_image:
width, height = pil_image.size
self.image_wxBitmaps.append(wx.BitmapFromBuffer(width, height, pil_image.convert('RGB').tobytes(), pil_image.convert('RGBA').getchannel('A').tobytes()))
return
def DrawWindow(self) :
"""Implement window drawing at any time."""
# screenContext will be drawn to after memoryContext is given the right combined bitmap
context = wx.PaintDC(self)
# Blit will copy the pixels from self.combined_image, which is a
# MemoryDC that contains the current Tama Image to be displayed.
# This image is newly generated within the Tama task system, in order to
# reduce image display time.
if len(self.image_wxBitmaps) and context.CanDrawBitmap():
context.DrawBitmap(self.image_wxBitmaps.pop(), 0, 0, False)
del context
#end def DrawWindow
#--------------------------------------------
def OnTimer(self, event):
if not self.tama_widget.is_grabbed() and self.tama_widget.is_moving():
self.move_direction()
self.SetImage(self.tama_widget.next())
self.Refresh()
return
def show_window_pinning(self, event):
self.parent.frames[2].Show()
return
def show_copyx(self, event):
self.parent.frames[3].Show()
return
def show_macro_recorder(self, event):
self.parent.frames[4].Show()
return
def show_settings(self, event):
self.parent.frames[5].Show()
return
def ShowRightMenu(self, *args) :
"""
Create and show a Context Menu
"""
# only do this part the first time so the events are only bound once
if not hasattr(self, "itemOneId"):
self.itemOneId = wx.NewId()
self.itemTwoId = wx.NewId()
self.itemThreeId = wx.NewId()
self.itemFourId = wx.NewId()
self.itemFiveId = wx.NewId()
self.Bind(wx.EVT_MENU, self.show_window_pinning, id=self.itemOneId)
self.Bind(wx.EVT_MENU, self.show_copyx, id=self.itemTwoId)
self.Bind(wx.EVT_MENU, self.show_macro_recorder, id=self.itemThreeId)
self.Bind(wx.EVT_MENU, self.show_settings, id=self.itemFourId)
self.Bind(wx.EVT_MENU, self.parent.OnClose, id=self.itemFiveId)
# build the menu
menu = wx.Menu()
itemOne = menu.Append(self.itemOneId, "Pin a Window...")
itemTwo = menu.Append(self.itemTwoId, "Copy X...")
itemThree = menu.Append(self.itemThreeId, "Record Mouse Events...")
itemFour = menu.Append(self.itemFourId, "Settings")
itemFive = menu.Append(self.itemFiveId, "Exit")
# show the popup menu
self.PopupMenu(menu)
menu.Destroy()
def OnKeyDown( self, event ) :
"""Quit the app if the user presses Q, q or Esc"""
keyCode = event.GetKeyCode()
quitCodes = [27, ord('Q'), ord('q')]
event.Skip() # Allow any following event processing.
if (keyCode in quitCodes) :
self.Close( force=True )
#end if
#end def
#--------------------------------------------
def OnMotion( self, event ) :
"""Implement window client area dragging since this window has no frame to grab."""
if not event.Dragging() : # Mouse is moving but no button is down.
self.dragPosn = None
return
#end if
#self.CaptureMouse()
if self.dragPosn == None : # Previous non-dragging mouse position
# Capture the first mouse coord after pressing any button
self.dragPosn = event.GetPosition()
else:
if not self.tama_widget.is_grabbed():
self.tama_widget.is_grabbed(True)
currPosn = event.GetPosition()
self.current_screen = wx.Display().GetFromWindow(self)
displacement = self.dragPosn - currPosn
newPosn = self.GetPosition() - displacement
self.SetPosition( newPosn )
self.Update()
def move_direction(self):
window_pos = self.GetScreenPosition()
if self.tama_widget.is_moving() and 'Move' in self.tama_widget.get_anim_name():
#box represents the client area of the current screen that Tama is located on.
#and the upper left corner does not have to be 0,0
box = self.bounding_boxes[self.current_screen]
if self.tama_widget.get_movement_direction() == 'Move Left':
if self.bounding_boxes[self.current_screen].Contains(
wx.Point(window_pos[0]-2, window_pos[1])):
self.Move(window_pos[0]-2, window_pos[1])
else:
self.tama_widget.is_moving(False)
elif self.tama_widget.get_movement_direction() == 'Move Right':
if self.bounding_boxes[self.current_screen].Contains(
wx.Point(window_pos[0] + self.GetSize().GetWidth(), window_pos[1])):
self.Move(window_pos[0]+2, window_pos[1])
else:
self.tama_widget.is_moving(False)
else:
pass
self.Update()
def OnRelease(self, event):
if self.tama_widget.is_grabbed():
self.tama_widget.is_grabbed(False)
def needs_update(self):
return self.tama_widget.needs_update()
def needs_mood(self):
if self.current_mood is None:
return True
return False
def generate(self, event):
if self.tama_widget.is_moving():
self.tama_widget.is_moving(False, None)
if 'Sleeping' in event.get_modifiers():
self.tama_widget.set_animation('Sleeping')
elif 'Eating' in event.get_modifiers():
self.tama_widget.set_animation('Eating')
elif 'Thinking_of_Food' in event.get_modifiers():
self.tama_widget.set_animation('Thinking_of_Food')
else:
self.tama_widget.set_animation('Idle')
self.Refresh()
return
def set_current_mood(self, current_mood):
self.tama_widget.set_current_mood(current_mood)
self.Show()
return
def get_bounding_boxes(self):
return self.bounding_boxes
def OnClose(self, e):
e.Skip()
class TamaWidget():
"""
Holds the processes that handle generating Tama from a stack of layers that are provided via (tama_stream)
It will yield images from tama_generate() as the animation is changed by a current_mood update.
"""
def __init__(self, parent):
self.assets_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), "Assets")
self.parent = parent
self.available_folders = []
self.current_mood = None
self.animation_duration = 0
self.frame_idx = 0
#self.tama_generator = wx.Process()
self.current_animation_name = 'Idle'
self.idle_animation_path = os.path.join(os.path.join(self.assets_folder, 'Idle'), 'Idle_0.gif')
self.current_folder_animations = [self.idle_animation_path]
#GenericAnimationCtrl is used here in order to detect when an animation is done playing.
self.current_gif = Image.open(self.idle_animation_path)
self.current_animation = []
self.prev_animation = None
self.grabbed = False
self.moving = False
self.direction = None
def set_current_mood(self, current_mood):
self.current_mood = current_mood
return
def get_movement_direction(self):
return self.direction
def needs_update(self):
if (self.is_grabbed() and self.current_animation_name != 'Grabbed') \
or (not self.is_grabbed() and self.current_animation_name == 'Grabbed'):
return True
elif self.frame_idx - self.animation_duration >= -1:
return True
return False
def get_current_animation(self):
if self.current_animation:
return self.current_animation[self.frame_idx]
return None
def get_anim_name(self):
return self.current_animation_name
# Returns the current frame and increments the frame_idx by one.
def next(self):
if self.frame_idx >= self.animation_duration-1:
self.set_animation(self.current_animation_name)
im = self.get_current_animation()
if im:
self.frame_idx += 1
return im
else:
return None
def is_grabbed(self, ishe = None):
'''
This allows other classes to "grab" Tama as well.
Returns whether or not 'grabbed' animations will play if used without the bool
Sets grabbed animations to play and returns the screen position if used with the bool
'''
if ishe is None:
return self.grabbed
self.grabbed = ishe
if self.is_moving():
if ishe == True:
if 'Move' not in self.current_animation_name \
and 'Grabbed' not in self.current_animation_name:
self.prev_animation = self.current_animation_name
self.set_animation('Grabbed')
self.is_moving(False)
if ishe == False:
return
else:
if ishe == True:
if 'Move' not in self.current_animation_name \
and 'Grabbed' not in self.current_animation_name:
self.prev_animation = self.current_animation_name
self.set_animation('Grabbed')
if ishe == False:
self.set_animation(self.prev_animation)
def is_moving(self, ishe = None, dir = -1):
'''
This allows other classes to trigger Tama left-right movements
Returns whether or not moving animations are playing
Sets moving animations to play
'''
if ishe is None:
return self.moving
self.moving = ishe
if dir == 0:
self.direction = 'Move Left'
elif dir == 1:
self.direction = 'Move Right'
else:
self.direction = "Idle"
self.moving = False
if not self.is_grabbed():
self.moving = ishe
if ishe == True:
if 'Move' not in self.current_animation_name \
and 'Grabbed' not in self.current_animation_name:
self.prev_animation = self.current_animation_name
self.set_animation(self.direction)
elif ishe == False:
self.set_animation(self.prev_animation)
else:
self.moving = False
def pngs_exist(self, gif_idx, anim_name):
if os.path.exists(os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen')):
pngs = [file.name for file in os.scandir(os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen')) if file.is_dir() != True and '.png' in file.name.lower()]
for png in pngs:
if str(gif_idx) + "_" in png:
return True
return False
def set_animation(self, anim_name):
#This has to happen every time set_animation is called, or indices will go out of range when calling self.next()
self.frame_idx = 0
if self.is_grabbed():
#in the future, we can set a grabbed + anim_name animation here, and rotate the animation on user drag.
anim_name = 'Grabbed'
elif random.randrange(0, 2) == 0:
if not self.is_moving():
dir = random.randrange(0, 2)
self.is_moving(True, dir)
else:
self.is_moving(False)
return
gifs = [file.path for file in os.scandir(os.path.join(self.assets_folder, anim_name)) if file.is_dir() != True and '.gif' in file.name.lower()]
if os.path.exists(os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen')):
pngs = [file.path for file in os.scandir(os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen')) if file.is_dir() != True and '.png' in file.name.lower()]
else:
pngs = []
if len(gifs) < 1:
self.current_animation_name = 'Idle'
current_gif = Image.open(self.idle_animation_path)
if not os.path.exists(os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen')):
os.mkdir(os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen'), 0)
for frame in ImageSequence.Iterator(current_gif):
combined_anim_name = "" + str(0) + "_" + anim_name + "_frame" + str(self.animation_duration) + ".png"
path_to_frame = os.path.join(os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen'), combined_anim_name)
gif_info = frame.info
frame.save(path_to_frame, **gif_info)
self.current_animation.append(Image.open(path_to_frame))
self.animation_duration += 1
else:
self.animation_duration = 0
self.current_animation_name = anim_name
self.current_animation = []
gif_idx = random.randrange(0, len(gifs), 1)
#if there aren't any pngs yet for this animation, create them
if self.pngs_exist(gif_idx, anim_name):
pngs = [file.path for file in os.scandir(os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen')) if file.is_dir() != True and '.png' in file.name.lower() and str(gif_idx) + "_" + anim_name + "_frame" in file.name]
for png in pngs:
combined_anim_name = "" + str(gif_idx) + "_" + anim_name + "_frame" + str(self.animation_duration) + ".png"
path_to_frame = os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen')
self.current_animation.append(Image.open(pngs[pngs.index(os.path.join(path_to_frame, combined_anim_name))]))
self.animation_duration += 1
else:
current_gif = Image.open(gifs[gif_idx])
if not os.path.exists(os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen')):
os.mkdir(os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen'), 0)
for frame in ImageSequence.Iterator(current_gif):
combined_anim_name = "" + str(gif_idx) + "_" + anim_name + "_frame" + str(self.animation_duration) + ".png"
path_to_frame = os.path.join(os.path.join(os.path.join(self.assets_folder, anim_name), 'Gen'), combined_anim_name)
gif_info = frame.info
frame.save(path_to_frame, **gif_info)
self.current_animation.append(Image.open(path_to_frame))
self.animation_duration += 1
return
| [
"wx.Display",
"wx.NewId",
"wx.Frame.__init__",
"wx.Colour",
"random.randint",
"wx.PaintDC",
"random.randrange",
"PIL.ImageChops.invert",
"PIL.ImageDraw.floodfill",
"win32.win32gui.SetWindowLong",
"wx.Point",
"PIL.Image.open",
"win32.win32gui.GetWindowLong",
"os.path.join",
"wx.Timer",
... | [((991, 1011), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (1005, 1011), False, 'import random\n'), ((2348, 2393), 'PIL.ImageDraw.floodfill', 'ImageDraw.floodfill', (['dstPilImage', '(0, 0)', '(255)'], {}), '(dstPilImage, (0, 0), 255)\n', (2367, 2393), False, 'from PIL import Image, ImageDraw, ImageChops, ImageSequence\n'), ((907, 929), 'random.randint', 'random.randint', (['(0)', '(127)'], {}), '(0, 127)\n', (921, 929), False, 'import random\n'), ((931, 953), 'random.randint', 'random.randint', (['(0)', '(127)'], {}), '(0, 127)\n', (945, 953), False, 'import random\n'), ((955, 977), 'random.randint', 'random.randint', (['(0)', '(127)'], {}), '(0, 127)\n', (969, 977), False, 'import random\n'), ((1181, 1199), 'wx.Colour', 'wx.Colour', (['r', 'g', 'b'], {}), '(r, g, b)\n', (1190, 1199), False, 'import wx\n'), ((1201, 1219), 'wx.Colour', 'wx.Colour', (['R', 'G', 'B'], {}), '(R, G, B)\n', (1210, 1219), False, 'import wx\n'), ((2444, 2474), 'PIL.ImageChops.invert', 'ImageChops.invert', (['dstPilImage'], {}), '(dstPilImage)\n', (2461, 2474), False, 'from PIL import Image, ImageDraw, ImageChops, ImageSequence\n'), ((3518, 3605), 'wx.Frame.__init__', 'wx.Frame.__init__', (['self', 'parent', 'wx.ID_ANY'], {'style': 'style', 'title': '"""Tama"""', 'name': '"""Tama"""'}), "(self, parent, wx.ID_ANY, style=style, title='Tama', name=\n 'Tama')\n", (3535, 3605), False, 'import wx\n'), ((3865, 3890), 'wx.Timer', 'wx.Timer', (['self', 'wx.ID_ANY'], {}), '(self, wx.ID_ANY)\n', (3873, 3890), False, 'import wx\n'), ((4131, 4145), 'screeninfo.get_monitors', 'get_monitors', ([], {}), '()\n', (4143, 4145), False, 'from screeninfo import get_monitors\n'), ((4932, 4946), 'wx.Point', 'wx.Point', (['(0)', '(0)'], {}), '(0, 0)\n', (4940, 4946), False, 'import wx\n'), ((5021, 5044), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5042, 5044), False, 'import datetime\n'), ((7870, 7886), 'wx.PaintDC', 'wx.PaintDC', (['self'], {}), '(self)\n', (7880, 7886), False, 'import wx\n'), ((9847, 9856), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (9854, 9856), False, 'import wx\n'), ((14827, 14863), 'PIL.Image.open', 'Image.open', (['self.idle_animation_path'], {}), '(self.idle_animation_path)\n', (14837, 14863), False, 'from PIL import Image, ImageDraw, ImageChops, ImageSequence\n'), ((6279, 6329), 'win32.win32gui.GetWindowLong', 'win32gui.GetWindowLong', (['hwnd', 'win32con.GWL_EXSTYLE'], {}), '(hwnd, win32con.GWL_EXSTYLE)\n', (6301, 6329), True, 'import win32.win32gui as win32gui\n'), ((6342, 6464), 'win32.win32gui.SetWindowLong', 'win32gui.SetWindowLong', (['hwnd', 'win32con.GWL_EXSTYLE', '(extendedStyleSettings | win32con.WS_EX_LAYERED | win32con.WS_CHILD)'], {}), '(hwnd, win32con.GWL_EXSTYLE, extendedStyleSettings |\n win32con.WS_EX_LAYERED | win32con.WS_CHILD)\n', (6364, 6464), True, 'import win32.win32gui as win32gui\n'), ((6474, 6546), 'win32.win32gui.SetLayeredWindowAttributes', 'win32gui.SetLayeredWindowAttributes', (['hwnd', '(0)', '(255)', 'win32con.LWA_COLORKEY'], {}), '(hwnd, 0, 255, win32con.LWA_COLORKEY)\n', (6509, 6546), True, 'import win32.win32gui as win32gui\n'), ((9246, 9256), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (9254, 9256), False, 'import wx\n'), ((9286, 9296), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (9294, 9296), False, 'import wx\n'), ((9328, 9338), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (9336, 9338), False, 'import wx\n'), ((9369, 9379), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (9377, 9379), False, 'import wx\n'), ((9410, 9420), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (9418, 9420), False, 'import wx\n'), ((14579, 14619), 'os.path.join', 'os.path.join', (['self.assets_folder', '"""Idle"""'], {}), "(self.assets_folder, 'Idle')\n", (14591, 14619), False, 'import os\n'), ((19908, 19944), 'PIL.Image.open', 'Image.open', (['self.idle_animation_path'], {}), '(self.idle_animation_path)\n', (19918, 19944), False, 'from PIL import Image, ImageDraw, ImageChops, ImageSequence\n'), ((20165, 20200), 'PIL.ImageSequence.Iterator', 'ImageSequence.Iterator', (['current_gif'], {}), '(current_gif)\n', (20187, 20200), False, 'from PIL import Image, ImageDraw, ImageChops, ImageSequence\n'), ((4690, 4702), 'wx.Display', 'wx.Display', ([], {}), '()\n', (4700, 4702), False, 'import wx\n'), ((14243, 14268), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (14258, 14268), False, 'import os\n'), ((18370, 18413), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (18382, 18413), False, 'import os\n'), ((19111, 19133), 'random.randrange', 'random.randrange', (['(0)', '(2)'], {}), '(0, 2)\n', (19127, 19133), False, 'import random\n'), ((19531, 19574), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (19543, 19574), False, 'import os\n'), ((21717, 21742), 'PIL.Image.open', 'Image.open', (['gifs[gif_idx]'], {}), '(gifs[gif_idx])\n', (21727, 21742), False, 'from PIL import Image, ImageDraw, ImageChops, ImageSequence\n'), ((21975, 22010), 'PIL.ImageSequence.Iterator', 'ImageSequence.Iterator', (['current_gif'], {}), '(current_gif)\n', (21997, 22010), False, 'from PIL import Image, ImageDraw, ImageChops, ImageSequence\n'), ((11415, 11427), 'wx.Display', 'wx.Display', ([], {}), '()\n', (11425, 11427), False, 'import wx\n'), ((12159, 12201), 'wx.Point', 'wx.Point', (['(window_pos[0] - 2)', 'window_pos[1]'], {}), '(window_pos[0] - 2, window_pos[1])\n', (12167, 12201), False, 'import wx\n'), ((19199, 19221), 'random.randrange', 'random.randrange', (['(0)', '(2)'], {}), '(0, 2)\n', (19215, 19221), False, 'import random\n'), ((19389, 19432), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (19401, 19432), False, 'import os\n'), ((20589, 20614), 'PIL.Image.open', 'Image.open', (['path_to_frame'], {}), '(path_to_frame)\n', (20599, 20614), False, 'from PIL import Image, ImageDraw, ImageChops, ImageSequence\n'), ((4542, 4564), 'wx.Display', 'wx.Display', (['screen_idx'], {}), '(screen_idx)\n', (4552, 4564), False, 'import wx\n'), ((19992, 20035), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (20004, 20035), False, 'import os\n'), ((20084, 20127), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (20096, 20127), False, 'import os\n'), ((20378, 20421), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (20390, 20421), False, 'import os\n'), ((21439, 21482), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (21451, 21482), False, 'import os\n'), ((22425, 22450), 'PIL.Image.open', 'Image.open', (['path_to_frame'], {}), '(path_to_frame)\n', (22435, 22450), False, 'from PIL import Image, ImageDraw, ImageChops, ImageSequence\n'), ((18490, 18533), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (18502, 18533), False, 'import os\n'), ((19651, 19694), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (19663, 19694), False, 'import os\n'), ((21794, 21837), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (21806, 21837), False, 'import os\n'), ((21890, 21933), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (21902, 21933), False, 'import os\n'), ((22202, 22245), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (22214, 22245), False, 'import os\n'), ((21059, 21102), 'os.path.join', 'os.path.join', (['self.assets_folder', 'anim_name'], {}), '(self.assets_folder, anim_name)\n', (21071, 21102), False, 'import os\n'), ((21568, 21615), 'os.path.join', 'os.path.join', (['path_to_frame', 'combined_anim_name'], {}), '(path_to_frame, combined_anim_name)\n', (21580, 21615), False, 'import os\n')] |
#!/usr/bin/env python
#
# Copyright (c) 2019, Pycom Limited.
#
# This software is licensed under the GNU GPL version 3 or any
# later version, with permitted additional terms. For more information
# see the Pycom Licence v1.0 document supplied with this file, or
# available at https://www.pycom.io/opensource/licensing
#
__version__ = '1'
from Message import Message
class MessageBoard:
def __init__(self, meshState):
self.received = []
self.toBeSent = []
self.sent = []
self.meshState = meshState
def sendMessage(self, message):
self.toBeSent.append(message)
def receiveMessage(self, message):
#we only care about our own messages
#print(message.target)
#print(self.meshState.me.rloc16)
if self.meshState.isDirectedToMe(message.target):
if message.isACK:
self._receivedAccMessageForMe(message);
else:
self.sendAcc(message)
self.received.append(message)
elif message.isBroadCast():
if message.isDecoration():
self.meshState.updateOthersDecorations(message)
else:
self.received.append(message)
def _receivedAccMessageForMe(self, message):
newSendList = []
for sent in self.toBeSent:
if message.isAccForMessage(sent) == False:
newSendList.append(sent)
else:
self.sent.append(sent)
self.toBeSent = newSendList #remove from send list
#remove acc
def sendCompleted(self):
newSendList = []
for sent in self.toBeSent:
if sent.isACK == False and sent.isBroadCast() == False:
newSendList.append(sent)
else:
if sent.isDecoration():
pass
else:
self.sent.append(sent)
self.toBeSent = newSendList
def getReceivedMessages(self):
return self.received
def getMessagesToBeSent(self):
return self.toBeSent
def getMessagesSent(self):
return self.sent
def sendAcc(self, message):
#note sender and target swapped places here...
accMessage = Message(message.content, message.sender, message.target, 0, True, False, False);
self.sendMessage(accMessage)
| [
"Message.Message"
] | [((2237, 2316), 'Message.Message', 'Message', (['message.content', 'message.sender', 'message.target', '(0)', '(True)', '(False)', '(False)'], {}), '(message.content, message.sender, message.target, 0, True, False, False)\n', (2244, 2316), False, 'from Message import Message\n')] |
from new_movies.random_data_utility import random_generator
available_movies = random_generator.generate_random_movies(movies_number=15)
available_games = random_generator.generate_random_games()
def add_movie(movie):
available_movies.append(movie)
| [
"new_movies.random_data_utility.random_generator.generate_random_movies",
"new_movies.random_data_utility.random_generator.generate_random_games"
] | [((80, 137), 'new_movies.random_data_utility.random_generator.generate_random_movies', 'random_generator.generate_random_movies', ([], {'movies_number': '(15)'}), '(movies_number=15)\n', (119, 137), False, 'from new_movies.random_data_utility import random_generator\n'), ((156, 196), 'new_movies.random_data_utility.random_generator.generate_random_games', 'random_generator.generate_random_games', ([], {}), '()\n', (194, 196), False, 'from new_movies.random_data_utility import random_generator\n')] |
from django.shortcuts import render
# Create your views here.
from rest_framework.views import APIView
from contents.serializers import HotSKUListSerializer
from goods.models import SKU
class HomeAPIView(APIView):
pass
'''
列表数据
热销数据:应该是到哪个分类去获取哪个分类的热销数据中
1.获取分类id
2.根据id获取数据
3.将数据转化为字典
4返回相应
'''
from rest_framework.generics import ListAPIView
class HotSKUListAPIView(ListAPIView):
def get_queryset(self):
category_id = self.kwargs['category_id']
return SKU.objects.filter(category_id=category_id).order_by('-sales')[:2]
serializer_class = HotSKUListSerializer | [
"goods.models.SKU.objects.filter"
] | [((487, 530), 'goods.models.SKU.objects.filter', 'SKU.objects.filter', ([], {'category_id': 'category_id'}), '(category_id=category_id)\n', (505, 530), False, 'from goods.models import SKU\n')] |
# -*- coding: utf-8 -*-
"""
Tera Python SDK. It needs a libtera_c.so
TODO(taocipian) __init__.py
"""
from ctypes import CFUNCTYPE, POINTER
from ctypes import byref, cdll, string_at
from ctypes import c_bool, c_char_p, c_void_p
from ctypes import c_int32, c_int64, c_ubyte, c_uint64
class ScanDescriptor(object):
""" scan操作描述符
scan出[start_key, end_key)范围内的所有数据,每个cell默认返回最新的1个版本
"""
def __init__(self, start_key):
"""
Args:
start_key(string): scan操作的起始位置,scan结果包含start_key
"""
self.desc = lib.tera_scan_descriptor(start_key,
c_uint64(len(start_key)))
def SetEnd(self, end_key):
"""
不调用此函数时,end_key被置为“无穷大”
Args:
end_key(string): scan操作的终止位置,scan结果不包含end_key
"""
lib.tera_scan_descriptor_set_end(self.desc, end_key,
c_uint64(len(end_key)))
def SetMaxVersions(self, versions):
"""
不调用此函数时,默认每个cell只scan出最新版本
Args:
versions(long): scan时某个cell最多被选出多少个版本
"""
lib.tera_scan_descriptor_set_max_versions(self.desc, versions)
def SetBufferSize(self, buffer_size):
lib.tera_scan_descriptor_set_buffer_size(self.desc, buffer_size)
def SetIsAsync(self, is_async):
lib.tera_scan_descriptor_set_is_async(self.desc, is_async)
def SetPackInterval(self, interval):
lib.tera_scan_descriptor_set_pack_interval(self.desc, interval)
def AddColumn(self, cf, qu):
lib.tera_scan_descriptor_add_column(self.desc, cf,
qu, c_uint64(len(qu)))
def AddColumnFamily(self, cf):
lib.tera_scan_descriptor_add_column_family(self.desc, cf)
def IsAsync(self):
return lib.tera_scan_descriptor_is_async(self.desc)
def SetFilterString(self, filter_string):
lib.tera_scan_descriptor_set_filter_string(self.desc, filter_string)
def SetSnapshot(self, sid):
lib.tera_scan_descriptor_set_snapshot(self.desc, sid)
def SetTimeRange(self, start, end):
lib.tera_scan_descriptor_set_time_range(self.desc, start, end)
class ResultStream(object):
""" scan操作返回的输出流
"""
def __init__(self, stream):
self.stream = stream
def Done(self):
""" 此stream是否已经读完
Returns:
(bool) 如果已经读完,则返回 true, 否则返回 false.
"""
err = c_char_p()
return lib.tera_result_stream_done(self.stream, byref(err))
def Next(self):
""" 迭代到下一个cell
"""
lib.tera_result_stream_next(self.stream)
def RowName(self):
"""
Returns:
(string) 当前cell对应的Rowkey
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_row_name(self.stream,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Family(self):
"""
Returns:
(string) 当前cell对应的ColumnFamily
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_family(self.stream, byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Qualifier(self):
"""
Returns:
(string) 当前cell对应的Qulifier
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_qualifier(self.stream,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def ColumnName(self):
"""
Returns:
(string) 当前cell对应的 ColumnName(即 ColumnFamily:Qulifier)
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_column_name(self.stream,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Value(self):
"""
Returns:
(string) 当前cell对应的value
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_value(self.stream, byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Timestamp(self):
"""
Returns:
(long) 当前cell对应的时间戳,Unix time
"""
return lib.tera_result_stream_timestamp(self.stream)
class Client(object):
""" 通过Client对象访问一个tera集群
使用建议:一个集群对应一个Client即可,如需访问多个Client,需要创建多个
"""
def __init__(self, conf_path, log_prefix):
"""
Raises:
TeraSdkException: 创建一个Client对象失败
"""
err = c_char_p()
self.client = lib.tera_client_open(conf_path, log_prefix, byref(err))
if self.client is None:
raise TeraSdkException("open client failed:" + str(err.value))
def OpenTable(self, name):
""" 打开名为<name>的表
Args:
name(string): 表名
Returns:
(Table) 打开的Table指针
Raises:
TeraSdkException: 打开table时出错
"""
err = c_char_p()
table_ptr = lib.tera_table_open(self.client, name, byref(err))
if table_ptr is None:
raise TeraSdkException("open table failed:" + err.value)
return Table(table_ptr)
MUTATION_CALLBACK = CFUNCTYPE(None, c_void_p)
class RowMutation(object):
""" 对某一行的变更
在Table.ApplyMutation()调用之前,
RowMutation的所有操作(如Put/DeleteColumn)都不会立即生效
"""
def __init__(self, mutation):
self.mutation = mutation
def Put(self, cf, qu, value):
""" 写入(修改)这一行上
ColumnFamily为<cf>, Qualifier为<qu>的cell值为<value>
Args:
cf(string): ColumnFamily名
qu(string): Qualifier名
value(string): cell的值
"""
lib.tera_row_mutation_put(self.mutation, cf,
qu, c_uint64(len(qu)),
value, c_uint64(len(value)))
def DeleteColumn(self, cf, qu):
""" 删除这一行上
ColumnFamily为<cf>, Qualifier为<qu>的cell
Args:
cf(string): ColumnFamily名
qu(string): Qualifier名
"""
lib.tera_row_mutation_delete_column(self.mutation, cf,
qu, c_uint64(len(qu)))
def RowKey(self):
"""
Returns:
(string): 此RowMutation对象的rowkey,例如可用在回调中
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_row_mutation_rowkey(self.mutation,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def SetCallback(self, callback):
""" 设置回调
调用此函数则本次变更为异步(Table.ApplyMutation()立即返回);
否则本次变更为同步(Table.ApplyMutation()等待写入操作完成后返回)。
Args:
callback(MUTATION_CALLBACK): 用户回调,不论任何情况,最终都会被调用
"""
lib.tera_row_mutation_set_callback(self.mutation, callback)
class Table(object):
""" 对表格的所有增删查改操作由此发起
通过Client.OpenTable()获取一个Table对象
"""
def __init__(self, table):
self.table = table
def NewRowMutation(self, rowkey):
""" 生成一个对 rowkey 的RowMutation对象
Args:
rowkey(string): 待变更的rowkey
Returns:
(RowMutation): RowMutation对象
"""
return RowMutation(lib.tera_row_mutation(self.table, rowkey,
c_uint64(len(rowkey))))
def ApplyMutation(self, mutation):
""" 应用一次变更,
如果之前调用过 SetCallback() 则本次调用为异步,否则为同步
Args:
mutation(RowMutation): RowMutation对象
"""
lib.tera_table_apply_mutation(self.table, mutation.mutation)
def IsPutFinished(self):
""" table的异步写操作是否*全部*完成
Returns:
(bool) 全部完成则返回true,否则返回false.
"""
return lib.tera_table_is_put_finished(self.table)
def Get(self, rowkey, cf, qu, snapshot):
""" 同步get一个cell的值
Args:
rowkey(string): Rowkey的值
cf(string): ColumnFamily名
qu(string): Qualifier名
snapshot(long): 快照,不关心的用户设置为0即可
Raises:
TeraSdkException: 读操作失败
"""
err = c_char_p()
value = POINTER(c_ubyte)()
vallen = c_uint64()
result = lib.tera_table_get(
self.table, rowkey, c_uint64(len(rowkey)), cf,
qu, c_uint64(len(qu)), byref(value), byref(vallen), byref(err),
c_uint64(snapshot)
)
if not result:
raise TeraSdkException("get record failed:" + err.value)
return copy_string_to_user(value, long(vallen.value))
def Put(self, rowkey, cf, qu, value):
""" 同步put一个cell的值
Args:
rowkey(string): Rowkey的值
cf(string): ColumnFamily名
qu(string): Qualifier名
value(string): cell的值
Raises:
TeraSdkException: 写操作失败
"""
err = c_char_p()
result = lib.tera_table_put(
self.table, rowkey, c_uint64(len(rowkey)), cf,
qu, c_uint64(len(qu)), value, c_uint64(len(value)), byref(err)
)
if not result:
raise TeraSdkException("put record failed:" + err.value)
def Delete(self, rowkey, cf, qu):
""" 同步删除某个cell
Args:
rowkey(string): Rowkey的值
cf(string): ColumnFamily名
qu(string): Qualifier名
"""
lib.tera_table_delete(
self.table, rowkey, c_uint64(len(rowkey)),
cf, qu, c_uint64(len(qu))
)
def Scan(self, desc):
""" 发起一次scan操作
Args:
desc(ScanDescriptor): scan操作描述符
Raises:
TeraSdkException: scan失败
"""
err = c_char_p()
stream = lib.tera_table_scan(
self.table,
desc.desc,
byref(err)
)
if stream is None:
raise TeraSdkException("scan failed:" + err.value)
return ResultStream(stream)
class TeraSdkException(Exception):
def __init__(self, reason):
self.reason = reason
def __str__(self):
return self.reason
def init_function_prototype():
######################
# scan result stream #
######################
lib.tera_result_stream_done.argtypes = [c_void_p,
POINTER(c_char_p)]
lib.tera_result_stream_done.restype = c_bool
lib.tera_result_stream_timestamp.argtypes = [c_void_p]
lib.tera_result_stream_timestamp.restype = c_int64
lib.tera_result_stream_column_name.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_result_stream_column_name.restype = None
lib.tera_result_stream_family.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_result_stream_family.restype = None
lib.tera_result_stream_next.argtypes = [c_void_p]
lib.tera_result_stream_next.restype = None
lib.tera_result_stream_qualifier.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_result_stream_qualifier.restype = None
lib.tera_result_stream_row_name.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_result_stream_row_name.restype = None
lib.tera_result_stream_value.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_result_stream_value.restype = None
###################
# scan descriptor #
###################
lib.tera_scan_descriptor.argtypes = [c_char_p, c_uint64]
lib.tera_scan_descriptor.restype = c_void_p
lib.tera_scan_descriptor_add_column.argtypes = [c_void_p, c_char_p,
c_void_p, c_uint64]
lib.tera_scan_descriptor_add_column.restype = None
lib.tera_scan_descriptor_add_column_family.argtypes = [c_void_p, c_char_p]
lib.tera_scan_descriptor_add_column_family.restype = None
lib.tera_scan_descriptor_is_async.argtypes = [c_void_p]
lib.tera_scan_descriptor_is_async.restype = c_bool
lib.tera_scan_descriptor_set_buffer_size.argtypes = [c_void_p, c_int64]
lib.tera_scan_descriptor_set_buffer_size.restype = None
lib.tera_scan_descriptor_set_end.argtypes = [c_void_p, c_void_p, c_uint64]
lib.tera_scan_descriptor_set_end.restype = None
lib.tera_scan_descriptor_set_filter_string.argtypes = [c_void_p, c_char_p]
lib.tera_scan_descriptor_set_filter_string.restype = None
lib.tera_scan_descriptor_set_pack_interval.argtypes = [c_char_p, c_int64]
lib.tera_scan_descriptor_set_pack_interval.restype = None
lib.tera_scan_descriptor_set_is_async.argtypes = [c_void_p, c_bool]
lib.tera_scan_descriptor_set_is_async.restype = None
lib.tera_scan_descriptor_set_max_versions.argtypes = [c_void_p, c_int32]
lib.tera_scan_descriptor_set_max_versions.restype = None
lib.tera_scan_descriptor_set_snapshot.argtypes = [c_void_p, c_uint64]
lib.tera_scan_descriptor_set_snapshot.restype = None
lib.tera_scan_descriptor_set_time_range.argtypes = [c_void_p,
c_int64, c_int64]
lib.tera_scan_descriptor_set_time_range.restype = None
##########
# client #
##########
lib.tera_client_open.argtypes = [c_char_p, c_char_p, POINTER(c_char_p)]
lib.tera_client_open.restype = c_void_p
lib.tera_table_open.argtypes = [c_void_p, c_char_p, POINTER(c_char_p)]
lib.tera_table_open.restype = c_void_p
################
# row_mutation #
################
lib.tera_row_mutation_put.argtypes = [c_void_p, c_char_p,
c_char_p, c_uint64,
c_char_p, c_uint64]
lib.tera_row_mutation_put.restype = None
lib.tera_row_mutation_set_callback.argtypes = [c_void_p, MUTATION_CALLBACK]
lib.tera_row_mutation_set_callback.restype = None
lib.tera_row_mutation_delete_column.argtypes = [c_void_p, c_char_p,
c_char_p, c_uint64]
lib.tera_row_mutation_delete_column.restype = None
lib.tera_row_mutation_rowkey.argtypes = [c_void_p,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64)]
lib.tera_row_mutation_rowkey.restype = None
#########
# table #
#########
lib.tera_table_get.argtypes = [c_void_p, c_char_p, c_uint64,
c_char_p, c_char_p, c_uint64,
POINTER(POINTER(c_ubyte)),
POINTER(c_uint64),
POINTER(c_char_p), c_uint64]
lib.tera_table_get.restype = c_bool
lib.tera_table_put.argtypes = [c_void_p, c_char_p, c_uint64, c_char_p,
c_char_p, c_uint64, c_char_p, c_uint64,
POINTER(c_char_p)]
lib.tera_table_put.restype = c_bool
lib.tera_table_delete.argtypes = [c_void_p, c_char_p, c_uint64,
c_char_p, c_char_p, c_uint64]
lib.tera_table_delete.restype = None
lib.tera_table_apply_mutation.argtypes = [c_void_p, c_void_p]
lib.tera_table_apply_mutation.restype = None
lib.tera_table_is_put_finished.argtypes = [c_void_p]
lib.tera_table_is_put_finished.restype = c_bool
lib.tera_row_mutation.argtypes = [c_void_p, c_char_p, c_uint64]
lib.tera_row_mutation.restype = c_void_p
def copy_string_to_user(value, size):
result = string_at(value, size)
libc = cdll.LoadLibrary('libc.so.6')
libc.free.argtypes = [c_void_p]
libc.free.restype = None
libc.free(value)
return result
lib = cdll.LoadLibrary('./libtera_c.so')
init_function_prototype()
| [
"ctypes.CFUNCTYPE",
"ctypes.byref",
"ctypes.POINTER",
"ctypes.cdll.LoadLibrary",
"ctypes.string_at",
"ctypes.c_char_p",
"ctypes.c_uint64"
] | [((5437, 5462), 'ctypes.CFUNCTYPE', 'CFUNCTYPE', (['None', 'c_void_p'], {}), '(None, c_void_p)\n', (5446, 5462), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((16413, 16447), 'ctypes.cdll.LoadLibrary', 'cdll.LoadLibrary', (['"""./libtera_c.so"""'], {}), "('./libtera_c.so')\n", (16429, 16447), False, 'from ctypes import byref, cdll, string_at\n'), ((16237, 16259), 'ctypes.string_at', 'string_at', (['value', 'size'], {}), '(value, size)\n', (16246, 16259), False, 'from ctypes import byref, cdll, string_at\n'), ((16271, 16300), 'ctypes.cdll.LoadLibrary', 'cdll.LoadLibrary', (['"""libc.so.6"""'], {}), "('libc.so.6')\n", (16287, 16300), False, 'from ctypes import byref, cdll, string_at\n'), ((2459, 2469), 'ctypes.c_char_p', 'c_char_p', ([], {}), '()\n', (2467, 2469), False, 'from ctypes import c_bool, c_char_p, c_void_p\n'), ((2797, 2807), 'ctypes.c_uint64', 'c_uint64', ([], {}), '()\n', (2805, 2807), False, 'from ctypes import c_int32, c_int64, c_ubyte, c_uint64\n'), ((3151, 3161), 'ctypes.c_uint64', 'c_uint64', ([], {}), '()\n', (3159, 3161), False, 'from ctypes import c_int32, c_int64, c_ubyte, c_uint64\n'), ((3462, 3472), 'ctypes.c_uint64', 'c_uint64', ([], {}), '()\n', (3470, 3472), False, 'from ctypes import c_int32, c_int64, c_ubyte, c_uint64\n'), ((3846, 3856), 'ctypes.c_uint64', 'c_uint64', ([], {}), '()\n', (3854, 3856), False, 'from ctypes import c_int32, c_int64, c_ubyte, c_uint64\n'), ((4198, 4208), 'ctypes.c_uint64', 'c_uint64', ([], {}), '()\n', (4206, 4208), False, 'from ctypes import c_int32, c_int64, c_ubyte, c_uint64\n'), ((4774, 4784), 'ctypes.c_char_p', 'c_char_p', ([], {}), '()\n', (4782, 4784), False, 'from ctypes import c_bool, c_char_p, c_void_p\n'), ((5202, 5212), 'ctypes.c_char_p', 'c_char_p', ([], {}), '()\n', (5210, 5212), False, 'from ctypes import c_bool, c_char_p, c_void_p\n'), ((6603, 6613), 'ctypes.c_uint64', 'c_uint64', ([], {}), '()\n', (6611, 6613), False, 'from ctypes import c_int32, c_int64, c_ubyte, c_uint64\n'), ((8371, 8381), 'ctypes.c_char_p', 'c_char_p', ([], {}), '()\n', (8379, 8381), False, 'from ctypes import c_bool, c_char_p, c_void_p\n'), ((8434, 8444), 'ctypes.c_uint64', 'c_uint64', ([], {}), '()\n', (8442, 8444), False, 'from ctypes import c_int32, c_int64, c_ubyte, c_uint64\n'), ((9118, 9128), 'ctypes.c_char_p', 'c_char_p', ([], {}), '()\n', (9126, 9128), False, 'from ctypes import c_bool, c_char_p, c_void_p\n'), ((9923, 9933), 'ctypes.c_char_p', 'c_char_p', ([], {}), '()\n', (9931, 9933), False, 'from ctypes import c_bool, c_char_p, c_void_p\n'), ((10539, 10556), 'ctypes.POINTER', 'POINTER', (['c_char_p'], {}), '(c_char_p)\n', (10546, 10556), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((10913, 10930), 'ctypes.POINTER', 'POINTER', (['c_uint64'], {}), '(c_uint64)\n', (10920, 10930), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((11162, 11179), 'ctypes.POINTER', 'POINTER', (['c_uint64'], {}), '(c_uint64)\n', (11169, 11179), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((11517, 11534), 'ctypes.POINTER', 'POINTER', (['c_uint64'], {}), '(c_uint64)\n', (11524, 11534), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((11770, 11787), 'ctypes.POINTER', 'POINTER', (['c_uint64'], {}), '(c_uint64)\n', (11777, 11787), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((12013, 12030), 'ctypes.POINTER', 'POINTER', (['c_uint64'], {}), '(c_uint64)\n', (12020, 12030), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((13976, 13993), 'ctypes.POINTER', 'POINTER', (['c_char_p'], {}), '(c_char_p)\n', (13983, 13993), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((14096, 14113), 'ctypes.POINTER', 'POINTER', (['c_char_p'], {}), '(c_char_p)\n', (14103, 14113), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((14961, 14978), 'ctypes.POINTER', 'POINTER', (['c_uint64'], {}), '(c_uint64)\n', (14968, 14978), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((15298, 15315), 'ctypes.POINTER', 'POINTER', (['c_uint64'], {}), '(c_uint64)\n', (15305, 15315), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((15352, 15369), 'ctypes.POINTER', 'POINTER', (['c_char_p'], {}), '(c_char_p)\n', (15359, 15369), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((15607, 15624), 'ctypes.POINTER', 'POINTER', (['c_char_p'], {}), '(c_char_p)\n', (15614, 15624), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((2526, 2536), 'ctypes.byref', 'byref', (['err'], {}), '(err)\n', (2531, 2536), False, 'from ctypes import byref, cdll, string_at\n'), ((2761, 2777), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (2768, 2777), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((2901, 2913), 'ctypes.byref', 'byref', (['value'], {}), '(value)\n', (2906, 2913), False, 'from ctypes import byref, cdll, string_at\n'), ((2915, 2928), 'ctypes.byref', 'byref', (['vallen'], {}), '(vallen)\n', (2920, 2928), False, 'from ctypes import byref, cdll, string_at\n'), ((3115, 3131), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (3122, 3131), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((3213, 3225), 'ctypes.byref', 'byref', (['value'], {}), '(value)\n', (3218, 3225), False, 'from ctypes import byref, cdll, string_at\n'), ((3227, 3240), 'ctypes.byref', 'byref', (['vallen'], {}), '(vallen)\n', (3232, 3240), False, 'from ctypes import byref, cdll, string_at\n'), ((3426, 3442), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (3433, 3442), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((3568, 3580), 'ctypes.byref', 'byref', (['value'], {}), '(value)\n', (3573, 3580), False, 'from ctypes import byref, cdll, string_at\n'), ((3582, 3595), 'ctypes.byref', 'byref', (['vallen'], {}), '(vallen)\n', (3587, 3595), False, 'from ctypes import byref, cdll, string_at\n'), ((3810, 3826), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (3817, 3826), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((3956, 3968), 'ctypes.byref', 'byref', (['value'], {}), '(value)\n', (3961, 3968), False, 'from ctypes import byref, cdll, string_at\n'), ((3970, 3983), 'ctypes.byref', 'byref', (['vallen'], {}), '(vallen)\n', (3975, 3983), False, 'from ctypes import byref, cdll, string_at\n'), ((4162, 4178), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (4169, 4178), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((4259, 4271), 'ctypes.byref', 'byref', (['value'], {}), '(value)\n', (4264, 4271), False, 'from ctypes import byref, cdll, string_at\n'), ((4273, 4286), 'ctypes.byref', 'byref', (['vallen'], {}), '(vallen)\n', (4278, 4286), False, 'from ctypes import byref, cdll, string_at\n'), ((4851, 4861), 'ctypes.byref', 'byref', (['err'], {}), '(err)\n', (4856, 4861), False, 'from ctypes import byref, cdll, string_at\n'), ((5272, 5282), 'ctypes.byref', 'byref', (['err'], {}), '(err)\n', (5277, 5282), False, 'from ctypes import byref, cdll, string_at\n'), ((6567, 6583), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (6574, 6583), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((6703, 6715), 'ctypes.byref', 'byref', (['value'], {}), '(value)\n', (6708, 6715), False, 'from ctypes import byref, cdll, string_at\n'), ((6717, 6730), 'ctypes.byref', 'byref', (['vallen'], {}), '(vallen)\n', (6722, 6730), False, 'from ctypes import byref, cdll, string_at\n'), ((8398, 8414), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (8405, 8414), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((8576, 8588), 'ctypes.byref', 'byref', (['value'], {}), '(value)\n', (8581, 8588), False, 'from ctypes import byref, cdll, string_at\n'), ((8590, 8603), 'ctypes.byref', 'byref', (['vallen'], {}), '(vallen)\n', (8595, 8603), False, 'from ctypes import byref, cdll, string_at\n'), ((8605, 8615), 'ctypes.byref', 'byref', (['err'], {}), '(err)\n', (8610, 8615), False, 'from ctypes import byref, cdll, string_at\n'), ((8629, 8647), 'ctypes.c_uint64', 'c_uint64', (['snapshot'], {}), '(snapshot)\n', (8637, 8647), False, 'from ctypes import c_int32, c_int64, c_ubyte, c_uint64\n'), ((9289, 9299), 'ctypes.byref', 'byref', (['err'], {}), '(err)\n', (9294, 9299), False, 'from ctypes import byref, cdll, string_at\n'), ((10031, 10041), 'ctypes.byref', 'byref', (['err'], {}), '(err)\n', (10036, 10041), False, 'from ctypes import byref, cdll, string_at\n'), ((10843, 10859), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (10850, 10859), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((11097, 11113), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (11104, 11113), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((11449, 11465), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (11456, 11465), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((11703, 11719), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (11710, 11719), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((11949, 11965), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (11956, 11965), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((14897, 14913), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (14904, 14913), False, 'from ctypes import CFUNCTYPE, POINTER\n'), ((15244, 15260), 'ctypes.POINTER', 'POINTER', (['c_ubyte'], {}), '(c_ubyte)\n', (15251, 15260), False, 'from ctypes import CFUNCTYPE, POINTER\n')] |
#!/usr/bin/env python
import roslib; roslib.load_manifest('tms_ts_smach')
import rospy
import smach
import smach_ros
from smach_ros import ServiceState
from smach import Concurrence
from tms_msg_rp.srv import *
from tms_msg_ts.srv import *
def smc0():
smc0 = smach.Concurrence( outcomes=['succeeded', 'aborted'],
default_outcome = 'aborted',
outcome_map = {'succeeded': {'0random_move':'succeeded', '1sensing':'succeeded'}},
child_termination_cb = lambda arg: True )
with smc0:
smach.Concurrence.add('0random_move',
ServiceState('rp_cmd',
rp_cmd,
request = rp_cmdRequest(9006, True, 2005, [0])))
smach.Concurrence.add('1sensing',
ServiceState('rp_cmd',
rp_cmd,
request = rp_cmdRequest(9007, True, 2005, [0])))
return smc0
def main():
rospy.init_node('tms_ts_smach_executive1')
sm_root = smach.StateMachine(['succeeded','aborted','preempted'])
with sm_root:
smach.StateMachine.add('smc0', smc0(), transitions={'succeeded':'control0'})
smach.StateMachine.add('control0',
ServiceState('ts_state_control',
ts_state_control,
request = ts_state_controlRequest(0, 0, 0, 2, "")),
transitions={'succeeded':'move1', 'aborted':'aborted'})
smach.StateMachine.add('move1',
ServiceState('rp_cmd',
rp_cmd,
request = rp_cmdRequest(9001, True, 2005, [0])),
transitions={'succeeded':'control1'})
smach.StateMachine.add('control1',
ServiceState('ts_state_control',
ts_state_control,
request = ts_state_controlRequest(0, 0, 0, 0, "")),
transitions={'succeeded':'succeeded', 'aborted':'aborted'})
sis = smach_ros.IntrospectionServer('tms_ts_smach_test', sm_root, '/ROS_TMS')
sis.start()
outcome = sm_root.execute()
rospy.spin()
sis.stop()
if __name__ == '__main__':
main()
| [
"smach_ros.IntrospectionServer",
"rospy.init_node",
"smach.Concurrence",
"smach.StateMachine",
"roslib.load_manifest",
"rospy.spin"
] | [((38, 74), 'roslib.load_manifest', 'roslib.load_manifest', (['"""tms_ts_smach"""'], {}), "('tms_ts_smach')\n", (58, 74), False, 'import roslib\n'), ((267, 477), 'smach.Concurrence', 'smach.Concurrence', ([], {'outcomes': "['succeeded', 'aborted']", 'default_outcome': '"""aborted"""', 'outcome_map': "{'succeeded': {'0random_move': 'succeeded', '1sensing': 'succeeded'}}", 'child_termination_cb': '(lambda arg: True)'}), "(outcomes=['succeeded', 'aborted'], default_outcome=\n 'aborted', outcome_map={'succeeded': {'0random_move': 'succeeded',\n '1sensing': 'succeeded'}}, child_termination_cb=lambda arg: True)\n", (284, 477), False, 'import smach\n'), ((1070, 1112), 'rospy.init_node', 'rospy.init_node', (['"""tms_ts_smach_executive1"""'], {}), "('tms_ts_smach_executive1')\n", (1085, 1112), False, 'import rospy\n'), ((1128, 1185), 'smach.StateMachine', 'smach.StateMachine', (["['succeeded', 'aborted', 'preempted']"], {}), "(['succeeded', 'aborted', 'preempted'])\n", (1146, 1185), False, 'import smach\n'), ((2271, 2342), 'smach_ros.IntrospectionServer', 'smach_ros.IntrospectionServer', (['"""tms_ts_smach_test"""', 'sm_root', '"""/ROS_TMS"""'], {}), "('tms_ts_smach_test', sm_root, '/ROS_TMS')\n", (2300, 2342), False, 'import smach_ros\n'), ((2397, 2409), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (2407, 2409), False, 'import rospy\n')] |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.bigtable_admin_v2.proto import (
bigtable_table_admin_pb2 as google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2,
)
from google.cloud.bigtable_admin_v2.proto import (
table_pb2 as google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2,
)
from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2
from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2
from google.longrunning import (
operations_pb2 as google_dot_longrunning_dot_operations__pb2,
)
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class BigtableTableAdminStub(object):
"""Service for creating, configuring, and deleting Cloud Bigtable tables.
Provides access to the table schemas only, not the data stored within
the tables.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateTable = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/CreateTable",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateTableRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.FromString,
)
self.CreateTableFromSnapshot = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/CreateTableFromSnapshot",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateTableFromSnapshotRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.ListTables = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/ListTables",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListTablesRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListTablesResponse.FromString,
)
self.GetTable = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/GetTable",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetTableRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.FromString,
)
self.DeleteTable = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/DeleteTable",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteTableRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ModifyColumnFamilies = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/ModifyColumnFamilies",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ModifyColumnFamiliesRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.FromString,
)
self.DropRowRange = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/DropRowRange",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DropRowRangeRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GenerateConsistencyToken = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/GenerateConsistencyToken",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GenerateConsistencyTokenRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GenerateConsistencyTokenResponse.FromString,
)
self.CheckConsistency = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/CheckConsistency",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CheckConsistencyRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CheckConsistencyResponse.FromString,
)
self.SnapshotTable = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/SnapshotTable",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.SnapshotTableRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.GetSnapshot = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/GetSnapshot",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetSnapshotRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Snapshot.FromString,
)
self.ListSnapshots = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/ListSnapshots",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListSnapshotsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListSnapshotsResponse.FromString,
)
self.DeleteSnapshot = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/DeleteSnapshot",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteSnapshotRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateBackup = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/CreateBackup",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateBackupRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.GetBackup = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/GetBackup",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetBackupRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Backup.FromString,
)
self.UpdateBackup = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/UpdateBackup",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.UpdateBackupRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Backup.FromString,
)
self.DeleteBackup = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/DeleteBackup",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteBackupRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListBackups = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/ListBackups",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListBackupsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListBackupsResponse.FromString,
)
self.RestoreTable = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/RestoreTable",
request_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.RestoreTableRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.GetIamPolicy = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/GetIamPolicy",
request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString,
response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString,
)
self.SetIamPolicy = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/SetIamPolicy",
request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString,
response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString,
)
self.TestIamPermissions = channel.unary_unary(
"/google.bigtable.admin.v2.BigtableTableAdmin/TestIamPermissions",
request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString,
response_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString,
)
class BigtableTableAdminServicer(object):
"""Service for creating, configuring, and deleting Cloud Bigtable tables.
Provides access to the table schemas only, not the data stored within
the tables.
"""
def CreateTable(self, request, context):
"""Creates a new table in the specified instance.
The table can be created with a full set of initial column families,
specified in the request.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CreateTableFromSnapshot(self, request, context):
"""Creates a new table from the specified snapshot. The target table must
not exist. The snapshot and the table must be in the same instance.
Note: This is a private alpha release of Cloud Bigtable snapshots. This
feature is not currently available to most Cloud Bigtable customers. This
feature might be changed in backward-incompatible ways and is not
recommended for production use. It is not subject to any SLA or deprecation
policy.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListTables(self, request, context):
"""Lists all tables served from a specified instance.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetTable(self, request, context):
"""Gets metadata information about the specified table.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteTable(self, request, context):
"""Permanently deletes a specified table and all of its data.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ModifyColumnFamilies(self, request, context):
"""Performs a series of column family modifications on the specified table.
Either all or none of the modifications will occur before this method
returns, but data requests received prior to that point may see a table
where only some modifications have taken effect.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DropRowRange(self, request, context):
"""Permanently drop/delete a row range from a specified table. The request can
specify whether to delete all rows in a table, or only those that match a
particular prefix.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GenerateConsistencyToken(self, request, context):
"""Generates a consistency token for a Table, which can be used in
CheckConsistency to check whether mutations to the table that finished
before this call started have been replicated. The tokens will be available
for 90 days.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CheckConsistency(self, request, context):
"""Checks replication consistency based on a consistency token, that is, if
replication has caught up based on the conditions specified in the token
and the check request.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SnapshotTable(self, request, context):
"""Creates a new snapshot in the specified cluster from the specified
source table. The cluster and the table must be in the same instance.
Note: This is a private alpha release of Cloud Bigtable snapshots. This
feature is not currently available to most Cloud Bigtable customers. This
feature might be changed in backward-incompatible ways and is not
recommended for production use. It is not subject to any SLA or deprecation
policy.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetSnapshot(self, request, context):
"""Gets metadata information about the specified snapshot.
Note: This is a private alpha release of Cloud Bigtable snapshots. This
feature is not currently available to most Cloud Bigtable customers. This
feature might be changed in backward-incompatible ways and is not
recommended for production use. It is not subject to any SLA or deprecation
policy.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListSnapshots(self, request, context):
"""Lists all snapshots associated with the specified cluster.
Note: This is a private alpha release of Cloud Bigtable snapshots. This
feature is not currently available to most Cloud Bigtable customers. This
feature might be changed in backward-incompatible ways and is not
recommended for production use. It is not subject to any SLA or deprecation
policy.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteSnapshot(self, request, context):
"""Permanently deletes the specified snapshot.
Note: This is a private alpha release of Cloud Bigtable snapshots. This
feature is not currently available to most Cloud Bigtable customers. This
feature might be changed in backward-incompatible ways and is not
recommended for production use. It is not subject to any SLA or deprecation
policy.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CreateBackup(self, request, context):
"""Starts creating a new Cloud Bigtable Backup. The returned backup
[long-running operation][google.longrunning.Operation] can be used to
track creation of the backup. The
[metadata][google.longrunning.Operation.metadata] field type is
[CreateBackupMetadata][google.bigtable.admin.v2.CreateBackupMetadata]. The
[response][google.longrunning.Operation.response] field type is
[Backup][google.bigtable.admin.v2.Backup], if successful. Cancelling the
returned operation will stop the creation and delete the backup.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetBackup(self, request, context):
"""Gets metadata on a pending or completed Cloud Bigtable Backup.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateBackup(self, request, context):
"""Updates a pending or completed Cloud Bigtable Backup.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteBackup(self, request, context):
"""Deletes a pending or completed Cloud Bigtable backup.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListBackups(self, request, context):
"""Lists Cloud Bigtable backups. Returns both completed and pending
backups.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def RestoreTable(self, request, context):
"""Create a new table by restoring from a completed backup. The new table
must be in the same instance as the instance containing the backup. The
returned table [long-running operation][google.longrunning.Operation] can
be used to track the progress of the operation, and to cancel it. The
[metadata][google.longrunning.Operation.metadata] field type is
[RestoreTableMetadata][google.bigtable.admin.RestoreTableMetadata]. The
[response][google.longrunning.Operation.response] type is
[Table][google.bigtable.admin.v2.Table], if successful.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetIamPolicy(self, request, context):
"""Gets the access control policy for a resource.
Returns an empty policy if the resource exists but does not have a policy
set.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetIamPolicy(self, request, context):
"""Sets the access control policy on a Table or Backup resource.
Replaces any existing policy.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def TestIamPermissions(self, request, context):
"""Returns permissions that the caller has on the specified table resource.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_BigtableTableAdminServicer_to_server(servicer, server):
rpc_method_handlers = {
"CreateTable": grpc.unary_unary_rpc_method_handler(
servicer.CreateTable,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateTableRequest.FromString,
response_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.SerializeToString,
),
"CreateTableFromSnapshot": grpc.unary_unary_rpc_method_handler(
servicer.CreateTableFromSnapshot,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateTableFromSnapshotRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"ListTables": grpc.unary_unary_rpc_method_handler(
servicer.ListTables,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListTablesRequest.FromString,
response_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListTablesResponse.SerializeToString,
),
"GetTable": grpc.unary_unary_rpc_method_handler(
servicer.GetTable,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetTableRequest.FromString,
response_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.SerializeToString,
),
"DeleteTable": grpc.unary_unary_rpc_method_handler(
servicer.DeleteTable,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteTableRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
"ModifyColumnFamilies": grpc.unary_unary_rpc_method_handler(
servicer.ModifyColumnFamilies,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ModifyColumnFamiliesRequest.FromString,
response_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.SerializeToString,
),
"DropRowRange": grpc.unary_unary_rpc_method_handler(
servicer.DropRowRange,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DropRowRangeRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
"GenerateConsistencyToken": grpc.unary_unary_rpc_method_handler(
servicer.GenerateConsistencyToken,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GenerateConsistencyTokenRequest.FromString,
response_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GenerateConsistencyTokenResponse.SerializeToString,
),
"CheckConsistency": grpc.unary_unary_rpc_method_handler(
servicer.CheckConsistency,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CheckConsistencyRequest.FromString,
response_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CheckConsistencyResponse.SerializeToString,
),
"SnapshotTable": grpc.unary_unary_rpc_method_handler(
servicer.SnapshotTable,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.SnapshotTableRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"GetSnapshot": grpc.unary_unary_rpc_method_handler(
servicer.GetSnapshot,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetSnapshotRequest.FromString,
response_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Snapshot.SerializeToString,
),
"ListSnapshots": grpc.unary_unary_rpc_method_handler(
servicer.ListSnapshots,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListSnapshotsRequest.FromString,
response_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListSnapshotsResponse.SerializeToString,
),
"DeleteSnapshot": grpc.unary_unary_rpc_method_handler(
servicer.DeleteSnapshot,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteSnapshotRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
"CreateBackup": grpc.unary_unary_rpc_method_handler(
servicer.CreateBackup,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateBackupRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"GetBackup": grpc.unary_unary_rpc_method_handler(
servicer.GetBackup,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetBackupRequest.FromString,
response_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Backup.SerializeToString,
),
"UpdateBackup": grpc.unary_unary_rpc_method_handler(
servicer.UpdateBackup,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.UpdateBackupRequest.FromString,
response_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Backup.SerializeToString,
),
"DeleteBackup": grpc.unary_unary_rpc_method_handler(
servicer.DeleteBackup,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteBackupRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
"ListBackups": grpc.unary_unary_rpc_method_handler(
servicer.ListBackups,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListBackupsRequest.FromString,
response_serializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListBackupsResponse.SerializeToString,
),
"RestoreTable": grpc.unary_unary_rpc_method_handler(
servicer.RestoreTable,
request_deserializer=google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.RestoreTableRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"GetIamPolicy": grpc.unary_unary_rpc_method_handler(
servicer.GetIamPolicy,
request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString,
response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString,
),
"SetIamPolicy": grpc.unary_unary_rpc_method_handler(
servicer.SetIamPolicy,
request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString,
response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString,
),
"TestIamPermissions": grpc.unary_unary_rpc_method_handler(
servicer.TestIamPermissions,
request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString,
response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
"google.bigtable.admin.v2.BigtableTableAdmin", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class BigtableTableAdmin(object):
"""Service for creating, configuring, and deleting Cloud Bigtable tables.
Provides access to the table schemas only, not the data stored within
the tables.
"""
@staticmethod
def CreateTable(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/CreateTable",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateTableRequest.SerializeToString,
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def CreateTableFromSnapshot(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/CreateTableFromSnapshot",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateTableFromSnapshotRequest.SerializeToString,
google_dot_longrunning_dot_operations__pb2.Operation.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def ListTables(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/ListTables",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListTablesRequest.SerializeToString,
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListTablesResponse.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def GetTable(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/GetTable",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetTableRequest.SerializeToString,
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def DeleteTable(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/DeleteTable",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteTableRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def ModifyColumnFamilies(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/ModifyColumnFamilies",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ModifyColumnFamiliesRequest.SerializeToString,
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def DropRowRange(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/DropRowRange",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DropRowRangeRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def GenerateConsistencyToken(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/GenerateConsistencyToken",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GenerateConsistencyTokenRequest.SerializeToString,
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GenerateConsistencyTokenResponse.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def CheckConsistency(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/CheckConsistency",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CheckConsistencyRequest.SerializeToString,
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CheckConsistencyResponse.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def SnapshotTable(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/SnapshotTable",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.SnapshotTableRequest.SerializeToString,
google_dot_longrunning_dot_operations__pb2.Operation.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def GetSnapshot(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/GetSnapshot",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetSnapshotRequest.SerializeToString,
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Snapshot.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def ListSnapshots(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/ListSnapshots",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListSnapshotsRequest.SerializeToString,
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListSnapshotsResponse.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def DeleteSnapshot(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/DeleteSnapshot",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteSnapshotRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def CreateBackup(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/CreateBackup",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateBackupRequest.SerializeToString,
google_dot_longrunning_dot_operations__pb2.Operation.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def GetBackup(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/GetBackup",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetBackupRequest.SerializeToString,
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Backup.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def UpdateBackup(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/UpdateBackup",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.UpdateBackupRequest.SerializeToString,
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Backup.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def DeleteBackup(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/DeleteBackup",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteBackupRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def ListBackups(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/ListBackups",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListBackupsRequest.SerializeToString,
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListBackupsResponse.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def RestoreTable(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/RestoreTable",
google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.RestoreTableRequest.SerializeToString,
google_dot_longrunning_dot_operations__pb2.Operation.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def GetIamPolicy(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/GetIamPolicy",
google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString,
google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def SetIamPolicy(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/SetIamPolicy",
google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString,
google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def TestIamPermissions(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.bigtable.admin.v2.BigtableTableAdmin/TestIamPermissions",
google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString,
google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
| [
"grpc.method_handlers_generic_handler",
"grpc.experimental.unary_unary",
"grpc.unary_unary_rpc_method_handler"
] | [((28917, 29026), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""google.bigtable.admin.v2.BigtableTableAdmin"""', 'rpc_method_handlers'], {}), "(\n 'google.bigtable.admin.v2.BigtableTableAdmin', rpc_method_handlers)\n", (28953, 29026), False, 'import grpc\n'), ((20697, 21024), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.CreateTable'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateTableRequest.FromString', 'response_serializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.SerializeToString'}), '(servicer.CreateTable,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .CreateTableRequest.FromString, response_serializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table\n .SerializeToString)\n', (20732, 21024), False, 'import grpc\n'), ((21084, 21411), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.CreateTableFromSnapshot'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateTableFromSnapshotRequest.FromString', 'response_serializer': 'google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString'}), '(servicer.CreateTableFromSnapshot,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .CreateTableFromSnapshotRequest.FromString, response_serializer=\n google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString)\n', (21119, 21411), False, 'import grpc\n'), ((21463, 21818), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.ListTables'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListTablesRequest.FromString', 'response_serializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListTablesResponse.SerializeToString'}), '(servicer.ListTables,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListTablesRequest.FromString, response_serializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListTablesResponse.SerializeToString)\n', (21498, 21818), False, 'import grpc\n'), ((21863, 22185), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetTable'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetTableRequest.FromString', 'response_serializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.SerializeToString'}), '(servicer.GetTable, request_deserializer\n =\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .GetTableRequest.FromString, response_serializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table\n .SerializeToString)\n', (21898, 22185), False, 'import grpc\n'), ((22232, 22523), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DeleteTable'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteTableRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.DeleteTable,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .DeleteTableRequest.FromString, response_serializer=\n google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString)\n', (22267, 22523), False, 'import grpc\n'), ((22585, 22930), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.ModifyColumnFamilies'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ModifyColumnFamiliesRequest.FromString', 'response_serializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.SerializeToString'}), '(servicer.ModifyColumnFamilies,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ModifyColumnFamiliesRequest.FromString, response_serializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table\n .SerializeToString)\n', (22620, 22930), False, 'import grpc\n'), ((22979, 23272), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DropRowRange'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DropRowRangeRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.DropRowRange,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .DropRowRangeRequest.FromString, response_serializer=\n google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString)\n', (23014, 23272), False, 'import grpc\n'), ((23338, 23735), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GenerateConsistencyToken'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GenerateConsistencyTokenRequest.FromString', 'response_serializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GenerateConsistencyTokenResponse.SerializeToString'}), '(servicer.GenerateConsistencyToken,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .GenerateConsistencyTokenRequest.FromString, response_serializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .GenerateConsistencyTokenResponse.SerializeToString)\n', (23373, 23735), False, 'import grpc\n'), ((23788, 24161), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.CheckConsistency'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CheckConsistencyRequest.FromString', 'response_serializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CheckConsistencyResponse.SerializeToString'}), '(servicer.CheckConsistency,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .CheckConsistencyRequest.FromString, response_serializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .CheckConsistencyResponse.SerializeToString)\n', (23823, 24161), False, 'import grpc\n'), ((24211, 24518), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SnapshotTable'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.SnapshotTableRequest.FromString', 'response_serializer': 'google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString'}), '(servicer.SnapshotTable,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .SnapshotTableRequest.FromString, response_serializer=\n google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString)\n', (24246, 24518), False, 'import grpc\n'), ((24571, 24901), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetSnapshot'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetSnapshotRequest.FromString', 'response_serializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Snapshot.SerializeToString'}), '(servicer.GetSnapshot,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .GetSnapshotRequest.FromString, response_serializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.\n Snapshot.SerializeToString)\n', (24606, 24901), False, 'import grpc\n'), ((24951, 25315), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.ListSnapshots'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListSnapshotsRequest.FromString', 'response_serializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListSnapshotsResponse.SerializeToString'}), '(servicer.ListSnapshots,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListSnapshotsRequest.FromString, response_serializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListSnapshotsResponse.SerializeToString)\n', (24986, 25315), False, 'import grpc\n'), ((25366, 25663), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DeleteSnapshot'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteSnapshotRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.DeleteSnapshot,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .DeleteSnapshotRequest.FromString, response_serializer=\n google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString)\n', (25401, 25663), False, 'import grpc\n'), ((25717, 26022), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.CreateBackup'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateBackupRequest.FromString', 'response_serializer': 'google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString'}), '(servicer.CreateBackup,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .CreateBackupRequest.FromString, response_serializer=\n google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString)\n', (25752, 26022), False, 'import grpc\n'), ((26073, 26397), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetBackup'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetBackupRequest.FromString', 'response_serializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Backup.SerializeToString'}), '(servicer.GetBackup,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .GetBackupRequest.FromString, response_serializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.\n Backup.SerializeToString)\n', (26108, 26397), False, 'import grpc\n'), ((26446, 26776), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.UpdateBackup'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.UpdateBackupRequest.FromString', 'response_serializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Backup.SerializeToString'}), '(servicer.UpdateBackup,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .UpdateBackupRequest.FromString, response_serializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.\n Backup.SerializeToString)\n', (26481, 26776), False, 'import grpc\n'), ((26825, 27118), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DeleteBackup'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteBackupRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.DeleteBackup,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .DeleteBackupRequest.FromString, response_serializer=\n google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString)\n', (26860, 27118), False, 'import grpc\n'), ((27171, 27529), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.ListBackups'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListBackupsRequest.FromString', 'response_serializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListBackupsResponse.SerializeToString'}), '(servicer.ListBackups,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListBackupsRequest.FromString, response_serializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListBackupsResponse.SerializeToString)\n', (27206, 27529), False, 'import grpc\n'), ((27578, 27883), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.RestoreTable'], {'request_deserializer': 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.RestoreTableRequest.FromString', 'response_serializer': 'google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString'}), '(servicer.RestoreTable,\n request_deserializer=\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .RestoreTableRequest.FromString, response_serializer=\n google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString)\n', (27613, 27883), False, 'import grpc\n'), ((27937, 28189), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetIamPolicy'], {'request_deserializer': 'google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString', 'response_serializer': 'google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString'}), '(servicer.GetIamPolicy,\n request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.\n GetIamPolicyRequest.FromString, response_serializer=\n google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString)\n', (27972, 28189), False, 'import grpc\n'), ((28248, 28500), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SetIamPolicy'], {'request_deserializer': 'google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString', 'response_serializer': 'google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString'}), '(servicer.SetIamPolicy,\n request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.\n SetIamPolicyRequest.FromString, response_serializer=\n google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString)\n', (28283, 28500), False, 'import grpc\n'), ((28565, 28859), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.TestIamPermissions'], {'request_deserializer': 'google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString', 'response_serializer': 'google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString'}), '(servicer.TestIamPermissions,\n request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.\n TestIamPermissionsRequest.FromString, response_serializer=\n google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.\n SerializeToString)\n', (28600, 28859), False, 'import grpc\n'), ((29631, 30064), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/CreateTable"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateTableRequest.SerializeToString', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/CreateTable',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .CreateTableRequest.SerializeToString,\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table\n .FromString, options, channel_credentials, call_credentials,\n compression, wait_for_ready, timeout, metadata)\n", (29660, 30064), False, 'import grpc\n'), ((30486, 30923), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/CreateTableFromSnapshot"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateTableFromSnapshotRequest.SerializeToString', 'google_dot_longrunning_dot_operations__pb2.Operation.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/CreateTableFromSnapshot',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .CreateTableFromSnapshotRequest.SerializeToString,\n google_dot_longrunning_dot_operations__pb2.Operation.FromString,\n options, channel_credentials, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (30515, 30923), False, 'import grpc\n'), ((31333, 31794), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/ListTables"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListTablesRequest.SerializeToString', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListTablesResponse.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/ListTables',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListTablesRequest.SerializeToString,\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListTablesResponse.FromString, options, channel_credentials,\n call_credentials, compression, wait_for_ready, timeout, metadata)\n", (31362, 31794), False, 'import grpc\n'), ((32201, 32628), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/GetTable"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetTableRequest.SerializeToString', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/GetTable',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .GetTableRequest.SerializeToString,\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table\n .FromString, options, channel_credentials, call_credentials,\n compression, wait_for_ready, timeout, metadata)\n", (32230, 32628), False, 'import grpc\n'), ((33038, 33439), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/DeleteTable"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteTableRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/DeleteTable',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .DeleteTableRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, call_credentials, compression, wait_for_ready,\n timeout, metadata)\n", (33067, 33439), False, 'import grpc\n'), ((33859, 34310), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/ModifyColumnFamilies"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ModifyColumnFamiliesRequest.SerializeToString', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/ModifyColumnFamilies',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ModifyColumnFamiliesRequest.SerializeToString,\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Table\n .FromString, options, channel_credentials, call_credentials,\n compression, wait_for_ready, timeout, metadata)\n", (33888, 34310), False, 'import grpc\n'), ((34721, 35124), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/DropRowRange"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DropRowRangeRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/DropRowRange',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .DropRowRangeRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, call_credentials, compression, wait_for_ready,\n timeout, metadata)\n", (34750, 35124), False, 'import grpc\n'), ((35548, 36055), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/GenerateConsistencyToken"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GenerateConsistencyTokenRequest.SerializeToString', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GenerateConsistencyTokenResponse.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/GenerateConsistencyToken',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .GenerateConsistencyTokenRequest.SerializeToString,\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .GenerateConsistencyTokenResponse.FromString, options,\n channel_credentials, call_credentials, compression, wait_for_ready,\n timeout, metadata)\n", (35577, 36055), False, 'import grpc\n'), ((36466, 36945), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/CheckConsistency"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CheckConsistencyRequest.SerializeToString', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CheckConsistencyResponse.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/CheckConsistency',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .CheckConsistencyRequest.SerializeToString,\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .CheckConsistencyResponse.FromString, options, channel_credentials,\n call_credentials, compression, wait_for_ready, timeout, metadata)\n", (36495, 36945), False, 'import grpc\n'), ((37357, 37774), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/SnapshotTable"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.SnapshotTableRequest.SerializeToString', 'google_dot_longrunning_dot_operations__pb2.Operation.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/SnapshotTable',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .SnapshotTableRequest.SerializeToString,\n google_dot_longrunning_dot_operations__pb2.Operation.FromString,\n options, channel_credentials, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (37386, 37774), False, 'import grpc\n'), ((38185, 38621), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/GetSnapshot"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetSnapshotRequest.SerializeToString', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Snapshot.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/GetSnapshot',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .GetSnapshotRequest.SerializeToString,\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.\n Snapshot.FromString, options, channel_credentials, call_credentials,\n compression, wait_for_ready, timeout, metadata)\n", (38214, 38621), False, 'import grpc\n'), ((39033, 39503), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/ListSnapshots"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListSnapshotsRequest.SerializeToString', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListSnapshotsResponse.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/ListSnapshots',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListSnapshotsRequest.SerializeToString,\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListSnapshotsResponse.FromString, options, channel_credentials,\n call_credentials, compression, wait_for_ready, timeout, metadata)\n", (39062, 39503), False, 'import grpc\n'), ((39916, 40323), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/DeleteSnapshot"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteSnapshotRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/DeleteSnapshot',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .DeleteSnapshotRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, call_credentials, compression, wait_for_ready,\n timeout, metadata)\n", (39945, 40323), False, 'import grpc\n'), ((40735, 41150), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/CreateBackup"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.CreateBackupRequest.SerializeToString', 'google_dot_longrunning_dot_operations__pb2.Operation.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/CreateBackup',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .CreateBackupRequest.SerializeToString,\n google_dot_longrunning_dot_operations__pb2.Operation.FromString,\n options, channel_credentials, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (40764, 41150), False, 'import grpc\n'), ((41559, 41989), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/GetBackup"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.GetBackupRequest.SerializeToString', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Backup.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/GetBackup',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .GetBackupRequest.SerializeToString,\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.\n Backup.FromString, options, channel_credentials, call_credentials,\n compression, wait_for_ready, timeout, metadata)\n", (41588, 41989), False, 'import grpc\n'), ((42400, 42836), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/UpdateBackup"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.UpdateBackupRequest.SerializeToString', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.Backup.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/UpdateBackup',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .UpdateBackupRequest.SerializeToString,\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_table__pb2.\n Backup.FromString, options, channel_credentials, call_credentials,\n compression, wait_for_ready, timeout, metadata)\n", (42429, 42836), False, 'import grpc\n'), ((43247, 43650), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/DeleteBackup"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.DeleteBackupRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/DeleteBackup',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .DeleteBackupRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, call_credentials, compression, wait_for_ready,\n timeout, metadata)\n", (43276, 43650), False, 'import grpc\n'), ((44061, 44525), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/ListBackups"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListBackupsRequest.SerializeToString', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.ListBackupsResponse.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/ListBackups',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListBackupsRequest.SerializeToString,\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .ListBackupsResponse.FromString, options, channel_credentials,\n call_credentials, compression, wait_for_ready, timeout, metadata)\n", (44090, 44525), False, 'import grpc\n'), ((44936, 45351), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/RestoreTable"""', 'google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2.RestoreTableRequest.SerializeToString', 'google_dot_longrunning_dot_operations__pb2.Operation.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/RestoreTable',\n google_dot_cloud_dot_bigtable__admin__v2_dot_proto_dot_bigtable__table__admin__pb2\n .RestoreTableRequest.SerializeToString,\n google_dot_longrunning_dot_operations__pb2.Operation.FromString,\n options, channel_credentials, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (44965, 45351), False, 'import grpc\n'), ((45763, 46127), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/GetIamPolicy"""', 'google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString', 'google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/GetIamPolicy',\n google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.\n SerializeToString, google_dot_iam_dot_v1_dot_policy__pb2.Policy.\n FromString, options, channel_credentials, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (45792, 46127), False, 'import grpc\n'), ((46542, 46906), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/SetIamPolicy"""', 'google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString', 'google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/SetIamPolicy',\n google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.\n SerializeToString, google_dot_iam_dot_v1_dot_policy__pb2.Policy.\n FromString, options, channel_credentials, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (46571, 46906), False, 'import grpc\n'), ((47327, 47728), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/google.bigtable.admin.v2.BigtableTableAdmin/TestIamPermissions"""', 'google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString', 'google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString', 'options', 'channel_credentials', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/google.bigtable.admin.v2.BigtableTableAdmin/TestIamPermissions',\n google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.\n SerializeToString, google_dot_iam_dot_v1_dot_iam__policy__pb2.\n TestIamPermissionsResponse.FromString, options, channel_credentials,\n call_credentials, compression, wait_for_ready, timeout, metadata)\n", (47356, 47728), False, 'import grpc\n')] |
# coding=utf-8
# 爬虫抓学校官网首页
import requests
import re
import urllib.request
from bs4 import BeautifulSoup
import os
import lxml
# 保存文件
def file_save(data, path):
if not os.path.exists(os.path.split(path)[0]):
os.makedirs(os.path.split(path)[0])
try:
with open(path, 'wb') as f:
f.write(data.encode('utf-8'))
print('保存完毕')
except Exception as ex:
print('保存失败', ex)
def url_open(url):
# 伪造头部信息
headers = {
'Accept-Encoding': 'gzip, deflate, sdch',
'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6',
'Cache-Control': 'max-age=0',
'Connection': 'keep-alive',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'
}
r = requests.get(url, headers)
_data = r.text
return _data
url = "http://www.yvtc.edu.cn"
data = url_open(url)
soup = BeautifulSoup(data, "lxml")
tag = soup.select('a[href^="/news/show"]')
s = ""
for item in tag:
s += item.get("href") + "," + item.get("title") + "\n"
print(s)
file_save(s, r"d:\1.txt") | [
"bs4.BeautifulSoup",
"requests.get",
"os.path.split"
] | [((963, 990), 'bs4.BeautifulSoup', 'BeautifulSoup', (['data', '"""lxml"""'], {}), "(data, 'lxml')\n", (976, 990), False, 'from bs4 import BeautifulSoup\n'), ((839, 865), 'requests.get', 'requests.get', (['url', 'headers'], {}), '(url, headers)\n', (851, 865), False, 'import requests\n'), ((188, 207), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (201, 207), False, 'import os\n'), ((233, 252), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (246, 252), False, 'import os\n')] |
import setuptools
from setuptools import setup
"""Setup module for model_ensembler
"""
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="model-ensembler",
version="0.5.2",
author="<NAME>",
author_email="<EMAIL>",
description="Model Ensemble for batch workflows on HPCs",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://www.github.com/jimcircadian/model-ensembler",
project_urls={
"Bug Tracker": "https://github.com/jimcircadian/model-ensembler/issues",
},
packages=setuptools.find_packages(),
keywords='slurm, hpc, tools, batch, model, ensemble',
classifiers=[
"Environment :: Console",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Intended Audience :: Science/Research",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Development Status :: 3 - Alpha",
"Topic :: System :: Distributed Computing",
],
entry_points={
'console_scripts': [
'model_ensemble=model_ensembler.cli:main',
],
},
python_requires='>=3.7, <4',
install_requires=[
"jinja2",
"jsonschema",
"pyyaml",
],
include_package_data=True,
)
| [
"setuptools.find_packages"
] | [((600, 626), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (624, 626), False, 'import setuptools\n')] |
# Generated by Django 4.0 on 2022-01-13 10:17
import uuid
import ckeditor_uploader.fields
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('funuser', '0004_alter_funuser_avatar'),
('auth', '0012_alter_user_first_name_max_length'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Fnotification',
fields=[
('id',
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
unique=True)),
('title',
models.CharField(
max_length=64,
verbose_name='Title')),
('content',
ckeditor_uploader.fields.RichTextUploadingField(
max_length=2048,
verbose_name='Content')),
('additional_files',
models.FileField(
help_text='If you have more than one file, please package them and upload them.',
upload_to='',
verbose_name='Additional files')),
('DOC',
models.DateTimeField(
auto_now_add=True,
verbose_name='Date of creating')),
('DOU',
models.DateTimeField(
auto_now=True,
verbose_name='Date of updating')),
('comment',
models.TextField(
max_length=128,
verbose_name='Comment')),
('groups',
models.ManyToManyField(
blank=True,
help_text='The groups this notification belongs to. all user of specific groups will receive notification. for all users if groups is null',
related_name='notification_set',
related_query_name='notification',
to='auth.Group',
verbose_name='groups')),
('poster',
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to='funuser.funuser',
verbose_name='Author')),
('readers',
models.ManyToManyField(
blank=True,
related_name='reader_set',
related_query_name='reader',
to=settings.AUTH_USER_MODEL,
verbose_name='Reader')),
],
options={
'verbose_name': 'Notification',
'verbose_name_plural': 'Notifications',
},
),
]
| [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.FileField",
"django.db.models.CharField",
"django.db.models.DateTimeField",
"django.db.migrations.swappable_dependency",
"django.db.models.UUIDField"
] | [((399, 456), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (430, 456), False, 'from django.db import migrations, models\n'), ((611, 715), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)', 'unique': '(True)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False, unique=True)\n', (627, 715), False, 'from django.db import migrations, models\n'), ((863, 916), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'verbose_name': '"""Title"""'}), "(max_length=64, verbose_name='Title')\n", (879, 916), False, 'from django.db import migrations, models\n'), ((1195, 1349), 'django.db.models.FileField', 'models.FileField', ([], {'help_text': '"""If you have more than one file, please package them and upload them."""', 'upload_to': '""""""', 'verbose_name': '"""Additional files"""'}), "(help_text=\n 'If you have more than one file, please package them and upload them.',\n upload_to='', verbose_name='Additional files')\n", (1211, 1349), False, 'from django.db import migrations, models\n'), ((1448, 1520), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Date of creating"""'}), "(auto_now_add=True, verbose_name='Date of creating')\n", (1468, 1520), False, 'from django.db import migrations, models\n'), ((1607, 1675), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Date of updating"""'}), "(auto_now=True, verbose_name='Date of updating')\n", (1627, 1675), False, 'from django.db import migrations, models\n'), ((1766, 1822), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'verbose_name': '"""Comment"""'}), "(max_length=128, verbose_name='Comment')\n", (1782, 1822), False, 'from django.db import migrations, models\n'), ((1912, 2209), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'help_text': '"""The groups this notification belongs to. all user of specific groups will receive notification. for all users if groups is null"""', 'related_name': '"""notification_set"""', 'related_query_name': '"""notification"""', 'to': '"""auth.Group"""', 'verbose_name': '"""groups"""'}), "(blank=True, help_text=\n 'The groups this notification belongs to. all user of specific groups will receive notification. for all users if groups is null'\n , related_name='notification_set', related_query_name='notification',\n to='auth.Group', verbose_name='groups')\n", (1934, 2209), False, 'from django.db import migrations, models\n'), ((2369, 2481), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""funuser.funuser"""', 'verbose_name': '"""Author"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'funuser.funuser', verbose_name='Author')\n", (2386, 2481), False, 'from django.db import migrations, models\n'), ((2588, 2739), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'related_name': '"""reader_set"""', 'related_query_name': '"""reader"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Reader"""'}), "(blank=True, related_name='reader_set',\n related_query_name='reader', to=settings.AUTH_USER_MODEL, verbose_name=\n 'Reader')\n", (2610, 2739), False, 'from django.db import migrations, models\n')] |
"""
`bq25883`
====================================================
CircuitPython driver for the BQ25883 2-cell USB boost-mode charger.
* Author(s): <NAME>
Implementation Notes
--------------------
"""
from micropython import const
from adafruit_bus_device.i2c_device import I2CDevice
from adafruit_register.i2c_struct import ROUnaryStruct, UnaryStruct
from adafruit_register.i2c_bits import ROBits, RWBits
from adafruit_register.i2c_bit import ROBit, RWBit
# Registers
_BATV_LIM = const(0x00)
_CHRGI_LIM = const(0x01)
_VIN_LIM = const(0x02)
_IIN_LIM = const(0x03)
_TERM_CTRL = const(0x04)
_CHRGR_CRTL1 = const(0x05)
_CHRGR_CRTL2 = const(0x06)
_CHRGR_CRTL3 = const(0x07)
_CHRGR_CRTL4 = const(0x08)
_OTG_CTRL = const(0x09)
_ICO_LIM = const(0x0A)
_CHRG_STAT1 = const(0x0B)
_CHRG_STAT2 = const(0x0C)
_NTC_STAT = const(0x0D)
_FAULT_STAT = const(0x0E)
_CHRGR_FLAG1 = const(0x0F)
_CHRGR_FLAG2 = const(0x10)
_FAULT_FLAG = const(0x11)
_CHRGR_MSK1 = const(0x12)
_CHRGR_MSK2 = const(0x13)
_FAULT_MSK = const(0x14)
_ADC_CTRL = const(0x15)
_ADC_FN_CTRL = const(0x16)
_IBUS_ADC1 = const(0x17)
_IBUS_ADC0 = const(0x18)
_ICHG_ADC1 = const(0x19)
_ICHG_ADC0 = const(0x1A)
_VBUS_ADC1 = const(0x1B)
_VBUS_ADC0 = const(0x1C)
_VBAT_ADC1 = const(0x1D)
_VBAT_ADC0 = const(0x1E)
_VSYS_ADC1 = const(0x1F)
_VSYS_ADC0 = const(0x20)
_TS_ADC1 = const(0x21)
_TS_ADC0 = const(0x22)
_TDIE_ADC1 = const(0x23)
_TDIE_ADC0 = const(0x24)
_PART_INFO = const(0x25)
def _to_signed(num):
if num > 0x7FFF:
num -= 0x10000
return num
class BQ25883:
_pn = ROBits(4,_PART_INFO,3,1,False)
_fault_status = ROBits(8,_FAULT_STAT,0,1,False)
_chrgr_status1 = ROBits(8,_CHRG_STAT1,0,1,False)
_chrgr_status2 = ROBits(8,_CHRG_STAT2,0,1,False)
_chrg_status = ROBits(3,_CHRG_STAT1,0,1,False)
_otg_ctrl = ROBits(8,_OTG_CTRL,0,1,False)
_chrg_ctrl2 = ROBits(8,_CHRGR_CRTL2,0,1,False)
_wdt = RWBits(2,_CHRGR_CRTL1,4,1,False)
_ntc_stat = RWBits(3,_NTC_STAT,0,1,False)
_pfm_dis = RWBit(_CHRGR_CRTL3,7,1,False)
_en_chrg = RWBit(_CHRGR_CRTL2, 3, 1, False)
_reg_rst = RWBit(_PART_INFO, 7, 1, False)
_stat_dis = RWBit(_CHRGR_CRTL1, 6, 1, False)
_inlim = RWBit(_CHRGI_LIM, 6, 1, False)
def __init__(self, i2c_bus, addr=0x6B):
self.i2c_device = I2CDevice(i2c_bus, addr)
self.i2c_addr = addr
assert self._pn == 3, "Unable to find BQ25883"
@property
def status(self):
print('Fault:',bin(self._fault_status))
print('Charger Status 1:',bin(self._chrgr_status1))
print('Charger Status 2:',bin(self._chrgr_status2))
print('Charge Status:',bin(self._chrg_status))
print('Charge Control2:',bin(self._chrg_ctrl2))
print('NTC Status:',bin(self._ntc_stat))
print('OTG:',hex(self._otg_ctrl))
@property
def charging(self):
print('Charge Control2:',bin(self._chrg_ctrl2))
@charging.setter
def charging(self,value):
assert type(value) == bool
self._en_chrg=value
@property
def wdt(self):
print('Watchdog Timer:',bin(self._wdt))
@wdt.setter
def wdt(self,value):
if not value:
self._wdt=0
else:
self._wdt=value
@property
def led(self):
print('Status LED:',bin(self._stat_dis))
@led.setter
def led(self,value):
assert type(value) == bool
self._stat_dis=not value | [
"adafruit_register.i2c_bits.RWBits",
"adafruit_register.i2c_bit.RWBit",
"adafruit_bus_device.i2c_device.I2CDevice",
"micropython.const",
"adafruit_register.i2c_bits.ROBits"
] | [((517, 525), 'micropython.const', 'const', (['(0)'], {}), '(0)\n', (522, 525), False, 'from micropython import const\n'), ((548, 556), 'micropython.const', 'const', (['(1)'], {}), '(1)\n', (553, 556), False, 'from micropython import const\n'), ((580, 588), 'micropython.const', 'const', (['(2)'], {}), '(2)\n', (585, 588), False, 'from micropython import const\n'), ((612, 620), 'micropython.const', 'const', (['(3)'], {}), '(3)\n', (617, 620), False, 'from micropython import const\n'), ((644, 652), 'micropython.const', 'const', (['(4)'], {}), '(4)\n', (649, 652), False, 'from micropython import const\n'), ((676, 684), 'micropython.const', 'const', (['(5)'], {}), '(5)\n', (681, 684), False, 'from micropython import const\n'), ((708, 716), 'micropython.const', 'const', (['(6)'], {}), '(6)\n', (713, 716), False, 'from micropython import const\n'), ((740, 748), 'micropython.const', 'const', (['(7)'], {}), '(7)\n', (745, 748), False, 'from micropython import const\n'), ((772, 780), 'micropython.const', 'const', (['(8)'], {}), '(8)\n', (777, 780), False, 'from micropython import const\n'), ((804, 812), 'micropython.const', 'const', (['(9)'], {}), '(9)\n', (809, 812), False, 'from micropython import const\n'), ((836, 845), 'micropython.const', 'const', (['(10)'], {}), '(10)\n', (841, 845), False, 'from micropython import const\n'), ((868, 877), 'micropython.const', 'const', (['(11)'], {}), '(11)\n', (873, 877), False, 'from micropython import const\n'), ((900, 909), 'micropython.const', 'const', (['(12)'], {}), '(12)\n', (905, 909), False, 'from micropython import const\n'), ((932, 941), 'micropython.const', 'const', (['(13)'], {}), '(13)\n', (937, 941), False, 'from micropython import const\n'), ((964, 973), 'micropython.const', 'const', (['(14)'], {}), '(14)\n', (969, 973), False, 'from micropython import const\n'), ((995, 1004), 'micropython.const', 'const', (['(15)'], {}), '(15)\n', (1000, 1004), False, 'from micropython import const\n'), ((1027, 1036), 'micropython.const', 'const', (['(16)'], {}), '(16)\n', (1032, 1036), False, 'from micropython import const\n'), ((1059, 1068), 'micropython.const', 'const', (['(17)'], {}), '(17)\n', (1064, 1068), False, 'from micropython import const\n'), ((1091, 1100), 'micropython.const', 'const', (['(18)'], {}), '(18)\n', (1096, 1100), False, 'from micropython import const\n'), ((1123, 1132), 'micropython.const', 'const', (['(19)'], {}), '(19)\n', (1128, 1132), False, 'from micropython import const\n'), ((1155, 1164), 'micropython.const', 'const', (['(20)'], {}), '(20)\n', (1160, 1164), False, 'from micropython import const\n'), ((1187, 1196), 'micropython.const', 'const', (['(21)'], {}), '(21)\n', (1192, 1196), False, 'from micropython import const\n'), ((1218, 1227), 'micropython.const', 'const', (['(22)'], {}), '(22)\n', (1223, 1227), False, 'from micropython import const\n'), ((1250, 1259), 'micropython.const', 'const', (['(23)'], {}), '(23)\n', (1255, 1259), False, 'from micropython import const\n'), ((1282, 1291), 'micropython.const', 'const', (['(24)'], {}), '(24)\n', (1287, 1291), False, 'from micropython import const\n'), ((1314, 1323), 'micropython.const', 'const', (['(25)'], {}), '(25)\n', (1319, 1323), False, 'from micropython import const\n'), ((1346, 1355), 'micropython.const', 'const', (['(26)'], {}), '(26)\n', (1351, 1355), False, 'from micropython import const\n'), ((1378, 1387), 'micropython.const', 'const', (['(27)'], {}), '(27)\n', (1383, 1387), False, 'from micropython import const\n'), ((1410, 1419), 'micropython.const', 'const', (['(28)'], {}), '(28)\n', (1415, 1419), False, 'from micropython import const\n'), ((1441, 1450), 'micropython.const', 'const', (['(29)'], {}), '(29)\n', (1446, 1450), False, 'from micropython import const\n'), ((1473, 1482), 'micropython.const', 'const', (['(30)'], {}), '(30)\n', (1478, 1482), False, 'from micropython import const\n'), ((1505, 1514), 'micropython.const', 'const', (['(31)'], {}), '(31)\n', (1510, 1514), False, 'from micropython import const\n'), ((1537, 1546), 'micropython.const', 'const', (['(32)'], {}), '(32)\n', (1542, 1546), False, 'from micropython import const\n'), ((1569, 1578), 'micropython.const', 'const', (['(33)'], {}), '(33)\n', (1574, 1578), False, 'from micropython import const\n'), ((1601, 1610), 'micropython.const', 'const', (['(34)'], {}), '(34)\n', (1606, 1610), False, 'from micropython import const\n'), ((1633, 1642), 'micropython.const', 'const', (['(35)'], {}), '(35)\n', (1638, 1642), False, 'from micropython import const\n'), ((1664, 1673), 'micropython.const', 'const', (['(36)'], {}), '(36)\n', (1669, 1673), False, 'from micropython import const\n'), ((1695, 1704), 'micropython.const', 'const', (['(37)'], {}), '(37)\n', (1700, 1704), False, 'from micropython import const\n'), ((1839, 1873), 'adafruit_register.i2c_bits.ROBits', 'ROBits', (['(4)', '_PART_INFO', '(3)', '(1)', '(False)'], {}), '(4, _PART_INFO, 3, 1, False)\n', (1845, 1873), False, 'from adafruit_register.i2c_bits import ROBits, RWBits\n'), ((1898, 1933), 'adafruit_register.i2c_bits.ROBits', 'ROBits', (['(8)', '_FAULT_STAT', '(0)', '(1)', '(False)'], {}), '(8, _FAULT_STAT, 0, 1, False)\n', (1904, 1933), False, 'from adafruit_register.i2c_bits import ROBits, RWBits\n'), ((1958, 1993), 'adafruit_register.i2c_bits.ROBits', 'ROBits', (['(8)', '_CHRG_STAT1', '(0)', '(1)', '(False)'], {}), '(8, _CHRG_STAT1, 0, 1, False)\n', (1964, 1993), False, 'from adafruit_register.i2c_bits import ROBits, RWBits\n'), ((2018, 2053), 'adafruit_register.i2c_bits.ROBits', 'ROBits', (['(8)', '_CHRG_STAT2', '(0)', '(1)', '(False)'], {}), '(8, _CHRG_STAT2, 0, 1, False)\n', (2024, 2053), False, 'from adafruit_register.i2c_bits import ROBits, RWBits\n'), ((2078, 2113), 'adafruit_register.i2c_bits.ROBits', 'ROBits', (['(3)', '_CHRG_STAT1', '(0)', '(1)', '(False)'], {}), '(3, _CHRG_STAT1, 0, 1, False)\n', (2084, 2113), False, 'from adafruit_register.i2c_bits import ROBits, RWBits\n'), ((2138, 2171), 'adafruit_register.i2c_bits.ROBits', 'ROBits', (['(8)', '_OTG_CTRL', '(0)', '(1)', '(False)'], {}), '(8, _OTG_CTRL, 0, 1, False)\n', (2144, 2171), False, 'from adafruit_register.i2c_bits import ROBits, RWBits\n'), ((2196, 2232), 'adafruit_register.i2c_bits.ROBits', 'ROBits', (['(8)', '_CHRGR_CRTL2', '(0)', '(1)', '(False)'], {}), '(8, _CHRGR_CRTL2, 0, 1, False)\n', (2202, 2232), False, 'from adafruit_register.i2c_bits import ROBits, RWBits\n'), ((2257, 2293), 'adafruit_register.i2c_bits.RWBits', 'RWBits', (['(2)', '_CHRGR_CRTL1', '(4)', '(1)', '(False)'], {}), '(2, _CHRGR_CRTL1, 4, 1, False)\n', (2263, 2293), False, 'from adafruit_register.i2c_bits import ROBits, RWBits\n'), ((2318, 2351), 'adafruit_register.i2c_bits.RWBits', 'RWBits', (['(3)', '_NTC_STAT', '(0)', '(1)', '(False)'], {}), '(3, _NTC_STAT, 0, 1, False)\n', (2324, 2351), False, 'from adafruit_register.i2c_bits import ROBits, RWBits\n'), ((2377, 2409), 'adafruit_register.i2c_bit.RWBit', 'RWBit', (['_CHRGR_CRTL3', '(7)', '(1)', '(False)'], {}), '(_CHRGR_CRTL3, 7, 1, False)\n', (2382, 2409), False, 'from adafruit_register.i2c_bit import ROBit, RWBit\n'), ((2436, 2468), 'adafruit_register.i2c_bit.RWBit', 'RWBit', (['_CHRGR_CRTL2', '(3)', '(1)', '(False)'], {}), '(_CHRGR_CRTL2, 3, 1, False)\n', (2441, 2468), False, 'from adafruit_register.i2c_bit import ROBit, RWBit\n'), ((2498, 2528), 'adafruit_register.i2c_bit.RWBit', 'RWBit', (['_PART_INFO', '(7)', '(1)', '(False)'], {}), '(_PART_INFO, 7, 1, False)\n', (2503, 2528), False, 'from adafruit_register.i2c_bit import ROBit, RWBit\n'), ((2558, 2590), 'adafruit_register.i2c_bit.RWBit', 'RWBit', (['_CHRGR_CRTL1', '(6)', '(1)', '(False)'], {}), '(_CHRGR_CRTL1, 6, 1, False)\n', (2563, 2590), False, 'from adafruit_register.i2c_bit import ROBit, RWBit\n'), ((2620, 2650), 'adafruit_register.i2c_bit.RWBit', 'RWBit', (['_CHRGI_LIM', '(6)', '(1)', '(False)'], {}), '(_CHRGI_LIM, 6, 1, False)\n', (2625, 2650), False, 'from adafruit_register.i2c_bit import ROBit, RWBit\n'), ((2725, 2749), 'adafruit_bus_device.i2c_device.I2CDevice', 'I2CDevice', (['i2c_bus', 'addr'], {}), '(i2c_bus, addr)\n', (2734, 2749), False, 'from adafruit_bus_device.i2c_device import I2CDevice\n')] |