code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import matplotlib.pyplot as plt
import networkx as nx
import numpy
plt.ion()
# test
def plot_neural_network(mek):
G = nx.DiGraph(numpy.transpose(mek.nn.links))
mylabels = dict(zip(range(len(mek.nn.neurons)),
[to_string(i)+'\n#'
+ str(ix)+'' for (ix, i) in enumerate(mek.nn.neurons)]))
G = nx.relabel_nodes(G, mylabels)
pos = nx.layout.spring_layout(G, k=2)
epos = [(u, v) for (u, v, d) in G.edges(data=True) if d['weight'] > 0.5]
eneg = [(u, v) for (u, v, d) in G.edges(data=True) if d['weight'] <= -0.5]
arrowsize = 50
colorspos = numpy.arange(len(epos))/5.0+4.0*len(epos)/5.0
colorsneg = numpy.arange(len(eneg))/5.0+4.0*len(eneg)/5.0
nx.draw_networkx_edges(G, pos, edgelist=epos, edge_color=colorspos,
width=3, arrowsize=arrowsize, alpha=1, arrowstyle='->', edge_cmap=plt.cm.Blues)
nx.draw_networkx_edges(G, pos, edgelist=eneg,
width=2, arrowsize=arrowsize, alpha=1, edge_color=colorsneg, arrowstyle='->', edge_cmap=plt.cm.Reds)
nodes = nx.draw_networkx_nodes(
G, pos, node_size=1500, node_color='gray', alpha=1)
nx.draw_networkx_labels(G, pos, font_size=10,
font_family='sans-serif', font_weight='bold')
ax = plt.gca()
ax.set_axis_off()
plt.show()
def to_string(name):
out = ""
for i in name:
out = out + str(i)
return(out)
| [
"networkx.layout.spring_layout",
"networkx.relabel_nodes",
"matplotlib.pyplot.gca",
"networkx.draw_networkx_nodes",
"networkx.draw_networkx_labels",
"matplotlib.pyplot.ion",
"numpy.transpose",
"networkx.draw_networkx_edges",
"matplotlib.pyplot.show"
] | [((68, 77), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (75, 77), True, 'import matplotlib.pyplot as plt\n'), ((354, 383), 'networkx.relabel_nodes', 'nx.relabel_nodes', (['G', 'mylabels'], {}), '(G, mylabels)\n', (370, 383), True, 'import networkx as nx\n'), ((394, 425), 'networkx.layout.spring_layout', 'nx.layout.spring_layout', (['G'], {'k': '(2)'}), '(G, k=2)\n', (417, 425), True, 'import networkx as nx\n'), ((731, 882), 'networkx.draw_networkx_edges', 'nx.draw_networkx_edges', (['G', 'pos'], {'edgelist': 'epos', 'edge_color': 'colorspos', 'width': '(3)', 'arrowsize': 'arrowsize', 'alpha': '(1)', 'arrowstyle': '"""->"""', 'edge_cmap': 'plt.cm.Blues'}), "(G, pos, edgelist=epos, edge_color=colorspos, width=3,\n arrowsize=arrowsize, alpha=1, arrowstyle='->', edge_cmap=plt.cm.Blues)\n", (753, 882), True, 'import networkx as nx\n'), ((910, 1060), 'networkx.draw_networkx_edges', 'nx.draw_networkx_edges', (['G', 'pos'], {'edgelist': 'eneg', 'width': '(2)', 'arrowsize': 'arrowsize', 'alpha': '(1)', 'edge_color': 'colorsneg', 'arrowstyle': '"""->"""', 'edge_cmap': 'plt.cm.Reds'}), "(G, pos, edgelist=eneg, width=2, arrowsize=arrowsize,\n alpha=1, edge_color=colorsneg, arrowstyle='->', edge_cmap=plt.cm.Reds)\n", (932, 1060), True, 'import networkx as nx\n'), ((1097, 1171), 'networkx.draw_networkx_nodes', 'nx.draw_networkx_nodes', (['G', 'pos'], {'node_size': '(1500)', 'node_color': '"""gray"""', 'alpha': '(1)'}), "(G, pos, node_size=1500, node_color='gray', alpha=1)\n", (1119, 1171), True, 'import networkx as nx\n'), ((1185, 1280), 'networkx.draw_networkx_labels', 'nx.draw_networkx_labels', (['G', 'pos'], {'font_size': '(10)', 'font_family': '"""sans-serif"""', 'font_weight': '"""bold"""'}), "(G, pos, font_size=10, font_family='sans-serif',\n font_weight='bold')\n", (1208, 1280), True, 'import networkx as nx\n'), ((1314, 1323), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1321, 1323), True, 'import matplotlib.pyplot as plt\n'), ((1350, 1360), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1358, 1360), True, 'import matplotlib.pyplot as plt\n'), ((136, 165), 'numpy.transpose', 'numpy.transpose', (['mek.nn.links'], {}), '(mek.nn.links)\n', (151, 165), False, 'import numpy\n')] |
import sys
import json
import asyncio
import aiohttp
from aiohttp import web
from urllib.parse import urlparse
class SimpleWebServer:
def __init__(self):
self.loop = asyncio.get_event_loop()
self.app = web.Application(loop=self.loop)
async def check_ack(self):
asyncio.sleep(1)
print("Closing WebServer")
def save_post(self, data):
print("Saving post data into vnf-br.json")
filename = "./vnf-br.json"
with open(filename, 'w') as outfile:
json.dump(data, outfile, indent=4, sort_keys=True)
return True
return False
async def post(self, request):
from_id = request.match_info['id']
print("Received msg from id", from_id)
reply = web.HTTPOk(text="Ack")
try:
raw_data = await request.read()
payload = raw_data.decode(encoding='UTF-8')
data = json.loads(payload)
self.save_post(data)
except Exception as e:
print("Could not save data to file")
print(e)
reply = web.HTTPBadRequest()
finally:
self.app.loop.create_task(self.check_ack())
return reply
def run(self, url):
self.app.add_routes([web.route("POST", "/{id}", self.post)])
url_parsed = urlparse(url)
host, port = url_parsed.hostname, url_parsed.port
print("Waiting for Player VNF-BR")
web.run_app(self.app, host=host, port=port)
if __name__ == "__main__":
app = SimpleWebServer()
app.run("http://127.0.0.1:7879") | [
"aiohttp.web.run_app",
"json.loads",
"urllib.parse.urlparse",
"aiohttp.web.Application",
"aiohttp.web.route",
"aiohttp.web.HTTPOk",
"asyncio.sleep",
"aiohttp.web.HTTPBadRequest",
"asyncio.get_event_loop",
"json.dump"
] | [((180, 204), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (202, 204), False, 'import asyncio\n'), ((224, 255), 'aiohttp.web.Application', 'web.Application', ([], {'loop': 'self.loop'}), '(loop=self.loop)\n', (239, 255), False, 'from aiohttp import web\n'), ((296, 312), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (309, 312), False, 'import asyncio\n'), ((761, 783), 'aiohttp.web.HTTPOk', 'web.HTTPOk', ([], {'text': '"""Ack"""'}), "(text='Ack')\n", (771, 783), False, 'from aiohttp import web\n'), ((1332, 1345), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (1340, 1345), False, 'from urllib.parse import urlparse\n'), ((1455, 1498), 'aiohttp.web.run_app', 'web.run_app', (['self.app'], {'host': 'host', 'port': 'port'}), '(self.app, host=host, port=port)\n', (1466, 1498), False, 'from aiohttp import web\n'), ((523, 573), 'json.dump', 'json.dump', (['data', 'outfile'], {'indent': '(4)', 'sort_keys': '(True)'}), '(data, outfile, indent=4, sort_keys=True)\n', (532, 573), False, 'import json\n'), ((924, 943), 'json.loads', 'json.loads', (['payload'], {}), '(payload)\n', (934, 943), False, 'import json\n'), ((1098, 1118), 'aiohttp.web.HTTPBadRequest', 'web.HTTPBadRequest', ([], {}), '()\n', (1116, 1118), False, 'from aiohttp import web\n'), ((1271, 1308), 'aiohttp.web.route', 'web.route', (['"""POST"""', '"""/{id}"""', 'self.post'], {}), "('POST', '/{id}', self.post)\n", (1280, 1308), False, 'from aiohttp import web\n')] |
import csv
import numpy as np
import os
infile = "dopamine_processed.csv"
outfile = "dopamine.csv"
if os.path.exists(outfile):
os.remove(outfile)
with open(infile, 'r', newline='') as f:
i=0
reader = csv.reader(f,delimiter=',')
for row in reader:
with open(outfile, 'a', newline='') as w:
if i == 0:
i += 1
continue
writer = csv.writer(w, delimiter=',')
writer.writerow([row[0], str(-1*np.log10(float(row[1])))])
i += 1
| [
"os.path.exists",
"csv.writer",
"csv.reader",
"os.remove"
] | [((102, 125), 'os.path.exists', 'os.path.exists', (['outfile'], {}), '(outfile)\n', (116, 125), False, 'import os\n'), ((131, 149), 'os.remove', 'os.remove', (['outfile'], {}), '(outfile)\n', (140, 149), False, 'import os\n'), ((212, 240), 'csv.reader', 'csv.reader', (['f'], {'delimiter': '""","""'}), "(f, delimiter=',')\n", (222, 240), False, 'import csv\n'), ((405, 433), 'csv.writer', 'csv.writer', (['w'], {'delimiter': '""","""'}), "(w, delimiter=',')\n", (415, 433), False, 'import csv\n')] |
"""The sample file to be run in runmanager.
This is the minimal sample that you can load from runmanager to see if your
code is working properly.
"""
from labscript import *
# from user_devices.dummy_device.labscript_devices import DummyDevice
from labscript_devices.DummyPseudoclock.labscript_devices import DummyPseudoclock
from user_devices.DummyIntermediateDevices.labscript_devices import (
DummyIntermediateDevice,
)
DummyPseudoclock("dummy_pseudoclock")
ClockLine(
name="dummy_clockline",
pseudoclock=dummy_pseudoclock.pseudoclock,
connection="flag 0",
)
# DummyDevice(name="dummy_device_0", parent_device=dummy_clockline)
DummyIntermediateDevice(name="dummy_intermediate_device", parent_device=dummy_clockline)
AnalogOut(name="AO1", parent_device=dummy_intermediate_device, connection="ao0")
AnalogOut(name="AO2", parent_device=dummy_intermediate_device, connection="ao1")
# DigitalOut(name="dummy_DO1", parent_device=dummy_intermediate_device, connection="dummy_connection")
if __name__ == "__main__":
start()
stop(1)
| [
"user_devices.DummyIntermediateDevices.labscript_devices.DummyIntermediateDevice",
"labscript_devices.DummyPseudoclock.labscript_devices.DummyPseudoclock"
] | [((431, 468), 'labscript_devices.DummyPseudoclock.labscript_devices.DummyPseudoclock', 'DummyPseudoclock', (['"""dummy_pseudoclock"""'], {}), "('dummy_pseudoclock')\n", (447, 468), False, 'from labscript_devices.DummyPseudoclock.labscript_devices import DummyPseudoclock\n'), ((651, 744), 'user_devices.DummyIntermediateDevices.labscript_devices.DummyIntermediateDevice', 'DummyIntermediateDevice', ([], {'name': '"""dummy_intermediate_device"""', 'parent_device': 'dummy_clockline'}), "(name='dummy_intermediate_device', parent_device=\n dummy_clockline)\n", (674, 744), False, 'from user_devices.DummyIntermediateDevices.labscript_devices import DummyIntermediateDevice\n')] |
import torch
def reparametrize(mu, log_var, device):
"""
Reparametrize based on input mean and log variance
Parameters
----------
mu : torch.tensor
The mean.
log_var : torch.tensor
The log variance.
device : str
The device on which to put the data.
Returns
-------
z : torch.tensor
The reparametrized value.
"""
sigma = torch.exp(0.5*log_var)
epsilon = torch.rand_like(sigma)
z = mu + epsilon*sigma
return z.to(device)
def adj_to_seq(adj, device='cpu'):
"""
Convert a dense adjacency matrix into a sequence.
Parameters
----------
adj : torch.Tensor
The dense adjacency tensor.
device : str, optional
The device onto which to put the data. The default is 'cpu'.
Returns
-------
adj_seq : torch.Tensor
The sequence representing the input adjacency tensor.
"""
B, N = adj.shape[0], adj.shape[1]
adj_seq = torch.zeros(B,int(((N-1)*N)/2)).to(device)
for b in range(B):
for i in range(1,N):
for j in range(i):
adj_seq[b,i+j] = adj[b,i,j]
return adj_seq
def seq_to_adj(adj_seq, device='cpu'):
"""
Convert an adjacency sequence to its corresponding dense representation.
Parameters
----------
adj_seq : torch.Tensor
The sequence adjacency.
device : str, optional
The device onto which to put the data. The default is 'cpu'.
Returns
-------
adj : torch.Tensor
The dense representation of the input sequence.
"""
B, n = adj_seq.shape[0], adj_seq.shape[1]
adj = torch.zeros(B,n,n).to(device)
for b in range(B):
for i in range(n):
for j in range(n):
adj[b,i,j] = adj[b,j,i] = adj_seq[b,i,j]
return adj
def clipping_dist(delta):
"""
Returns the average distance between residues i and j, based on experimental data.
Parameters
----------
delta : int
The delta between residue indexes i and j.
Returns
-------
float
The average distance.
"""
if delta == 1:
return 4
elif delta == 2:
return 6
elif delta == 3:
return 7.5
elif delta == 4:
return 8.5
elif delta == 5:
return 10
elif delta == 6:
return 10.5
elif delta == 7:
return 11
elif delta == 8:
return 12
else:
return 12.5 | [
"torch.zeros",
"torch.rand_like",
"torch.exp"
] | [((403, 427), 'torch.exp', 'torch.exp', (['(0.5 * log_var)'], {}), '(0.5 * log_var)\n', (412, 427), False, 'import torch\n'), ((440, 462), 'torch.rand_like', 'torch.rand_like', (['sigma'], {}), '(sigma)\n', (455, 462), False, 'import torch\n'), ((1640, 1660), 'torch.zeros', 'torch.zeros', (['B', 'n', 'n'], {}), '(B, n, n)\n', (1651, 1660), False, 'import torch\n')] |
from django.shortcuts import render,redirect,get_object_or_404,HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.contrib.auth import login, authenticate
from .forms import *
from .models import NeighborHood,Profile,Post,Business
from django.contrib.sites.shortcuts import get_current_site
from django.utils.encoding import force_text
from django.contrib.auth.models import User
from django.db import IntegrityError
from django.utils.http import urlsafe_base64_decode
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
from django.template.loader import render_to_string
from .tokens import account_activation_token
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AuthenticationForm
from django.contrib import messages
from django.contrib.auth import login as auth_login
from django.core.mail import EmailMessage
# Create your views here.
###################### Landing
@login_required(login_url='/accounts/login/')
# @login_required
def index(request):
current_user = request.user
neighborhoods = NeighborHood.objects.all().order_by('-created_at')
return render(request, 'index.html',{'current_user':current_user, 'neighborhoods':neighborhoods})
###################### Login
def login(request):
if request.method == 'POST':
form = AuthenticationForm(request=request, data=request.POST)
if form.is_valid():
username = form.cleaned_data.get('username')
password = form.cleaned_data.get('password')
user = authenticate(username=username, password=password)
if user is not None:
auth_login(request, user)
messages.info(request, f"You are now logged in as {username}")
return redirect('home')
else:
messages.error(request, "Invalid username or password.")
else:
messages.error(request, "Invalid username or password.")
form = AuthenticationForm()
return render(request = request,template_name = "registration/login.html",context={"form":form})
###################### Account Activation
###################### Neighborhood
@login_required(login_url='/accounts/login/')
# @login_required
def create_neighborhood(request):
if request.method == 'POST':
add_neighborhood_form = CreateNeighborHoodForm(request.POST, request.FILES)
if add_neighborhood_form.is_valid():
neighborhood = add_neighborhood_form.save(commit=False)
neighborhood.admin = request.user.profile
neighborhood.save()
return redirect('home')
else:
add_neighborhood_form = CreateNeighborHoodForm()
return render(request, 'create_neighbor.html', {'add_neighborhood_form': add_neighborhood_form})
@login_required(login_url='/accounts/login/')
# @login_required
def neighborhood(request, neighborhood_id):
current_user = request.user
neighborhood = NeighborHood.objects.get(id=neighborhood_id)
business = Business.objects.filter(neighborhood=neighborhood)
users = Profile.objects.filter(neighborhood=neighborhood)
posts = Post.objects.filter(neighborhood=neighborhood)
return render(request, 'neighbor.html', {'users':users,'current_user':current_user, 'neighborhood':neighborhood,'business':business,'posts':posts})
@login_required(login_url='/accounts/login/')
# @login_required
def update_neighborhood(request, neighborhood_id):
neighborhood = NeighborHood.objects.get(pk=neighborhood_id)
if request.method == 'POST':
update_neighborhood_form = UpdateNeighborhoodForm(request.POST,request.FILES, instance=neighborhood)
if update_neighborhood_form.is_valid():
update_neighborhood_form.save()
messages.success(request, f'Post updated!')
return redirect('home')
else:
update_neighborhood_form = UpdateNeighborhoodForm(instance=neighborhood)
return render(request, 'update_neighbor.html', {"update_neighborhood_form":update_neighborhood_form})
@login_required(login_url='/accounts/login/')
# @login_required
def delete_neighborhood(request,neighborhood_id):
current_user = request.user
neighborhood = NeighborHood.objects.get(pk=neighborhood_id)
if neighborhood:
neighborhood.delete_neighborhood()
return redirect('home')
@login_required(login_url='/accounts/login/')
# @login_required
def search(request):
if 'name' in request.GET and request.GET["name"]:
search_term = request.GET.get("name")
searched_businesses = Business.search_businesses(search_term)
message = f"{search_term}"
return render(request,'search.html', {"message":message,"businesses":searched_businesses})
else:
message = "You haven't searched for any term"
return render(request,'search.html',{"message":message})
@login_required(login_url='/accounts/login/')
# @login_required
def choose_neighborhood(request, neighborhood_id):
neighborhood = get_object_or_404(NeighborHood, id=neighborhood_id)
request.user.profile.neighborhood = neighborhood
request.user.profile.save()
return redirect('home')
def get_neighborhood_users(request, neighborhood_id):
neighborhood = NeighborHood.objects.get(id=neighborhood_id)
users = Profile.objects.filter(neighborhood=neighborhood)
return render(request, 'neighborhood_users.html', {'users': users})
@login_required(login_url='/accounts/login/')
# @login_required
def leave_neighborhood(request, neighborhood_id):
neighborhood = get_object_or_404(NeighborHood, id=neighborhood_id)
request.user.profile.neighborhood = None
request.user.profile.save()
return redirect('home')
@login_required(login_url='/accounts/login/')
# @login_required
def create_business(request,neighborhood_id):
neighborhood = NeighborHood.objects.get(id=neighborhood_id)
if request.method == 'POST':
add_business_form = CreateBusinessForm(request.POST, request.FILES)
if add_business_form.is_valid():
business = add_business_form.save(commit=False)
business.neighborhood =neighborhood
business.user = request.user
business.save()
return redirect('neighborhood', neighborhood.id)
else:
add_business_form = CreateBusinessForm()
return render(request, 'create_business.html', {'add_business_form': add_business_form,'neighborhood':neighborhood})
@login_required(login_url='/accounts/login/')
# @login_required
def delete_business(request,business_id):
current_user = request.user
business = Business.objects.get(pk=business_id)
if business:
business.delete_business()
return redirect('home')
@login_required(login_url='/accounts/login/')
# @login_required
def update_business(request, business_id):
business = Business.objects.get(pk=business_id)
if request.method == 'POST':
update_business_form = UpdateBusinessForm(request.POST,request.FILES, instance=business)
if update_business_form.is_valid():
update_business_form.save()
messages.success(request, f'Business updated!')
return redirect('home')
else:
update_business_form = UpdateBusinessForm(instance=business)
return render(request, 'update_business.html', {"update_business_form":update_business_form})
@login_required(login_url='/accounts/login/')
# @login_required
def create_post(request, neighborhood_id):
neighborhood = NeighborHood.objects.get(id=neighborhood_id)
if request.method == 'POST':
add_post_form = CreatePostForm(request.POST,request.FILES)
if add_post_form.is_valid():
post = add_post_form.save(commit=False)
post.neighborhood = neighborhood
post.user = request.user
post.save()
return redirect('neighborhood', neighborhood.id)
else:
add_post_form = CreatePostForm()
return render(request, 'create_post.html', {'add_post_form': add_post_form,'neighborhood':neighborhood})
@login_required(login_url='/accounts/login/')
# @login_required
def delete_post(request,post_id):
current_user = request.user
post = Post.objects.get(pk=post_id)
if post:
post.delete_post()
return redirect('home')
@login_required(login_url='/accounts/login/')
# @login_required
def update_post(request, post_id):
post = Post.objects.get(pk=post_id)
if request.method == 'POST':
update_post_form = UpdatePostForm(request.POST,request.FILES, instance=post)
if update_post_form.is_valid():
update_post_form.save()
messages.success(request, f'Post updated!')
return redirect('home')
else:
update_post_form = UpdatePostForm(instance=post)
return render(request, 'update_post.html', {"update_post_form":update_post_form})
@login_required(login_url='/accounts/login/')
# @login_required
def profile(request):
current_user = request.user
user_posts = Post.objects.filter(user_id = current_user.id).all()
return render(request,'profile/profile.html',{'user_posts':user_posts,"current_user":current_user})
@login_required(login_url='/accounts/login/')
# @login_required
def update_profile(request):
if request.method == 'POST':
user_form = UpdateUser(request.POST,instance=request.user)
profile_form = UpdateProfile(request.POST,request.FILES,instance=request.user.profile)
if user_form.is_valid() and profile_form.is_valid():
user_form.save()
profile_form.save()
messages.success(request,'Your Profile account has been updated successfully')
return redirect('profile')
else:
user_form = UpdateUser(instance=request.user)
profile_form = UpdateProfile(instance=request.user.profile)
params = {
'user_form':user_form,
'profile_form':profile_form
}
return render(request,'profile/update.html',params)
@login_required(login_url='/accounts/login/')
# @login_required
def users_profile(request,pk):
user = User.objects.get(pk = pk)
user_posts = Post.objects.filter(user_id = user.id).all()
current_user = request.user
return render(request,'profile/users_profile.html',{'user_posts':user_posts,"user":user,"current_user":current_user})
| [
"django.shortcuts.render",
"django.contrib.auth.authenticate",
"django.contrib.messages.error",
"django.shortcuts.get_object_or_404",
"django.contrib.auth.login",
"django.contrib.messages.info",
"django.contrib.auth.forms.AuthenticationForm",
"django.shortcuts.redirect",
"django.contrib.auth.decorat... | [((1012, 1056), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (1026, 1056), False, 'from django.contrib.auth.decorators import login_required\n'), ((2165, 2209), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (2179, 2209), False, 'from django.contrib.auth.decorators import login_required\n'), ((2743, 2787), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (2757, 2787), False, 'from django.contrib.auth.decorators import login_required\n'), ((3278, 3322), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (3292, 3322), False, 'from django.contrib.auth.decorators import login_required\n'), ((3945, 3989), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (3959, 3989), False, 'from django.contrib.auth.decorators import login_required\n'), ((4237, 4281), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (4251, 4281), False, 'from django.contrib.auth.decorators import login_required\n'), ((4732, 4776), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (4746, 4776), False, 'from django.contrib.auth.decorators import login_required\n'), ((5273, 5317), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (5287, 5317), False, 'from django.contrib.auth.decorators import login_required\n'), ((5557, 5601), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (5571, 5601), False, 'from django.contrib.auth.decorators import login_required\n'), ((6252, 6296), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (6266, 6296), False, 'from django.contrib.auth.decorators import login_required\n'), ((6512, 6556), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (6526, 6556), False, 'from django.contrib.auth.decorators import login_required\n'), ((7123, 7167), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (7137, 7167), False, 'from django.contrib.auth.decorators import login_required\n'), ((7763, 7807), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (7777, 7807), False, 'from django.contrib.auth.decorators import login_required\n'), ((7991, 8035), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (8005, 8035), False, 'from django.contrib.auth.decorators import login_required\n'), ((8534, 8578), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (8548, 8578), False, 'from django.contrib.auth.decorators import login_required\n'), ((8824, 8868), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (8838, 8868), False, 'from django.contrib.auth.decorators import login_required\n'), ((9581, 9625), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (9595, 9625), False, 'from django.contrib.auth.decorators import login_required\n'), ((1209, 1306), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', "{'current_user': current_user, 'neighborhoods': neighborhoods}"], {}), "(request, 'index.html', {'current_user': current_user,\n 'neighborhoods': neighborhoods})\n", (1215, 1306), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((1961, 1981), 'django.contrib.auth.forms.AuthenticationForm', 'AuthenticationForm', ([], {}), '()\n', (1979, 1981), False, 'from django.contrib.auth.forms import AuthenticationForm\n'), ((1991, 2084), 'django.shortcuts.render', 'render', ([], {'request': 'request', 'template_name': '"""registration/login.html"""', 'context': "{'form': form}"}), "(request=request, template_name='registration/login.html', context={\n 'form': form})\n", (1997, 2084), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((2650, 2743), 'django.shortcuts.render', 'render', (['request', '"""create_neighbor.html"""', "{'add_neighborhood_form': add_neighborhood_form}"], {}), "(request, 'create_neighbor.html', {'add_neighborhood_form':\n add_neighborhood_form})\n", (2656, 2743), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((3133, 3289), 'django.shortcuts.render', 'render', (['request', '"""neighbor.html"""', "{'users': users, 'current_user': current_user, 'neighborhood': neighborhood,\n 'business': business, 'posts': posts}"], {}), "(request, 'neighbor.html', {'users': users, 'current_user':\n current_user, 'neighborhood': neighborhood, 'business': business,\n 'posts': posts})\n", (3139, 3289), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((3847, 3946), 'django.shortcuts.render', 'render', (['request', '"""update_neighbor.html"""', "{'update_neighborhood_form': update_neighborhood_form}"], {}), "(request, 'update_neighbor.html', {'update_neighborhood_form':\n update_neighborhood_form})\n", (3853, 3946), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((4217, 4233), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (4225, 4233), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((4863, 4914), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['NeighborHood'], {'id': 'neighborhood_id'}), '(NeighborHood, id=neighborhood_id)\n', (4880, 4914), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((5005, 5021), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (5013, 5021), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((5209, 5269), 'django.shortcuts.render', 'render', (['request', '"""neighborhood_users.html"""', "{'users': users}"], {}), "(request, 'neighborhood_users.html', {'users': users})\n", (5215, 5269), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((5403, 5454), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['NeighborHood'], {'id': 'neighborhood_id'}), '(NeighborHood, id=neighborhood_id)\n', (5420, 5454), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((5537, 5553), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (5545, 5553), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((6138, 6253), 'django.shortcuts.render', 'render', (['request', '"""create_business.html"""', "{'add_business_form': add_business_form, 'neighborhood': neighborhood}"], {}), "(request, 'create_business.html', {'add_business_form':\n add_business_form, 'neighborhood': neighborhood})\n", (6144, 6253), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((6492, 6508), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (6500, 6508), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((7033, 7124), 'django.shortcuts.render', 'render', (['request', '"""update_business.html"""', "{'update_business_form': update_business_form}"], {}), "(request, 'update_business.html', {'update_business_form':\n update_business_form})\n", (7039, 7124), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((7661, 7764), 'django.shortcuts.render', 'render', (['request', '"""create_post.html"""', "{'add_post_form': add_post_form, 'neighborhood': neighborhood}"], {}), "(request, 'create_post.html', {'add_post_form': add_post_form,\n 'neighborhood': neighborhood})\n", (7667, 7764), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((7971, 7987), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (7979, 7987), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((8456, 8531), 'django.shortcuts.render', 'render', (['request', '"""update_post.html"""', "{'update_post_form': update_post_form}"], {}), "(request, 'update_post.html', {'update_post_form': update_post_form})\n", (8462, 8531), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((8729, 8830), 'django.shortcuts.render', 'render', (['request', '"""profile/profile.html"""', "{'user_posts': user_posts, 'current_user': current_user}"], {}), "(request, 'profile/profile.html', {'user_posts': user_posts,\n 'current_user': current_user})\n", (8735, 8830), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((9533, 9579), 'django.shortcuts.render', 'render', (['request', '"""profile/update.html"""', 'params'], {}), "(request, 'profile/update.html', params)\n", (9539, 9579), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((9684, 9707), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (9700, 9707), False, 'from django.contrib.auth.models import User\n'), ((9812, 9933), 'django.shortcuts.render', 'render', (['request', '"""profile/users_profile.html"""', "{'user_posts': user_posts, 'user': user, 'current_user': current_user}"], {}), "(request, 'profile/users_profile.html', {'user_posts': user_posts,\n 'user': user, 'current_user': current_user})\n", (9818, 9933), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((1393, 1447), 'django.contrib.auth.forms.AuthenticationForm', 'AuthenticationForm', ([], {'request': 'request', 'data': 'request.POST'}), '(request=request, data=request.POST)\n', (1411, 1447), False, 'from django.contrib.auth.forms import AuthenticationForm\n'), ((4524, 4615), 'django.shortcuts.render', 'render', (['request', '"""search.html"""', "{'message': message, 'businesses': searched_businesses}"], {}), "(request, 'search.html', {'message': message, 'businesses':\n searched_businesses})\n", (4530, 4615), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((4678, 4730), 'django.shortcuts.render', 'render', (['request', '"""search.html"""', "{'message': message}"], {}), "(request, 'search.html', {'message': message})\n", (4684, 4730), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((1587, 1637), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': 'username', 'password': 'password'}), '(username=username, password=password)\n', (1599, 1637), False, 'from django.contrib.auth import login, authenticate\n'), ((1895, 1951), 'django.contrib.messages.error', 'messages.error', (['request', '"""Invalid username or password."""'], {}), "(request, 'Invalid username or password.')\n", (1909, 1951), False, 'from django.contrib import messages\n'), ((2563, 2579), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (2571, 2579), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((3678, 3721), 'django.contrib.messages.success', 'messages.success', (['request', 'f"""Post updated!"""'], {}), "(request, f'Post updated!')\n", (3694, 3721), False, 'from django.contrib import messages\n'), ((3735, 3751), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (3743, 3751), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((6034, 6075), 'django.shortcuts.redirect', 'redirect', (['"""neighborhood"""', 'neighborhood.id'], {}), "('neighborhood', neighborhood.id)\n", (6042, 6075), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((6872, 6919), 'django.contrib.messages.success', 'messages.success', (['request', 'f"""Business updated!"""'], {}), "(request, f'Business updated!')\n", (6888, 6919), False, 'from django.contrib import messages\n'), ((6933, 6949), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (6941, 6949), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((7565, 7606), 'django.shortcuts.redirect', 'redirect', (['"""neighborhood"""', 'neighborhood.id'], {}), "('neighborhood', neighborhood.id)\n", (7573, 7606), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((8311, 8354), 'django.contrib.messages.success', 'messages.success', (['request', 'f"""Post updated!"""'], {}), "(request, f'Post updated!')\n", (8327, 8354), False, 'from django.contrib import messages\n'), ((8368, 8384), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (8376, 8384), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((9213, 9292), 'django.contrib.messages.success', 'messages.success', (['request', '"""Your Profile account has been updated successfully"""'], {}), "(request, 'Your Profile account has been updated successfully')\n", (9229, 9292), False, 'from django.contrib import messages\n'), ((9305, 9324), 'django.shortcuts.redirect', 'redirect', (['"""profile"""'], {}), "('profile')\n", (9313, 9324), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((1673, 1698), 'django.contrib.auth.login', 'auth_login', (['request', 'user'], {}), '(request, user)\n', (1683, 1698), True, 'from django.contrib.auth import login as auth_login\n'), ((1707, 1769), 'django.contrib.messages.info', 'messages.info', (['request', 'f"""You are now logged in as {username}"""'], {}), "(request, f'You are now logged in as {username}')\n", (1720, 1769), False, 'from django.contrib import messages\n'), ((1785, 1801), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (1793, 1801), False, 'from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect\n'), ((1822, 1878), 'django.contrib.messages.error', 'messages.error', (['request', '"""Invalid username or password."""'], {}), "(request, 'Invalid username or password.')\n", (1836, 1878), False, 'from django.contrib import messages\n')] |
###############################################################################
#
# IBT: Isolated Build Tool
# Copyright (C) 2016, <NAME>. All rights reserved.
#
# Simple wrappers around Docker etc. for fully isolated build environments
#
###############################################################################
from __future__ import print_function
from ibt.util import get_user_info
class Context(object):
def __init__(self, working_dir):
self._working_dir = working_dir
self._user_info = get_user_info(working_dir)
@property
def working_dir(self): return self._working_dir
def user_info(self): return self._user_info
| [
"ibt.util.get_user_info"
] | [((521, 547), 'ibt.util.get_user_info', 'get_user_info', (['working_dir'], {}), '(working_dir)\n', (534, 547), False, 'from ibt.util import get_user_info\n')] |
"""This is a cog for a discord.py bot.
It hides the help command and adds these commands:
helpall show all commands (including all hidden ones)
The commands will output to the current channel or to a dm channel
according to the pm_help kwarg of the bot.
Only users that have an admin role can use the commands.
"""
import itertools
from discord import Embed
from discord.ext import commands
from discord.ext.commands import HelpCommand, DefaultHelpCommand
#pylint: disable=E1101
class myHelpCommand(HelpCommand):
def __init__(self, **options):
super().__init__(**options)
self.paginator = None
self.spacer = "\u1160 " # Invisible Unicode Character to indent lines
async def send_pages(self, header=False, footer=False):
destination = self.get_destination()
embed = Embed(
color=0x2ECC71
)
if header:
embed.set_author(
name=self.context.bot.description,
icon_url=self.context.bot.user.avatar_url
)
for category, entries in self.paginator:
embed.add_field(
name=category,
value=entries,
inline=False
)
if footer:
embed.set_footer(
text='Use felix help <command/category> for more information.'
)
await destination.send(embed=embed)
async def send_bot_help(self, mapping):
ctx = self.context
bot = ctx.bot
def get_category(command):
cog = command.cog
return cog.qualified_name + ':' if cog is not None else 'Help:'
filtered = await self.filter_commands(
bot.commands,
sort=True,
key=get_category
)
to_iterate = itertools.groupby(filtered, key=get_category)
for cog_name, command_grouper in to_iterate:
cmds = sorted(command_grouper, key=lambda c: c.name)
category = f'► {cog_name}'
if len(cmds) == 1:
entries = f'{self.spacer}{cmds[0].name} → {cmds[0].short_doc}'
else:
entries = ''
while len(cmds) > 0:
entries += self.spacer
entries += ' | '.join([cmd.name for cmd in cmds[0:8]])
cmds = cmds[8:]
entries += '\n' if cmds else ''
self.paginator.append((category, entries))
await self.send_pages(header=True, footer=True)
async def send_cog_help(self, cog):
filtered = await self.filter_commands(cog.get_commands(), sort=True)
if not filtered:
await self.context.send(
'No public commands in this cog. Try again with felix helpall.'
)
return
category = f'▼ {cog.qualified_name}'
entries = '\n'.join(
self.spacer +
f'**{command.name}** → {command.short_doc or command.description}'
for command in filtered
)
self.paginator.append((category, entries))
await self.send_pages(footer=True)
async def send_group_help(self, group):
filtered = await self.filter_commands(group.commands, sort=True)
if not filtered:
await self.context.send(
'No public commands in group. Try again with felix helpall.'
)
return
category = f'**{group.name}** - {group.description or group.short_doc}'
entries = '\n'.join(
self.spacer + f'**{command.name}** → {command.short_doc}'
for command in filtered
)
self.paginator.append((category, entries))
await self.send_pages(footer=True)
async def send_command_help(self, command):
signature = self.get_command_signature(command)
helptext = command.help or command.description or 'No help Text'
self.paginator.append(
(signature, helptext)
)
await self.send_pages()
async def prepare_help_command(self, ctx, command=None):
self.paginator = []
await super().prepare_help_command(ctx, command)
class Help(commands.Cog):
def __init__(self, client):
self.client = client
self.client.help_command = myHelpCommand(
command_attrs={
'aliases': ['halp'],
'help': 'Shows help about the bot, a command, or a category'
}
)
async def cog_check(self, ctx):
return self.client.user_is_admin(ctx.author)
def cog_unload(self):
self.client.get_command('help').hidden = False
self.client.help_command = DefaultHelpCommand()
@commands.command(
aliases=['halpall'],
hidden=True
)
async def helpall(self, ctx, *, text=None):
"""Print bot help including all hidden commands"""
self.client.help_command = myHelpCommand(show_hidden=True)
if text:
await ctx.send_help(text)
else:
await ctx.send_help()
self.client.help_command = myHelpCommand()
def setup(client):
client.add_cog(Help(client))
| [
"itertools.groupby",
"discord.Embed",
"discord.ext.commands.command",
"discord.ext.commands.DefaultHelpCommand"
] | [((4723, 4773), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['halpall']", 'hidden': '(True)'}), "(aliases=['halpall'], hidden=True)\n", (4739, 4773), False, 'from discord.ext import commands\n'), ((838, 858), 'discord.Embed', 'Embed', ([], {'color': '(3066993)'}), '(color=3066993)\n', (843, 858), False, 'from discord import Embed\n'), ((1816, 1861), 'itertools.groupby', 'itertools.groupby', (['filtered'], {'key': 'get_category'}), '(filtered, key=get_category)\n', (1833, 1861), False, 'import itertools\n'), ((4696, 4716), 'discord.ext.commands.DefaultHelpCommand', 'DefaultHelpCommand', ([], {}), '()\n', (4714, 4716), False, 'from discord.ext.commands import HelpCommand, DefaultHelpCommand\n')] |
import sys, statistics
def solution(N):
m = int(round(statistics.mean(N))) # average for (n - i)**2
d = int(statistics.median(N)) # average for abs(n - i)
return min(sum((n - i)**2 + abs(n - i) for n in N) for i in range(m, d, (d > m) - (d < m))) // 2
print(solution([int(n) for n in sys.stdin.read().split(',')]))
| [
"statistics.median",
"sys.stdin.read",
"statistics.mean"
] | [((118, 138), 'statistics.median', 'statistics.median', (['N'], {}), '(N)\n', (135, 138), False, 'import sys, statistics\n'), ((59, 77), 'statistics.mean', 'statistics.mean', (['N'], {}), '(N)\n', (74, 77), False, 'import sys, statistics\n'), ((305, 321), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (319, 321), False, 'import sys, statistics\n')] |
# Copyright 2021 The T5 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Hugging Face Transformers T5 Model.
This model API is fully functional but should be treated as experimental and
subject to change. Due to implementation details, if you are interested in
exactly replicating the results in ``Exploring the Limits of Transfer Learning
with a Unified Text-to-Text Transformer'' you should use the MtfModel API
instead.
Usage example for fine-tuning and evaluating on CoLA:
```Python
import functools
import t5
import t5.models
import torch
import transformers
if torch.cuda.is_available():
device = torch.device("cuda")
else:
device = torch.device("cpu")
model = t5.models.HfPyTorchModel("t5-base", "/tmp/hft5/", device)
# Evaluate the pre-trained checkpoint, before further fine-tuning
model.eval(
"glue_cola_v002",
sequence_length={"inputs": 64, "targets": 4},
batch_size=128,
)
# Run 1000 steps of fine-tuning
model.train(
mixture_or_task_name="glue_cola_v002",
steps=1000,
save_steps=100,
sequence_length={"inputs": 64, "targets": 4},
split="train",
batch_size=32,
optimizer=functools.partial(transformers.AdamW, lr=1e-4),
)
# Evaluate after fine-tuning
model.eval(
"glue_cola_v002",
checkpoint_steps="all",
sequence_length={"inputs": 64, "targets": 4},
batch_size=128,
)
# Generate some predictions
inputs = [
"cola sentence: This is a totally valid sentence.",
"cola sentence: A doggy detail was walking famously.",
]
model.predict(
inputs,
sequence_length={"inputs": 32},
batch_size=2,
output_file="/tmp/hft5/example_predictions.txt",
)
```
"""
import functools
import itertools
import os
import re
import time
from absl import logging
import mesh_tensorflow.transformer.dataset as transformer_dataset
import t5.data
from t5.models import utils
from t5.models.t5_model import T5Model
import tensorflow.compat.v1 as tf
import tensorflow_datasets as tfds
import torch
import torch.utils.tensorboard
CHECKPOINT_FILE_FORMAT = "model-{}.checkpoint"
def tokens_to_batches(dataset,
sequence_length,
batch_size,
output_features,
mixture_or_task=None):
"""Convert a dataset of token sequences to batches of padded/masked examples.
Args:
dataset: tf.data.Dataset containing examples with token sequences.
sequence_length: dict of int, a dict mapping feature name to length.
batch_size: int, the number of padded sequences in each batch.
output_features: list of str, features to include in the dataset.
mixture_or_task: a Task or Mixture object, used to correctly specify eos if
provided. If none, eos is always added at the end of the sequence.
Returns:
A generator that produces batches of numpy examples.
"""
if mixture_or_task:
eos_keys = set(
k for k, f in mixture_or_task.output_features.items() if f.add_eos)
else:
eos_keys = True
dataset = transformer_dataset.pack_or_pad(
dataset,
sequence_length,
pack=False,
feature_keys=output_features,
ensure_eos=eos_keys,
)
def _map_fn(ex):
for key in output_features:
tensor = ex[key]
mask = tf.cast(tf.greater(tensor, 0), tensor.dtype)
ex[key + "_mask"] = mask
return ex
dataset = dataset.map(
_map_fn,
num_parallel_calls=tf.data.experimental.AUTOTUNE,
)
dataset = dataset.batch(batch_size, drop_remainder=False)
return tfds.as_numpy(dataset)
def _get_dataset(mixture_or_task_or_name,
sequence_length,
split,
shuffle=True):
"""Get a tf.data.Dataset for a given Task or Mixture.
Args:
mixture_or_task_or_name: Task or Mixture or str, the name of the Mixture or
Task to train on or the Tasks or Mixture object itself.
Must be pre-registered in the global `t5.data.TaskRegistry` or
`t5.data.MixtureRegistry.`
sequence_length: dict of int, a dict mapping feature name to length.
split: str or `tensorflow_datasets.Split`, the data split to load.
shuffle: boolean, whether to shuffle the dataset.
Returns:
A generator that produces batches of numpy examples.
"""
if isinstance(mixture_or_task_or_name, str):
task = t5.data.get_mixture_or_task(mixture_or_task_or_name)
else:
task = mixture_or_task_or_name
return task.get_dataset(sequence_length, split, shuffle=shuffle)
class HfPyTorchModel(T5Model):
"""Wrapper class for Hugging Face Transformers PyTorch T5 model."""
def __init__(self, model_spec, model_dir, device):
"""Constructor for HfModel class.
Args:
model_spec: A str to pass into the `pretrained_model_name_or_path`
argument of `transformers.T5ForConditionalGeneration.from_pretrained`
(e.g. `"t5-base"` or a path to a previously trained model) or an
instance of the `transformers.configuration_t5.T5Config` class to use
to directly construct the `transformers.T5ForConditionalGeneration`
object.
model_dir: str, directory to save and load model checkpoints.
device: `torch.device` on which the model should be run.
"""
# We have to import transformers here because it has a side effect of
# creating a TensorFlow graph, which prevents eager execution from being
# enabled in files that import hf_model.py
import transformers # pylint: disable=import-outside-toplevel,g-import-not-at-top
if isinstance(model_spec, str):
self._model = transformers.T5ForConditionalGeneration.from_pretrained(
model_spec
)
elif isinstance(model_spec, transformers.T5Config):
self._model = transformers.T5ForConditionalGeneration(model_spec)
else:
raise ValueError("model_spec should be a string or T5Config.")
tf.io.gfile.makedirs(model_dir)
self._writer = torch.utils.tensorboard.writer.SummaryWriter(model_dir)
self._model_dir = model_dir
self._device = device
if self._device.type == "cuda":
self._model.cuda()
self._step = 0
self.load_latest_checkpoint()
self.to_tensor = functools.partial(
torch.as_tensor, device=self._device, dtype=torch.long)
@property
def model(self):
return self._model
@property
def step(self):
return self._step
def save_checkpoint(self, step):
"""Save the current model parameters to the `model_dir`.
Args:
step: int, the current training step.
"""
path = os.path.join(self._model_dir, CHECKPOINT_FILE_FORMAT.format(step))
torch.save(self._model.state_dict(), path)
def load_checkpoint(self, step, model_dir=None):
"""Load the model parameters from a checkpoint at a given step.
Args:
step: int, load the checkpoint from this training step.
model_dir: str, the directory of the checkpoint to load or None to use
this model's directory.
"""
model_dir = model_dir or self._model_dir
path = os.path.join(model_dir, CHECKPOINT_FILE_FORMAT.format(step))
logging.info("Loading from %s", path)
self._model.load_state_dict(torch.load(path))
self._step = step
def get_all_checkpoint_steps(self, model_dir=None):
"""Retrieve the steps corresponding to all checkpoints in `model_dir`.
Args:
model_dir: str, the directory of the checkpoints or None to use this
model's directory.
Returns:
A list of ints corresponding to all checkpoint steps, or None if there
are no checkpoints in the model directory.
"""
model_dir = model_dir or self._model_dir
checkpoint_files = tf.io.gfile.glob(
os.path.join(model_dir, CHECKPOINT_FILE_FORMAT.format("*"))
)
if not checkpoint_files:
return
step_regex = re.compile(".*" + CHECKPOINT_FILE_FORMAT.format(r"(\d+)"))
steps = [int(step_regex.match(path).group(1)) for path in checkpoint_files]
return sorted(steps)
def get_latest_checkpoint_step(self, model_dir=None):
"""Retrieve the step corresponding to the most recent checkpoint.
Args:
model_dir: str, the directory of the checkpoints or None to use this
model's directory.
Returns:
An integer corresponding to the most recent step, or None if there are no
checkpoints in the model directory.
"""
steps = self.get_all_checkpoint_steps(model_dir)
if steps is not None:
return max(steps)
def load_latest_checkpoint(self):
"""Load the most recent checkpoint and update the model's current step."""
latest_step = self.get_latest_checkpoint_step()
if latest_step is not None:
self.load_checkpoint(latest_step)
def train(
self,
mixture_or_task_name,
steps,
save_steps,
sequence_length,
split,
batch_size,
optimizer,
learning_rate_scheduler=None,
):
"""Train the model on the given Mixture or Task.
Args:
mixture_or_task_name: str, the name of the Mixture or Task to train on.
Must be pre-registered in the global `t5.data.TaskRegistry` or
`t5.data.MixtureRegistry.`
steps: int, the total number of steps to train for.
save_steps: int, the number of steps between checkpoint saves.
sequence_length: dict of int, a dict mapping feature name to length.
split: str or `tensorflow_datasets.Split`, the data split to load.
batch_size: int, the number of padded sequences in each batch.
optimizer: function that takes the model parameters as its sole argument.
For example, to use an AdamW optimizer with a learning rate of 1e-4,
you could pass in `functools.partial(transformers.AdamW, lr=1e-4)`.
learning_rate_scheduler: optional function that takes in an optimizer as
its sole argument. For example, to use a schedule that warms up the
optimizer's learning rate after 100 steps, you could pass in
`functools.partial(transformers.get_constant_schedule_with_warmup,
num_warmup_steps=100)`.
"""
self._model.train()
ds = _get_dataset(mixture_or_task_name, sequence_length, split)
task = t5.data.get_mixture_or_task(mixture_or_task_name)
ds = tokens_to_batches(ds, sequence_length, batch_size,
tuple(task.output_features), task)
# Repeat dataset forever
ds = itertools.cycle(ds)
optimizer = optimizer(self._model.parameters())
if learning_rate_scheduler:
learning_rate_scheduler = learning_rate_scheduler(optimizer)
now = time.time()
for train_step, batch in enumerate(itertools.islice(ds, steps)):
if not train_step % save_steps:
# TODO(craffel): Consider saving optimizer and scheduler state.
logging.info("Saving checkpoint for step %s", self._step)
self.save_checkpoint(self._step)
self._model.zero_grad()
outputs = self._model(
input_ids=self.to_tensor(batch["inputs"]),
attention_mask=self.to_tensor(batch["inputs_mask"]),
decoder_attention_mask=self.to_tensor(batch["targets_mask"]),
labels=self.to_tensor(batch["targets"]),
)
loss = outputs[0]
loss.backward()
optimizer.step()
if learning_rate_scheduler:
learning_rate_scheduler.step()
self._writer.add_scalar(
"loss", loss.detach().cpu().numpy(), self._step
)
self._writer.add_scalar("step/s", 1 / (time.time() - now), self._step)
now = time.time()
self._step += 1
logging.info("Saving final checkpoint for step %s", self._step)
self.save_checkpoint(self._step)
def eval(
self,
mixture_or_task_name,
sequence_length,
batch_size,
checkpoint_steps=None,
summary_dir=None,
split="validation",
compute_sequence_length=False,
**generate_kwargs,
):
"""Evaluate the model on the given Mixture or Task.
*Note*: If a checkpoint step is provided (i.e. `checkpoint_steps is not
None`), the model's state will be replaced by the state in those
checkpoints. If you have not saved your model before calling `eval`, you
should call `save_checkpoint` before `eval` to avoid losing its parameter
values and state.
Args:
mixture_or_task_name: str, the name of the Mixture or Task to evaluate
on. Must be pre-registered in the global `t5.data.TaskRegistry` or
`t5.data.MixtureRegistry.`
sequence_length: dict of int, a dict mapping feature name to length.
batch_size: int, the number of padded sequences in each batch.
checkpoint_steps: int, list of ints, "all", or None. If None, eval in the
model in its current state without loading any checkpoints. If an int
or list of ints, evaluation will be run on the checkpoint files in
`model_dir` whose global steps are those provided. If -1, eval on the
latest checkpoint from the model directory. If "all", evaluate all
checkpoints in the model directory.
summary_dir: str, path to write TensorBoard events file summaries for
eval. If None, use model_dir/{split}_eval.
split: str, the mixture/task split to evaluate on.
compute_sequence_length: bool, automatically compute sequence length
during eval mode.
**generate_kwargs: Additional keyword arguments to pass to
`transformers.PretrainedModel.generate()`, for example to change the
decoding strategy. See the documentation for
`transformers.PretrainedModel.generate()` for options.
"""
def _predict_from_tasks(tasks, vocabulary, checkpoint_step, sequence_length,
datasets, **unused_kwargs):
if isinstance(vocabulary, tuple):
vocab = vocabulary[1]
if checkpoint_step != self._step:
self.load_checkpoint(checkpoint_step)
self._model.eval()
outputs = []
for task in tasks:
if compute_sequence_length:
ds = _get_dataset(task.name, sequence_length, split, shuffle=False)
else:
ds = datasets[task.name]
ds = list(tokens_to_batches(
ds, sequence_length, batch_size, tuple(task.output_features), task))
for batch in ds:
predicted_tokens = self._model.generate(
input_ids=self.to_tensor(batch["inputs"]), **generate_kwargs
)
predicted_tokens = predicted_tokens.cpu().numpy().tolist()
predictions = [vocab.decode(p) for p in predicted_tokens]
outputs.extend(predictions)
return outputs
if checkpoint_steps is None:
checkpoint_steps = [self._step]
elif isinstance(checkpoint_steps, int):
checkpoint_steps = [checkpoint_steps]
elif checkpoint_steps == "all":
checkpoint_steps = self.get_all_checkpoint_steps()
elif not isinstance(checkpoint_steps, (list, tuple)):
raise ValueError(
f"checkpoint_steps must be None, int or list; got {checkpoint_steps}"
)
summary_dir = summary_dir or os.path.join(self._model_dir, f"{split}_eval")
tf.io.gfile.makedirs(summary_dir)
utils.run_eval(
mixture_or_task_name=mixture_or_task_name,
predict_or_score_fn=_predict_from_tasks,
checkpoint_steps=checkpoint_steps,
dataset_fn=functools.partial(_get_dataset, shuffle=False),
summary_dir=summary_dir,
split=split,
sequence_length=None if compute_sequence_length else sequence_length,
batch_size=batch_size)
def predict(
self,
inputs,
sequence_length,
batch_size,
output_file=None,
vocabulary=None,
**generate_kwargs,
):
"""Evaluate the model on the given Mixture or Task.
*Note*: If a checkpoint step is provided (i.e. `checkpoint_steps is not
None`), the model's state will be replaced by the state in those
checkpoints. If you have not saved your model before calling `eval`, you
should call `save_checkpoint` before `eval` to avoid losing its parameter
values and state.
Args:
inputs: list of str or str, either a list of inputs to feed into the
model or the path to a text file that contains a single input on each
line.
sequence_length: dict of int, a dict mapping feature name to length.
batch_size: int, the number of padded sequences in each batch.
output_file: str or None, path to write out predictions or None to skip
writing.
vocabulary: t5.data.vocabularies.Vocabulary or dict or None. Either the
Vocabulary to use for processing inputs and targets, a dict mapping
"inputs" to a Vocabulary for encoding the inputs and "targets" for
decoding the predictions, or None (default) to use a
t5.data.SentencePieceVocabulary with the provided
sentencepiece_model_path (as was used in all pre-trained T5 models).
**generate_kwargs: Additional keyword arguments to pass to
`transformers.PretrainedModel.generate()`, for example to change the
decoding strategy. See the documentation for
`transformers.PretrainedModel.generate()` for options.
"""
if isinstance(inputs, str):
if not tf.io.gfile.exists(inputs):
raise ValueError(
f"A str was provided for `inputs`, but the path {inputs} does not "
"exist. If you want the model's output for {inputs}, you should "
"feed in inputs=['{inputs}']"
)
with tf.io.gfile.GFile(inputs) as f:
inputs = [l.strip() for l in f]
if vocabulary is None:
vocab = t5.data.get_default_vocabulary()
vocabs = {"inputs": vocab, "targets": vocab}
elif isinstance(vocabulary, t5.data.vocabularies.Vocabulary):
vocabs = {"inputs": vocabulary, "targets": vocabulary}
elif isinstance(vocabulary, dict):
vocabs = vocabulary
else:
raise ValueError("vocabulary must be a dict, a Vocabulary, or None")
dataset = tf.data.Dataset.from_tensor_slices(inputs)
dataset = dataset.map(
lambda x: {"inputs": tf.cast(vocabs["inputs"].encode_tf(x), tf.int64)},
num_parallel_calls=tf.data.experimental.AUTOTUNE,
)
dataset = tokens_to_batches(
dataset, sequence_length, batch_size, ["inputs"]
)
predictions = []
for batch in dataset:
predicted_tokens = self._model.generate(
input_ids=self.to_tensor(batch["inputs"]), **generate_kwargs
)
predicted_tokens = predicted_tokens.cpu().numpy().tolist()
predictions.extend(
[vocabs["targets"].decode(p) for p in predicted_tokens]
)
for inp, pred in zip(inputs, predictions):
logging.info("%s\n -> %s", inp, pred)
if output_file is not None:
utils.write_lines_to_file(predictions, output_file)
def finetune(
self,
mixture_or_task_name,
finetune_steps,
pretrained_model_dir,
pretrained_checkpoint_step=-1,
**train_kwargs,
):
"""Trains model after loading from any existing checkpoint.
Note that if you have initialized the model using a pre-trained model
specification (e.g. by passing "t5-base" for `model_spec`) then you can
just call `train` directly. This function is only provided for convenience
for loading a pre-trained model checkpoint from an arbitrary model
directory before calling `train`.
Args:
mixture_or_task_name: str, the name of the Mixture or Task to evaluate
on. Must be pre-registered in the global `t5.data.TaskRegistry` or
`t5.data.MixtureRegistry.`
finetune_steps: int, the number of additional steps to train for.
pretrained_model_dir: str, directory with pretrained model checkpoints.
pretrained_checkpoint_step: int, checkpoint to initialize weights from.
If -1 (default), use the latest checkpoint from the pretrained model
directory.
**train_kwargs: Additional keyword arguments to pass to `train`. See the
docstring for `train` for more details.
"""
if pretrained_checkpoint_step == -1:
pretrained_checkpoint_step = self.get_latest_checkpoint_step(
pretrained_model_dir
)
self.load_checkpoint(pretrained_checkpoint_step, pretrained_model_dir)
self.train(mixture_or_task_name, finetune_steps, **train_kwargs)
| [
"itertools.cycle",
"itertools.islice",
"t5.models.utils.write_lines_to_file",
"tensorflow.compat.v1.io.gfile.makedirs",
"torch.utils.tensorboard.writer.SummaryWriter",
"torch.load",
"absl.logging.info",
"os.path.join",
"tensorflow.compat.v1.io.gfile.exists",
"tensorflow.compat.v1.data.Dataset.from... | [((3534, 3658), 'mesh_tensorflow.transformer.dataset.pack_or_pad', 'transformer_dataset.pack_or_pad', (['dataset', 'sequence_length'], {'pack': '(False)', 'feature_keys': 'output_features', 'ensure_eos': 'eos_keys'}), '(dataset, sequence_length, pack=False,\n feature_keys=output_features, ensure_eos=eos_keys)\n', (3565, 3658), True, 'import mesh_tensorflow.transformer.dataset as transformer_dataset\n'), ((4039, 4061), 'tensorflow_datasets.as_numpy', 'tfds.as_numpy', (['dataset'], {}), '(dataset)\n', (4052, 4061), True, 'import tensorflow_datasets as tfds\n'), ((6378, 6409), 'tensorflow.compat.v1.io.gfile.makedirs', 'tf.io.gfile.makedirs', (['model_dir'], {}), '(model_dir)\n', (6398, 6409), True, 'import tensorflow.compat.v1 as tf\n'), ((6429, 6484), 'torch.utils.tensorboard.writer.SummaryWriter', 'torch.utils.tensorboard.writer.SummaryWriter', (['model_dir'], {}), '(model_dir)\n', (6473, 6484), False, 'import torch\n'), ((6678, 6751), 'functools.partial', 'functools.partial', (['torch.as_tensor'], {'device': 'self._device', 'dtype': 'torch.long'}), '(torch.as_tensor, device=self._device, dtype=torch.long)\n', (6695, 6751), False, 'import functools\n'), ((7585, 7622), 'absl.logging.info', 'logging.info', (['"""Loading from %s"""', 'path'], {}), "('Loading from %s', path)\n", (7597, 7622), False, 'from absl import logging\n'), ((10871, 10890), 'itertools.cycle', 'itertools.cycle', (['ds'], {}), '(ds)\n', (10886, 10890), False, 'import itertools\n'), ((11053, 11064), 'time.time', 'time.time', ([], {}), '()\n', (11062, 11064), False, 'import time\n'), ((12027, 12090), 'absl.logging.info', 'logging.info', (['"""Saving final checkpoint for step %s"""', 'self._step'], {}), "('Saving final checkpoint for step %s', self._step)\n", (12039, 12090), False, 'from absl import logging\n'), ((15587, 15620), 'tensorflow.compat.v1.io.gfile.makedirs', 'tf.io.gfile.makedirs', (['summary_dir'], {}), '(summary_dir)\n', (15607, 15620), True, 'import tensorflow.compat.v1 as tf\n'), ((18472, 18514), 'tensorflow.compat.v1.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['inputs'], {}), '(inputs)\n', (18506, 18514), True, 'import tensorflow.compat.v1 as tf\n'), ((6080, 6147), 'transformers.T5ForConditionalGeneration.from_pretrained', 'transformers.T5ForConditionalGeneration.from_pretrained', (['model_spec'], {}), '(model_spec)\n', (6135, 6147), False, 'import transformers\n'), ((7655, 7671), 'torch.load', 'torch.load', (['path'], {}), '(path)\n', (7665, 7671), False, 'import torch\n'), ((11104, 11131), 'itertools.islice', 'itertools.islice', (['ds', 'steps'], {}), '(ds, steps)\n', (11120, 11131), False, 'import itertools\n'), ((11988, 11999), 'time.time', 'time.time', ([], {}), '()\n', (11997, 11999), False, 'import time\n'), ((15536, 15582), 'os.path.join', 'os.path.join', (['self._model_dir', 'f"""{split}_eval"""'], {}), "(self._model_dir, f'{split}_eval')\n", (15548, 15582), False, 'import os\n'), ((19175, 19213), 'absl.logging.info', 'logging.info', (['"""%s\n -> %s"""', 'inp', 'pred'], {}), "('%s\\n -> %s', inp, pred)\n", (19187, 19213), False, 'from absl import logging\n'), ((19253, 19304), 't5.models.utils.write_lines_to_file', 'utils.write_lines_to_file', (['predictions', 'output_file'], {}), '(predictions, output_file)\n', (19278, 19304), False, 'from t5.models import utils\n'), ((3786, 3807), 'tensorflow.compat.v1.greater', 'tf.greater', (['tensor', '(0)'], {}), '(tensor, 0)\n', (3796, 3807), True, 'import tensorflow.compat.v1 as tf\n'), ((6242, 6293), 'transformers.T5ForConditionalGeneration', 'transformers.T5ForConditionalGeneration', (['model_spec'], {}), '(model_spec)\n', (6281, 6293), False, 'import transformers\n'), ((11253, 11310), 'absl.logging.info', 'logging.info', (['"""Saving checkpoint for step %s"""', 'self._step'], {}), "('Saving checkpoint for step %s', self._step)\n", (11265, 11310), False, 'from absl import logging\n'), ((15804, 15850), 'functools.partial', 'functools.partial', (['_get_dataset'], {'shuffle': '(False)'}), '(_get_dataset, shuffle=False)\n', (15821, 15850), False, 'import functools\n'), ((17707, 17733), 'tensorflow.compat.v1.io.gfile.exists', 'tf.io.gfile.exists', (['inputs'], {}), '(inputs)\n', (17725, 17733), True, 'import tensorflow.compat.v1 as tf\n'), ((17982, 18007), 'tensorflow.compat.v1.io.gfile.GFile', 'tf.io.gfile.GFile', (['inputs'], {}), '(inputs)\n', (17999, 18007), True, 'import tensorflow.compat.v1 as tf\n'), ((11944, 11955), 'time.time', 'time.time', ([], {}), '()\n', (11953, 11955), False, 'import time\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 17 17:59:36 2018
@author: avelinojaver
"""
import numpy as np
import cv2
from functools import partial
import json
from pathlib import Path
import pandas as pd
from tierpsy.analysis.ske_create.helperIterROI import generateMoviesROI
mask_file = Path('/Users/avelinojaver/OneDrive - Nexus365/worms/Bertie_movies/CX11314_Ch1_04072017_103259.hdf5')
root_dir = '/Users/avelinojaver/OneDrive - Nexus365/worms/Bertie_movies/'
for mask_file in list(Path(root_dir).glob('*.hdf5')):
skeletons_file = mask_file.parent / 'Results' / (mask_file.stem + '_skeletons.hdf5')
with pd.HDFStore(str(skeletons_file), "r") as ske_file_id:
#attribute useful to understand if we are dealing with dark or light worms
bgnd_param = ske_file_id.get_node('/plate_worms')._v_attrs['bgnd_param']
bgnd_param = json.loads(bgnd_param.decode("utf-8"))
print(bgnd_param)
#%%
ROIs_generator = generateMoviesROI(masked_image_file,
trajectories_data,
bgnd_param = bgnd_param,
progress_prefix = '')
for frame_props in ROIs_generator:
break
| [
"tierpsy.analysis.ske_create.helperIterROI.generateMoviesROI",
"pathlib.Path"
] | [((320, 430), 'pathlib.Path', 'Path', (['"""/Users/avelinojaver/OneDrive - Nexus365/worms/Bertie_movies/CX11314_Ch1_04072017_103259.hdf5"""'], {}), "(\n '/Users/avelinojaver/OneDrive - Nexus365/worms/Bertie_movies/CX11314_Ch1_04072017_103259.hdf5'\n )\n", (324, 430), False, 'from pathlib import Path\n'), ((981, 1084), 'tierpsy.analysis.ske_create.helperIterROI.generateMoviesROI', 'generateMoviesROI', (['masked_image_file', 'trajectories_data'], {'bgnd_param': 'bgnd_param', 'progress_prefix': '""""""'}), "(masked_image_file, trajectories_data, bgnd_param=\n bgnd_param, progress_prefix='')\n", (998, 1084), False, 'from tierpsy.analysis.ske_create.helperIterROI import generateMoviesROI\n'), ((520, 534), 'pathlib.Path', 'Path', (['root_dir'], {}), '(root_dir)\n', (524, 534), False, 'from pathlib import Path\n')] |
import time
class Wait(object):
def __init__(self):
pass
def wait_for_state_change(self, expected_status, from_status):
for i in range(0, 20):
if expected_status != from_status:
break
time.sleep(1)
| [
"time.sleep"
] | [((251, 264), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (261, 264), False, 'import time\n')] |
"""
Overview
--------
general info about this module
Classes and Inheritance Structure
----------------------------------------------
.. inheritance-diagram::
Summary
---------
.. autosummary::
list of the module you want
Module API
----------
"""
from __future__ import absolute_import, division, print_function
from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)
__author__ = "<NAME>"
import ast
import decorator
from datetime import datetime, date, time
from astropy.time import Time as astropyTime
from astropy.time import TimeDelta as astropyTimeDelta
from astropy.coordinates import Angle as astropyAngle
from .catalog import BasicCatalog
import numpy as np
@decorator.decorator
def check_par_list(func,par_list,*args, **kwargs):
for par in par_list:
if isinstance(par,Parameter):
pass
else:
raise RuntimeError('each parameter in the par_list has to be an instance of Parameters')
return func(par_list, *args, **kwargs)
class ParameterGroup(object):
def __init__(self,par_list,name,exclusive=True,def_selected=None,selected=None):
self.name=name
self._par_list=par_list
self._check_pars(par_list)
self.exclusive=True
self.msk = np.ones(len(par_list), dtype=np.bool)
if exclusive==True:
self.msk[::]=False
if def_selected is None:
self.msk[0]==True
if def_selected is not None:
self.select(def_selected)
if selected is not None:
self.select(selected)
@property
def par_list(self):
return self._par_list
@property
def names(self):
return [p.name for p in self._par_list]
def select(self,name):
if isinstance(name,Parameter):
name=Parameter.value
for ID,p in enumerate(self._par_list):
if p.name==name:
self.msk[ID]=True
self._selected=self._par_list[ID].name
if self.msk.sum()>1 and self.exclusive==True:
raise RuntimeError('only one paramter can be selected in mutually exclusive groups')
def _check_pars(self, par_list):
for p in par_list:
if isinstance(p,Parameter):
pass
elif isinstance(p,ParameterRange):
pass
else:
raise RuntimeError('you can group Paramters or ParamtersRanges found',type(p))
def to_list(self):
_l=[]
for p in self._par_list:
if isinstance(p,Parameter):
_l.append(p)
elif isinstance(p,ParameterRange):
_l.extend(p.to_list())
return _l
def add_par(self,par):
self.par_list.append(par)
self.msk=np.append(self.msk,False)
def build_selector(self,name):
return Parameter(name, allowed_values=self.names)
class ParameterRange(object):
def __init__(self,p1,p2,name):
self._check_pars(p1,p2)
self.name=name
self.p1=p1
self.p2=p2
def _check_pars(self,p1,p2):
if type(p1)!=type(p2):
raise RuntimeError('pars must be of the same time')
for p in (p1,p2):
try:
assert (isinstance(p,Parameter))
except:
raise RuntimeError('both p1 and p2 must be Parameters objects, found',type(p))
def to_list(self):
return [self.p1,self.p2]
class ParameterTuple(object):
def __init__(self,p_list,name):
self._check_pars(p_list)
self.name=name
self.p_list=tuple(p_list)
def _check_pars(self,p_list):
if any( type(x)!=type(p_list[0]) for x in p_list):
raise RuntimeError('pars must be of the same time')
for p in (p_list):
try:
assert (isinstance(p,Parameter))
except:
raise RuntimeError('both p1 and p2 must be Parameters objects, found',type(p))
def to_list(self):
return self.p_list
class Parameter(object):
def __init__(self,value=None,units=None,name=None,allowed_units=[],check_value=None,allowed_values=None,units_name=None):
self.check_value=check_value
self._allowed_units = allowed_units
self._allowed_values = allowed_values
self.name = name
self.units=units
self.value = value
self.units_name=units_name
#self._wtform_dict=wtform_dict
@property
def value(self):
return self._value
@value.setter
def value(self,v):
#print ('set',self.name,v,self._allowed_values)
if v is not None:
if self.check_value is not None:
self.check_value(v, units=self.units,name=self.name)
if self._allowed_values is not None:
if v not in self._allowed_values:
raise RuntimeError('value',v,'not allowed, allowed=',self._allowed_values)
#print('set->',self.name,v,type(v))
if type(v)==str or type(v)== unicode:
self._value=v.strip()
else:
self._value = v
else:
self._value=None
@property
def units(self):
return self._units
@units.setter
def units(self,units):
if self._allowed_units !=[] and self._allowed_units is not None:
self.chekc_units(units,self._allowed_units,self.name)
self._units=units
def set_from_form(self,form,verbose=False):
par_name = self.name
units_name = self.units_name
v = None
u = None
in_dictionary=False
if units_name is not None:
if units_name in form.keys():
u = form[units_name]
if par_name in form.keys():
v=form[par_name]
in_dictionary=True
if in_dictionary is True:
self.set_par(value=v,units=u)
#print('setting par:', par_name, 'to val=', self.value, 'and units', units_name, 'to', self.units )
else:
if verbose is True:
print('setting par:', par_name, 'not in dictionary')
def set_par(self,value,units=None):
if units is not None:
self.units=units
self.value=value
def get_form(self,wtform_cls,key,validators,defaults):
return wtform_cls('key', validators=validators, default=defaults)
@staticmethod
def chekc_units(units,allowed,name):
if units not in allowed:
raise RuntimeError('wrong units for par: %s'%name, ' found: ',units,' allowed:', allowed)
@staticmethod
def check_value(val,units,par_name):
pass
# def get_form_field(self,key=None,default=None,validators=None,wtform_dict=None,wtform=None):
# if key is None:
# key=self.name
#
# if wtform is None and wtform_dict is None:
#
# wtform_dict=self._wtform_dict
#
# if default is not None:
# self.check_value(default,self.units)
# else:
# default=self.value
#
#
# if wtform is not None and wtform_dict is not None:
# raise RuntimeError('either you provide wtform or wtform_dict or you pass a wtform_dict to the constructor')
#
# elif wtform_dict is not None:
# wtform=wtform_dict[self.units]
#
# else:
# raise RuntimeError('yuo must provide wtform or wtform_dict')
#
# return wtform(label=key, validators=validators, default=default)
def reprJSON(self):
return dict(name=self.name, units=self.units, value=self.value)
#class Instrument(Parameter):
# def __init__(self,T_format,name,value=None):
#wtform_dict = {'iso': SelectField}
class Name(Parameter):
def __init__(self,value=None, name_format='str', name=None):
_allowed_units = ['str']
super(Name,self).__init__(value=value,
units=name_format,
check_value=self.check_name_value,
name=name,
allowed_units=_allowed_units)
@staticmethod
def check_name_value(value, units=None, name=None):
pass
class Float(Parameter):
def __init__(self,value=None,units=None,name=None):
_allowed_units = None
#wtform_dict = {'keV': FloatField}
super(Float, self).__init__(value=value,
units=units,
check_value=self.check_float_value,
name=name,
allowed_units=_allowed_units)
#wtform_dict=wtform_dict)
self.value=value
@property
def value(self):
return self._v
@value.setter
def value(self, v):
if v is not None and v!='':
self.check_float_value(v,name=self.name)
self._v = np.float(v)
else:
self._v=None
@staticmethod
def check_float_value(value, units=None,name=None):
#print('check type of ',name,'value', value, 'type',type(value))
if value is None or value=='':
pass
else:
try:
value=ast.literal_eval(value)
except:
pass
value=np.float(value)
if type(value) == int or type(value) == np.int:
pass
elif type(value) == float or type(value) == np.float:
pass
else:
raise RuntimeError('type of ', name, 'not valid', type(value))
class Integer(Parameter):
def __init__(self,value=None,units=None,name=None):
_allowed_units = None
#wtform_dict = {'keV': FloatField}
super(Integer, self).__init__(value=value,
units=units,
check_value=self.check_int_value,
name=name,
allowed_units=_allowed_units)
#wtform_dict=wtform_dict)
self.value=value
@property
def value(self):
return self._v
@value.setter
def value(self, v):
if v is not None and v!='':
self.check_int_value(v,name=self.name)
self._v = np.int(v)
else:
self._v=None
@staticmethod
def check_int_value(value, units=None,name=None):
#print('check type of ',name,'value', value, 'type',type(value))
if value is None or value=='':
pass
else:
try:
value=ast.literal_eval(value)
except:
pass
value=np.int(value)
if type(value) == int or type(value) == np.int:
pass
elif type(value) == float or type(value) == np.float:
pass
else:
raise RuntimeError('type of ', name, 'not valid', type(value))
class Time(Parameter):
def __init__(self,value=None,T_format=None,name=None,Time_format_name=None):
#_allowed_units = astropyTime.FORMATS
#wtform_dict = {'iso': StringField}
#wtform_dict['mjd'] = FloatField
#wtform_dict['prod_list'] = TextAreaField
super(Time,self).__init__(value=value,
units=T_format,
units_name=Time_format_name,
name=name,
allowed_units=None)
#wtform_dict=wtform_dict)
self._set_time(value,format=T_format)
@property
def value(self):
return self._astropy_time.value
@value.setter
def value(self, v):
units=self.units
self._set_time(v, format=units)
def _set_time(self,value,format):
try:
value=ast.literal_eval(value)
except:
pass
self._astropy_time = astropyTime(value, format=format)
self._value =value
class TimeDelta(Parameter):
def __init__(self, value=None, delta_T_format='sec', name=None, delta_T_format_name=None):
# _allowed_units = astropyTime.FORMATS
# wtform_dict = {'iso': StringField}
# wtform_dict['mjd'] = FloatField
# wtform_dict['prod_list'] = TextAreaField
super(TimeDelta, self).__init__(value=value,
units=delta_T_format,
units_name=delta_T_format_name,
name=name,
allowed_units=None)
# wtform_dict=wtform_dict)
self._set_time(value, format=delta_T_format)
@property
def value(self):
return self._astropy_time_delta.value
@value.setter
def value(self, v):
units = self.units
self._set_time(v, format=units)
def _set_time(self, value, format):
try:
value = ast.literal_eval(value)
except:
pass
#print ('value',value)
self._astropy_time_delta = astropyTimeDelta(value, format=format)
self._value = value
class InputProdList(Parameter):
def __init__(self,value=None,_format='names_list',name=None):
_allowed_units = ['names_list']
if value is None:
value=[]
super(InputProdList, self).__init__(value=value,
units=_format,
check_value=self.check_list_value,
name=name,
allowed_units=_allowed_units)
#wtform_dict=wtform_dict)
self._split(value)
def _split(self,str_list):
if type(str_list)==list:
pass
elif type(str_list)==str or type(str(str_list)):
if ',' in str_list:
str_list= str_list.split(',')
else:
str_list = str_list.split(' ')
else:
raise RuntimeError('parameter format is not correct')
if str_list == ['']:
str_list = []
return str_list
@property
def value(self):
if self._value==[''] or self._value is None:
return []
else:
return self._value
@value.setter
def value(self, v):
#print('set', self.name, v, self._allowed_values)
if v is not None:
if self.check_value is not None:
self.check_value(v, units=self.units, name=self.name)
if self._allowed_values is not None:
if v not in self._allowed_values:
raise RuntimeError('value', v, 'not allowed, allowed=', self._allowed_values)
if v == [''] or v is None or str(v) == '':
self._value=['']
else:
self._value = v
else:
self._value = ['']
self._value=self._split(self._value)
#print ('set to ',self._value)
@staticmethod
def check_list_value(value,units,name='par'):
if units=='names_list':
try:
#print(type(value))
assert (type(value) == list or type(value) == str or type(str(value))== str)
except:
raise RuntimeError('par:',name,', value is not product list format : list of strings','it is',type(value),value)
else:
raise RuntimeError(name,'units not valid',units)
class Angle(Parameter):
def __init__(self,value=None, units=None,name=None):
super(Angle, self).__init__(value=value,
units=units,
name=name,
allowed_units=None)
# wtform_dict=wtform_dict)
self._set_angle(value, units=units)
@property
def value(self):
return self._astropy_angle.value
@value.setter
def value(self, v, units=None):
if units is None:
units = self.units
self._set_angle(v, units=units)
def _set_angle(self, value, units):
if value=='' or value is None:
pass
else:
self._astropy_angle = astropyAngle(value, unit=units)
self._value = self._astropy_angle.value
# class AngularDistance(Parameter):
# def __init__(self, angular_units,name, value=None):
# _allowed_units = ['deg']
# super(AngularDistance, self).__init__(value=value,
# units=angular_units,
# check_value=self.check_angle_value,
# name=name,
# allowed_units=_allowed_units)
#
#
#
# @staticmethod
# def check_angle_value(value, units=None, name=None):
# print('check type of ', name, 'value', value, 'type', type(value))
# pass
#
class SpectralBoundary(Parameter):
def __init__(self,value=None,E_units='keV',name=None):
_allowed_units = ['keV','eV','MeV','GeV','TeV','Hz','MHz','GHz']
#wtform_dict = {'keV': FloatField}
super(SpectralBoundary, self).__init__(value=value,
units=E_units,
check_value=self.check_energy_value,
name=name,
allowed_units=_allowed_units)
#wtform_dict=wtform_dict)
@staticmethod
def check_energy_value(value, units=None,name=None):
#print('check type of ',name,'value', value, 'type',type(value))
try:
value=ast.literal_eval(value)
except:
pass
if type(value)==int or type(value)==np.int:
pass
elif type(value)==float or type(value)==np.float:
pass
else:
raise RuntimeError('type of ',name,'not valid',type(value))
class Energy(Parameter):
def __init__(self,value=None,E_units=None,name=None):
_allowed_units = ['keV','eV','MeV','GeV','TeV']
#wtform_dict = {'keV': FloatField}
super(Energy, self).__init__(value=value,
units=E_units,
check_value=self.check_energy_value,
name=name,
allowed_units=_allowed_units)
#wtform_dict=wtform_dict)
@staticmethod
def check_energy_value(value, units=None,name=None):
#print('check type of ',name,'value', value, 'type',type(value))
try:
value=ast.literal_eval(value)
except:
pass
if type(value)==int or type(value)==np.int:
pass
elif type(value)==float or type(value)==np.float:
pass
else:
raise RuntimeError('type of ',name,'not valid',type(value))
class DetectionThreshold(Parameter):
def __init__(self,value=None,units='sigma',name=None):
_allowed_units = ['sigma']
#wtform_dict = {'keV': FloatField}
super(DetectionThreshold, self).__init__(value=value,
units=units,
check_value=self.check_value,
name=name,
allowed_units=_allowed_units)
#wtform_dict=wtform_dict)
@staticmethod
def check_value(value, units=None,name=None):
#print('check type of ',name,'value', value, 'type',type(value))
try:
value=ast.literal_eval(value)
except:
pass
if type(value)==int or type(value)==np.int:
pass
elif type(value)==float or type(value)==np.float:
pass
else:
raise RuntimeError('type of ',name,'not valid',type(value))
class UserCatalog(Parameter):
def __init__(self, value=None,name_format='str', name=None):
_allowed_units = ['str']
super(UserCatalog,self).__init__(value=value,
units=name_format,
check_value=self.check_name_value,
name=name,
allowed_units=_allowed_units)
@staticmethod
def check_name_value(value, units=None, name=None):
pass
| [
"numpy.float",
"astropy.coordinates.Angle",
"astropy.time.TimeDelta",
"builtins.super",
"builtins.str",
"numpy.append",
"astropy.time.Time",
"ast.literal_eval",
"numpy.int"
] | [((2855, 2881), 'numpy.append', 'np.append', (['self.msk', '(False)'], {}), '(self.msk, False)\n', (2864, 2881), True, 'import numpy as np\n'), ((12208, 12241), 'astropy.time.Time', 'astropyTime', (['value'], {'format': 'format'}), '(value, format=format)\n', (12219, 12241), True, 'from astropy.time import Time as astropyTime\n'), ((13351, 13389), 'astropy.time.TimeDelta', 'astropyTimeDelta', (['value'], {'format': 'format'}), '(value, format=format)\n', (13367, 13389), True, 'from astropy.time import TimeDelta as astropyTimeDelta\n'), ((9105, 9116), 'numpy.float', 'np.float', (['v'], {}), '(v)\n', (9113, 9116), True, 'import numpy as np\n'), ((9496, 9511), 'numpy.float', 'np.float', (['value'], {}), '(value)\n', (9504, 9511), True, 'import numpy as np\n'), ((10517, 10526), 'numpy.int', 'np.int', (['v'], {}), '(v)\n', (10523, 10526), True, 'import numpy as np\n'), ((10904, 10917), 'numpy.int', 'np.int', (['value'], {}), '(value)\n', (10910, 10917), True, 'import numpy as np\n'), ((12113, 12136), 'ast.literal_eval', 'ast.literal_eval', (['value'], {}), '(value)\n', (12129, 12136), False, 'import ast\n'), ((13227, 13250), 'ast.literal_eval', 'ast.literal_eval', (['value'], {}), '(value)\n', (13243, 13250), False, 'import ast\n'), ((16626, 16657), 'astropy.coordinates.Angle', 'astropyAngle', (['value'], {'unit': 'units'}), '(value, unit=units)\n', (16638, 16657), True, 'from astropy.coordinates import Angle as astropyAngle\n'), ((18093, 18116), 'ast.literal_eval', 'ast.literal_eval', (['value'], {}), '(value)\n', (18109, 18116), False, 'import ast\n'), ((19097, 19120), 'ast.literal_eval', 'ast.literal_eval', (['value'], {}), '(value)\n', (19113, 19120), False, 'import ast\n'), ((20092, 20115), 'ast.literal_eval', 'ast.literal_eval', (['value'], {}), '(value)\n', (20108, 20115), False, 'import ast\n'), ((8010, 8027), 'builtins.super', 'super', (['Name', 'self'], {}), '(Name, self)\n', (8015, 8027), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((8534, 8552), 'builtins.super', 'super', (['Float', 'self'], {}), '(Float, self)\n', (8539, 8552), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((9413, 9436), 'ast.literal_eval', 'ast.literal_eval', (['value'], {}), '(value)\n', (9429, 9436), False, 'import ast\n'), ((9948, 9968), 'builtins.super', 'super', (['Integer', 'self'], {}), '(Integer, self)\n', (9953, 9968), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((10821, 10844), 'ast.literal_eval', 'ast.literal_eval', (['value'], {}), '(value)\n', (10837, 10844), False, 'import ast\n'), ((11484, 11501), 'builtins.super', 'super', (['Time', 'self'], {}), '(Time, self)\n', (11489, 11501), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((12599, 12621), 'builtins.super', 'super', (['TimeDelta', 'self'], {}), '(TimeDelta, self)\n', (12604, 12621), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((13616, 13642), 'builtins.super', 'super', (['InputProdList', 'self'], {}), '(InputProdList, self)\n', (13621, 13642), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((15906, 15924), 'builtins.super', 'super', (['Angle', 'self'], {}), '(Angle, self)\n', (15911, 15924), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((17562, 17591), 'builtins.super', 'super', (['SpectralBoundary', 'self'], {}), '(SpectralBoundary, self)\n', (17567, 17591), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((18576, 18595), 'builtins.super', 'super', (['Energy', 'self'], {}), '(Energy, self)\n', (18581, 18595), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((19575, 19606), 'builtins.super', 'super', (['DetectionThreshold', 'self'], {}), '(DetectionThreshold, self)\n', (19580, 19606), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((20519, 20543), 'builtins.super', 'super', (['UserCatalog', 'self'], {}), '(UserCatalog, self)\n', (20524, 20543), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((14147, 14160), 'builtins.str', 'str', (['str_list'], {}), '(str_list)\n', (14150, 14160), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((15102, 15108), 'builtins.str', 'str', (['v'], {}), '(v)\n', (15105, 15108), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((15558, 15568), 'builtins.str', 'str', (['value'], {}), '(value)\n', (15561, 15568), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n')] |
#!/usr/bin/python
# R2D2 Python source code to control the Sphero R2D2 droic
# Author: <NAME>
# Version: 1.0
# Date: Sept, 2019
# License: LGPL 3.0
#
# Based on the reverse engineering work
# "Scripting Sphero's Star Wars Droids"
# by ~bbraun.
#
# Thanks to <NAME> who inspired the live chroma key
# methodological approach with openCV
# <http://www.synack.net/~bbraun/spherodroid/>
#
# Credits:
#
# * Sphero (Sphero/inc) for the precious documentation on
# low-level APIs for their Sphero robots
# <https://github.com/sphero-inc/DeveloperResources>
#
# * <NAME> for the pygatt Python library
# <https://github.com/peplin/pygatt>
#
# * Pimoroni for the complete and exhaustive documentation on their
# PiCade HAT <https://github.com/pimoroni/picade-hat>
#
# * <NAME> and <NAME> by Element14.com for providing the
# hardware
import pygatt
import time
import sys
import tty
import getopt
import ctypes
# Import specific BLE libraries. Pygatt should be installed in your
# Python environment.
from pygatt.backends import BLEBackend, Characteristic, BLEAddressType
# Initial command status
command = None
# Specify the Bluetooth address of the droid. Can be changed during
# the call to a command.
address = 'FD:F9:CA:74:DC:DA'
sendbytes = None
# If the flag is set the droid is set to sleep when the program exits
sleeponexit = False
# Commands dictionary in human redable form
commandmap = dict([
("laugh", [0x0A,0x18,0x00,0x1F,0x00,0x32,0x00,0x00,0x00,0x00,0x00]),
("yes", [0x0A,0x17,0x05,0x41,0x00,0x0F]),
("no", [0x0A,0x17,0x05,0x3F,0x00,0x10]),
("alarm", [0x0A,0x17,0x05,0x17,0x00,0x07]),
("angry", [0x0A,0x17,0x05,0x18,0x00,0x08]),
("annoyed", [0x0A,0x17,0x05,0x19,0x00,0x09]),
("ionblast", [0x0A,0x17,0x05,0x1A,0x00,0x0E]),
("sad", [0x0A,0x17,0x05,0x1C,0x00,0x11]),
("scared", [0x0A,0x17,0x05,0x1D,0x00,0x13]),
("chatty", [0x0A,0x17,0x05,0x17,0x00,0x0A]),
("confident", [0x0A,0x17,0x05,0x18,0x00,0x12]),
("excited", [0x0A,0x17,0x05,0x19,0x00,0x0C]),
("happy", [0x0A,0x17,0x05,0x1A,0x00,0x0D]),
("laugh2", [0x0A,0x17,0x05,0x1B,0x00,0x0F]),
("surprise", [0x0A,0x17,0x05,0x1C,0x00,0x18]),
("tripod", [0x0A,0x17,0x0D,0x1D,0x01]),
("bipod", [0x0A,0x17,0x0D,0x1C,0x02]),
("rot+", [0x8D,0x0A,0x17,0x0F,0x1C,0x42,0xB4,0x00,0x00,0xBD,0xD8]),
("rot0", [0x8D,0x0A,0x17,0x0F,0x1E,0x00,0x00,0x00,0x00,0xB1,0xD8])
])
# Generate the CRC 256 modulus sum of all the bytes bitwise inverted
def GenCrc(bytes):
ret = 0;
for b in bytes:
ret += b
ret = ret % 256
return ~ret % 256
# Create the data packet to send to the droid
def BuildPacket(bytes):
# 0x8D marks the start of a packet
ret = [0x8D]
for b in bytes:
ret.append(b)
# CRC is always the 2nd to last byte
ret.append(GenCrc(bytes))
# 0xD8 marks the end of a packet
ret.append(0xD8)
return ret
# Initialize the communication with the droid. If sleeping awake it
def r2d2_init():
# Initialize the BLE Gatt adapter and start the connection
# Note: no address type is specified.
adapter = pygatt.GATTToolBackend()
adapter.start()
device = adapter.connect(address = address, address_type = BLEAddressType.random)
# 'usetheforce...band' tells the droid we're a controller and prevents disconnection.
device.char_write_handle(0x15, [0x75,0x73,0x65,0x74,0x68,0x65,0x66,0x6F,0x72,0x63,0x65,0x2E,0x2E,0x2E,0x62,0x61,0x6E,0x64], True)
# wake from sleep? Droid is responsive and front led flashes blue/red
device.char_write_handle(0x1c, [0x8D,0x0A,0x13,0x0D,0x00,0xD5,0xD8], True)
# Turn on holoprojector led, 0xff (max) intensity
device.char_write_handle(0x1c, [0x8D,0x0A,0x1A,0x0E,0x1C,0x00,0x80,0xFF,0x32,0xD8], True)
return device, adapter
###############
# Main program
###############
def main():
sequences = []
pexit = False # Exit condition, never set to true. For future devel.
# Init the connection
r2d2, ble = r2d2_init()
# Start reading the pad
tty.setcbreak(sys.stdin)
# Control loop
while pexit == False:
# Get the scancode from the mapped pad
scancode = ord(sys.stdin.read(1))
# In case of wrong scancode, command is set to False
valid_command = True
# Create the commands sequence
if scancode == 65: # Up
sequences.append(commandmap["tripod"])
sequences.append(commandmap["yes"])
elif scancode == 66: # Down
sequences.append(commandmap["bipod"])
sequences.append(commandmap["yes"])
elif scancode == 67: # Rot left
sequences.append(commandmap["rot0"])
sequences.append(commandmap["no"])
elif scancode == 68: # Rot right
sequences.append(commandmap["rot+"])
sequences.append(commandmap["surprise"])
elif scancode == 122: # Button 3, 4
sequences.append(commandmap["laugh"])
sequences.append(commandmap["happy"])
sequences.append(commandmap["excited"])
elif scancode == 32: # Button 5
sequences.append(commandmap["surprise"])
sequences.append(commandmap["sad"])
sequences.append(commandmap["scared"])
elif scancode == 120: # Button 6
sequences.append(commandmap["confident"])
sequences.append(commandmap["laugh2"])
elif scancode == 105: # Front left
sequences.append(commandmap["angry"])
sequences.append(commandmap["alarm"])
elif scancode == 111: # Front right
sequences.append(commandmap["chatty"])
sequences.append(commandmap["ionblast"])
else:
valid_command = False
# Executes the command sequence
if valid_command == True:
for seq in sequences:
#device.char_write_handle(0x1c, commandmap[command], True)
r2d2.char_write_handle(0x1c, BuildPacket(seq), True)
# Empty the sequence list
del sequences[:]
ble.stop()
if __name__ == '__main__':
main()
| [
"sys.stdin.read",
"tty.setcbreak",
"pygatt.GATTToolBackend"
] | [((2982, 3006), 'pygatt.GATTToolBackend', 'pygatt.GATTToolBackend', ([], {}), '()\n', (3004, 3006), False, 'import pygatt\n'), ((3867, 3891), 'tty.setcbreak', 'tty.setcbreak', (['sys.stdin'], {}), '(sys.stdin)\n', (3880, 3891), False, 'import tty\n'), ((3990, 4007), 'sys.stdin.read', 'sys.stdin.read', (['(1)'], {}), '(1)\n', (4004, 4007), False, 'import sys\n')] |
# -*- coding: utf-8 -*-
import numpy as np
from scipy.signal import fftconvolve
def energy(traces, duration, dt=1):
"""
Compute an mean-squared energy measurement for each point of a
seismic section.
:param traces: The data array to use for calculating MS energy.
Must be 1D or 2D numpy array.
:param duration: the time duration of the window (in seconds), or
samples if dt=1.
:param dt: the sample interval of the data (in seconds). Defaults
to 1 so duration can be in samples.
:returns: An array the same dimensions as the input array.
"""
energy_data = np.zeros(traces.shape)
signal = traces * traces
n_samples = int(duration / dt)
window = np.ones(n_samples)
if np.ndim(signal) == 1:
# Compute the sliding average using a convolution
energy_data = fftconvolve(signal, window, mode='same') \
/ n_samples
elif np.ndim(signal) == 2:
for trace in range(signal.shape[1]):
energy_data[:, trace] = (fftconvolve(signal[:, trace],
window,
mode='same'))
else:
raise ValueError('Array must be 1D or 2D')
return energy_data
| [
"numpy.zeros",
"scipy.signal.fftconvolve",
"numpy.ones",
"numpy.ndim"
] | [((651, 673), 'numpy.zeros', 'np.zeros', (['traces.shape'], {}), '(traces.shape)\n', (659, 673), True, 'import numpy as np\n'), ((752, 770), 'numpy.ones', 'np.ones', (['n_samples'], {}), '(n_samples)\n', (759, 770), True, 'import numpy as np\n'), ((779, 794), 'numpy.ndim', 'np.ndim', (['signal'], {}), '(signal)\n', (786, 794), True, 'import numpy as np\n'), ((881, 921), 'scipy.signal.fftconvolve', 'fftconvolve', (['signal', 'window'], {'mode': '"""same"""'}), "(signal, window, mode='same')\n", (892, 921), False, 'from scipy.signal import fftconvolve\n'), ((967, 982), 'numpy.ndim', 'np.ndim', (['signal'], {}), '(signal)\n', (974, 982), True, 'import numpy as np\n'), ((1071, 1121), 'scipy.signal.fftconvolve', 'fftconvolve', (['signal[:, trace]', 'window'], {'mode': '"""same"""'}), "(signal[:, trace], window, mode='same')\n", (1082, 1121), False, 'from scipy.signal import fftconvolve\n')] |
import smtplib
from flask_babel import gettext as _
from flask_mail import Mail
from .app import app
from .errors import ViewError
mail = Mail(app)
def try_send_mail(msg):
try:
mail.send(msg)
except smtplib.SMTPServerDisconnected:
raise ViewError(_("Der Server ist nicht korrekt konfiguriert"))
except smtplib.SMTPRecipientsRefused as e:
messages = [
"{}: {} {}".format(r, errno, msg.decode())
for r, (errno, msg) in e.recipients.items()
]
raise ViewError(
_("Konnte E-Mail nicht versenden an:") + " " + ", ".join(messages)
)
| [
"flask_mail.Mail",
"flask_babel.gettext"
] | [((142, 151), 'flask_mail.Mail', 'Mail', (['app'], {}), '(app)\n', (146, 151), False, 'from flask_mail import Mail\n'), ((277, 323), 'flask_babel.gettext', '_', (['"""Der Server ist nicht korrekt konfiguriert"""'], {}), "('Der Server ist nicht korrekt konfiguriert')\n", (278, 323), True, 'from flask_babel import gettext as _\n'), ((551, 589), 'flask_babel.gettext', '_', (['"""Konnte E-Mail nicht versenden an:"""'], {}), "('Konnte E-Mail nicht versenden an:')\n", (552, 589), True, 'from flask_babel import gettext as _\n')] |
#!/usr/bin/env python3
from setuptools import setup
setup(
name='savedump',
version="0.1.0",
packages=[
"savedump",
],
entry_points={
'console_scripts': ['savedump=savedump.savedump:main'],
},
author='Delphix Platform Team',
author_email='<EMAIL>',
description='Archive linux crash dumps and cores',
license='Apache-2.0',
url='https://github.com/sdimitro/savedump',
)
| [
"setuptools.setup"
] | [((54, 380), 'setuptools.setup', 'setup', ([], {'name': '"""savedump"""', 'version': '"""0.1.0"""', 'packages': "['savedump']", 'entry_points': "{'console_scripts': ['savedump=savedump.savedump:main']}", 'author': '"""Delphix Platform Team"""', 'author_email': '"""<EMAIL>"""', 'description': '"""Archive linux crash dumps and cores"""', 'license': '"""Apache-2.0"""', 'url': '"""https://github.com/sdimitro/savedump"""'}), "(name='savedump', version='0.1.0', packages=['savedump'], entry_points\n ={'console_scripts': ['savedump=savedump.savedump:main']}, author=\n 'Delphix Platform Team', author_email='<EMAIL>', description=\n 'Archive linux crash dumps and cores', license='Apache-2.0', url=\n 'https://github.com/sdimitro/savedump')\n", (59, 380), False, 'from setuptools import setup\n')] |
import re
import pandas as pd
from constant import shenzhen_data_csv_path
def shape_data(header, body):
"""
该方法用于从pdf获取数据的时候塑造dataframe形式的数据
:param header:
:param body:
:return:
"""
pd.set_option('display.max_rows', None)
df = pd.DataFrame(body)
df.columns = header
return df
def to_csv(dataframe):
'''
该方法用于保存从PDF提取的数据到csv
:param dataframe:
:return:
'''
print("开始写入数据到csv文件")
dataframe.to_csv(shenzhen_data_csv_path, index=False)
def read_data_from_csv(path):
"""
该方法用于从csv文件中获取数据
:param path:
:return:
"""
pd.set_option('display.max_rows', None)
return pd.read_csv(path)
def fuzzy_finder(user_input, collection):
"""
该方法参考于https://www.cnblogs.com/weiman3389/p/6047017.html
:param user_input:
:param collection:
:return:
"""
suggestions = []
pattern = '.*'.join(user_input)
regex = re.compile(pattern)
for item in collection:
match = regex.search(item)
if match:
suggestions.append(item)
return suggestions
| [
"pandas.DataFrame",
"re.compile",
"pandas.read_csv",
"pandas.set_option"
] | [((214, 253), 'pandas.set_option', 'pd.set_option', (['"""display.max_rows"""', 'None'], {}), "('display.max_rows', None)\n", (227, 253), True, 'import pandas as pd\n'), ((263, 281), 'pandas.DataFrame', 'pd.DataFrame', (['body'], {}), '(body)\n', (275, 281), True, 'import pandas as pd\n'), ((608, 647), 'pandas.set_option', 'pd.set_option', (['"""display.max_rows"""', 'None'], {}), "('display.max_rows', None)\n", (621, 647), True, 'import pandas as pd\n'), ((659, 676), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (670, 676), True, 'import pandas as pd\n'), ((925, 944), 're.compile', 're.compile', (['pattern'], {}), '(pattern)\n', (935, 944), False, 'import re\n')] |
import sys
sys.path.append('..')
import external_test
external_test.runExternalTest()
| [
"external_test.runExternalTest",
"sys.path.append"
] | [((11, 32), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (26, 32), False, 'import sys\n'), ((54, 85), 'external_test.runExternalTest', 'external_test.runExternalTest', ([], {}), '()\n', (83, 85), False, 'import external_test\n')] |
#
# This file is part of LUNA.
#
# Copyright (c) 2020 <NAME> <<EMAIL>>
# SPDX-License-Identifier: BSD-3-Clause
""" Integrated logic analysis helpers. """
import io
import os
import sys
import math
import unittest
import tempfile
import subprocess
from abc import ABCMeta, abstractmethod
from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer
from nmigen.hdl.ast import Rose
from nmigen.lib.cdc import FFSynchronizer
from nmigen.lib.fifo import AsyncFIFOBuffered
from vcd import VCDWriter
from vcd.gtkw import GTKWSave
from ..stream import StreamInterface
from ..interface.uart import UARTMultibyteTransmitter
from ..interface.spi import SPIDeviceInterface, SPIBus, SPIGatewareTestCase
from ..test.utils import LunaGatewareTestCase, sync_test_case
class IntegratedLogicAnalyzer(Elaboratable):
""" Super-simple integrated-logic-analyzer generator class for LUNA.
Attributes
----------
trigger: Signal(), input
A strobe that determines when we should start sampling.
sampling: Signal(), output
Indicates when sampling is in progress.
complete: Signal(), output
Indicates when sampling is complete and ready to be read.
captured_sample_number: Signal(), input
Selects which sample the ILA will output. Effectively the address for the ILA's
sample buffer.
captured_sample: Signal(), output
The sample corresponding to the relevant sample number.
Can be broken apart by using Cat(*signals).
Parameters
----------
signals: iterable of Signals
An iterable of signals that should be captured by the ILA.
sample_depth: int
The depth of the desired buffer, in samples.
domain: string
The clock domain in which the ILA should operate.
sample_rate: float
Cosmetic indication of the sample rate. Used to format output.
samples_pretrigger: int
The number of our samples which should be captured _before_ the trigger.
This also can act like an implicit synchronizer; so asynchronous inputs
are allowed if this number is >= 2. Note that the trigger strobe is read
on the rising edge of the clock.
"""
def __init__(self, *, signals, sample_depth, domain="sync", sample_rate=60e6, samples_pretrigger=1):
self.domain = domain
self.signals = signals
self.inputs = Cat(*signals)
self.sample_width = len(self.inputs)
self.sample_depth = sample_depth
self.samples_pretrigger = samples_pretrigger
self.sample_rate = sample_rate
self.sample_period = 1 / sample_rate
#
# Create a backing store for our samples.
#
self.mem = Memory(width=self.sample_width, depth=sample_depth, name="ila_buffer")
#
# I/O port
#
self.trigger = Signal()
self.sampling = Signal()
self.complete = Signal()
self.captured_sample_number = Signal(range(0, self.sample_depth))
self.captured_sample = Signal(self.sample_width)
def elaborate(self, platform):
m = Module()
# TODO: switch this to a single-port RAM
# Memory ports.
write_port = self.mem.write_port()
read_port = self.mem.read_port(domain='comb')
m.submodules += [write_port, read_port]
# If necessary, create synchronized versions of the relevant signals.
if self.samples_pretrigger >= 2:
delayed_inputs = Signal.like(self.inputs)
m.submodules += FFSynchronizer(self.inputs, delayed_inputs,
stages=self.samples_pretrigger)
elif self.samples_pretrigger == 1:
delayed_inputs = Signal.like(self.inputs)
m.d.sync += delayed_inputs.eq(self.inputs)
else:
delayed_inputs = self.inputs
# Counter that keeps track of our write position.
write_position = Signal(range(0, self.sample_depth))
# Set up our write port to capture the input signals,
# and our read port to provide the output.
m.d.comb += [
write_port.data .eq(delayed_inputs),
write_port.addr .eq(write_position),
self.captured_sample .eq(read_port.data),
read_port.addr .eq(self.captured_sample_number)
]
self.test = Signal()
m.d.comb += self.test.eq(read_port.addr)
# Don't sample unless our FSM asserts our sample signal explicitly.
m.d.sync += write_port.en.eq(0)
with m.FSM() as fsm:
m.d.comb += self.sampling.eq(~fsm.ongoing("IDLE"))
# IDLE: wait for the trigger strobe
with m.State('IDLE'):
with m.If(self.trigger):
m.next = 'SAMPLE'
# Grab a sample as our trigger is asserted.
m.d.sync += [
write_port.en .eq(1),
write_position .eq(0),
self.complete .eq(0),
]
# SAMPLE: do our sampling
with m.State('SAMPLE'):
# Sample until we run out of samples.
m.d.sync += [
write_port.en .eq(1),
write_position .eq(write_position + 1),
]
# If this is the last sample, we're done. Finish up.
with m.If(write_position + 1 == self.sample_depth):
m.next = "IDLE"
m.d.sync += [
self.complete .eq(1),
write_port.en .eq(0)
]
# Convert our sync domain to the domain requested by the user, if necessary.
if self.domain != "sync":
m = DomainRenamer({"sync": self.domain})(m)
return m
class IntegratedLogicAnalyzerTest(LunaGatewareTestCase):
def instantiate_dut(self):
self.input_a = Signal()
self.input_b = Signal(30)
self.input_c = Signal()
return IntegratedLogicAnalyzer(
signals=[self.input_a, self.input_b, self.input_c],
sample_depth = 32
)
def initialize_signals(self):
yield self.input_a .eq(0)
yield self.input_b .eq(0)
yield self.input_c .eq(0)
def provide_all_signals(self, value):
all_signals = Cat(self.input_a, self.input_b, self.input_c)
yield all_signals.eq(value)
def assert_sample_value(self, address, value):
""" Helper that asserts a ILA sample has a given value. """
yield self.dut.captured_sample_number.eq(address)
yield
try:
self.assertEqual((yield self.dut.captured_sample), value)
return
except AssertionError:
pass
# Generate an appropriate exception.
actual_value = (yield self.dut.captured_sample)
message = "assertion failed: at address 0x{:08x}: {:08x} != {:08x} (expected)".format(address, actual_value, value)
raise AssertionError(message)
@sync_test_case
def test_sampling(self):
# Quick helper that generates simple, repetitive samples.
def sample_value(i):
return i | (i << 8) | (i << 16) | (0xFF << 24)
yield from self.provide_all_signals(0xDEADBEEF)
yield
# Before we trigger, we shouldn't be capturing any samples,
# and we shouldn't be complete.
self.assertEqual((yield self.dut.sampling), 0)
self.assertEqual((yield self.dut.complete), 0)
# Advance a bunch of cycles, and ensure we don't start sampling.
yield from self.advance_cycles(10)
self.assertEqual((yield self.dut.sampling), 0)
# Set a new piece of data for a couple of cycles.
yield from self.provide_all_signals(0x01234567)
yield
yield from self.provide_all_signals(0x89ABCDEF)
yield
# Finally, trigger the capture.
yield from self.provide_all_signals(sample_value(0))
yield from self.pulse(self.dut.trigger, step_after=False)
yield from self.provide_all_signals(sample_value(1))
yield
# After we pulse our trigger strobe, we should be sampling.
self.assertEqual((yield self.dut.sampling), 1)
# Populate the memory with a variety of interesting signals;
# and continue afterwards for a couple of cycles to make sure
# these don't make it into our sample buffer.
for i in range(2, 34):
yield from self.provide_all_signals(sample_value(i))
yield
# We now should be done with our sampling.
self.assertEqual((yield self.dut.sampling), 0)
self.assertEqual((yield self.dut.complete), 1)
# Validate the memory values that were captured.
for i in range(32):
yield from self.assert_sample_value(i, sample_value(i))
# All of those reads shouldn't change our completeness.
self.assertEqual((yield self.dut.sampling), 0)
self.assertEqual((yield self.dut.complete), 1)
class SyncSerialILA(Elaboratable):
""" Super-simple ILA that reads samples out over a simple unidirectional SPI.
Create a receiver for this object by calling apollo.ila_receiver_for(<this>).
This protocol is simple: every time CS goes low, we begin sending out a bit of
sample on each rising edge. Once a new sample is complete, the next sample begins
on the next 32-bit boundary.
Attributes
----------
trigger: Signal(), input
A strobe that determines when we should start sampling.
sampling: Signal(), output
Indicates when sampling is in progress.
complete: Signal(), output
Indicates when sampling is complete and ready to be read.
sck: Signal(), input
Serial clock for the SPI lines.
sdo: Signal(), output
Serial data out for the SPI lines.
cs: Signal(), input
Chip select for the SPI lines.
Parameters
----------
signals: iterable of Signals
An iterable of signals that should be captured by the ILA.
sample_depth: int
The depth of the desired buffer, in samples.
domain: string
The clock domain in which the ILA should operate.
samples_pretrigger: int
The number of our samples which should be captured _before_ the trigger.
This also can act like an implicit synchronizer; so asynchronous inputs
are allowed if this number is >= 2.
clock_polarity: int, 0 or 1
Clock polarity for the output SPI transciever. Optional.
clock_phase: int, 0 or 1
Clock phase for the output SPI transciever. Optional.
cs_idles_high: bool, optional
If True, the CS line will be assumed to be asserted when cs=0.
If False or not provided, the CS line will be assumed to be asserted when cs=1.
This can be used to share a simple two-device SPI bus, so two internal endpoints
can use the same CS line, with two opposite polarities.
"""
def __init__(self, *, signals, sample_depth, clock_polarity=0, clock_phase=1, cs_idles_high=False, **kwargs):
#
# I/O port
#
self.spi = SPIBus()
#
# Init
#
self.clock_phase = clock_phase
self.clock_polarity = clock_polarity
# Extract the domain from our keyword arguments, and then translate it to sync
# before we pass it back below. We'll use a DomainRenamer at the boundary to
# handle non-sync domains.
self.domain = kwargs.get('domain', 'sync')
kwargs['domain'] = 'sync'
# Create our core integrated logic analyzer.
self.ila = IntegratedLogicAnalyzer(
signals=signals,
sample_depth=sample_depth,
**kwargs)
# Copy some core parameters from our inner ILA.
self.signals = signals
self.sample_width = self.ila.sample_width
self.sample_depth = self.ila.sample_depth
self.sample_rate = self.ila.sample_rate
self.sample_period = self.ila.sample_period
# Figure out how many bytes we'll send per sample.
# We'll always send things squished into 32-bit chunks, as this is what the SPI engine
# on our Debug Controller likes most.
words_per_sample = (self.ila.sample_width + 31) // 32
# Bolster our bits_per_word up to a power of two...
self.bits_per_word = words_per_sample * 4 * 8
self.bits_per_word = 2 ** ((self.bits_per_word - 1).bit_length())
# ... and compute how many bits should be used.
self.bytes_per_sample = self.bits_per_word // 8
# Expose our ILA's trigger and status ports directly.
self.trigger = self.ila.trigger
self.sampling = self.ila.sampling
self.complete = self.ila.complete
def elaborate(self, platform):
m = Module()
m.submodules.ila = self.ila
transaction_start = Rose(self.spi.cs)
# Connect up our SPI transciever to our public interface.
interface = SPIDeviceInterface(
word_size=self.bits_per_word,
clock_polarity=self.clock_polarity,
clock_phase=self.clock_phase
)
m.submodules.spi = interface
m.d.comb += [
interface.spi .connect(self.spi),
# Always output the captured sample.
interface.word_out .eq(self.ila.captured_sample)
]
# Count where we are in the current transmission.
current_sample_number = Signal(range(0, self.ila.sample_depth))
# Our first piece of data is latched in when the transaction
# starts, so we'll move on to sample #1.
with m.If(self.spi.cs):
with m.If(transaction_start):
m.d.sync += current_sample_number.eq(1)
# From then on, we'll move to the next sample whenever we're finished
# scanning out a word (and thus our current samples are latched in).
with m.Elif(interface.word_accepted):
m.d.sync += current_sample_number.eq(current_sample_number + 1)
# Whenever CS is low, we should be providing the very first sample,
# so reset our sample counter to 0.
with m.Else():
m.d.sync += current_sample_number.eq(0)
# Ensure our ILA module outputs the right sample.
m.d.sync += [
self.ila.captured_sample_number .eq(current_sample_number)
]
# Convert our sync domain to the domain requested by the user, if necessary.
if self.domain != "sync":
m = DomainRenamer({"sync": self.domain})(m)
return m
class SyncSerialReadoutILATest(SPIGatewareTestCase):
def instantiate_dut(self):
self.input_signal = Signal(12)
return SyncSerialILA(
signals=[self.input_signal],
sample_depth=16,
clock_polarity=1,
clock_phase=0
)
def initialize_signals(self):
yield self.input_signal.eq(0xF00)
@sync_test_case
def test_spi_readout(self):
input_signal = self.input_signal
# Trigger the test while offering our first sample.
yield
yield from self.pulse(self.dut.trigger, step_after=False)
# Provide the remainder of our samples.
for i in range(1, 16):
yield input_signal.eq(0xF00 | i)
yield
# Wait a few cycles to account for delays in
# the sampling pipeline.
yield from self.advance_cycles(5)
# We've now captured a full set of samples.
# We'll test reading them out.
self.assertEqual((yield self.dut.complete), 1)
# Start the transaction, and exchange 16 bytes of data.
yield self.dut.spi.cs.eq(1)
yield
# Read our our result over SPI...
data = yield from self.spi_exchange_data(b"\0" * 32)
# ... and ensure it matches what was sampled.
i = 0
while data:
datum = data[0:4]
del data[0:4]
expected = b"\x00\x00\x0f" + bytes([i])
self.assertEqual(datum, expected)
i += 1
class StreamILA(Elaboratable):
""" Super-simple ILA that outputs its samples over a Stream.
Create a receiver for this object by calling apollo.ila_receiver_for(<this>).
This protocol is simple: we wait for a trigger; and then broadcast our samples.
We broadcast one buffer of samples per each subsequent trigger.
Attributes
----------
trigger: Signal(), input
A strobe that determines when we should start sampling.
sampling: Signal(), output
Indicates when sampling is in progress.
complete: Signal(), output
Indicates when sampling is complete and ready to be read.
stream: output stream
Stream output for the ILA.
Parameters
----------
signals: iterable of Signals
An iterable of signals that should be captured by the ILA.
sample_depth: int
The depth of the desired buffer, in samples.
domain: string
The clock domain in which the ILA should operate.
o_domain: string
The clock domain in which the output stream will be generated.
If omitted, defaults to the same domain as the core ILA.
samples_pretrigger: int
The number of our samples which should be captured _before_ the trigger.
This also can act like an implicit synchronizer; so asynchronous inputs
are allowed if this number is >= 2.
"""
def __init__(self, *, signals, sample_depth, o_domain=None, **kwargs):
# Extract the domain from our keyword arguments, and then translate it to sync
# before we pass it back below. We'll use a DomainRenamer at the boundary to
# handle non-sync domains.
self.domain = kwargs.get('domain', 'sync')
kwargs['domain'] = 'sync'
self._o_domain = o_domain if o_domain else self.domain
# Create our core integrated logic analyzer.
self.ila = IntegratedLogicAnalyzer(
signals=signals,
sample_depth=sample_depth,
**kwargs)
# Copy some core parameters from our inner ILA.
self.signals = signals
self.sample_width = self.ila.sample_width
self.sample_depth = self.ila.sample_depth
self.sample_rate = self.ila.sample_rate
self.sample_period = self.ila.sample_period
# Bolster our bits per sample "word" up to a power of two.
self.bits_per_sample = 2 ** ((self.ila.sample_width - 1).bit_length())
self.bytes_per_sample = self.bits_per_sample // 8
#
# I/O port
#
self.stream = StreamInterface(payload_width=self.bits_per_sample)
self.trigger = Signal()
# Expose our ILA's trigger and status ports directly.
self.sampling = self.ila.sampling
self.complete = self.ila.complete
def elaborate(self, platform):
m = Module()
m.submodules.ila = ila = self.ila
if self._o_domain == self.domain:
in_domain_stream = self.stream
else:
in_domain_stream = StreamInterface(payload_width=self.bits_per_sample)
# Count where we are in the current transmission.
current_sample_number = Signal(range(0, ila.sample_depth))
# Always present the current sample number to our ILA, and the current
# sample value to the UART.
m.d.comb += [
ila.captured_sample_number .eq(current_sample_number),
in_domain_stream.payload .eq(ila.captured_sample)
]
with m.FSM():
# IDLE -- we're currently waiting for a trigger before capturing samples.
with m.State("IDLE"):
# Always allow triggering, as we're ready for the data.
m.d.comb += self.ila.trigger.eq(self.trigger)
# Once we're triggered, move onto the SAMPLING state.
with m.If(self.trigger):
m.next = "SAMPLING"
# SAMPLING -- the internal ILA is sampling; we're now waiting for it to
# complete. This state is similar to IDLE; except we block triggers in order
# to cleanly avoid a race condition.
with m.State("SAMPLING"):
# Once our ILA has finished sampling, prepare to read out our samples.
with m.If(self.ila.complete):
m.d.sync += [
current_sample_number .eq(0),
in_domain_stream.first .eq(1)
]
m.next = "SENDING"
# SENDING -- we now have a valid buffer of samples to send up to the host;
# we'll transmit them over our stream interface.
with m.State("SENDING"):
m.d.comb += [
# While we're sending, we're always providing valid data to the UART.
in_domain_stream.valid .eq(1),
# Indicate when we're on the last sample.
in_domain_stream.last .eq(current_sample_number == (self.sample_depth - 1))
]
# Each time the UART accepts a valid word, move on to the next one.
with m.If(in_domain_stream.ready):
m.d.sync += [
current_sample_number .eq(current_sample_number + 1),
in_domain_stream.first .eq(0)
]
# If this was the last sample, we're done! Move back to idle.
with m.If(self.stream.last):
m.next = "IDLE"
# If we're not streaming out of the same domain we're capturing from,
# we'll add some clock-domain crossing hardware.
if self._o_domain != self.domain:
in_domain_signals = Cat(
in_domain_stream.first,
in_domain_stream.payload,
in_domain_stream.last
)
out_domain_signals = Cat(
self.stream.first,
self.stream.payload,
self.stream.last
)
# Create our async FIFO...
m.submodules.cdc = fifo = AsyncFIFOBuffered(
width=len(in_domain_signals),
depth=16,
w_domain="sync",
r_domain=self._o_domain
)
m.d.comb += [
# ... fill it from our in-domain stream...
fifo.w_data .eq(in_domain_signals),
fifo.w_en .eq(in_domain_stream.valid),
in_domain_stream.ready .eq(fifo.w_rdy),
# ... and output it into our outupt stream.
out_domain_signals .eq(fifo.r_data),
self.stream.valid .eq(fifo.r_rdy),
fifo.r_en .eq(self.stream.ready)
]
# Convert our sync domain to the domain requested by the user, if necessary.
if self.domain != "sync":
m = DomainRenamer({"sync": self.domain})(m)
return m
class AsyncSerialILA(Elaboratable):
""" Super-simple ILA that reads samples out over a UART connection.
Create a receiver for this object by calling apollo.ila_receiver_for(<this>).
This protocol is simple: we wait for a trigger; and then broadcast our samples.
We broadcast one buffer of samples per each subsequent trigger.
Attributes
----------
trigger: Signal(), input
A strobe that determines when we should start sampling.
sampling: Signal(), output
Indicates when sampling is in progress.
complete: Signal(), output
Indicates when sampling is complete and ready to be read.
tx: Signal(), output
Serial output for the ILA.
Parameters
----------
signals: iterable of Signals
An iterable of signals that should be captured by the ILA.
sample_depth: int
The depth of the desired buffer, in samples.
divisor: int
The number of `sync` clock cycles per bit period.
domain: string
The clock domain in which the ILA should operate.
samples_pretrigger: int
The number of our samples which should be captured _before_ the trigger.
This also can act like an implicit synchronizer; so asynchronous inputs
are allowed if this number is >= 2.
"""
def __init__(self, *, signals, sample_depth, divisor, **kwargs):
self.divisor = divisor
#
# I/O port
#
self.tx = Signal()
# Extract the domain from our keyword arguments, and then translate it to sync
# before we pass it back below. We'll use a DomainRenamer at the boundary to
# handle non-sync domains.
self.domain = kwargs.get('domain', 'sync')
kwargs['domain'] = 'sync'
# Create our core integrated logic analyzer.
self.ila = StreamILA(
signals=signals,
sample_depth=sample_depth,
**kwargs)
# Copy some core parameters from our inner ILA.
self.signals = signals
self.sample_width = self.ila.sample_width
self.sample_depth = self.ila.sample_depth
self.sample_rate = self.ila.sample_rate
self.sample_period = self.ila.sample_period
self.bits_per_sample = self.ila.bits_per_sample
self.bytes_per_sample = self.ila.bytes_per_sample
# Expose our ILA's trigger and status ports directly.
self.trigger = self.ila.trigger
self.sampling = self.ila.sampling
self.complete = self.ila.complete
def elaborate(self, platform):
m = Module()
m.submodules.ila = ila = self.ila
# Create our UART transmitter, and connect it to our stream interface.
m.submodules.uart = uart = UARTMultibyteTransmitter(
byte_width=self.bytes_per_sample,
divisor=self.divisor
)
m.d.comb +=[
uart.stream .stream_eq(ila.stream),
self.tx .eq(uart.tx)
]
# Convert our sync domain to the domain requested by the user, if necessary.
if self.domain != "sync":
m = DomainRenamer({"sync": self.domain})(m)
return m
class ILAFrontend(metaclass=ABCMeta):
""" Class that communicates with an ILA module and emits useful output. """
def __init__(self, ila):
"""
Parameters:
ila -- The ILA object to work with.
"""
self.ila = ila
self.samples = None
@abstractmethod
def _read_samples(self):
""" Read samples from the target ILA. Should return an iterable of samples. """
def _parse_sample(self, raw_sample):
""" Converts a single binary sample to a dictionary of names -> sample values. """
position = 0
sample = {}
# Split our raw, bits(0) signal into smaller slices, and associate them with their names.
for signal in self.ila.signals:
signal_width = len(signal)
signal_bits = raw_sample[position : position + signal_width]
position += signal_width
sample[signal.name] = signal_bits
return sample
def _parse_samples(self, raw_samples):
""" Converts raw, binary samples to dictionaries of name -> sample. """
return [self._parse_sample(sample) for sample in raw_samples]
def refresh(self):
""" Fetches the latest set of samples from the target ILA. """
self.samples = self._parse_samples(self._read_samples())
def enumerate_samples(self):
""" Returns an iterator that returns pairs of (timestamp, sample). """
# If we don't have any samples, fetch samples from the ILA.
if self.samples is None:
self.refresh()
timestamp = 0
# Iterate over each sample...
for sample in self.samples:
yield timestamp, sample
# ... and advance the timestamp by the relevant interval.
timestamp += self.ila.sample_period
def print_samples(self):
""" Simple method that prints each of our samples; for simple CLI debugging."""
for timestamp, sample in self.enumerate_samples():
timestamp_scaled = 1000000 * timestamp
print(f"{timestamp_scaled:08f}us: {sample}")
def emit_vcd(self, filename, *, gtkw_filename=None, add_clock=True):
""" Emits a VCD file containing the ILA samples.
Parameters:
filename -- The filename to write to, or '-' to write to stdout.
gtkw_filename -- If provided, a gtkwave save file will be generated that
automatically displays all of the relevant signals in the
order provided to the ILA.
add_clock -- If true or not provided, adds a replica of the ILA's sample
clock to make change points easier to see.
"""
# Select the file-like object we're working with.
if filename == "-":
stream = sys.stdout
close_after = False
else:
stream = open(filename, 'w')
close_after = True
# Create our basic VCD.
with VCDWriter(stream, timescale=f"1 ns", date='today') as writer:
first_timestamp = math.inf
last_timestamp = 0
signals = {}
# If we're adding a clock...
if add_clock:
clock_value = 1
clock_signal = writer.register_var('ila', 'ila_clock', 'integer', size=1, init=clock_value ^ 1)
# Create named values for each of our signals.
for signal in self.ila.signals:
signals[signal.name] = writer.register_var('ila', signal.name, 'integer', size=len(signal))
# Dump the each of our samples into the VCD.
clock_time = 0
for timestamp, sample in self.enumerate_samples():
for signal_name, signal_value in sample.items():
# If we're adding a clock signal, add any changes necessary since
# the last value-change.
if add_clock:
while clock_time < timestamp:
writer.change(clock_signal, clock_time / 1e-9, clock_value)
clock_value ^= 1
clock_time += (self.ila.sample_period / 2)
# Register the signal change.
writer.change(signals[signal_name], timestamp / 1e-9, signal_value.to_int())
# If we're generating a GTKW, delegate that to our helper function.
if gtkw_filename:
assert(filename != '-')
self._emit_gtkw(gtkw_filename, filename, add_clock=add_clock)
def _emit_gtkw(self, filename, dump_filename, *, add_clock=True):
""" Emits a GTKWave save file to accompany a generated VCD.
Parameters:
filename -- The filename to write the GTKW save to.
dump_filename -- The filename of the VCD that should be opened with this save.
add_clock -- True iff a clock signal should be added to the GTKW save.
"""
with open(filename, 'w') as f:
gtkw = GTKWSave(f)
# Comments / context.
gtkw.comment("Generated by the LUNA ILA.")
# Add a reference to the dumpfile we're working with.
gtkw.dumpfile(dump_filename)
# If we're adding a clock, add it to the top of the view.
gtkw.trace('ila.ila_clock')
# Add each of our signals to the file.
for signal in self.ila.signals:
gtkw.trace(f"ila.{signal.name}")
def interactive_display(self, *, add_clock=True):
""" Attempts to spawn a GTKWave instance to display the ILA results interactively. """
# Hack: generate files in a way that doesn't trip macOS's fancy guards.
try:
vcd_filename = os.path.join(tempfile.gettempdir(), os.urandom(24).hex() + '.vcd')
gtkw_filename = os.path.join(tempfile.gettempdir(), os.urandom(24).hex() + '.gtkw')
self.emit_vcd(vcd_filename, gtkw_filename=gtkw_filename)
subprocess.run(["gtkwave", "-f", vcd_filename, "-a", gtkw_filename])
finally:
os.remove(vcd_filename)
os.remove(gtkw_filename)
class AsyncSerialILAFrontend(ILAFrontend):
""" UART-based ILA transport.
Parameters
------------
port: string
The serial port to use to connect. This is typically a path on *nix systems.
ila: IntegratedLogicAnalyzer
The ILA object to work with.
"""
def __init__(self, *args, ila, **kwargs):
import serial
self._port = serial.Serial(*args, **kwargs)
self._port.reset_input_buffer()
super().__init__(ila)
def _split_samples(self, all_samples):
""" Returns an iterator that iterates over each sample in the raw binary of samples. """
from apollo.support.bits import bits
sample_width_bytes = self.ila.bytes_per_sample
# Iterate over each sample, and yield its value as a bits object.
for i in range(0, len(all_samples), sample_width_bytes):
raw_sample = all_samples[i:i + sample_width_bytes]
sample_length = len(Cat(self.ila.signals))
yield bits.from_bytes(raw_sample, length=sample_length, byteorder='big')
def _read_samples(self):
""" Reads a set of ILA samples, and returns them. """
sample_width_bytes = self.ila.bytes_per_sample
total_to_read = self.ila.sample_depth * sample_width_bytes
# Fetch all of our samples from the given device.
all_samples = self._port.read(total_to_read)
return list(self._split_samples(all_samples))
if __name__ == "__main__":
unittest.main()
| [
"vcd.gtkw.GTKWSave",
"nmigen.Signal",
"nmigen.DomainRenamer",
"apollo.support.bits.bits.from_bytes",
"os.urandom",
"subprocess.run",
"nmigen.hdl.ast.Rose",
"nmigen.Signal.like",
"nmigen.Module",
"nmigen.lib.cdc.FFSynchronizer",
"nmigen.Cat",
"serial.Serial",
"tempfile.gettempdir",
"unittes... | [((34447, 34462), 'unittest.main', 'unittest.main', ([], {}), '()\n', (34460, 34462), False, 'import unittest\n'), ((2492, 2505), 'nmigen.Cat', 'Cat', (['*signals'], {}), '(*signals)\n', (2495, 2505), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((2843, 2913), 'nmigen.Memory', 'Memory', ([], {'width': 'self.sample_width', 'depth': 'sample_depth', 'name': '"""ila_buffer"""'}), "(width=self.sample_width, depth=sample_depth, name='ila_buffer')\n", (2849, 2913), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((2979, 2987), 'nmigen.Signal', 'Signal', ([], {}), '()\n', (2985, 2987), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((3012, 3020), 'nmigen.Signal', 'Signal', ([], {}), '()\n', (3018, 3020), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((3045, 3053), 'nmigen.Signal', 'Signal', ([], {}), '()\n', (3051, 3053), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((3167, 3192), 'nmigen.Signal', 'Signal', (['self.sample_width'], {}), '(self.sample_width)\n', (3173, 3192), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((3243, 3251), 'nmigen.Module', 'Module', ([], {}), '()\n', (3249, 3251), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((4500, 4508), 'nmigen.Signal', 'Signal', ([], {}), '()\n', (4506, 4508), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((6107, 6115), 'nmigen.Signal', 'Signal', ([], {}), '()\n', (6113, 6115), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((6139, 6149), 'nmigen.Signal', 'Signal', (['(30)'], {}), '(30)\n', (6145, 6149), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((6173, 6181), 'nmigen.Signal', 'Signal', ([], {}), '()\n', (6179, 6181), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((6531, 6576), 'nmigen.Cat', 'Cat', (['self.input_a', 'self.input_b', 'self.input_c'], {}), '(self.input_a, self.input_b, self.input_c)\n', (6534, 6576), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((13157, 13165), 'nmigen.Module', 'Module', ([], {}), '()\n', (13163, 13165), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((13231, 13248), 'nmigen.hdl.ast.Rose', 'Rose', (['self.spi.cs'], {}), '(self.spi.cs)\n', (13235, 13248), False, 'from nmigen.hdl.ast import Rose\n'), ((15070, 15080), 'nmigen.Signal', 'Signal', (['(12)'], {}), '(12)\n', (15076, 15080), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((19100, 19108), 'nmigen.Signal', 'Signal', ([], {}), '()\n', (19106, 19108), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((19307, 19315), 'nmigen.Module', 'Module', ([], {}), '()\n', (19313, 19315), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((24981, 24989), 'nmigen.Signal', 'Signal', ([], {}), '()\n', (24987, 24989), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((26123, 26131), 'nmigen.Module', 'Module', ([], {}), '()\n', (26129, 26131), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((33335, 33365), 'serial.Serial', 'serial.Serial', (['*args'], {}), '(*args, **kwargs)\n', (33348, 33365), False, 'import serial\n'), ((3622, 3646), 'nmigen.Signal.like', 'Signal.like', (['self.inputs'], {}), '(self.inputs)\n', (3633, 3646), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((3675, 3750), 'nmigen.lib.cdc.FFSynchronizer', 'FFSynchronizer', (['self.inputs', 'delayed_inputs'], {'stages': 'self.samples_pretrigger'}), '(self.inputs, delayed_inputs, stages=self.samples_pretrigger)\n', (3689, 3750), False, 'from nmigen.lib.cdc import FFSynchronizer\n'), ((22234, 22310), 'nmigen.Cat', 'Cat', (['in_domain_stream.first', 'in_domain_stream.payload', 'in_domain_stream.last'], {}), '(in_domain_stream.first, in_domain_stream.payload, in_domain_stream.last)\n', (22237, 22310), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((22406, 22467), 'nmigen.Cat', 'Cat', (['self.stream.first', 'self.stream.payload', 'self.stream.last'], {}), '(self.stream.first, self.stream.payload, self.stream.last)\n', (22409, 22467), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((29746, 29796), 'vcd.VCDWriter', 'VCDWriter', (['stream'], {'timescale': 'f"""1 ns"""', 'date': '"""today"""'}), "(stream, timescale=f'1 ns', date='today')\n", (29755, 29796), False, 'from vcd import VCDWriter\n'), ((31810, 31821), 'vcd.gtkw.GTKWSave', 'GTKWSave', (['f'], {}), '(f)\n', (31818, 31821), False, 'from vcd.gtkw import GTKWSave\n'), ((32793, 32861), 'subprocess.run', 'subprocess.run', (["['gtkwave', '-f', vcd_filename, '-a', gtkw_filename]"], {}), "(['gtkwave', '-f', vcd_filename, '-a', gtkw_filename])\n", (32807, 32861), False, 'import subprocess\n'), ((32891, 32914), 'os.remove', 'os.remove', (['vcd_filename'], {}), '(vcd_filename)\n', (32900, 32914), False, 'import os\n'), ((32927, 32951), 'os.remove', 'os.remove', (['gtkw_filename'], {}), '(gtkw_filename)\n', (32936, 32951), False, 'import os\n'), ((3840, 3864), 'nmigen.Signal.like', 'Signal.like', (['self.inputs'], {}), '(self.inputs)\n', (3851, 3864), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((5935, 5971), 'nmigen.DomainRenamer', 'DomainRenamer', (["{'sync': self.domain}"], {}), "({'sync': self.domain})\n", (5948, 5971), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((14897, 14933), 'nmigen.DomainRenamer', 'DomainRenamer', (["{'sync': self.domain}"], {}), "({'sync': self.domain})\n", (14910, 14933), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((23451, 23487), 'nmigen.DomainRenamer', 'DomainRenamer', (["{'sync': self.domain}"], {}), "({'sync': self.domain})\n", (23464, 23487), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((26659, 26695), 'nmigen.DomainRenamer', 'DomainRenamer', (["{'sync': self.domain}"], {}), "({'sync': self.domain})\n", (26672, 26695), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((32561, 32582), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (32580, 32582), False, 'import tempfile\n'), ((32656, 32677), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (32675, 32677), False, 'import tempfile\n'), ((33918, 33939), 'nmigen.Cat', 'Cat', (['self.ila.signals'], {}), '(self.ila.signals)\n', (33921, 33939), False, 'from nmigen import Signal, Module, Cat, Elaboratable, Memory, ClockDomain, DomainRenamer\n'), ((33960, 34026), 'apollo.support.bits.bits.from_bytes', 'bits.from_bytes', (['raw_sample'], {'length': 'sample_length', 'byteorder': '"""big"""'}), "(raw_sample, length=sample_length, byteorder='big')\n", (33975, 34026), False, 'from apollo.support.bits import bits\n'), ((32584, 32598), 'os.urandom', 'os.urandom', (['(24)'], {}), '(24)\n', (32594, 32598), False, 'import os\n'), ((32679, 32693), 'os.urandom', 'os.urandom', (['(24)'], {}), '(24)\n', (32689, 32693), False, 'import os\n')] |
import os.path
import time
from moler.config import load_config
from moler.device.device import DeviceFactory
from moler.util.moler_test import MolerTest
def outage_callback(device_name, ping_times):
MolerTest.info("Network outage on {}".format(device_name))
ping_times["lost_connection_time"] = time.time()
def ping_is_on_callback(ping_times):
MolerTest.info("Ping works")
if ping_times["lost_connection_time"] > 0: # ping operable AFTER any net loss
if ping_times["reconnection_time"] == 0:
ping_times["reconnection_time"] = time.time()
outage_time = ping_times["reconnection_time"] - ping_times["lost_connection_time"]
MolerTest.info("Network outage time is {}".format(outage_time))
def test_network_outage():
load_config(config=os.path.abspath('config/my_devices.yml'))
unix1 = DeviceFactory.get_device(name='MyMachine1')
unix2 = DeviceFactory.get_device(name='MyMachine2')
# test setup
ping_times = {"lost_connection_time": 0,
"reconnection_time": 0}
# ensure network is up before running test
net_up = unix2.get_cmd(cmd_name="ifconfig", cmd_params={"options": "lo up"})
sudo_ensure_net_up = unix2.get_cmd(cmd_name="sudo", cmd_params={"password": "<PASSWORD>", "cmd_object": net_up})
sudo_ensure_net_up()
# run event observing "network down/up"
no_ping = unix1.get_event(event_name="ping_no_response")
no_ping.add_event_occurred_callback(callback=outage_callback,
callback_params={'device_name': 'MyMachine1',
'ping_times': ping_times})
no_ping.start()
ping_is_on = unix1.get_event(event_name="ping_response")
ping_is_on.add_event_occurred_callback(callback=ping_is_on_callback,
callback_params={'ping_times': ping_times})
ping_is_on.start()
# run test
ping = unix1.get_cmd(cmd_name="ping", cmd_params={"destination": "localhost", "options": "-O"})
ping.start(timeout=120)
time.sleep(3)
ifconfig_down = unix2.get_cmd(cmd_name="ifconfig", cmd_params={"options": "lo down"})
sudo_ifconfig_down = unix2.get_cmd(cmd_name="sudo", cmd_params={"password": "<PASSWORD>", "cmd_object": ifconfig_down})
sudo_ifconfig_down()
time.sleep(5)
ifconfig_up = unix2.get_cmd(cmd_name="ifconfig", cmd_params={"options": "lo up"})
sudo_ifconfig_up = unix2.get_cmd(cmd_name="sudo", cmd_params={"password": "<PASSWORD>", "cmd_object": ifconfig_up})
sudo_ifconfig_up()
time.sleep(3)
# test teardown
ping.cancel()
no_ping.cancel()
if __name__ == '__main__':
test_network_outage()
"""
copy this file into workshop1/network_outage.py
*** calculating network outage time ***
1. run it
2. see logs - look for "Network outage" and "Ping works"
- be carefull in logs analysis - what's wrong?
3. fix incorrect calculation by exchanging:
no_ping = unix1.get_event(event_name="ping_no_response")
into:
no_ping = unix1.get_event(event_name="ping_no_response", event_params={"till_occurs_times": 1})
"""
| [
"time.sleep",
"time.time",
"moler.util.moler_test.MolerTest.info",
"moler.device.device.DeviceFactory.get_device"
] | [((306, 317), 'time.time', 'time.time', ([], {}), '()\n', (315, 317), False, 'import time\n'), ((361, 389), 'moler.util.moler_test.MolerTest.info', 'MolerTest.info', (['"""Ping works"""'], {}), "('Ping works')\n", (375, 389), False, 'from moler.util.moler_test import MolerTest\n'), ((857, 900), 'moler.device.device.DeviceFactory.get_device', 'DeviceFactory.get_device', ([], {'name': '"""MyMachine1"""'}), "(name='MyMachine1')\n", (881, 900), False, 'from moler.device.device import DeviceFactory\n'), ((913, 956), 'moler.device.device.DeviceFactory.get_device', 'DeviceFactory.get_device', ([], {'name': '"""MyMachine2"""'}), "(name='MyMachine2')\n", (937, 956), False, 'from moler.device.device import DeviceFactory\n'), ((2086, 2099), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2096, 2099), False, 'import time\n'), ((2345, 2358), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (2355, 2358), False, 'import time\n'), ((2594, 2607), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2604, 2607), False, 'import time\n'), ((568, 579), 'time.time', 'time.time', ([], {}), '()\n', (577, 579), False, 'import time\n')] |
import django_filters
from snippets.models import File, Snippet, Label, SnippetLabel
class FileFilter(django_filters.FilterSet):
class Meta:
model = File
fields = [
'snippet',
'language',
]
class SnippetFilter(django_filters.FilterSet):
favorite = django_filters.BooleanFilter(method='filter_is_favorite', label="Is favorite?", )
labeled = django_filters.BooleanFilter(method='filter_is_labeled', label="Is labeled?", )
team_is_null = django_filters.BooleanFilter(method='filter_team_is_null', label="Team is None", )
# ToDo: Add after shares app
# shared_to = django_filters.NumberFilter(field_name="shared__user")
# shared_from = django_filters.NumberFilter(field_name="shared__user")
class Meta:
model = Snippet
fields = [
'labels',
'visibility',
'files__language',
'user',
'team',
]
def filter_is_favorite(self, queryset, name, value):
pass
def filter_is_labeled(self, queryset, name, value):
if value:
return queryset.exclude(labels=None)
return queryset.filter(labels=None)
def filter_team_is_null(self, queryset, name, value):
return queryset.filter(
team__isnull=value,
)
class LabelFilter(django_filters.FilterSet):
user = django_filters.NumberFilter(method='filter_user', label="User", )
class Meta:
model = Label
fields = [
'user',
'team',
]
def filter_user(self, queryset, name, value):
return queryset.filter(
user=value,
team=None,
)
class SnippetLabelFilter(django_filters.FilterSet):
class Meta:
model = SnippetLabel
fields = [
'snippet',
'label',
]
| [
"django_filters.BooleanFilter",
"django_filters.NumberFilter"
] | [((311, 390), 'django_filters.BooleanFilter', 'django_filters.BooleanFilter', ([], {'method': '"""filter_is_favorite"""', 'label': '"""Is favorite?"""'}), "(method='filter_is_favorite', label='Is favorite?')\n", (339, 390), False, 'import django_filters\n'), ((408, 485), 'django_filters.BooleanFilter', 'django_filters.BooleanFilter', ([], {'method': '"""filter_is_labeled"""', 'label': '"""Is labeled?"""'}), "(method='filter_is_labeled', label='Is labeled?')\n", (436, 485), False, 'import django_filters\n'), ((508, 593), 'django_filters.BooleanFilter', 'django_filters.BooleanFilter', ([], {'method': '"""filter_team_is_null"""', 'label': '"""Team is None"""'}), "(method='filter_team_is_null', label='Team is None'\n )\n", (536, 593), False, 'import django_filters\n'), ((1394, 1457), 'django_filters.NumberFilter', 'django_filters.NumberFilter', ([], {'method': '"""filter_user"""', 'label': '"""User"""'}), "(method='filter_user', label='User')\n", (1421, 1457), False, 'import django_filters\n')] |
import json
import demistomock as demisto
from MyToDoTasksWidget import get_open_to_do_tasks_of_current_user
def test_open_to_do_tasks_of_current_user(mocker):
'''
Given:
- Mock response of 'internalHttpRequest' to '/v2/statistics/widgets/query' that includes an open task and
a close task
When:
- Running the MyToDoTasksWidget script
Then:
- Ensure the markdown table was generated correctly and includes only the open task
'''
res_body = {
'data': [
{
'assignee': 'admin',
'completed': '0001-01-01T00:00:00Z',
'dbotCreatedBy': 'admin',
'description': 'test_open_task',
'dueDate': '2021-11-30T15:49:11+02:00',
'id': '1@2',
'incidentId': '2',
'status': 'open',
'title': 'test open'
},
{
'assignee': 'admin',
'dbotCreatedBy': 'admin',
'description': 'test_close_task',
'dueDate': '2021-11-30T15:49:11+02:00',
'id': '1@3',
'incidentId': '3',
'status': 'close',
'title': 'test close'
}
]
}
mocker.patch.object(
demisto,
'internalHttpRequest',
return_value={
'statusCode': 200,
'body': json.dumps(res_body)
}
)
expected_table = [
{
'Task Name': 'test open',
'Task Description': 'test_open_task',
'Task ID': '1@2',
'SLA': '2021-11-30 15:49:11+0200',
'Opened By': 'admin',
'Incident ID': '[2](#/Custom/caseinfoid/2)'
}
]
table = get_open_to_do_tasks_of_current_user()
assert len(table) == 1
assert table == expected_table
| [
"json.dumps",
"MyToDoTasksWidget.get_open_to_do_tasks_of_current_user"
] | [((1790, 1828), 'MyToDoTasksWidget.get_open_to_do_tasks_of_current_user', 'get_open_to_do_tasks_of_current_user', ([], {}), '()\n', (1826, 1828), False, 'from MyToDoTasksWidget import get_open_to_do_tasks_of_current_user\n'), ((1435, 1455), 'json.dumps', 'json.dumps', (['res_body'], {}), '(res_body)\n', (1445, 1455), False, 'import json\n')] |
from Interface.StudentCommandLineInterface import CLI
from HelperLibrary.StorageFunctions import StorageFunctions
from HelperLibrary.MarkSheet import MarkSheet
from datetime import datetime
class StudentController:
def __init__(self, student, table_name):
self.student = student
self.table_name = table_name
def retrieve_data(self):
self.student.name = (self.student.name.lower()).capitalize()
student_data = StorageFunctions("students").retrieve(["name"], [self.student.name])
student_data = list(student_data[0])
student_id = student_data[0]
year_group_id = student_data[3]
del student_data[0]
term_id_list = StorageFunctions("terms").list("id")
for term_id in term_id_list:
mark_sheet_data = StorageFunctions("mark_sheets").retrieve(["student_id", "term_id", "year_group_id"], [student_id, term_id, year_group_id])
term_data = StorageFunctions("terms").retrieve(["id"], [term_id])
term = [(term_data[0])[1]]
if mark_sheet_data:
student_data = student_data + term + list((mark_sheet_data[0])[1:4])
else:
student_data = student_data + term + [None, None, None]
return student_data
def validate_if_student_exists(self):
student_data = StorageFunctions(self.table_name).retrieve(["name"], [self.student.name])
if not student_data:
return False
else:
return True
def check_archive_status(self):
if self.student.leave_date:
return True
else:
return False
def list_students(self):
list_of_students = StorageFunctions(self.table_name).list("name")
print("List of students:")
if list_of_students:
counter = 1
for student_name in sorted(list_of_students):
print(counter, ":", student_name, end="\n")
counter += 1
else:
print("No available students")
def list_archived_students(self):
archived_students_data = StorageFunctions(self.table_name).retrieve(["leave_date"], [None], negative=True)
print("List of old students:")
if archived_students_data:
counter = 1
for student_data in archived_students_data:
print(counter, ":", student_data[1], end="\n")
counter += 1
else:
print("No available students")
def create_student(self):
StorageFunctions(self.table_name).append("(name, age, current_year_group, date_of_birth, address, father_name, mother_name, leave_date)", [self.student.name, self.student.age, self.student.year_group, self.student.date_of_birth, self.student.address, self.student.father_name, self.student.mother_name, self.student.leave_date])
student_data = StorageFunctions(self.table_name).retrieve(["name"], [self.student.name])
student_data = student_data[0]
StorageFunctions("mark_sheets").append("(math_mark, science_mark, english_mark, student_id, term_id, year_group_id)", [self.student.summer_mark_sheet.math_grade, self.student.summer_mark_sheet.science_grade, self.student.summer_mark_sheet.english_grade, student_data[0], (StorageFunctions("terms").retrieve(["term"], ["Summer"])[0])[0], self.student.year_group])
StorageFunctions("mark_sheets").append("(math_mark, science_mark, english_mark, student_id, term_id, year_group_id)", [self.student.spring_mark_sheet.math_grade, self.student.spring_mark_sheet.science_grade, self.student.spring_mark_sheet.english_grade, student_data[0], (StorageFunctions("terms").retrieve(["term"], ["Spring"])[0])[0], self.student.year_group])
StorageFunctions("mark_sheets").append("(math_mark, science_mark, english_mark, student_id, term_id, year_group_id)", [self.student.autumn_mark_sheet.math_grade, self.student.autumn_mark_sheet.science_grade, self.student.autumn_mark_sheet.english_grade, student_data[0], (StorageFunctions("terms").retrieve(["term"], ["Autumn"])[0])[0], self.student.year_group])
def create_mark_sheets(self):
student_id = StorageFunctions("students").retrieve(["name"], [self.student.name])[0][0]
term_id_list = StorageFunctions("terms").list("id")
term_name_list = StorageFunctions("terms").list("term")
for term_id in term_id_list:
mark_sheets_data = StorageFunctions("mark_sheets").retrieve(["student_id", "term_id", "year_group_id"], [student_id, term_id, self.student.year_group])
if not mark_sheets_data:
StorageFunctions("mark_sheets").append("(math_mark, science_mark, english_mark, student_id, term_id, year_group_id)", [0, 0, 0, student_id, term_id, self.student.year_group])
self.student.__setattr__(term_name_list[term_id_list.index(term_id)].lower() + "_mark_sheet", MarkSheet(self.student.name, term_name_list[term_id_list.index(term_id)].lower(), self.student.year_group))
def validate_student_details(self):
if self.student.age < 1:
return False, "Invalid age!"
elif self.student.year_group < 1:
self.student.year_group = 1
return True, "Age too low!\nSetting year group to 1"
elif self.student.year_group > 13:
self.student.year_group = 13
return True, "Age too high!\nSetting year group to 13"
else:
return True, None
@staticmethod
def _choose_mark_sheet(activity):
mark_sheet_choice_dictionary = {'1': 'Summer', '2': 'Spring', '3': 'Autumn'}
while True:
print("Enter 1 to", activity, "the summer term mark sheet, 2 for spring term mark sheet and 3 for autumn term mark sheet", end='')
mark_sheet_choice = mark_sheet_choice_dictionary.get(input())
if not mark_sheet_choice:
print("Please enter a valid choice")
else:
return mark_sheet_choice
def get_student_details(self):
print("Student name:", self.student.name)
print("Student age:", self.student.age)
print("Student year group:", self.student.year_group)
print("Student date of birth:", self.student.date_of_birth)
print("Student address:", self.student.address)
print("Student father's name:", self.student.father_name)
print("Student mother's name:", self.student.mother_name)
print("Student leave date:", self.student.leave_date)
def get_mark_sheet_details(self):
mark_sheet_choice = self._choose_mark_sheet("get details of")
MarkSheet.get_details(getattr(self.student, mark_sheet_choice.lower() + "_mark_sheet"))
def archive_get_mark_sheet_detail(self):
mark_sheet_choice = self._choose_mark_sheet("get details of")
term_id = StorageFunctions("terms").retrieve(["term"], [mark_sheet_choice])[0][0]
year_group = input("Enter year group to get data of:")
year_group_id = StorageFunctions("year_groups").retrieve(["year_group"], [year_group])[0][0]
student_id = StorageFunctions("students").retrieve(["name"], [self.student.name])[0][0]
mark_sheet_data = StorageFunctions("mark_sheets").retrieve(["student_id", "term_id", "year_group_id"], [student_id, term_id, year_group_id])
if mark_sheet_data:
mark_sheet_data = mark_sheet_data[0]
MarkSheet.get_details(MarkSheet(self.student.name, mark_sheet_choice, year_group, mark_sheet_data[1], mark_sheet_data[2], mark_sheet_data[3]))
else:
print("No data available!")
def get_mark_sheet_marks(self):
mark_sheet_choice = self._choose_mark_sheet("get marks of")
MarkSheet.get_marks(getattr(self.student, mark_sheet_choice.lower() + "_mark_sheet"))
def archive_get_mark_sheet_marks(self):
mark_sheet_choice = self._choose_mark_sheet("get details of")
term_id = StorageFunctions("terms").retrieve(["term"], [mark_sheet_choice])[0][0]
year_group = input("Enter year group to get data of:")
year_group_id = StorageFunctions("year_groups").retrieve(["year_group"], [year_group])[0][0]
student_id = StorageFunctions("students").retrieve(["name"], [self.student.name])[0][0]
mark_sheet_data = StorageFunctions("mark_sheets").retrieve(["student_id", "term_id", "year_group_id"], [student_id, term_id, year_group_id])
if mark_sheet_data:
mark_sheet_data = mark_sheet_data[0]
MarkSheet.get_marks(MarkSheet(self.student.name, mark_sheet_choice, year_group, mark_sheet_data[1], mark_sheet_data[2], mark_sheet_data[3]))
else:
print("No data available!")
def get_all_student_data(self):
self.get_student_details()
print()
student_id = StorageFunctions("students").retrieve(["name"], [self.student.name])[0][0]
all_mark_sheets_data = StorageFunctions("mark_sheets").retrieve(['student_id'], [student_id])
all_year_group_ids = []
for student_mark_sheet_data in all_mark_sheets_data:
mark_sheet_year_group_id = student_mark_sheet_data[6]
if mark_sheet_year_group_id not in all_year_group_ids:
all_year_group_ids.append(mark_sheet_year_group_id)
all_year_group_ids.sort()
term_ids_list = StorageFunctions("terms").list("id")
term_names_list = StorageFunctions("terms").list("term")
for year_group_id in all_year_group_ids:
year_group = StorageFunctions("year_groups").retrieve(['id'], [year_group_id])[0][1]
print("Year", year_group, ":")
for term_id in term_ids_list:
mark_sheet_data = StorageFunctions("mark_sheets").retrieve(['student_id', 'year_group_id', 'term_id'], [student_id, year_group_id, term_id])[0]
mark_sheet = MarkSheet(self.student.name, term_names_list[term_ids_list.index(term_id)], year_group, mark_sheet_data[1], mark_sheet_data[2], mark_sheet_data[3])
mark_sheet.get_details()
mark_sheet.get_marks()
print()
def edit_mark_sheet(self):
mark_sheet_choice = self._choose_mark_sheet("edit")
self.student.__getattribute__(mark_sheet_choice.lower() + "_mark_sheet").edit_mark_sheet()
def edit_student_details(self):
attributes = {
'1': self.edit_name,
'2': self.edit_year_group,
'3': self.edit_date_of_birth,
'4': self.edit_address,
'5': self.edit_father_name,
'6': self.edit_mother_name,
}
exit_initiated = False
while not exit_initiated:
edit_option = input("Enter 1 to edit name, 2 to edit year group, 3 to edit date of birth, 4 to edit address, 5 to edit father name, 6 to edit mother name and 7 to exit:")
if edit_option == str(len(attributes) + 1):
exit_initiated = True
elif (edit_option > str(len(attributes) + 1)) or (edit_option < '1'):
print("Please enter a valid choice!")
else:
attributes.get(edit_option)()
def edit_name(self):
valid_name = False
while not valid_name:
original_name = self.student.name
print("Student's current name is", self.student.name)
self.student.name = input("Enter new name for student:")
if not self.validate_if_student_exists():
valid_name = True
self.save_student_data(original_name)
else:
print("Student already exists!")
self.student.name = original_name
def edit_year_group(self):
print("Student's current year group is", self.student.year_group)
self.student.year_group = int(input("Enter new year group for student:"))
self.create_mark_sheets()
def edit_date_of_birth(self):
print("Student's current date of birth is", self.student.date_of_birth)
birth_year = int(input("Enter new year of birth:"))
birth_month = int(input("Enter new month of birth:"))
birth_date = int(input("Enter new date of birth:"))
self.student.date_of_birth = datetime(birth_year, birth_month, birth_date)
self.student.age = self.student.calculate_age()
def edit_address(self):
print("Student's current address is", self.student.address)
self.student.address = input("Enter new address of student:")
def edit_father_name(self):
print("Student's father's current name is", self.student.father_name)
self.student.father_name = input("Enter new name for student's father:")
def edit_mother_name(self):
print("Student's father's current name is", self.student.mother_name)
self.student.mother_name = input("Enter new name for student's mother:")
def save_student_data(self, old_name=None, save_mark_sheet_data=True):
if not old_name:
student_data = StorageFunctions("students").retrieve(["name"], [self.student.name])
else:
student_data = StorageFunctions("students").retrieve(["name"], [old_name])
student_id = (student_data[0])[0]
StorageFunctions("students").update(["name", "age", "current_year_group", "date_of_birth", "address", "father_name", "mother_name", "leave_date"], [self.student.name, self.student.age, self.student.year_group, self.student.date_of_birth, self.student.address, self.student.father_name, self.student.mother_name, self.student.leave_date], student_id)
if save_mark_sheet_data:
term_id_list = StorageFunctions("terms").list("id")
for term_id in term_id_list:
mark_sheet_data = StorageFunctions("mark_sheets").retrieve(["student_id", "term_id", "year_group_id"], [student_id, term_id, self.student.year_group])
mark_sheet_id = (mark_sheet_data[0])[0]
term_data = StorageFunctions("terms").retrieve(["id"], [term_id])
term = (term_data[0])[1]
StorageFunctions("mark_sheets").update(["math_mark", "science_mark", "english_mark"], [getattr(self.student, term.lower() + "_mark_sheet").math_grade, getattr(self.student, term.lower() + "_mark_sheet").science_grade, getattr(self.student, term.lower() + "_mark_sheet").english_grade], mark_sheet_id)
def delete_student(self):
student_data = StorageFunctions("students").retrieve(["name"], [self.student.name])
student_id = (student_data[0])[0]
StorageFunctions("mark_sheets").delete(student_id, "student_id")
StorageFunctions("students").delete(student_id)
def archive(self):
leave_year = int(input("Enter leave year:"))
leave_month = int(input("Enter leave month:"))
leave_date = int(input("Enter leave date:"))
self.student.leave_date = datetime(leave_year, leave_month, leave_date)
self.student.year_group = None
self.save_student_data(save_mark_sheet_data=False)
def unarchive(self):
self.student.recreate_student()
new_year_group = int(input("Enter new year group for student:"))
new_year_group_data = StorageFunctions("year_groups").retrieve(["year_group"], [new_year_group])
if new_year_group_data:
self.student.year_group = new_year_group_data[0][0]
self.student.leave_date = None
self.create_mark_sheets()
self.save_student_data()
return "Student successfully added back"
else:
return "Invalid year group"
class Student:
def __init__(self, name, date_of_birth, address, father_name, mother_name, table_name="students"):
self.name = name
self.date_of_birth = date_of_birth
if self.date_of_birth is not None:
self.age = self.calculate_age()
self.year_group = (StorageFunctions("year_groups").retrieve(["year_group"], [self.age-4])[0])[0]
else:
self.age = None
self.year_group = None
self.address = address
self.father_name = father_name
self.mother_name = mother_name
self.leave_date = None
self.summer_mark_sheet = MarkSheet(self.name, "Summer", self.year_group)
self.spring_mark_sheet = MarkSheet(self.name, "Spring", self.year_group)
self.autumn_mark_sheet = MarkSheet(self.name, "Spring", self.year_group)
self.student_controller = StudentController(self, table_name)
self.student_menu_dict = {'1': self.student_controller.edit_mark_sheet,
'2': self.student_controller.get_student_details,
'3': self.student_controller.get_mark_sheet_details,
'4': self.student_controller.get_mark_sheet_marks,
'5': self.student_controller.get_all_student_data,
}
self.admin_student_menu_dict = {'1': self.student_controller.edit_mark_sheet,
'2': self.student_controller.get_student_details,
'3': self.student_controller.get_mark_sheet_details,
'4': self.student_controller.get_mark_sheet_marks,
'5': self.student_controller.edit_student_details,
'6': self.student_controller.get_all_student_data,
'7': self.student_controller.archive,
'8': self.delete,
}
self.archive_student_menu_dict = {'1': self.student_controller.get_student_details,
'2': self.student_controller.archive_get_mark_sheet_detail,
'3': self.student_controller.archive_get_mark_sheet_marks,
'4': self.student_controller.get_all_student_data,
}
self.admin_archive_student_menu_dict = {'1': self.student_controller.get_student_details,
'2': self.student_controller.archive_get_mark_sheet_detail,
'3': self.student_controller.archive_get_mark_sheet_marks,
'4': self.student_controller.get_all_student_data,
'5': self.delete,
}
def recreate_student(self):
student_data = self.student_controller.retrieve_data()
self.name = student_data[0]
self.age = student_data[1]
self.year_group = student_data[2]
self.date_of_birth = student_data[3]
self.address = student_data[4]
self.father_name = student_data[5]
self.mother_name = student_data[6]
self.leave_date = student_data[7]
self.summer_mark_sheet.student = self.name
self.summer_mark_sheet.term = student_data[8]
self.summer_mark_sheet.year_group = self.year_group
self.summer_mark_sheet.math_grade = student_data[9]
self.summer_mark_sheet.science_grade = student_data[10]
self.summer_mark_sheet.english_grade = student_data[11]
self.spring_mark_sheet.student = self.name
self.spring_mark_sheet.term = student_data[12]
self.spring_mark_sheet.year_group = self.year_group
self.spring_mark_sheet.math_grade = student_data[13]
self.spring_mark_sheet.science_grade = student_data[14]
self.spring_mark_sheet.english_grade = student_data[15]
self.autumn_mark_sheet.student = self.name
self.autumn_mark_sheet.term = student_data[16]
self.autumn_mark_sheet.year_group = self.year_group
self.autumn_mark_sheet.math_grade = student_data[17]
self.autumn_mark_sheet.science_grade = student_data[18]
self.autumn_mark_sheet.english_grade = student_data[19]
def calculate_age(self):
current_date = datetime.now()
age = current_date.year - self.date_of_birth.year
if current_date.month < self.date_of_birth.month or ((current_date.month == self.date_of_birth.month) and (current_date.day < self.date_of_birth.day)):
age -= 1
return age
def create_new_student(self):
if not self.student_controller.validate_if_student_exists():
self.student_controller.create_student()
return "Student successfully created"
else:
return "Student already exists"
def create_old_student(self):
choice_list_of_students = bool(int(input("Enter 1 to get a list of all old students and 0 to continue without a list of students:")))
if choice_list_of_students:
self.student_controller.list_archived_students()
self.name = input("Enter student name to add back into school:").capitalize()
if self.student_controller.validate_if_student_exists():
return self.student_controller.unarchive()
else:
return "Student does not exist"
def manage(self, admin):
choice_list_of_students = bool(int(input("Enter 1 to get a list of all students and 0 to continue without a list of students:")))
if choice_list_of_students:
self.student_controller.list_students()
self.name = input("Enter student name to manage student:").capitalize()
if self.student_controller.validate_if_student_exists():
self.recreate_student()
archive = self.student_controller.check_archive_status()
CLI(self, admin, archive).initiate()
return "Exiting..."
else:
return "Student does not exist"
def delete(self):
confirm_deletion = bool(int(input("Enter 1 to confirm deletion of student and 0 to cancel deletion:")))
if confirm_deletion:
self.student_controller.delete_student()
return True
else:
return False
| [
"datetime.datetime",
"HelperLibrary.StorageFunctions.StorageFunctions",
"HelperLibrary.MarkSheet.MarkSheet",
"Interface.StudentCommandLineInterface.CLI",
"datetime.datetime.now"
] | [((12241, 12286), 'datetime.datetime', 'datetime', (['birth_year', 'birth_month', 'birth_date'], {}), '(birth_year, birth_month, birth_date)\n', (12249, 12286), False, 'from datetime import datetime\n'), ((14906, 14951), 'datetime.datetime', 'datetime', (['leave_year', 'leave_month', 'leave_date'], {}), '(leave_year, leave_month, leave_date)\n', (14914, 14951), False, 'from datetime import datetime\n'), ((16249, 16296), 'HelperLibrary.MarkSheet.MarkSheet', 'MarkSheet', (['self.name', '"""Summer"""', 'self.year_group'], {}), "(self.name, 'Summer', self.year_group)\n", (16258, 16296), False, 'from HelperLibrary.MarkSheet import MarkSheet\n'), ((16330, 16377), 'HelperLibrary.MarkSheet.MarkSheet', 'MarkSheet', (['self.name', '"""Spring"""', 'self.year_group'], {}), "(self.name, 'Spring', self.year_group)\n", (16339, 16377), False, 'from HelperLibrary.MarkSheet import MarkSheet\n'), ((16411, 16458), 'HelperLibrary.MarkSheet.MarkSheet', 'MarkSheet', (['self.name', '"""Spring"""', 'self.year_group'], {}), "(self.name, 'Spring', self.year_group)\n", (16420, 16458), False, 'from HelperLibrary.MarkSheet import MarkSheet\n'), ((20203, 20217), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (20215, 20217), False, 'from datetime import datetime\n'), ((453, 481), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""students"""'], {}), "('students')\n", (469, 481), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((695, 720), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (711, 720), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((1340, 1373), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['self.table_name'], {}), '(self.table_name)\n', (1356, 1373), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((1699, 1732), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['self.table_name'], {}), '(self.table_name)\n', (1715, 1732), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((2110, 2143), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['self.table_name'], {}), '(self.table_name)\n', (2126, 2143), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((2534, 2567), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['self.table_name'], {}), '(self.table_name)\n', (2550, 2567), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((2886, 2919), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['self.table_name'], {}), '(self.table_name)\n', (2902, 2919), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((3007, 3038), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (3023, 3038), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((3378, 3409), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (3394, 3409), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((3749, 3780), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (3765, 3780), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((4266, 4291), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (4282, 4291), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((4328, 4353), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (4344, 4353), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((7211, 7242), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (7227, 7242), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((7445, 7569), 'HelperLibrary.MarkSheet.MarkSheet', 'MarkSheet', (['self.student.name', 'mark_sheet_choice', 'year_group', 'mark_sheet_data[1]', 'mark_sheet_data[2]', 'mark_sheet_data[3]'], {}), '(self.student.name, mark_sheet_choice, year_group, mark_sheet_data\n [1], mark_sheet_data[2], mark_sheet_data[3])\n', (7454, 7569), False, 'from HelperLibrary.MarkSheet import MarkSheet\n'), ((8310, 8341), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (8326, 8341), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((8542, 8666), 'HelperLibrary.MarkSheet.MarkSheet', 'MarkSheet', (['self.student.name', 'mark_sheet_choice', 'year_group', 'mark_sheet_data[1]', 'mark_sheet_data[2]', 'mark_sheet_data[3]'], {}), '(self.student.name, mark_sheet_choice, year_group, mark_sheet_data\n [1], mark_sheet_data[2], mark_sheet_data[3])\n', (8551, 8666), False, 'from HelperLibrary.MarkSheet import MarkSheet\n'), ((8932, 8963), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (8948, 8963), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((9355, 9380), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (9371, 9380), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((9418, 9443), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (9434, 9443), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((13242, 13270), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""students"""'], {}), "('students')\n", (13258, 13270), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((14447, 14475), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""students"""'], {}), "('students')\n", (14463, 14475), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((14566, 14597), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (14582, 14597), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((14639, 14667), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""students"""'], {}), "('students')\n", (14655, 14667), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((15219, 15250), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""year_groups"""'], {}), "('year_groups')\n", (15235, 15250), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((799, 830), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (815, 830), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((946, 971), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (962, 971), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((4435, 4466), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (4451, 4466), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((13022, 13050), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""students"""'], {}), "('students')\n", (13038, 13050), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((13132, 13160), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""students"""'], {}), "('students')\n", (13148, 13160), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((13652, 13677), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (13668, 13677), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((21797, 21822), 'Interface.StudentCommandLineInterface.CLI', 'CLI', (['self', 'admin', 'archive'], {}), '(self, admin, archive)\n', (21800, 21822), False, 'from Interface.StudentCommandLineInterface import CLI\n'), ((4168, 4196), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""students"""'], {}), "('students')\n", (4184, 4196), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((4621, 4652), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (4637, 4652), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((6853, 6878), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (6869, 6878), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((7012, 7043), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""year_groups"""'], {}), "('year_groups')\n", (7028, 7043), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((7110, 7138), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""students"""'], {}), "('students')\n", (7126, 7138), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((7952, 7977), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (7968, 7977), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((8111, 8142), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""year_groups"""'], {}), "('year_groups')\n", (8127, 8142), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((8209, 8237), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""students"""'], {}), "('students')\n", (8225, 8237), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((8826, 8854), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""students"""'], {}), "('students')\n", (8842, 8854), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((13764, 13795), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (13780, 13795), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((13981, 14006), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (13997, 14006), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((14092, 14123), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (14108, 14123), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((9531, 9562), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""year_groups"""'], {}), "('year_groups')\n", (9547, 9562), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((9722, 9753), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""mark_sheets"""'], {}), "('mark_sheets')\n", (9738, 9753), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((15921, 15952), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""year_groups"""'], {}), "('year_groups')\n", (15937, 15952), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((3279, 3304), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (3295, 3304), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((3650, 3675), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (3666, 3675), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n'), ((4021, 4046), 'HelperLibrary.StorageFunctions.StorageFunctions', 'StorageFunctions', (['"""terms"""'], {}), "('terms')\n", (4037, 4046), False, 'from HelperLibrary.StorageFunctions import StorageFunctions\n')] |
import random
import os
from decimal import *
os.getcwd()
os.chdir('..')
os.chdir('..')
parent=os.getcwd()
#seed = 15
path1="Model/label2.2gram.lm"
filename1=os.path.join(parent,path1)
with open(filename1,'r',encoding='utf-8') as f:
text=f.read().split('\n')
dict_val={}
words=[]
new_text=[]
for i in range (0,len(text)-2):
new_text.append(text[i])
for line in new_text:
list_t= line.split("|")
word1=list_t[0]
word2=list_t[1]
value=list_t[2]
if len(value)>0:
if value[0]=='0':
dict_val[word1+" "+word2]=value.replace('\n','')
if word1=="<s>":
words.append(word1+" "+word2)
seed=input("Enter seed : ")
random.seed( int(seed) )
n = input("Enter n : ")
output=''
for i in range(0,int(n)):
rand_word=random.choice(words)
word=rand_word.split(" ")[1]
sentence=[]
sentence.append(rand_word)
while word!= "</s>":
max_tt=0
next_word=""
for toupels in dict_val:
pre=toupels.split(" ")[0]
now=toupels.split(" ")[1]
if word == pre and now not in sentence :
if Decimal(dict_val[toupels]) > Decimal(max_tt):
max_tt=dict_val[toupels]
next_word=now
word=next_word
sentence.append(next_word)
out_text=''
for word in sentence:
out_text+= word+" "
output += out_text +'\n'
out_t=os.path.join(parent,'TextGen/label2.2gram.gen')
f = open(out_t,'w')
f.write(output)
f.close()
| [
"os.chdir",
"random.choice",
"os.path.join",
"os.getcwd"
] | [((46, 57), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (55, 57), False, 'import os\n'), ((58, 72), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (66, 72), False, 'import os\n'), ((73, 87), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (81, 87), False, 'import os\n'), ((95, 106), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (104, 106), False, 'import os\n'), ((161, 188), 'os.path.join', 'os.path.join', (['parent', 'path1'], {}), '(parent, path1)\n', (173, 188), False, 'import os\n'), ((1429, 1489), 'os.path.join', 'os.path.join', (['parent', '"""\u202b\u202aTextGen/label2.2gram.gen"""'], {}), "(parent, '\\u202b\\u202aTextGen/label2.2gram.gen')\n", (1441, 1489), False, 'import os\n'), ((780, 800), 'random.choice', 'random.choice', (['words'], {}), '(words)\n', (793, 800), False, 'import random\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Project: Azimuthal integration
# https://github.com/silx-kit/pyFAI
#
# Copyright (C) 2003-2018 European Synchrotron Radiation Facility, Grenoble,
# France
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Utilities, mainly for image treatment
"""
__authors__ = ["<NAME>", "<NAME>"]
__contact__ = "<EMAIL>"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "19/02/2019"
__status__ = "production"
import logging
import numpy
import fabio
import weakref
from scipy import ndimage
from scipy.interpolate import interp1d
from scipy.optimize.optimize import fmin
from scipy.optimize.optimize import fminbound
from .third_party import six
from .utils import stringutil
from .utils import header_utils
from ._version import calc_hexversion
if ("hexversion" not in dir(fabio)) or (fabio.hexversion < calc_hexversion(0, 4, 0, "dev", 5)):
# Short cut fabio.factory do not exists on older versions
fabio.factory = fabio.fabioimage.FabioImage.factory
logger = logging.getLogger(__name__)
class ImageReductionFilter(object):
"""
Generic filter applied in a set of images.
"""
def init(self, max_images=None):
"""
Initialize the filter before using it.
:param int max_images: Max images supported by the filter
"""
pass
def add_image(self, image):
"""
Add an image to the filter.
:param numpy.ndarray image: image to add
"""
raise NotImplementedError()
def get_parameters(self):
"""Return a dictionary containing filter parameters
:rtype: dict
"""
return {"cutoff": None, "quantiles": None}
def get_result(self):
"""
Get the result of the filter.
:return: result filter
"""
raise NotImplementedError()
class ImageAccumulatorFilter(ImageReductionFilter):
"""
Filter applied in a set of images in which it is possible
to reduce data step by step into a single merged image.
"""
def init(self, max_images=None):
self._count = 0
self._accumulated_image = None
def add_image(self, image):
"""
Add an image to the filter.
:param numpy.ndarray image: image to add
"""
self._accumulated_image = self._accumulate(self._accumulated_image, image)
self._count += 1
def _accumulate(self, accumulated_image, added_image):
"""
Add an image to the filter.
:param numpy.ndarray accumulated_image: image use to accumulate
information
:param numpy.ndarray added_image: image to add
"""
raise NotImplementedError()
def get_result(self):
"""
Get the result of the filter.
:return: result filter
:rtype: numpy.ndarray
"""
result = self._accumulated_image
# release the allocated memory
self._accumulated_image = None
return result
class MaxAveraging(ImageAccumulatorFilter):
name = "max"
def _accumulate(self, accumulated_image, added_image):
if accumulated_image is None:
return added_image
return numpy.maximum(accumulated_image, added_image)
class MinAveraging(ImageAccumulatorFilter):
name = "min"
def _accumulate(self, accumulated_image, added_image):
if accumulated_image is None:
return added_image
return numpy.minimum(accumulated_image, added_image)
class SumAveraging(ImageAccumulatorFilter):
name = "sum"
def _accumulate(self, accumulated_image, added_image):
if accumulated_image is None:
return added_image
return accumulated_image + added_image
class MeanAveraging(SumAveraging):
name = "mean"
def get_result(self):
result = super(MeanAveraging, self).get_result()
return result / numpy.float32(self._count)
class ImageStackFilter(ImageReductionFilter):
"""
Filter creating a stack from all images and computing everything at the
end.
"""
def init(self, max_images=None):
self._stack = None
self._max_stack_size = max_images
self._count = 0
def add_image(self, image):
"""
Add an image to the filter.
:param numpy.ndarray image: image to add
"""
if self._stack is None:
shape = self._max_stack_size, image.shape[0], image.shape[1]
self._stack = numpy.zeros(shape, dtype=numpy.float32)
self._stack[self._count] = image
self._count += 1
def _compute_stack_reduction(self, stack):
"""Called after initialization of the stack and return the reduction
result."""
raise NotImplementedError()
def get_result(self):
if self._stack is None:
raise Exception("No data to reduce")
shape = self._count, self._stack.shape[1], self._stack.shape[2]
self._stack.resize(shape)
result = self._compute_stack_reduction(self._stack)
# release the allocated memory
self._stack = None
return result
class AverageDarkFilter(ImageStackFilter):
"""
Filter based on the algorithm of average_dark
TODO: Must be split according to each filter_name, and removed
"""
def __init__(self, filter_name, cut_off, quantiles):
super(AverageDarkFilter, self).__init__()
self._filter_name = filter_name
self._cut_off = cut_off
self._quantiles = quantiles
@property
def name(self):
return self._filter_name
def get_parameters(self):
"""Return a dictionary containing filter parameters"""
return {"cutoff": self._cut_off, "quantiles": self._quantiles}
def _compute_stack_reduction(self, stack):
"""
Compute the stack reduction.
:param numpy.ndarray stack: stack to reduce
:return: result filter
:rtype: numpy.ndarray
"""
return average_dark(stack,
self._filter_name,
self._cut_off,
self._quantiles)
_FILTERS = [
MaxAveraging,
MinAveraging,
MeanAveraging,
SumAveraging,
]
_FILTER_NAME_MAPPING = {}
for _f in _FILTERS:
_FILTER_NAME_MAPPING[_f.name] = _f
_AVERAGE_DARK_FILTERS = set(["min", "max", "sum", "mean", "std", "quantiles", "median"])
def is_algorithm_name_exists(filter_name):
"""Return true if the name is a name of a filter algorithm"""
if filter_name in _FILTER_NAME_MAPPING:
return True
elif filter_name in _AVERAGE_DARK_FILTERS:
return True
return False
class AlgorithmCreationError(RuntimeError):
"""Exception returned if creation of an ImageReductionFilter is not
possible"""
pass
def create_algorithm(filter_name, cut_off=None, quantiles=None):
"""Factory to create algorithm according to parameters
:param cutoff: keep all data where (I-center)/std < cutoff
:type cutoff: float or None
:param quantiles: 2-tuple of floats average out data between the two
quantiles
:type quantiles: tuple(float, float) or None
:return: An algorithm
:rtype: ImageReductionFilter
:raise AlgorithmCreationError: If it is not possible to create the
algorithm
"""
if filter_name in _FILTER_NAME_MAPPING and cut_off is None:
# use less memory
filter_class = _FILTER_NAME_MAPPING[filter_name]
algorithm = filter_class()
elif filter_name in _AVERAGE_DARK_FILTERS:
# must create a big array with all the data
if filter_name == "quantiles" and quantiles is None:
raise AlgorithmCreationError("Quantiles algorithm expect quantiles parameters")
algorithm = AverageDarkFilter(filter_name, cut_off, quantiles)
else:
raise AlgorithmCreationError("No algorithm available for the expected parameters")
return algorithm
def bounding_box(img):
"""
Tries to guess the bounding box around a valid massif
:param img: 2D array like
:return: 4-typle (d0_min, d1_min, d0_max, d1_max)
"""
img = img.astype(numpy.int)
img0 = (img.sum(axis=1) > 0).astype(numpy.int)
img1 = (img.sum(axis=0) > 0).astype(numpy.int)
dimg0 = img0[1:] - img0[:-1]
min0 = dimg0.argmax()
max0 = dimg0.argmin() + 1
dimg1 = img1[1:] - img1[:-1]
min1 = dimg1.argmax()
max1 = dimg1.argmin() + 1
if max0 == 1:
max0 = img0.size
if max1 == 1:
max1 = img1.size
return (min0, min1, max0, max1)
def remove_saturated_pixel(ds, threshold=0.1, minimum=None, maximum=None):
"""
Remove saturated fixes from an array inplace.
:param ds: a dataset as ndarray
:param float threshold: what is the upper limit?
all pixel > max*(1-threshold) are discareded.
:param float minimum: minumum valid value (or True for auto-guess)
:param float maximum: maximum valid value
:return: the input dataset
"""
shape = ds.shape
if ds.dtype == numpy.uint16:
maxt = (1.0 - threshold) * 65535.0
elif ds.dtype == numpy.int16:
maxt = (1.0 - threshold) * 32767.0
elif ds.dtype == numpy.uint8:
maxt = (1.0 - threshold) * 255.0
elif ds.dtype == numpy.int8:
maxt = (1.0 - threshold) * 127.0
else:
if maximum is None:
maxt = (1.0 - threshold) * ds.max()
else:
maxt = maximum
if maximum is not None:
maxt = min(maxt, maximum)
invalid = (ds > maxt)
if minimum:
if minimum is True:
# automatic guess of the best minimum TODO: use the HWHM to guess the minumum...
data_min = ds.min()
x, y = numpy.histogram(numpy.log(ds - data_min + 1.0), bins=100)
f = interp1d((y[1:] + y[:-1]) / 2.0, -x, bounds_error=False, fill_value=-x.min())
max_low = fmin(f, y[1], disp=0)
max_hi = fmin(f, y[-1], disp=0)
if max_hi > max_low:
f = interp1d((y[1:] + y[:-1]) / 2.0, x, bounds_error=False)
min_center = fminbound(f, max_low, max_hi)
else:
min_center = max_hi
minimum = float(numpy.exp(y[((min_center / y) > 1).sum() - 1])) - 1.0 + data_min
logger.debug("removeSaturatedPixel: best minimum guessed is %s", minimum)
ds[ds < minimum] = minimum
ds -= minimum # - 1.0
if invalid.sum(dtype=int) == 0:
logger.debug("No saturated area where found")
return ds
gi = ndimage.morphology.binary_dilation(invalid)
lgi, nc = ndimage.label(gi)
if nc > 100:
logger.warning("More than 100 saturated zones were found on this image !!!!")
for zone in range(nc + 1):
dzone = (lgi == zone)
if dzone.sum(dtype=int) > ds.size // 2:
continue
min0, min1, max0, max1 = bounding_box(dzone)
ksize = min(max0 - min0, max1 - min1)
subset = ds[max(0, min0 - 4 * ksize):min(shape[0], max0 + 4 * ksize), max(0, min1 - 4 * ksize):min(shape[1], max1 + 4 * ksize)]
while subset.max() > maxt:
subset = ndimage.median_filter(subset, ksize)
ds[max(0, min0 - 4 * ksize):min(shape[0], max0 + 4 * ksize), max(0, min1 - 4 * ksize):min(shape[1], max1 + 4 * ksize)] = subset
return ds
def average_dark(lstimg, center_method="mean", cutoff=None, quantiles=(0.5, 0.5)):
"""
Averages a serie of dark (or flat) images.
Centers the result on the mean or the median ...
but averages all frames within cutoff*std
:param lstimg: list of 2D images or a 3D stack
:param str center_method: is the center calculated by a "mean", "median",
"quantile", "std"
:param cutoff: keep all data where (I-center)/std < cutoff
:type cutoff: float or None
:param quantiles: 2-tuple of floats average out data between the two
quantiles
:type quantiles: tuple(float, float) or None
:return: 2D image averaged
"""
if "ndim" in dir(lstimg) and lstimg.ndim == 3:
stack = lstimg.astype(numpy.float32)
shape = stack.shape[1:]
length = stack.shape[0]
else:
shape = lstimg[0].shape
length = len(lstimg)
if length == 1:
return lstimg[0].astype(numpy.float32)
stack = numpy.zeros((length, shape[0], shape[1]), dtype=numpy.float32)
for i, img in enumerate(lstimg):
stack[i] = img
if center_method in dir(stack):
center = stack.__getattribute__(center_method)(axis=0)
elif center_method == "median":
logger.info("Filtering data (median)")
center = numpy.median(stack, axis=0)
elif center_method.startswith("quantil"):
logger.info("Filtering data (quantiles: %s)", quantiles)
sorted_ = numpy.sort(stack, axis=0)
lower = max(0, int(numpy.floor(min(quantiles) * length)))
upper = min(length, int(numpy.ceil(max(quantiles) * length)))
if (upper == lower):
if upper < length:
upper += 1
elif lower > 0:
lower -= 1
else:
logger.warning("Empty selection for quantil %s, would keep points from %s to %s", quantiles, lower, upper)
center = sorted_[lower:upper].mean(axis=0)
else:
raise RuntimeError("Cannot understand method: %s in average_dark" % center_method)
if cutoff is None or cutoff <= 0:
output = center
else:
std = stack.std(axis=0)
strides = 0, std.strides[0], std.strides[1]
std.shape = 1, shape[0], shape[1]
std.strides = strides
center.shape = 1, shape[0], shape[1]
center.strides = strides
mask = ((abs(stack - center) / std) > cutoff)
stack[numpy.where(mask)] = 0.0
summed = stack.sum(axis=0)
output = summed / numpy.float32(numpy.maximum(1, (length - mask.sum(axis=0))))
return output
def _normalize_image_stack(image_stack):
"""
Convert input data to a list of 2D numpy arrays or a stack
of numpy array (3D array).
:param image_stack: slice of images
:type image_stack: list or numpy.ndarray
:return: A stack of image (list of 2D array or a single 3D array)
:rtype: list or numpy.ndarray
"""
if image_stack is None:
return None
if isinstance(image_stack, numpy.ndarray) and image_stack.ndim == 3:
# numpy image stack (single 3D image)
return image_stack
if isinstance(image_stack, list):
# list of numpy images (multi 2D images)
result = []
for image in image_stack:
if isinstance(image, six.string_types):
data = fabio.open(image).data
elif isinstance(image, numpy.ndarray) and image.ndim == 2:
data = image
else:
raise Exception("Unsupported image type '%s' in image_stack" % type(image))
result.append(data)
return result
raise Exception("Unsupported type '%s' for image_stack" % type(image_stack))
class AverageWriter():
"""Interface for using writer in `Average` process."""
def write_header(self, merged_files, nb_frames, monitor_name):
"""Write the header of the average
:param list merged_files: List of files used to generate this output
:param int nb_frames: Number of frames used
:param str monitor_name: Name of the monitor used. Can be None.
"""
raise NotImplementedError()
def write_reduction(self, algorithm, data):
"""Write one reduction
:param ImageReductionFilter algorithm: Algorithm used
:param object data: Data of this reduction
"""
raise NotImplementedError()
def close(self):
"""Close the writer. Must not be used anymore."""
raise NotImplementedError()
class MultiFilesAverageWriter(AverageWriter):
"""Write reductions into multi files. File headers are duplicated."""
def __init__(self, file_name_pattern, file_format, dry_run=False):
"""
:param str file_name_pattern: File name pattern for the output files.
If it contains "{method_name}", it is updated for each
reduction writing with the name of the reduction.
:param str file_format: File format used. It is the default
extension file.
:param bool dry_run: If dry_run, the file is created on memory but not
saved on the file system at the end
"""
self._file_name_pattern = file_name_pattern
self._global_header = {}
self._fabio_images = weakref.WeakKeyDictionary()
self._dry_run = dry_run
# in case "edf.gz"
if "." in file_format:
file_format = file_format.split(".")[0]
self._fabio_class = fabio.factory(file_format + "image")
def write_header(self, merged_files, nb_frames, monitor_name):
self._global_header["nfiles"] = len(merged_files)
self._global_header["nframes"] = nb_frames
if monitor_name is not None:
self._global_header["monitor_name"] = monitor_name
pattern = "merged_file_%%0%ii" % len(str(len(merged_files)))
for i, f in enumerate(merged_files):
name = pattern % i
self._global_header[name] = f.filename
def _get_file_name(self, reduction_name):
keys = {"method_name": reduction_name}
return stringutil.safe_format(self._file_name_pattern, keys)
def write_reduction(self, algorithm, data):
file_name = self._get_file_name(algorithm.name)
# overwrite the method
header = fabio.fabioimage.OrderedDict()
header["method"] = algorithm.name
for name, value in self._global_header.items():
header[name] = str(value)
filter_parameters = algorithm.get_parameters()
for name, value in filter_parameters.items():
header[name] = str(value)
image = self._fabio_class.__class__(data=data, header=header)
if not self._dry_run:
image.write(file_name)
logger.info("Wrote %s", file_name)
self._fabio_images[algorithm] = image
def get_fabio_image(self, algorithm):
"""Get the constructed fabio image
:rtype: fabio.fabioimage.FabioImage
"""
return self._fabio_images[algorithm]
def close(self):
"""Close the writer. Must not be used anymore."""
self._header = None
def common_prefix(string_list):
"""Return the common prefix of a list of strings
TODO: move it into utils package
:param list(str) string_list: List of strings
:rtype: str
"""
prefix = ""
for ch in zip(string_list):
c = ch[0]
good = True
for i in ch:
if i != c:
good = False
break
if good:
prefix += c
else:
break
return prefix
class AverageObserver(object):
def image_loaded(self, fabio_image, image_index, images_count):
"""Called when an input image is loaded"""
pass
def process_started(self):
"""Called when the full processing is started"""
pass
def algorithm_started(self, algorithm):
"""Called when an algorithm is started"""
pass
def frame_processed(self, algorithm, frame_index, frames_count):
"""Called after providing a frame to an algorithm"""
pass
def result_processing(self, algorithm):
"""Called before the result of an algorithm is computed"""
pass
def algorithm_finished(self, algorithm):
"""Called when an algorithm is finished"""
pass
def process_finished(self):
"""Called when the full process is finished"""
pass
class Average(object):
"""Process images to generate an average using different algorithms."""
def __init__(self):
"""Constructor"""
self._dark = None
self._raw_flat = None
self._flat = None
self._monitor_key = None
self._threshold = None
self._minimum = None
self._maximum = None
self._fabio_images = []
self._writer = None
self._algorithms = []
self._nb_frames = 0
self._correct_flat_from_dark = False
self._results = weakref.WeakKeyDictionary()
self._observer = None
def set_observer(self, observer):
"""Set an observer to the average process.
:param AverageObserver observer: An observer
"""
self._observer = observer
def set_dark(self, dark_list):
"""Defines images used as dark.
:param list dark_list: List of dark used
"""
if dark_list is None:
self._dark = None
return
darks = _normalize_image_stack(dark_list)
self._dark = average_dark(darks, center_method="mean", cutoff=4)
def set_flat(self, flat_list):
"""Defines images used as flat.
:param list flat_list: List of dark used
"""
if flat_list is None:
self._raw_flat = None
return
flats = _normalize_image_stack(flat_list)
self._raw_flat = average_dark(flats, center_method="mean", cutoff=4)
def set_correct_flat_from_dark(self, correct_flat_from_dark):
"""Defines if the dark must be applied on the flat.
:param bool correct_flat_from_dark: If true, the dark is applied.
"""
self._correct_flat_from_dark = correct_flat_from_dark
def get_counter_frames(self):
"""Returns the number of frames used for the process.
:rtype: int
"""
return self._nb_frames
def get_fabio_images(self):
"""Returns source images as fabio images.
:rtype: list(fabio.fabioimage.FabioImage)"""
return self._fabio_images
def set_images(self, image_list):
"""Defines the set set of source images to used to process an average.
:param list image_list: List of filename, numpy arrays, fabio images
used as source for the computation.
"""
self._fabio_images = []
self._nb_frames = 0
if len(image_list) > 100:
# if too many files are opened, it may crash. The har limit is 1024
copy_data = True
else:
copy_data = False
for image_index, image in enumerate(image_list):
if isinstance(image, six.string_types):
logger.info("Reading %s", image)
fabio_image = fabio.open(image)
if copy_data and fabio_image.nframes == 1:
# copy the data so that we can close the file right now.
fimg = fabio_image.convert(fabio_image.__class__)
fimg.filename = image
fabio_image.close()
fabio_image = fimg
elif isinstance(image, fabio.fabioimage.fabioimage):
fabio_image = image
else:
if fabio.hexversion < 262148:
logger.error("Old version of fabio detected, upgrade to 0.4 or newer")
# Assume this is a numpy array like
if not isinstance(image, numpy.ndarray):
raise RuntimeError("Not good type for input, got %s, expected numpy array" % type(image))
fabio_image = fabio.numpyimage.NumpyImage(data=image)
if self._observer:
self._observer.image_loaded(fabio_image, image_index, len(image_list))
self._fabio_images.append(fabio_image)
self._nb_frames += fabio_image.nframes
def set_monitor_name(self, monitor_name):
"""Defines the monitor name used to correct images before processing
the average. This monitor must be part of the file header, else the
image is skipped.
:param str monitor_name: Name of the monitor available on the header
file
"""
self._monitor_key = monitor_name
def set_pixel_filter(self, threshold, minimum, maximum):
"""Defines the filter applied on each pixels of the images before
processing the average.
:param threshold: what is the upper limit?
all pixel > max*(1-threshold) are discareded.
:param minimum: minimum valid value or True
:param maximum: maximum valid value
"""
self._threshold = threshold
self._minimum = minimum
self._maximum = maximum
def set_writer(self, writer):
"""Defines the object write which will be used to store the result.
:param AverageWriter writer: The writer to use."""
self._writer = writer
def add_algorithm(self, algorithm):
"""Defines another algorithm which will be computed on the source.
:param ImageReductionFilter algorithm: An averaging algorithm.
"""
self._algorithms.append(algorithm)
def _get_corrected_image(self, fabio_image, image):
"""Returns an image corrected by pixel filter, saturation, flat, dark,
and monitor correction. The internal computation is done in float
64bits. The result is provided as float 32 bits.
:param fabio.fabioimage.FabioImage fabio_image: Object containing the
header of the data to process
:param numpy.ndarray image: Data to process
:rtype: numpy.ndarray
"""
corrected_image = numpy.ascontiguousarray(image, numpy.float64)
if self._threshold or self._minimum or self._maximum:
corrected_image = remove_saturated_pixel(corrected_image, self._threshold, self._minimum, self._maximum)
if self._dark is not None:
corrected_image -= self._dark
if self._flat is not None:
corrected_image /= self._flat
if self._monitor_key is not None:
try:
monitor = header_utils.get_monitor_value(fabio_image, self._monitor_key)
corrected_image /= monitor
except header_utils.MonitorNotFound as e:
logger.warning("Monitor not found in filename '%s', data skipped. Cause: %s", fabio_image.filename, str(e))
return None
return numpy.ascontiguousarray(corrected_image, numpy.float32)
def _get_image_reduction(self, algorithm):
"""Returns the result of an averaging algorithm using all over
parameters defined in this object.
:param ImageReductionFilter algorithm: Averaging algorithm
:rtype: numpy.ndarray
"""
algorithm.init(max_images=self._nb_frames)
frame_index = 0
for fabio_image in self._fabio_images:
for frame in range(fabio_image.nframes):
if fabio_image.nframes == 1:
data = fabio_image.data
else:
data = fabio_image.getframe(frame).data
logger.debug("Intensity range for %s#%i is %s --> %s", fabio_image.filename, frame, data.min(), data.max())
corrected_image = self._get_corrected_image(fabio_image, data)
if corrected_image is not None:
algorithm.add_image(corrected_image)
if self._observer:
self._observer.frame_processed(algorithm, frame_index, self._nb_frames)
frame_index += 1
if self._observer:
self._observer.result_processing(algorithm)
return algorithm.get_result()
def _update_flat(self):
"""
Update the flat according to the last process parameters
:rtype: numpy.ndarray
"""
if self._raw_flat is not None:
flat = numpy.array(self._raw_flat)
if self._correct_flat_from_dark:
if self._dark is not None:
flat -= self._dark
else:
logger.debug("No dark. Flat correction using dark skipped")
flat[numpy.where(flat <= 0)] = 1.0
else:
flat = None
self._flat = flat
def process(self):
"""Process source images to all defined averaging algorithms defined
using defined parameters. To access to the results you have to define
a writer (`AverageWriter`). To follow the process forward you have to
define an observer (`AverageObserver`).
"""
self._update_flat()
writer = self._writer
if self._observer:
self._observer.process_started()
if writer is not None:
writer.write_header(self._fabio_images, self._nb_frames, self._monitor_key)
for algorithm in self._algorithms:
if self._observer:
self._observer.algorithm_started(algorithm)
image_reduction = self._get_image_reduction(algorithm)
logger.debug("Intensity range in merged dataset : %s --> %s", image_reduction.min(), image_reduction.max())
if writer is not None:
writer.write_reduction(algorithm, image_reduction)
self._results[algorithm] = image_reduction
if self._observer:
self._observer.algorithm_finished(algorithm)
if self._observer:
self._observer.process_finished()
if writer is not None:
writer.close()
def get_image_reduction(self, algorithm):
"""Returns the result of an algorithm. The `process` must be already
done.
:param ImageReductionFilter algorithm: An averaging algorithm
:rtype: numpy.ndarray
"""
return self._results[algorithm]
def average_images(listImages, output=None, threshold=0.1, minimum=None,
maximum=None, darks=None, flats=None, filter_="mean",
correct_flat_from_dark=False, cutoff=None, quantiles=None,
fformat="edf", monitor_key=None):
"""
Takes a list of filenames and create an average frame discarding all
saturated pixels.
:param listImages: list of string representing the filenames
:param output: name of the optional output file
:param threshold: what is the upper limit? all pixel > max*(1-threshold)
are discareded.
:param minimum: minimum valid value or True
:param maximum: maximum valid value
:param darks: list of dark current images for subtraction
:param flats: list of flat field images for division
:param filter_: can be "min", "max", "median", "mean", "sum", "quantiles"
(default='mean')
:param correct_flat_from_dark: shall the flat be re-corrected ?
:param cutoff: keep all data where (I-center)/std < cutoff
:param quantiles: 2-tuple containing the lower and upper quantile (0<q<1)
to average out.
:param fformat: file format of the output image, default: edf
:param monitor_key str: Key containing the monitor. Can be none.
:return: filename with the data or the data ndarray in case format=None
"""
# input sanitization
if not is_algorithm_name_exists(filter_):
logger.warning("Filter %s not understood. switch to mean filter", filter_)
filter_ = "mean"
if quantiles is not None and filter_ != "quantiles":
logger.warning("Set method to quantiles as quantiles parameters is defined.")
filter_ = "quantiles"
average = Average()
average.set_images(listImages)
average.set_dark(darks)
average.set_flat(flats)
average.set_correct_flat_from_dark(correct_flat_from_dark)
average.set_monitor_name(monitor_key)
average.set_pixel_filter(threshold, minimum, maximum)
algorithm = create_algorithm(filter_, cutoff, quantiles)
average.add_algorithm(algorithm)
# define writer
if fformat is not None:
if fformat.startswith("."):
fformat = fformat.lstrip(".")
if output is None:
prefix = common_prefix([i.filename for i in average.get_fabio_images()])
output = "filt%02i-%s.%s" % (average.get_counter_frames(), prefix, fformat)
output = "{method_name}" + output
if output is not None:
writer = MultiFilesAverageWriter(output, fformat)
average.set_writer(writer)
else:
writer = None
average.process()
if writer is not None:
fabio_image = writer.get_fabio_image(algorithm)
return fabio_image.filename
else:
return average.get_image_reduction(algorithm)
| [
"logging.getLogger",
"scipy.optimize.optimize.fmin",
"numpy.log",
"numpy.ascontiguousarray",
"scipy.interpolate.interp1d",
"numpy.array",
"scipy.optimize.optimize.fminbound",
"fabio.open",
"fabio.factory",
"numpy.where",
"numpy.sort",
"scipy.ndimage.label",
"numpy.maximum",
"weakref.WeakKe... | [((2111, 2138), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2128, 2138), False, 'import logging\n'), ((11662, 11705), 'scipy.ndimage.morphology.binary_dilation', 'ndimage.morphology.binary_dilation', (['invalid'], {}), '(invalid)\n', (11696, 11705), False, 'from scipy import ndimage\n'), ((11720, 11737), 'scipy.ndimage.label', 'ndimage.label', (['gi'], {}), '(gi)\n', (11733, 11737), False, 'from scipy import ndimage\n'), ((4292, 4337), 'numpy.maximum', 'numpy.maximum', (['accumulated_image', 'added_image'], {}), '(accumulated_image, added_image)\n', (4305, 4337), False, 'import numpy\n'), ((4545, 4590), 'numpy.minimum', 'numpy.minimum', (['accumulated_image', 'added_image'], {}), '(accumulated_image, added_image)\n', (4558, 4590), False, 'import numpy\n'), ((13443, 13505), 'numpy.zeros', 'numpy.zeros', (['(length, shape[0], shape[1])'], {'dtype': 'numpy.float32'}), '((length, shape[0], shape[1]), dtype=numpy.float32)\n', (13454, 13505), False, 'import numpy\n'), ((17755, 17782), 'weakref.WeakKeyDictionary', 'weakref.WeakKeyDictionary', ([], {}), '()\n', (17780, 17782), False, 'import weakref\n'), ((17955, 17991), 'fabio.factory', 'fabio.factory', (["(file_format + 'image')"], {}), "(file_format + 'image')\n", (17968, 17991), False, 'import fabio\n'), ((18782, 18812), 'fabio.fabioimage.OrderedDict', 'fabio.fabioimage.OrderedDict', ([], {}), '()\n', (18810, 18812), False, 'import fabio\n'), ((21491, 21518), 'weakref.WeakKeyDictionary', 'weakref.WeakKeyDictionary', ([], {}), '()\n', (21516, 21518), False, 'import weakref\n'), ((26651, 26696), 'numpy.ascontiguousarray', 'numpy.ascontiguousarray', (['image', 'numpy.float64'], {}), '(image, numpy.float64)\n', (26674, 26696), False, 'import numpy\n'), ((27442, 27497), 'numpy.ascontiguousarray', 'numpy.ascontiguousarray', (['corrected_image', 'numpy.float32'], {}), '(corrected_image, numpy.float32)\n', (27465, 27497), False, 'import numpy\n'), ((4993, 5019), 'numpy.float32', 'numpy.float32', (['self._count'], {}), '(self._count)\n', (5006, 5019), False, 'import numpy\n'), ((5573, 5612), 'numpy.zeros', 'numpy.zeros', (['shape'], {'dtype': 'numpy.float32'}), '(shape, dtype=numpy.float32)\n', (5584, 5612), False, 'import numpy\n'), ((11011, 11032), 'scipy.optimize.optimize.fmin', 'fmin', (['f', 'y[1]'], {'disp': '(0)'}), '(f, y[1], disp=0)\n', (11015, 11032), False, 'from scipy.optimize.optimize import fmin\n'), ((11054, 11076), 'scipy.optimize.optimize.fmin', 'fmin', (['f', 'y[-1]'], {'disp': '(0)'}), '(f, y[-1], disp=0)\n', (11058, 11076), False, 'from scipy.optimize.optimize import fmin\n'), ((12262, 12298), 'scipy.ndimage.median_filter', 'ndimage.median_filter', (['subset', 'ksize'], {}), '(subset, ksize)\n', (12283, 12298), False, 'from scipy import ndimage\n'), ((13773, 13800), 'numpy.median', 'numpy.median', (['stack'], {'axis': '(0)'}), '(stack, axis=0)\n', (13785, 13800), False, 'import numpy\n'), ((14901, 14918), 'numpy.where', 'numpy.where', (['mask'], {}), '(mask)\n', (14912, 14918), False, 'import numpy\n'), ((28913, 28940), 'numpy.array', 'numpy.array', (['self._raw_flat'], {}), '(self._raw_flat)\n', (28924, 28940), False, 'import numpy\n'), ((10853, 10883), 'numpy.log', 'numpy.log', (['(ds - data_min + 1.0)'], {}), '(ds - data_min + 1.0)\n', (10862, 10883), False, 'import numpy\n'), ((11130, 11185), 'scipy.interpolate.interp1d', 'interp1d', (['((y[1:] + y[:-1]) / 2.0)', 'x'], {'bounds_error': '(False)'}), '((y[1:] + y[:-1]) / 2.0, x, bounds_error=False)\n', (11138, 11185), False, 'from scipy.interpolate import interp1d\n'), ((11215, 11244), 'scipy.optimize.optimize.fminbound', 'fminbound', (['f', 'max_low', 'max_hi'], {}), '(f, max_low, max_hi)\n', (11224, 11244), False, 'from scipy.optimize.optimize import fminbound\n'), ((13930, 13955), 'numpy.sort', 'numpy.sort', (['stack'], {'axis': '(0)'}), '(stack, axis=0)\n', (13940, 13955), False, 'import numpy\n'), ((23726, 23743), 'fabio.open', 'fabio.open', (['image'], {}), '(image)\n', (23736, 23743), False, 'import fabio\n'), ((29187, 29209), 'numpy.where', 'numpy.where', (['(flat <= 0)'], {}), '(flat <= 0)\n', (29198, 29209), False, 'import numpy\n'), ((15821, 15838), 'fabio.open', 'fabio.open', (['image'], {}), '(image)\n', (15831, 15838), False, 'import fabio\n'), ((24577, 24616), 'fabio.numpyimage.NumpyImage', 'fabio.numpyimage.NumpyImage', ([], {'data': 'image'}), '(data=image)\n', (24604, 24616), False, 'import fabio\n')] |
from django.shortcuts import render
import datetime
from datetime import date
import calendar
from schedule.models import Event, period_choices, cart_choice
from django.views.generic import UpdateView, TemplateView, ListView
from schedule.forms import ReservationForm
from django.http import HttpResponseRedirect, HttpResponse
from django import forms
import json
import calendar
from django.http import JsonResponse
from django.core.serializers.json import DjangoJSONEncoder
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from django.db import IntegrityError
from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
from django.contrib.auth.models import User
class Home(LoginRequiredMixin, ListView):
model = Event
template_name = "schedule/home.html"
login_url = '/login/'
redirect_field_name = 'redirect_to'
def get_context_data(self, **kwargs):
context = super(Home, self).get_context_data(**kwargs)
q = self.kwargs['adjuster'] #Change week
adjust = int(q)
if adjust != 0:
adjust *=7 # Add to Monday to move up and down weeks
today = date.today()
Monday = date.today() - datetime.timedelta(days=date.today().weekday())
Friday = Monday + datetime.timedelta(days=4)
Monday = Monday + datetime.timedelta(days=adjust)
Friday = Friday + datetime.timedelta(days=adjust) # Alter the days if needed. Really non-pythony
context['monday'] = Monday
context['q'] = q
context['this_week'] = Event.objects.filter(day__gte=Monday,day__lte=Friday)
context['the_day'] = calendar.day_name[today.weekday()]
periods = []
user = self.request.user
context['my_reservation'] = Event.objects.filter(day__gte=Monday,day__lte=Friday,teacher=user.id)
for p in period_choices:
periods.append(p[1])
context['periods'] = periods
context['username'] = self.request.user.username
return context
@ensure_csrf_cookie
def reserve(request):
if request.is_ajax():
pk = request.POST['pk']
slot = Event.objects.get(pk=pk)
user = request.user
if slot.is_reserved == True:
if user == slot.teacher:
slot.is_reserved = False
slot.teacher = None
slot.save()
result = 1
elif user.is_superuser and user != slot.teacher: # Override as admin
slot.is_reserved == True
slot.teacher = user
slot.save()
result = 2
else:
result = 3
else:
slot.is_reserved = True
slot.teacher = user
slot.save()
result = 2
data = {'result': result}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder))
class Dashboard(PermissionRequiredMixin,TemplateView):
template_name = "schedule/dashboard.html"
permission_required = 'is_staff'
def get_context_data(self, **kwargs):
context = super(Dashboard, self).get_context_data(**kwargs)
periods = []
for p in period_choices:
periods.append(p[1])
today = datetime.date.today()
tomorrow = today + datetime.timedelta(days=1)
cart_list =[]
for cart in cart_choice:
cart_list.append(cart[0])
context['periods']=periods # Make this a mixin
context['this_day'] = Event.objects.filter(day=today)
context['next_day'] = Event.objects.filter(day=tomorrow)
context['cart_list'] = cart_list
context['today'] = today
context['tomorrow'] = tomorrow
return context
@ensure_csrf_cookie
def create_week(request):
if request.is_ajax():
to_monday = date.today().weekday()
start = date.today() - datetime.timedelta(days=to_monday) #Find Monday
day = start # Day will change, start will not
end = start + datetime.timedelta(days=4) # One week, edit later for flexibility
weekend = set([5, 6]) # Python week starts on Monday as 0
dupe_list = []
total = 0
while day <= end:
if day.weekday() not in weekend:
for period in period_choices:
for cart in cart_choice:
open = Event(day=day, period=period[0], cart=cart[0])
try:
open.save()
total+=1
except IntegrityError:
dupe = str(open)
dupe_list.append(dupe)
pass
day += datetime.timedelta(days=1) # Adds one day until the current day is past the end day
data = {'start': start, 'end': end, 'dupe_list': dupe_list, 'total': total}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder))
def redirect_root(request):
return HttpResponseRedirect('/week/0/')
@ensure_csrf_cookie
def create_month(request):
if request.is_ajax():
month = date.today().month
year = date.today().year
# last = calendar.monthrange(year, month) # Last day
start = date.today().replace(day=1) # Get the first day of the month
end = date.today().replace(day=(calendar.monthrange(year, month)[1]))
weekend = set([5, 6]) # Python week starts on Monday as 0
dupe_list = []
total = 0
day = start
while day <= end:
if day.weekday() not in weekend:
for period in period_choices:
for cart in cart_choice:
open = Event(day=day, period=period[0], cart=cart[0])
try:
open.save()
total+=1
except IntegrityError:
dupe = str(open)
dupe_list.append(dupe)
pass
day += datetime.timedelta(days=1)
data = {'start': start, 'end': end, 'dupe_list': dupe_list, 'total': total}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder))
@ensure_csrf_cookie
def create_twelve(request):
if request.is_ajax():
to_monday = date.today().weekday()
start = date.today() - datetime.timedelta(days=to_monday) # Find Monday
fake_end = start + datetime.timedelta(days=84)
if fake_end.weekday() != 4:
end = fake_end - datetime.timedelta(days=(fake_end.weekday() - 4))
else:
end = fake_end
day = start
total = 0
dupe_list = []
weekend = set([5, 6]) # Python week starts on Monday as 0
while day <= end:
if day.weekday() not in weekend:
for period in period_choices:
for cart in cart_choice:
open = Event(day=day, period=period[0], cart=cart[0])
try:
open.save()
total+=1
except IntegrityError:
dupe = str(open)
dupe_list.append(dupe)
pass
day += datetime.timedelta(days=1)
data = {'start': start, 'end': end, 'dupe_list': dupe_list, 'total': total}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder))
@ensure_csrf_cookie
def delete_all(request):
if request.is_ajax():
Event.objects.all().delete()
return HttpResponse()
| [
"django.http.HttpResponseRedirect",
"schedule.models.Event.objects.get",
"schedule.models.Event.objects.all",
"django.http.HttpResponse",
"json.dumps",
"datetime.timedelta",
"schedule.models.Event.objects.filter",
"calendar.monthrange",
"schedule.models.Event",
"datetime.date.today"
] | [((5040, 5072), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/week/0/"""'], {}), "('/week/0/')\n", (5060, 5072), False, 'from django.http import HttpResponseRedirect, HttpResponse\n'), ((7738, 7752), 'django.http.HttpResponse', 'HttpResponse', ([], {}), '()\n', (7750, 7752), False, 'from django.http import HttpResponseRedirect, HttpResponse\n'), ((1168, 1180), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1178, 1180), False, 'from datetime import date\n'), ((1568, 1622), 'schedule.models.Event.objects.filter', 'Event.objects.filter', ([], {'day__gte': 'Monday', 'day__lte': 'Friday'}), '(day__gte=Monday, day__lte=Friday)\n', (1588, 1622), False, 'from schedule.models import Event, period_choices, cart_choice\n'), ((1776, 1847), 'schedule.models.Event.objects.filter', 'Event.objects.filter', ([], {'day__gte': 'Monday', 'day__lte': 'Friday', 'teacher': 'user.id'}), '(day__gte=Monday, day__lte=Friday, teacher=user.id)\n', (1796, 1847), False, 'from schedule.models import Event, period_choices, cart_choice\n'), ((2145, 2169), 'schedule.models.Event.objects.get', 'Event.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (2162, 2169), False, 'from schedule.models import Event, period_choices, cart_choice\n'), ((2845, 2884), 'json.dumps', 'json.dumps', (['data'], {'cls': 'DjangoJSONEncoder'}), '(data, cls=DjangoJSONEncoder)\n', (2855, 2884), False, 'import json\n'), ((3239, 3260), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (3258, 3260), False, 'import datetime\n'), ((3493, 3524), 'schedule.models.Event.objects.filter', 'Event.objects.filter', ([], {'day': 'today'}), '(day=today)\n', (3513, 3524), False, 'from schedule.models import Event, period_choices, cart_choice\n'), ((3555, 3589), 'schedule.models.Event.objects.filter', 'Event.objects.filter', ([], {'day': 'tomorrow'}), '(day=tomorrow)\n', (3575, 3589), False, 'from schedule.models import Event, period_choices, cart_choice\n'), ((1198, 1210), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1208, 1210), False, 'from datetime import date\n'), ((1287, 1313), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(4)'}), '(days=4)\n', (1305, 1313), False, 'import datetime\n'), ((1340, 1371), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': 'adjust'}), '(days=adjust)\n', (1358, 1371), False, 'import datetime\n'), ((1398, 1429), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': 'adjust'}), '(days=adjust)\n', (1416, 1429), False, 'import datetime\n'), ((3288, 3314), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (3306, 3314), False, 'import datetime\n'), ((3858, 3870), 'datetime.date.today', 'date.today', ([], {}), '()\n', (3868, 3870), False, 'from datetime import date\n'), ((3873, 3907), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': 'to_monday'}), '(days=to_monday)\n', (3891, 3907), False, 'import datetime\n'), ((3997, 4023), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(4)'}), '(days=4)\n', (4015, 4023), False, 'import datetime\n'), ((4761, 4787), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4779, 4787), False, 'import datetime\n'), ((4958, 4997), 'json.dumps', 'json.dumps', (['data'], {'cls': 'DjangoJSONEncoder'}), '(data, cls=DjangoJSONEncoder)\n', (4968, 4997), False, 'import json\n'), ((5163, 5175), 'datetime.date.today', 'date.today', ([], {}), '()\n', (5173, 5175), False, 'from datetime import date\n'), ((5197, 5209), 'datetime.date.today', 'date.today', ([], {}), '()\n', (5207, 5209), False, 'from datetime import date\n'), ((6139, 6165), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (6157, 6165), False, 'import datetime\n'), ((6278, 6317), 'json.dumps', 'json.dumps', (['data'], {'cls': 'DjangoJSONEncoder'}), '(data, cls=DjangoJSONEncoder)\n', (6288, 6317), False, 'import json\n'), ((6453, 6465), 'datetime.date.today', 'date.today', ([], {}), '()\n', (6463, 6465), False, 'from datetime import date\n'), ((6468, 6502), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': 'to_monday'}), '(days=to_monday)\n', (6486, 6502), False, 'import datetime\n'), ((6545, 6572), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(84)'}), '(days=84)\n', (6563, 6572), False, 'import datetime\n'), ((7438, 7464), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (7456, 7464), False, 'import datetime\n'), ((7577, 7616), 'json.dumps', 'json.dumps', (['data'], {'cls': 'DjangoJSONEncoder'}), '(data, cls=DjangoJSONEncoder)\n', (7587, 7616), False, 'import json\n'), ((3819, 3831), 'datetime.date.today', 'date.today', ([], {}), '()\n', (3829, 3831), False, 'from datetime import date\n'), ((5291, 5303), 'datetime.date.today', 'date.today', ([], {}), '()\n', (5301, 5303), False, 'from datetime import date\n'), ((5366, 5378), 'datetime.date.today', 'date.today', ([], {}), '()\n', (5376, 5378), False, 'from datetime import date\n'), ((6414, 6426), 'datetime.date.today', 'date.today', ([], {}), '()\n', (6424, 6426), False, 'from datetime import date\n'), ((7698, 7717), 'schedule.models.Event.objects.all', 'Event.objects.all', ([], {}), '()\n', (7715, 7717), False, 'from schedule.models import Event, period_choices, cart_choice\n'), ((5392, 5424), 'calendar.monthrange', 'calendar.monthrange', (['year', 'month'], {}), '(year, month)\n', (5411, 5424), False, 'import calendar\n'), ((4381, 4427), 'schedule.models.Event', 'Event', ([], {'day': 'day', 'period': 'period[0]', 'cart': 'cart[0]'}), '(day=day, period=period[0], cart=cart[0])\n', (4386, 4427), False, 'from schedule.models import Event, period_choices, cart_choice\n'), ((5763, 5809), 'schedule.models.Event', 'Event', ([], {'day': 'day', 'period': 'period[0]', 'cart': 'cart[0]'}), '(day=day, period=period[0], cart=cart[0])\n', (5768, 5809), False, 'from schedule.models import Event, period_choices, cart_choice\n'), ((7062, 7108), 'schedule.models.Event', 'Event', ([], {'day': 'day', 'period': 'period[0]', 'cart': 'cart[0]'}), '(day=day, period=period[0], cart=cart[0])\n', (7067, 7108), False, 'from schedule.models import Event, period_choices, cart_choice\n'), ((1237, 1249), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1247, 1249), False, 'from datetime import date\n')] |
import os
print("""
##
# You should look at the following URL's in order to grasp a solid understanding
# of Nginx configuration files in order to fully unleash the power of Nginx.
# https://www.nginx.com/resources/wiki/start/
# https://www.nginx.com/resources/wiki/start/topics/tutorials/config_pitfalls/
# https://wiki.debian.org/Nginx/DirectoryStructure
#
# In most cases, administrators will remove this file from sites-enabled/ and
# leave it as reference inside of sites-available where it will continue to be
# updated by the nginx packaging team.
#
# This file will automatically load configuration files provided by other
# applications, such as Drupal or Wordpress. These applications will be made
# available underneath a path with that package name, such as /drupal8.
#
# Please see /usr/share/doc/nginx-doc/examples/ for more detailed examples.
##
# Default server configuration
#
server {
listen %s default_server;
listen [::]:%s default_server;
# SSL configuration
#
# listen 443 ssl default_server;
# listen [::]:443 ssl default_server;
#
# Note: You should disable gzip for SSL traffic.
# See: https://bugs.debian.org/773332
#
# Read up on ssl_ciphers to ensure a secure configuration.
# See: https://bugs.debian.org/765782
#
# Self signed certs generated by the ssl-cert package
# Don't use them in a production server!
#
# include snippets/snakeoil.conf;
root /var/www/html;
error_page 404 /notfound;
# Add index.php to the list if you are using PHP
index index.html index.htm index.nginx-debian.html;
server_name _;
location / {
proxy_pass http://0.0.0.0:8000/;
}
location /terminal {
proxy_pass http://0.0.0.0:8001/;
}
location /terminal/ws {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://0.0.0.0:8001/ws;
}
location /preview {
proxy_pass http://0.0.0.0:5000/;
}
location /notfound {
root /krypton/worker;
}
# pass PHP scripts to FastCGI server
#
#location ~ \.php$ {
# include snippets/fastcgi-php.conf;
#
# # With php-fpm (or other unix sockets):
# fastcgi_pass unix:/run/php/php7.3-fpm.sock;
# # With php-cgi (or other tcp sockets):
# fastcgi_pass 127.0.0.1:9000;
#}
# deny access to .htaccess files, if Apache's document root
# concurs with nginx's one
#
#location ~ /\.ht {
# deny all;
#}
}
# Virtual Host configuration for example.com
#
# You can move that to a different file under sites-available/ and symlink that
# to sites-enabled/ to enable it.
#
#server {
# listen 80;
# listen [::]:80;
#
# server_name example.com;
#
# root /var/www/example.com;
# index index.html;
#
# location / {
# try_files $uri $uri/ =404;
# }
#}
"""%(os.environ.get("PORT"), os.environ.get("PORT"))) | [
"os.environ.get"
] | [((3338, 3360), 'os.environ.get', 'os.environ.get', (['"""PORT"""'], {}), "('PORT')\n", (3352, 3360), False, 'import os\n'), ((3362, 3384), 'os.environ.get', 'os.environ.get', (['"""PORT"""'], {}), "('PORT')\n", (3376, 3384), False, 'import os\n')] |
# -*- coding: utf-8 -*-
"""
This file is part of pyCMBS.
(c) 2012- <NAME>
For COPYING and LICENSE details, please refer to the LICENSE file
"""
import os
import glob
import unittest
class TestCodingStandards(unittest.TestCase):
"""
test coding standards: check for license
"""
def test_PythonFiles_HaveLicenseText(self):
pyfiles = find_python_files()
files_missing_license = []
for filename in pyfiles:
file_basename = os.path.basename(filename)
if license_missing(filename): # is True #and file_basename not in skip_files:
files_missing_license.append(filename)
self.assertEquals(len(files_missing_license), 0,
str(files_missing_license))
def find_python_files():
"""
find all python files in pyCMBS installation
"""
# get path of current file and set root path relative to it
path = os.path.dirname(os.path.realpath(__file__)) + os.sep + '..' + os.sep + '..' + os.sep
# *.py
pyfiles = [os.path.join(dirpath, f)
for dirpath, dirnames, files in os.walk(path)
for f in files if f.endswith('.py')]
# *.pyx
pyxfiles = [os.path.join(dirpath, f)
for dirpath, dirnames, files in os.walk(path)
for f in files if f.endswith('.pyx')]
res = pyfiles + pyxfiles
return res
def license_missing(filename):
license_string = \
"This file is part of pyCMBS." + "\n" + \
"(c) 2012- <NAME>" + "\n" + \
"For COPYING and LICENSE details, please refer to the LICENSE file"
# check first if directory shall be skipped
skip_dirs = ['docsrc']
skip_files = ['emd.py']
skip_tags = ['cartopy']
for sd in skip_dirs: # skip predefined directories
if sd in os.path.dirname(filename):
return False
license_missing = True
fh = open(filename, 'r')
file_contents = fh.read()
if license_string in file_contents:
license_missing = False
fh.close()
if os.path.basename(filename) in skip_files:
license_missing = False
for k in skip_tags:
if k in filename:
license_missing = False
return license_missing
| [
"os.path.join",
"os.path.realpath",
"os.path.dirname",
"os.path.basename",
"os.walk"
] | [((1027, 1051), 'os.path.join', 'os.path.join', (['dirpath', 'f'], {}), '(dirpath, f)\n', (1039, 1051), False, 'import os\n'), ((1180, 1204), 'os.path.join', 'os.path.join', (['dirpath', 'f'], {}), '(dirpath, f)\n', (1192, 1204), False, 'import os\n'), ((1999, 2025), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (2015, 2025), False, 'import os\n'), ((473, 499), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (489, 499), False, 'import os\n'), ((1092, 1105), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (1099, 1105), False, 'import os\n'), ((1245, 1258), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (1252, 1258), False, 'import os\n'), ((1765, 1790), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (1780, 1790), False, 'import os\n'), ((931, 957), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (947, 957), False, 'import os\n')] |
from typing import Optional
import requests
from mstrio.connection import Connection
from mstrio.utils.error_handlers import ErrorHandler
@ErrorHandler(err_msg='Error while creating the package holder')
def create_package_holder(connection: Connection, project_id: Optional[str] = None,
error_msg: Optional[str] = None) -> requests.Response:
"""Create a new in-memory metadata package holder.
Args:
connection (Connection): Object representation of connection to
MSTR Server.
project_id (Optional[str]): Optional ID of a project. Defaults to None.
error_msg (Optional[str]): Optional error message. Defaults to None.
Returns:
requests.Response: Response object containing all of the information
returned by the server.
"""
project_id = project_id if project_id is not None else connection.project_id
return connection.post(
url=f'{connection.base_url}/api/packages',
headers={'X-MSTR-ProjectID': project_id}
)
@ErrorHandler(err_msg='Error while updating the package holder with id: {id}')
def update_package_holder(connection: Connection, body: dict, id: str,
project_id: Optional[str] = None, prefer: str = "respond-async",
error_msg: Optional[str] = None) -> requests.Response:
"""Fill the content of the in-memory metadata package holder per supplied
specification. Currently, it's only supported when the holder is empty.
Args:
connection (Connection): Object representation of connection to
MSTR Server.
body (dict): dictionarized PackageConfig object (with `to_dict()`)
id (str): ID of the package to be updated
prefer (str, optional): API currently just supports asynchronous mode,
not support synchronous mode, so header parameter ‘Prefer’ must be set
to ‘respond-async’ in your request. Defaults to "respond-async".
project_id (Optional[str]): Optional ID of a project. Defaults to None.
error_msg (Optional[str]): Optional error message. Defaults to None.
Returns:
requests.Response: Response object containing all of the information
returned by the server.
"""
project_id = project_id if project_id is not None else connection.project_id
return connection.put(
url=f'{connection.base_url}/api/packages/{id}',
headers={
'X-MSTR-ProjectID': project_id,
'Prefer': prefer
},
json=body
)
@ErrorHandler(err_msg='Error while downloading the package with id: {id}')
def download_package(connection: Connection, id: str, project_id: Optional[str] = None,
error_msg: Optional[str] = None) -> requests.Response:
"""Download a package binary.
Args:
connection (Connection): Object representation of connection to
MSTR Server.
id (str): ID of the package to be downloaded.
project_id (Optional[str]): Optional ID of a project. Defaults to None.
error_msg (Optional[str]): Optional error message. Defaults to None.
Returns:
requests.Response: Response object containing all of the information
returned by the server.
"""
project_id = project_id if project_id is not None else connection.project_id
return connection.get(
url=f'{connection.base_url}/api/packages/{id}/binary',
headers={'X-MSTR-ProjectID': project_id}
)
@ErrorHandler(err_msg='Error while uploading the package with id: {id}')
def upload_package(connection: Connection, id: str, file: bytes, project_id: Optional[str] = None,
error_msg: Optional[str] = None) -> requests.Response:
"""Upload package to sandbox directly.
Args:
connection (Connection): Object representation of connection to
MSTR Server.
id (str): ID of the package to be uploaded.
file (bytes): package in a format of a binary string.
project_id (Optional[str]): Optional ID of a project. Defaults to None.
error_msg (Optional[str]): Optional error message. Defaults to None.
Returns:
requests.Response: Response object containing all of the information
returned by the server.
"""
project_id = project_id if project_id is not None else connection.project_id
return connection.put(
url=f'{connection.base_url}/api/packages/{id}/binary',
headers={'X-MSTR-ProjectID': project_id},
files={'file': file}
)
@ErrorHandler(err_msg='Error while getting the package holder with id: {id}')
def get_package_holder(connection: Connection, id: str, project_id: Optional[str] = None,
show_content: bool = True,
error_msg: Optional[str] = None) -> requests.Response:
"""Get definition of a package, including package status and its detail
content.
Args:
connection (Connection): Object representation of connection to
MSTR Server.
id (str): ID of the package to be retrieved.
project_id (Optional[str]): Optional ID of a project. Defaults to None.
show_content (bool, optional): Show package content or not. Defaults to
False.
error_msg (Optional[str]): Optional error message. Defaults to None.
Returns:
requests.Response: Response object containing all of the information
returned by the server.
"""
project_id = project_id if project_id is not None else connection.project_id
return connection.get(
url=f'{connection.base_url}/api/packages/{id}',
headers={'X-MSTR-ProjectID': project_id},
params={'showContent': show_content}
)
@ErrorHandler(err_msg='Error while deleting the package holder with id: {id}')
def delete_package_holder(connection: Connection, id: str, project_id: Optional[str] = None,
prefer: str = 'respond-async',
error_msg: Optional[str] = None) -> requests.Response:
"""Delete the in-memory metadata package holder, releasing associated
Intelligence Server resources.
Args:
connection (Connection): Object representation of connection to
MSTR Server.
id (str): ID of the package to be deleted.
prefer (str, optional): API currently just supports asynchronous mode,
not support synchronous mode, so header parameter ‘Prefer’ must be set
to ‘respond-async’ in your request. Defaults to "respond-async".
project_id (Optional[str]): Optional ID of a project. Defaults to None.
error_msg (Optional[str]): Optional error message. Defaults to None.
Returns:
requests.Response: Response object containing all of the information
returned by the server.
"""
project_id = project_id if project_id is not None else connection.project_id
return connection.delete(
url=f'{connection.base_url}/api/packages/{id}',
headers={
'X-MSTR-ProjectID': project_id,
'Prefer': prefer
}
)
@ErrorHandler(err_msg='Error while creating the import for package holder with id: {id}')
def create_import(connection: Connection, id: str, project_id: Optional[str] = None,
generate_undo: bool = False,
error_msg: Optional[str] = None) -> requests.Response:
"""Create a package import process.
Args:
connection (Connection): Object representation of connection to
MSTR Server.
id (str): ID of the package for which import process will be
created.
generate_undo (bool, optional): Generate undo package or not. Defaults
to False.
project_id (Optional[str]): Optional ID of a project. Defaults to None.
error_msg (Optional[str]): Optional error message. Defaults to None.
Returns:
requests.Response: Response object containing all of the information
returned by the server.
"""
# TODO: Change to a parameter when any other values are supported
prefer = 'respond-async'
project_id = project_id if project_id is not None else connection.project_id
return connection.post(
url=f'{connection.base_url}/api/packages/imports',
headers={
'X-MSTR-ProjectID': project_id,
'Prefer': prefer
},
params={
'packageId': id,
'generateUndo': generate_undo
},
)
@ErrorHandler(err_msg='Error while getting the import with id: {id}')
def get_import(connection: Connection, id: str, project_id: Optional[str] = None,
error_msg: Optional[str] = None) -> requests.Response:
"""Get result of a package import process.
Args:
connection (Connection): Object representation of connection to
MSTR Server.
id (str): Import process ID.
project_id (Optional[str]): Optional ID of a project. Defaults to None.
error_msg (Optional[str]): Optional error message. Defaults to None.
Returns:
requests.Response: Response object containing all of the information
returned by the server.
"""
project_id = project_id if project_id is not None else connection.project_id
return connection.get(
url=f'{connection.base_url}/api/packages/imports/{id}',
headers={'X-MSTR-ProjectID': project_id}
)
@ErrorHandler(err_msg='Error while deleting the import with id: {id}')
def delete_import(connection: Connection, id: str, project_id: Optional[str] = None,
error_msg: Optional[str] = None) -> requests.Response:
"""Closes an existing import process previously created.
Args:
connection (Connection): Object representation of connection to
MSTR Server.
id (str): Import process ID.
project_id (Optional[str]): Optional ID of a project. Defaults to None.
error_msg (Optional[str]): Optional error message. Defaults to None.
Returns:
requests.Response: Response object containing all of the information
returned by the server.
"""
# TODO: Change to a parameter when any other values are supported
prefer = 'respond-async'
project_id = project_id if project_id is not None else connection.project_id
return connection.delete(
url=f'{connection.base_url}/api/packages/imports/{id}',
headers={
'X-MSTR-ProjectID': project_id,
'Prefer': prefer
}
)
@ErrorHandler(err_msg='Error while creating the undo for import with id: {id}')
def create_undo(connection: Connection, id: str, project_id: Optional[str] = None,
error_msg: Optional[str] = None) -> requests.Response:
"""Download undo package binary for this import process.
Args:
connection (Connection): Object representation of connection to
MSTR Server.
id (str): Import process ID.
project_id (Optional[str]): Optional ID of a project. Defaults to None.
error_msg (Optional[str]): Optional error message. Defaults to None.
Returns:
requests.Response: Response object containing all of the information
returned by the server.
"""
project_id = project_id if project_id is not None else connection.project_id
return connection.get(
url=f'{connection.base_url}/api/packages/imports/{id}/undoPackage/binary',
headers={'X-MSTR-ProjectID': project_id}
)
| [
"mstrio.utils.error_handlers.ErrorHandler"
] | [((143, 206), 'mstrio.utils.error_handlers.ErrorHandler', 'ErrorHandler', ([], {'err_msg': '"""Error while creating the package holder"""'}), "(err_msg='Error while creating the package holder')\n", (155, 206), False, 'from mstrio.utils.error_handlers import ErrorHandler\n'), ((1041, 1118), 'mstrio.utils.error_handlers.ErrorHandler', 'ErrorHandler', ([], {'err_msg': '"""Error while updating the package holder with id: {id}"""'}), "(err_msg='Error while updating the package holder with id: {id}')\n", (1053, 1118), False, 'from mstrio.utils.error_handlers import ErrorHandler\n'), ((2561, 2634), 'mstrio.utils.error_handlers.ErrorHandler', 'ErrorHandler', ([], {'err_msg': '"""Error while downloading the package with id: {id}"""'}), "(err_msg='Error while downloading the package with id: {id}')\n", (2573, 2634), False, 'from mstrio.utils.error_handlers import ErrorHandler\n'), ((3512, 3583), 'mstrio.utils.error_handlers.ErrorHandler', 'ErrorHandler', ([], {'err_msg': '"""Error while uploading the package with id: {id}"""'}), "(err_msg='Error while uploading the package with id: {id}')\n", (3524, 3583), False, 'from mstrio.utils.error_handlers import ErrorHandler\n'), ((4569, 4645), 'mstrio.utils.error_handlers.ErrorHandler', 'ErrorHandler', ([], {'err_msg': '"""Error while getting the package holder with id: {id}"""'}), "(err_msg='Error while getting the package holder with id: {id}')\n", (4581, 4645), False, 'from mstrio.utils.error_handlers import ErrorHandler\n'), ((5766, 5843), 'mstrio.utils.error_handlers.ErrorHandler', 'ErrorHandler', ([], {'err_msg': '"""Error while deleting the package holder with id: {id}"""'}), "(err_msg='Error while deleting the package holder with id: {id}')\n", (5778, 5843), False, 'from mstrio.utils.error_handlers import ErrorHandler\n'), ((7140, 7233), 'mstrio.utils.error_handlers.ErrorHandler', 'ErrorHandler', ([], {'err_msg': '"""Error while creating the import for package holder with id: {id}"""'}), "(err_msg=\n 'Error while creating the import for package holder with id: {id}')\n", (7152, 7233), False, 'from mstrio.utils.error_handlers import ErrorHandler\n'), ((8532, 8600), 'mstrio.utils.error_handlers.ErrorHandler', 'ErrorHandler', ([], {'err_msg': '"""Error while getting the import with id: {id}"""'}), "(err_msg='Error while getting the import with id: {id}')\n", (8544, 8600), False, 'from mstrio.utils.error_handlers import ErrorHandler\n'), ((9463, 9532), 'mstrio.utils.error_handlers.ErrorHandler', 'ErrorHandler', ([], {'err_msg': '"""Error while deleting the import with id: {id}"""'}), "(err_msg='Error while deleting the import with id: {id}')\n", (9475, 9532), False, 'from mstrio.utils.error_handlers import ErrorHandler\n'), ((10570, 10648), 'mstrio.utils.error_handlers.ErrorHandler', 'ErrorHandler', ([], {'err_msg': '"""Error while creating the undo for import with id: {id}"""'}), "(err_msg='Error while creating the undo for import with id: {id}')\n", (10582, 10648), False, 'from mstrio.utils.error_handlers import ErrorHandler\n')] |
# This module deals with test data representation for the third task
#########################################################################################
import os
from dialent.common.util import normalize
from dialent.common.util import safeOpen
from dialent.objects.fact import Fact
#########################################################################################
class Test:
"""Test markup for the third track"""
def __init__(self, name, dir='.'):
"""Load the data from the given document
name - file to load the data from (without an extension)
"""
try:
self.name = name
full_name = os.path.join(dir, name + '.task3')
self.load(full_name)
except Exception as e:
print('Failed to load "{}"'.format(full_name))
print(e)
def load(self, filename):
"""Do the exception-prone loading"""
self.facts = []
with safeOpen(filename) as f:
buffer = ''
for raw_line in f:
line = normalize(raw_line)
if len(line) == 0:
if len(buffer) > 0:
self.facts.append(Fact.fromTest(buffer))
buffer = ''
else:
buffer += line + '\n'
if len(buffer) > 0:
self.facts.append(Fact.fromTest(buffer))
| [
"dialent.objects.fact.Fact.fromTest",
"dialent.common.util.normalize",
"os.path.join",
"dialent.common.util.safeOpen"
] | [((681, 715), 'os.path.join', 'os.path.join', (['dir', "(name + '.task3')"], {}), "(dir, name + '.task3')\n", (693, 715), False, 'import os\n'), ((979, 997), 'dialent.common.util.safeOpen', 'safeOpen', (['filename'], {}), '(filename)\n', (987, 997), False, 'from dialent.common.util import safeOpen\n'), ((1082, 1101), 'dialent.common.util.normalize', 'normalize', (['raw_line'], {}), '(raw_line)\n', (1091, 1101), False, 'from dialent.common.util import normalize\n'), ((1408, 1429), 'dialent.objects.fact.Fact.fromTest', 'Fact.fromTest', (['buffer'], {}), '(buffer)\n', (1421, 1429), False, 'from dialent.objects.fact import Fact\n'), ((1219, 1240), 'dialent.objects.fact.Fact.fromTest', 'Fact.fromTest', (['buffer'], {}), '(buffer)\n', (1232, 1240), False, 'from dialent.objects.fact import Fact\n')] |
import os
filename = os.path.dirname(__file__) + "\\input"
arrayList = []
with open(filename) as file:
for line in file:
arrayList.append(line.rstrip())
width = len(arrayList[0].rstrip())
print(f'len {width}')
gamma_nums = arrayList
for r in range(width):
start = 0
x = []
for line in gamma_nums:
text = list(line.rstrip())
if start == 0:
x = [0] * len(text)
i = 0
for t in text:
if t == '1':
x[i] += 1
i += 1
start += 1
y = x[r]
if len(gamma_nums) == 1:
gamma_nums = gamma_nums
elif y >= start / 2:
gamma_nums = list(filter(lambda score: score[r] == '1', gamma_nums))
else:
gamma_nums = list(filter(lambda score: score[r] == '0', gamma_nums))
co2_nums = arrayList
for r in range(width):
start = 0
x = []
for line in co2_nums:
text = list(line.rstrip())
if start == 0:
x = [0] * len(text)
i = 0
for t in text:
if t == '1':
x[i] += 1
i += 1
start += 1
y = x[r]
if len(co2_nums) == 1:
co2_nums = co2_nums
elif y < start / 2:
co2_nums = list(filter(lambda score: score[r] == '1', co2_nums))
else:
co2_nums = list(filter(lambda score: score[r] == '0', co2_nums))
print(int(co2_nums[0], 2) * int(gamma_nums[0], 2))
| [
"os.path.dirname"
] | [((22, 47), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (37, 47), False, 'import os\n')] |
import os, sys
sys.path.insert(0, os.path.join(os.getcwd(), '..', 'Imagination'))
sys.path.insert(0, os.path.join(os.getcwd(), '..', 'xmode'))
from keymaster.starter import activate
activate() | [
"keymaster.starter.activate",
"os.getcwd"
] | [((183, 193), 'keymaster.starter.activate', 'activate', ([], {}), '()\n', (191, 193), False, 'from keymaster.starter import activate\n'), ((47, 58), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (56, 58), False, 'import os, sys\n'), ((114, 125), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (123, 125), False, 'import os, sys\n')] |
# Generated by Django 3.0.11 on 2020-11-19 10:40
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dcodex_lectionary', '0030_auto_20201119_2131'),
]
operations = [
migrations.RenameField(
model_name='movableday',
old_name='period',
new_name='season',
),
]
| [
"django.db.migrations.RenameField"
] | [((238, 328), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""movableday"""', 'old_name': '"""period"""', 'new_name': '"""season"""'}), "(model_name='movableday', old_name='period', new_name\n ='season')\n", (260, 328), False, 'from django.db import migrations\n')] |
import numpy as np
import os
from utils import *
from keras.preprocessing.image import ImageDataGenerator
from keras.preprocessing import image
from model import Deeplabv3
import keras
from tensorflow.python.keras.layers import *
from tensorflow.python.keras.layers.convolutional import Deconvolution2D
from numpy import random
from random import seed, sample, randrange
from tensorflow.python.keras.callbacks import ReduceLROnPlateau, ModelCheckpoint, CSVLogger
from tensorflow.python.keras.optimizers import Adam
######## Train Model on ISIC
EPOCHS = 10
BS = 2
runnr = randrange(1, 10000)
print("RUNNUMBER", runnr)
deeplab_model = Deeplabv3(input_shape=(256, 256, 3), classes=2)
test_percentage = 0.1
file_list = recursive_glob(
"/home/bijan/Workspace/Python/keras-deeplab-v3-plus/data/ISIC2018_Task1-2_Training_Input",
".jpg"
)
seed(123)
test_indices = sample(range(0, 2594), int(2594 * test_percentage))
test = [sorted(file_list)[k] for k in test_indices]
train = sorted([k for k in file_list if k not in test])
file_list = recursive_glob(
"/home/bijan/Workspace/Python/keras-deeplab-v3-plus/data/ISIC2018_Task1_Training_GroundTruth/",
".png"
)
test_labels = [sorted(file_list)[k] for k in test_indices]
train_labels = sorted(list(set(file_list) - set(test_labels)))
aug = ImageDataGenerator(zoom_range=0.15,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.15,
horizontal_flip=True,
fill_mode="nearest")
csv_logger = CSVLogger('./runs/'+str(runnr)+'_log.csv', append=True, separator=';')
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, min_lr=0.001)
checkpointer = ModelCheckpoint(filepath='./runs/'+str(runnr)+'_model.hdf5',
verbose=1,
save_weights_only=True,
save_best_only=True)
deeplab_model.compile(optimizer=Adam(lr=0.0001), loss=keras.losses.binary_crossentropy, metrics=['binary_accuracy'])
H = deeplab_model.fit_generator(isic_generator(train, train_labels, batch_size=BS),
validation_data=isic_generator(test, test_labels, batch_size=BS),
validation_steps=len(test_labels),
steps_per_epoch=len(train) // BS,
epochs=EPOCHS,
max_queue_size=3,
callbacks=[checkpointer, csv_logger, reduce_lr])
model_json = deeplab_model.to_json()
with open("./runs/"+str(runnr)+"_model.json", "w") as json_file:
json_file.write(model_json)
print(H.history)
| [
"random.randrange",
"tensorflow.python.keras.callbacks.ReduceLROnPlateau",
"model.Deeplabv3",
"tensorflow.python.keras.optimizers.Adam",
"keras.preprocessing.image.ImageDataGenerator",
"random.seed"
] | [((575, 594), 'random.randrange', 'randrange', (['(1)', '(10000)'], {}), '(1, 10000)\n', (584, 594), False, 'from random import seed, sample, randrange\n'), ((638, 685), 'model.Deeplabv3', 'Deeplabv3', ([], {'input_shape': '(256, 256, 3)', 'classes': '(2)'}), '(input_shape=(256, 256, 3), classes=2)\n', (647, 685), False, 'from model import Deeplabv3\n'), ((846, 855), 'random.seed', 'seed', (['(123)'], {}), '(123)\n', (850, 855), False, 'from random import seed, sample, randrange\n'), ((1305, 1456), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'zoom_range': '(0.15)', 'width_shift_range': '(0.2)', 'height_shift_range': '(0.2)', 'shear_range': '(0.15)', 'horizontal_flip': '(True)', 'fill_mode': '"""nearest"""'}), "(zoom_range=0.15, width_shift_range=0.2,\n height_shift_range=0.2, shear_range=0.15, horizontal_flip=True,\n fill_mode='nearest')\n", (1323, 1456), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((1671, 1746), 'tensorflow.python.keras.callbacks.ReduceLROnPlateau', 'ReduceLROnPlateau', ([], {'monitor': '"""val_loss"""', 'factor': '(0.2)', 'patience': '(3)', 'min_lr': '(0.001)'}), "(monitor='val_loss', factor=0.2, patience=3, min_lr=0.001)\n", (1688, 1746), False, 'from tensorflow.python.keras.callbacks import ReduceLROnPlateau, ModelCheckpoint, CSVLogger\n'), ((2006, 2021), 'tensorflow.python.keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.0001)'}), '(lr=0.0001)\n', (2010, 2021), False, 'from tensorflow.python.keras.optimizers import Adam\n')] |
# Copyright 1997 - 2018 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class JoinPrune(Base):
"""The JoinPrune class encapsulates a user managed joinPrune node in the ixnetwork hierarchy.
An instance of the class can be obtained by accessing the JoinPrune property from a parent instance.
The internal properties list will be empty when the property is accessed and is populated from the server using the find method.
The internal properties list can be managed by the user by using the add and remove methods.
"""
_SDM_NAME = 'joinPrune'
def __init__(self, parent):
super(JoinPrune, self).__init__(parent)
@property
def LearnedMgrState(self):
"""An instance of the LearnedMgrState class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.pimsm.router.interface.joinprune.learnedmgrstate.learnedmgrstate.LearnedMgrState)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.pimsm.router.interface.joinprune.learnedmgrstate.learnedmgrstate import LearnedMgrState
return LearnedMgrState(self)
@property
def DiscardRegisterStates(self):
"""If checked, the Learned Join States sent by the RP (DUT) in response to this specific Register Message will be discarded - and will not be displayed in the table of the Register Range window.
Returns:
bool
"""
return self._get_attribute('discardRegisterStates')
@DiscardRegisterStates.setter
def DiscardRegisterStates(self, value):
self._set_attribute('discardRegisterStates', value)
@property
def Enabled(self):
"""Enables the use of this join/prune.
Returns:
bool
"""
return self._get_attribute('enabled')
@Enabled.setter
def Enabled(self, value):
self._set_attribute('enabled', value)
@property
def EnabledDataMdt(self):
"""If enabled, pimsmLearnedDataMdt will be available. (default = disabled)
Returns:
bool
"""
return self._get_attribute('enabledDataMdt')
@EnabledDataMdt.setter
def EnabledDataMdt(self, value):
self._set_attribute('enabledDataMdt', value)
@property
def FlapEnabled(self):
"""Enables emulated flapping of this multicast group range. NOTE: Flapping is not supported for the Switchover (*, G) -> (S, G) range type.
Returns:
bool
"""
return self._get_attribute('flapEnabled')
@FlapEnabled.setter
def FlapEnabled(self, value):
self._set_attribute('flapEnabled', value)
@property
def FlapInterval(self):
"""Defines the join/prune flapping interval.
Returns:
number
"""
return self._get_attribute('flapInterval')
@FlapInterval.setter
def FlapInterval(self, value):
self._set_attribute('flapInterval', value)
@property
def GroupAddress(self):
"""An IPv4 or IPv6 address used with the group mask to create a range of multicast addresses.
Returns:
str
"""
return self._get_attribute('groupAddress')
@GroupAddress.setter
def GroupAddress(self, value):
self._set_attribute('groupAddress', value)
@property
def GroupCount(self):
"""The number of multicast group addresses to be included in the multicast group range. The maximum number of valid possible addresses depends on the values for the group address and the group mask width.
Returns:
number
"""
return self._get_attribute('groupCount')
@GroupCount.setter
def GroupCount(self, value):
self._set_attribute('groupCount', value)
@property
def GroupMappingMode(self):
"""Sets the type of mapping that occurs when routes are advertised. This only applies for (S, G) and switchover types for MGR and is meaningful for RR.
Returns:
str(fullyMeshed|oneToOne)
"""
return self._get_attribute('groupMappingMode')
@GroupMappingMode.setter
def GroupMappingMode(self, value):
self._set_attribute('groupMappingMode', value)
@property
def GroupMaskWidth(self):
"""The number of bits in the mask applied to the group address. (The masked bits in the group address form the address prefix.)The default value is 32. The valid range is 1 to 128, depending on address family type.
Returns:
number
"""
return self._get_attribute('groupMaskWidth')
@GroupMaskWidth.setter
def GroupMaskWidth(self, value):
self._set_attribute('groupMaskWidth', value)
@property
def GroupRange(self):
"""The multicast group range type.
Returns:
str(rp|g|sg|sptSwitchOver|registerTriggeredSg)
"""
return self._get_attribute('groupRange')
@GroupRange.setter
def GroupRange(self, value):
self._set_attribute('groupRange', value)
@property
def NumRegToReceivePerSg(self):
"""If rangeType is set to pimsmJoinsPrunesTypeRegisterTriggeredSG, then this is the count of register messages received that will trigger transmission of a (S,G) message. (default = 10)
Returns:
number
"""
return self._get_attribute('numRegToReceivePerSg')
@NumRegToReceivePerSg.setter
def NumRegToReceivePerSg(self, value):
self._set_attribute('numRegToReceivePerSg', value)
@property
def PackGroupsEnabled(self):
"""If enabled, multiple groups can be included within a single packet.
Returns:
bool
"""
return self._get_attribute('packGroupsEnabled')
@PackGroupsEnabled.setter
def PackGroupsEnabled(self, value):
self._set_attribute('packGroupsEnabled', value)
@property
def PruneSourceAddress(self):
"""ONLY used for (*,G) Type to send (S,G,rpt) Prune Messages. (Multicast addresses are invalid.)
Returns:
str
"""
return self._get_attribute('pruneSourceAddress')
@PruneSourceAddress.setter
def PruneSourceAddress(self, value):
self._set_attribute('pruneSourceAddress', value)
@property
def PruneSourceCount(self):
"""The number of prune source addresses to be included. The maximum number of valid possible addresses depends on the values for the source address and the source mask width. The default value is 0. ONLY used for (*,G) type to send (S,G,rpt) prune messages.
Returns:
number
"""
return self._get_attribute('pruneSourceCount')
@PruneSourceCount.setter
def PruneSourceCount(self, value):
self._set_attribute('pruneSourceCount', value)
@property
def PruneSourceMaskWidth(self):
"""The number of bits in the mask applied to the prune source address. (The masked bits in the prune source address form the address prefix.)
Returns:
number
"""
return self._get_attribute('pruneSourceMaskWidth')
@PruneSourceMaskWidth.setter
def PruneSourceMaskWidth(self, value):
self._set_attribute('pruneSourceMaskWidth', value)
@property
def RpAddress(self):
"""The IP address of the Rendezvous Point (RP) router.
Returns:
str
"""
return self._get_attribute('rpAddress')
@RpAddress.setter
def RpAddress(self, value):
self._set_attribute('rpAddress', value)
@property
def SourceAddress(self):
"""The Multicast Source Address. Used for (S,G) Type and (S,G, rpt) only. (Multicast addresses are invalid.)
Returns:
str
"""
return self._get_attribute('sourceAddress')
@SourceAddress.setter
def SourceAddress(self, value):
self._set_attribute('sourceAddress', value)
@property
def SourceCount(self):
"""The number of multicast source addresses to be included. The maximum number of valid possible addresses depends on the values for the source address and the source mask width.
Returns:
number
"""
return self._get_attribute('sourceCount')
@SourceCount.setter
def SourceCount(self, value):
self._set_attribute('sourceCount', value)
@property
def SourceMaskWidth(self):
"""The number of bits in the mask applied to the source address. (The masked bits in the source address form the address prefix.)The default value is 32. The valid range is 1 to 128, depending on address family type. Used for (S,G) Type and (S,G, rpt) only.
Returns:
number
"""
return self._get_attribute('sourceMaskWidth')
@SourceMaskWidth.setter
def SourceMaskWidth(self, value):
self._set_attribute('sourceMaskWidth', value)
@property
def SptSwitchoverInterval(self):
"""The time interval (in seconds) allowed for the switch from using the RP tree to using a Source-specific tree - from (*,G) to (S,G). The default value is 0.
Returns:
number
"""
return self._get_attribute('sptSwitchoverInterval')
@SptSwitchoverInterval.setter
def SptSwitchoverInterval(self, value):
self._set_attribute('sptSwitchoverInterval', value)
def add(self, DiscardRegisterStates=None, Enabled=None, EnabledDataMdt=None, FlapEnabled=None, FlapInterval=None, GroupAddress=None, GroupCount=None, GroupMappingMode=None, GroupMaskWidth=None, GroupRange=None, NumRegToReceivePerSg=None, PackGroupsEnabled=None, PruneSourceAddress=None, PruneSourceCount=None, PruneSourceMaskWidth=None, RpAddress=None, SourceAddress=None, SourceCount=None, SourceMaskWidth=None, SptSwitchoverInterval=None):
"""Adds a new joinPrune node on the server and retrieves it in this instance.
Args:
DiscardRegisterStates (bool): If checked, the Learned Join States sent by the RP (DUT) in response to this specific Register Message will be discarded - and will not be displayed in the table of the Register Range window.
Enabled (bool): Enables the use of this join/prune.
EnabledDataMdt (bool): If enabled, pimsmLearnedDataMdt will be available. (default = disabled)
FlapEnabled (bool): Enables emulated flapping of this multicast group range. NOTE: Flapping is not supported for the Switchover (*, G) -> (S, G) range type.
FlapInterval (number): Defines the join/prune flapping interval.
GroupAddress (str): An IPv4 or IPv6 address used with the group mask to create a range of multicast addresses.
GroupCount (number): The number of multicast group addresses to be included in the multicast group range. The maximum number of valid possible addresses depends on the values for the group address and the group mask width.
GroupMappingMode (str(fullyMeshed|oneToOne)): Sets the type of mapping that occurs when routes are advertised. This only applies for (S, G) and switchover types for MGR and is meaningful for RR.
GroupMaskWidth (number): The number of bits in the mask applied to the group address. (The masked bits in the group address form the address prefix.)The default value is 32. The valid range is 1 to 128, depending on address family type.
GroupRange (str(rp|g|sg|sptSwitchOver|registerTriggeredSg)): The multicast group range type.
NumRegToReceivePerSg (number): If rangeType is set to pimsmJoinsPrunesTypeRegisterTriggeredSG, then this is the count of register messages received that will trigger transmission of a (S,G) message. (default = 10)
PackGroupsEnabled (bool): If enabled, multiple groups can be included within a single packet.
PruneSourceAddress (str): ONLY used for (*,G) Type to send (S,G,rpt) Prune Messages. (Multicast addresses are invalid.)
PruneSourceCount (number): The number of prune source addresses to be included. The maximum number of valid possible addresses depends on the values for the source address and the source mask width. The default value is 0. ONLY used for (*,G) type to send (S,G,rpt) prune messages.
PruneSourceMaskWidth (number): The number of bits in the mask applied to the prune source address. (The masked bits in the prune source address form the address prefix.)
RpAddress (str): The IP address of the Rendezvous Point (RP) router.
SourceAddress (str): The Multicast Source Address. Used for (S,G) Type and (S,G, rpt) only. (Multicast addresses are invalid.)
SourceCount (number): The number of multicast source addresses to be included. The maximum number of valid possible addresses depends on the values for the source address and the source mask width.
SourceMaskWidth (number): The number of bits in the mask applied to the source address. (The masked bits in the source address form the address prefix.)The default value is 32. The valid range is 1 to 128, depending on address family type. Used for (S,G) Type and (S,G, rpt) only.
SptSwitchoverInterval (number): The time interval (in seconds) allowed for the switch from using the RP tree to using a Source-specific tree - from (*,G) to (S,G). The default value is 0.
Returns:
self: This instance with all currently retrieved joinPrune data using find and the newly added joinPrune data available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._create(locals())
def remove(self):
"""Deletes all the joinPrune data in this instance from server.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, DiscardRegisterStates=None, Enabled=None, EnabledDataMdt=None, FlapEnabled=None, FlapInterval=None, GroupAddress=None, GroupCount=None, GroupMappingMode=None, GroupMaskWidth=None, GroupRange=None, NumRegToReceivePerSg=None, PackGroupsEnabled=None, PruneSourceAddress=None, PruneSourceCount=None, PruneSourceMaskWidth=None, RpAddress=None, SourceAddress=None, SourceCount=None, SourceMaskWidth=None, SptSwitchoverInterval=None):
"""Finds and retrieves joinPrune data from the server.
All named parameters support regex and can be used to selectively retrieve joinPrune data from the server.
By default the find method takes no parameters and will retrieve all joinPrune data from the server.
Args:
DiscardRegisterStates (bool): If checked, the Learned Join States sent by the RP (DUT) in response to this specific Register Message will be discarded - and will not be displayed in the table of the Register Range window.
Enabled (bool): Enables the use of this join/prune.
EnabledDataMdt (bool): If enabled, pimsmLearnedDataMdt will be available. (default = disabled)
FlapEnabled (bool): Enables emulated flapping of this multicast group range. NOTE: Flapping is not supported for the Switchover (*, G) -> (S, G) range type.
FlapInterval (number): Defines the join/prune flapping interval.
GroupAddress (str): An IPv4 or IPv6 address used with the group mask to create a range of multicast addresses.
GroupCount (number): The number of multicast group addresses to be included in the multicast group range. The maximum number of valid possible addresses depends on the values for the group address and the group mask width.
GroupMappingMode (str(fullyMeshed|oneToOne)): Sets the type of mapping that occurs when routes are advertised. This only applies for (S, G) and switchover types for MGR and is meaningful for RR.
GroupMaskWidth (number): The number of bits in the mask applied to the group address. (The masked bits in the group address form the address prefix.)The default value is 32. The valid range is 1 to 128, depending on address family type.
GroupRange (str(rp|g|sg|sptSwitchOver|registerTriggeredSg)): The multicast group range type.
NumRegToReceivePerSg (number): If rangeType is set to pimsmJoinsPrunesTypeRegisterTriggeredSG, then this is the count of register messages received that will trigger transmission of a (S,G) message. (default = 10)
PackGroupsEnabled (bool): If enabled, multiple groups can be included within a single packet.
PruneSourceAddress (str): ONLY used for (*,G) Type to send (S,G,rpt) Prune Messages. (Multicast addresses are invalid.)
PruneSourceCount (number): The number of prune source addresses to be included. The maximum number of valid possible addresses depends on the values for the source address and the source mask width. The default value is 0. ONLY used for (*,G) type to send (S,G,rpt) prune messages.
PruneSourceMaskWidth (number): The number of bits in the mask applied to the prune source address. (The masked bits in the prune source address form the address prefix.)
RpAddress (str): The IP address of the Rendezvous Point (RP) router.
SourceAddress (str): The Multicast Source Address. Used for (S,G) Type and (S,G, rpt) only. (Multicast addresses are invalid.)
SourceCount (number): The number of multicast source addresses to be included. The maximum number of valid possible addresses depends on the values for the source address and the source mask width.
SourceMaskWidth (number): The number of bits in the mask applied to the source address. (The masked bits in the source address form the address prefix.)The default value is 32. The valid range is 1 to 128, depending on address family type. Used for (S,G) Type and (S,G, rpt) only.
SptSwitchoverInterval (number): The time interval (in seconds) allowed for the switch from using the RP tree to using a Source-specific tree - from (*,G) to (S,G). The default value is 0.
Returns:
self: This instance with matching joinPrune data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of joinPrune data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the joinPrune data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| [
"ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.pimsm.router.interface.joinprune.learnedmgrstate.learnedmgrstate.LearnedMgrState"
] | [((2369, 2390), 'ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.pimsm.router.interface.joinprune.learnedmgrstate.learnedmgrstate.LearnedMgrState', 'LearnedMgrState', (['self'], {}), '(self)\n', (2384, 2390), False, 'from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.pimsm.router.interface.joinprune.learnedmgrstate.learnedmgrstate import LearnedMgrState\n')] |
# -*- coding: utf-8 -*-
import os
import sys
import time
import random
from subprocess import Popen, PIPE
from django.utils import termcolors
TYPING_SPEED = 50
def simulate_command_type(commands, shell=False):
for command in commands.split('&& '):
if not command.startswith('source'):
simulate_type(command, shell=shell)
def simulate_type(command, shell=False):
sys.stdout.write('breno@localhost: ~$ ')
for c in command:
sys.stdout.write(c)
sys.stdout.flush()
time.sleep(random.random()*10.0/TYPING_SPEED)
print('')
def bold(text):
return termcolors.make_style(fg='black', opts=('bold',))(text)
def info(text):
return termcolors.make_style(fg='cyan')(text)
def error(text):
return termcolors.make_style(fg='red', opts=('bold',))(text)
class Terminal(object):
def __init__(self, verbose=True, python='python'):
self.proccess = None
self.verbose = verbose
self.python = python
def execute(self, command, clear=True, base_dir=None):
if clear:
os.system('clear')
simulate_type('', shell=True)
if self.verbose:
simulate_command_type(command, shell=True)
if command.startswith('python'):
command.replace('python', self.python)
if base_dir:
command = 'cd {} && {}'.format(base_dir, command)
if not self.verbose:
command = '{} > /dev/null'.format(command)
os.system(command)
def show(self, visible=True):
if os.path.exists('/usr/bin/osascript'):
minimize_terminal_script = '''
tell application "Terminal"
set miniaturized of window 1 to {}
end tell
'''.format(visible and 'false' or 'true')
self.proccess = Popen(['osascript', '-'], stdin=PIPE, stdout=PIPE, stderr=PIPE)
self.proccess.communicate(minimize_terminal_script.encode())
def hide(self):
self.show(False)
| [
"os.path.exists",
"django.utils.termcolors.make_style",
"subprocess.Popen",
"random.random",
"os.system",
"sys.stdout.flush",
"sys.stdout.write"
] | [((395, 435), 'sys.stdout.write', 'sys.stdout.write', (['"""breno@localhost: ~$ """'], {}), "('breno@localhost: ~$ ')\n", (411, 435), False, 'import sys\n'), ((466, 485), 'sys.stdout.write', 'sys.stdout.write', (['c'], {}), '(c)\n', (482, 485), False, 'import sys\n'), ((494, 512), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (510, 512), False, 'import sys\n'), ((610, 659), 'django.utils.termcolors.make_style', 'termcolors.make_style', ([], {'fg': '"""black"""', 'opts': "('bold',)"}), "(fg='black', opts=('bold',))\n", (631, 659), False, 'from django.utils import termcolors\n'), ((695, 727), 'django.utils.termcolors.make_style', 'termcolors.make_style', ([], {'fg': '"""cyan"""'}), "(fg='cyan')\n", (716, 727), False, 'from django.utils import termcolors\n'), ((764, 811), 'django.utils.termcolors.make_style', 'termcolors.make_style', ([], {'fg': '"""red"""', 'opts': "('bold',)"}), "(fg='red', opts=('bold',))\n", (785, 811), False, 'from django.utils import termcolors\n'), ((1488, 1506), 'os.system', 'os.system', (['command'], {}), '(command)\n', (1497, 1506), False, 'import os\n'), ((1553, 1589), 'os.path.exists', 'os.path.exists', (['"""/usr/bin/osascript"""'], {}), "('/usr/bin/osascript')\n", (1567, 1589), False, 'import os\n'), ((1079, 1097), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (1088, 1097), False, 'import os\n'), ((1838, 1901), 'subprocess.Popen', 'Popen', (["['osascript', '-']"], {'stdin': 'PIPE', 'stdout': 'PIPE', 'stderr': 'PIPE'}), "(['osascript', '-'], stdin=PIPE, stdout=PIPE, stderr=PIPE)\n", (1843, 1901), False, 'from subprocess import Popen, PIPE\n'), ((532, 547), 'random.random', 'random.random', ([], {}), '()\n', (545, 547), False, 'import random\n')] |
import humps
import pytest
from django import test
from django.contrib.auth.models import User
from django.urls import reverse
def test_profile_updates_correctly(
profile_admin_client: test.Client, user: User, update_profile_params
):
url = f"{reverse('admin_update_profile')}?email={user.email}"
res = profile_admin_client.patch(url, humps.camelize(update_profile_params))
assert res.status_code == 200
user.refresh_from_db()
profile = user.profile
for key, val in update_profile_params.items():
assert getattr(profile, key) == val
@pytest.mark.parametrize(
argnames="method, status",
argvalues=[("get", 400), ("put", 400), ("post", 405), ("patch", 400)],
)
def test_requires_query_param(
profile_admin_client: test.Client, method: str, status: int
):
request_method = getattr(profile_admin_client, method)
url = f"{reverse('admin_update_profile')}"
res = request_method(url)
assert res.status_code == status
def test_missing_profile_returns_404(profile_admin_client: test.Client):
url = f"{reverse('admin_update_profile')}?email=abc"
res = profile_admin_client.get(url)
assert res.status_code == 404
@pytest.mark.parametrize(
argnames="method, status", argvalues=[("get", 200), ("post", 405), ("patch", 200)]
)
def test_staff_user_has_access(
authed_admin_client: test.Client, user: User, method: str, status: int
):
request_method = getattr(authed_admin_client, method)
url = f"{reverse('admin_update_profile')}?email={user.email}"
res = request_method(url)
assert res.status_code == status
@pytest.mark.parametrize(
argnames="method, status",
argvalues=[("get", 403), ("put", 403), ("post", 405), ("patch", 403)],
)
def test_view_requires_profile_admin_group(
authed_client: test.Client, user: User, method: str, status: int
):
request_method = getattr(authed_client, method)
url = f"{reverse('admin_update_profile')}?email={user.email}"
res = request_method(url)
assert res.status_code == status
| [
"pytest.mark.parametrize",
"humps.camelize",
"django.urls.reverse"
] | [((577, 702), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([], {'argnames': '"""method, status"""', 'argvalues': "[('get', 400), ('put', 400), ('post', 405), ('patch', 400)]"}), "(argnames='method, status', argvalues=[('get', 400),\n ('put', 400), ('post', 405), ('patch', 400)])\n", (600, 702), False, 'import pytest\n'), ((1192, 1303), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([], {'argnames': '"""method, status"""', 'argvalues': "[('get', 200), ('post', 405), ('patch', 200)]"}), "(argnames='method, status', argvalues=[('get', 200),\n ('post', 405), ('patch', 200)])\n", (1215, 1303), False, 'import pytest\n'), ((1611, 1736), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([], {'argnames': '"""method, status"""', 'argvalues': "[('get', 403), ('put', 403), ('post', 405), ('patch', 403)]"}), "(argnames='method, status', argvalues=[('get', 403),\n ('put', 403), ('post', 405), ('patch', 403)])\n", (1634, 1736), False, 'import pytest\n'), ((349, 386), 'humps.camelize', 'humps.camelize', (['update_profile_params'], {}), '(update_profile_params)\n', (363, 386), False, 'import humps\n'), ((254, 285), 'django.urls.reverse', 'reverse', (['"""admin_update_profile"""'], {}), "('admin_update_profile')\n", (261, 285), False, 'from django.urls import reverse\n'), ((880, 911), 'django.urls.reverse', 'reverse', (['"""admin_update_profile"""'], {}), "('admin_update_profile')\n", (887, 911), False, 'from django.urls import reverse\n'), ((1070, 1101), 'django.urls.reverse', 'reverse', (['"""admin_update_profile"""'], {}), "('admin_update_profile')\n", (1077, 1101), False, 'from django.urls import reverse\n'), ((1487, 1518), 'django.urls.reverse', 'reverse', (['"""admin_update_profile"""'], {}), "('admin_update_profile')\n", (1494, 1518), False, 'from django.urls import reverse\n'), ((1925, 1956), 'django.urls.reverse', 'reverse', (['"""admin_update_profile"""'], {}), "('admin_update_profile')\n", (1932, 1956), False, 'from django.urls import reverse\n')] |
#!/usr/bin/env python
__author__ = "<NAME>"
__copyrights__ = "Copyright 2018, the python-uds project"
__credits__ = ["<NAME>"]
__license__ = "MIT"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
import cProfile
import sys
from functools import reduce
# ----------------------------------------------------------------
# Profiler Code
# ----------------------------------------------------------------
def do_cprofile(func):
def profiled_func(*args, **kwargs):
profile = cProfile.Profile()
try:
profile.enable()
result = func(*args, **kwargs)
profile.disable()
return result
finally:
profile.print_stats()
return profiled_func
# ----------------------------------------------------------------
# buildIntFromList Tests
# ----------------------------------------------------------------
@do_cprofile
def buildIntFromListNonRecursiveFunc(aList):
def buildIntFromList(aList):
result = 0
for i in range(0, len(aList)):
result += (aList[i] << (8 * (len(aList) - (i+1))))
return result
return buildIntFromList(aList)
@do_cprofile
def buildIntFromListRecursiveFunc(aList):
def buildIntFromList(aList):
if(len(aList) == 1):
return aList[0]
else:
return (aList[0] << (8 * (len(aList) - 1) )) + buildIntFromList(aList[1:])
return buildIntFromList(aList)
@do_cprofile
def buildIntFromListReduceFunc(aList):
def buildIntFromList(aList):
return reduce(lambda x, y: (x << 8) + y, aList)
return buildIntFromList(aList)
# ----------------------------------------------------------------
# byteListToString Tests
# ----------------------------------------------------------------
@do_cprofile
def byteListToStringNonRecursiveFunc(aList):
def byteListToString(aList):
result = ""
for i in aList:
result += chr(i)
return result
return byteListToString(aList)
@do_cprofile
def byteListToStringRecursiveFunc(aList):
def byteListToString(aList):
if(len(aList) == 1):
return chr(aList[0])
else:
return chr(aList[0]) + byteListToString(aList[1:])
return byteListToString(aList)
@do_cprofile
def byteListToStringReduceFunc(aList):
def byteListToString(aList):
return reduce(lambda x, y: x + y, list(map(chr, aList)))
return byteListToString(aList)
if __name__ == "__main__":
sys.setrecursionlimit(4000)
testListA = []
for i in range(0, 2500):
testListA.append(0x5a)
testListB = []
for i in range(0, 2500):
testListB.append(0x30)
print("Testing the buildIntFromList methods")
resultA = buildIntFromListNonRecursiveFunc(testListA)
resultB = buildIntFromListRecursiveFunc(testListA)
resultC = buildIntFromListReduceFunc(testListA)
assert(resultA == resultB == resultC)
print("Testing the byteListToString methods")
resultA = byteListToStringNonRecursiveFunc(testListB)
resultB = byteListToStringRecursiveFunc(testListB)
resultC = byteListToStringReduceFunc(testListB)
assert (resultA == resultB == resultC)
pass
| [
"functools.reduce",
"sys.setrecursionlimit",
"cProfile.Profile"
] | [((2512, 2539), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(4000)'], {}), '(4000)\n', (2533, 2539), False, 'import sys\n'), ((515, 533), 'cProfile.Profile', 'cProfile.Profile', ([], {}), '()\n', (531, 533), False, 'import cProfile\n'), ((1567, 1607), 'functools.reduce', 'reduce', (['(lambda x, y: (x << 8) + y)', 'aList'], {}), '(lambda x, y: (x << 8) + y, aList)\n', (1573, 1607), False, 'from functools import reduce\n')] |
import os
from fixtures import TempDir
from testtools import ExpectedException, TestCase, run_test_with
from testtools.assertions import assert_that
from testtools.matchers import (
Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure)
from testtools.twistedsupport import (
AsynchronousDeferredRunTest, flush_logged_errors)
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from twisted.internet.error import CannotListenError, ConnectionRefusedError
from txacme.urls import LETSENCRYPT_STAGING_DIRECTORY
from marathon_acme.cli import init_storage_dir, main, parse_listen_addr
# Make sure we always use the Let's Encrypt Staging endpoint for these tests
def main_t(reactor, **kwargs):
argv = kwargs.get('argv', [])
env = kwargs.get('env', {})
return main(reactor, acme_url=LETSENCRYPT_STAGING_DIRECTORY.asText(),
argv=argv, env=env)
class TestCli(TestCase):
# These are testtools-style tests so we can run aynchronous tests
def test_storage_dir_required(self):
"""
When the program is run with no arguments, it should exit with code 2
because there is one required argument.
"""
with ExpectedException(SystemExit, MatchesStructure(code=Equals(2))):
main_t(reactor, argv=[])
@inlineCallbacks
@run_test_with(AsynchronousDeferredRunTest.make_factory(timeout=10.0))
def test_storage_dir_provided(self):
"""
When the program is run with an argument, it should start up and run.
The program is expected to fail because it is unable to connect to
Marathon.
This test takes a while because we have to let txacme go through it's
initial sync (registration + issuing of 0 certificates) before things
can be halted.
"""
temp_dir = self.useFixture(TempDir())
yield main_t(reactor, argv=[
temp_dir.path,
'--acme', LETSENCRYPT_STAGING_DIRECTORY.asText(),
'--marathon', 'http://localhost:28080' # An address we can't reach
])
# Expect a 'certs' directory to be created
self.assertThat(os.path.isdir(temp_dir.join('certs')), Equals(True))
# Expect an 'unmanaged-certs' directory to be created
self.assertThat(
os.path.isdir(temp_dir.join('unmanaged-certs')), Equals(True))
# Expect a default certificate to be created
self.assertThat(os.path.isfile(temp_dir.join('default.pem')),
Equals(True))
# Expect to be unable to connect to Marathon
flush_logged_errors(ConnectionRefusedError)
@inlineCallbacks
@run_test_with(AsynchronousDeferredRunTest.make_factory(timeout=5.0))
def test_storage_dir_provided_vault(self):
"""
When the program is run with an argument and the --vault option, it
should start up and run. The program is expected to fail because it is
unable to connect to Vault.
Unlike the above test, this crashes immediately and returns because we
never actually start up txacme or marathon-acme if we can't get/store
an ACME client key.
"""
with ExpectedException(ConnectionRefusedError,
r'Connection was refused by other side'):
yield main_t(
reactor,
env={
# An address we can't reach
'VAULT_ADDR': 'http://localhost:28080'
},
argv=[
'secret',
'--vault',
'--acme', LETSENCRYPT_STAGING_DIRECTORY.asText(),
]
)
flush_logged_errors(ConnectionRefusedError)
@inlineCallbacks
@run_test_with(AsynchronousDeferredRunTest.make_factory(timeout=5.0))
def test_cannot_listen(self):
"""
When the program is run with an argument and a listen address specified
with an address that we can't listen on (e.g. 1.1.1.1), a
CannotListenError is expected to be logged and the program should stop.
"""
temp_dir = self.useFixture(TempDir())
yield main_t(reactor, argv=[
temp_dir.path,
'--listen', '1.1.1.1:8080', # An address we can't listen on
])
# Expect a 'certs' directory to be created
self.assertThat(os.path.isdir(temp_dir.join('certs')), Equals(True))
# Expect a default certificate to be created
self.assertThat(os.path.isfile(temp_dir.join('default.pem')),
Equals(True))
# Expect to be unable to listen
flush_logged_errors(CannotListenError)
class TestParseListenAddr(object):
def test_parse_no_colon(self):
"""
When a listen address is parsed with no ':' character, an error is
raised.
"""
with ExpectedException(
ValueError,
r"'foobar' does not have the correct form for a listen address: "
r'\[ipaddress\]:port'):
parse_listen_addr('foobar')
def test_parse_no_ip_address(self):
"""
When a listen address is parsed with no IP address, an endpoint
description with the listen address's port but no interface is
returned.
"""
assert_that(parse_listen_addr(':8080'), Equals('tcp:8080'))
def test_parse_ipv4(self):
"""
When a listen address is parsed with an IPv4 address, an appropriate
interface is present in the returned endpoint description.
"""
assert_that(parse_listen_addr('127.0.0.1:8080'),
Equals('tcp:8080:interface=127.0.0.1'))
def test_parse_ipv6(self):
"""
When a listen address is parsed with an IPv4 address, an appropriate
interface is present in the returned endpoint description.
"""
assert_that(parse_listen_addr('[::]:8080'),
Equals('tcp6:8080:interface=\\:\\:'))
def test_parse_invalid_ipaddress(self):
"""
When a listen address is parsed with an invalid IP address, an error
is raised.
"""
with ExpectedException(
ValueError,
r"u?'hello' does not appear to be an IPv4 or IPv6 address"):
parse_listen_addr('hello:8080')
def test_parse_invalid_port(self):
"""
When a listen address is parsed with an invalid port, an error is
raised.
"""
with ExpectedException(
ValueError,
r"'foo' does not appear to be a valid port number"):
parse_listen_addr(':foo')
with ExpectedException(
ValueError,
r"'0' does not appear to be a valid port number"):
parse_listen_addr(':0')
with ExpectedException(
ValueError,
r"'65536' does not appear to be a valid port number"):
parse_listen_addr(':65536')
with ExpectedException(
ValueError,
r"'' does not appear to be a valid port number"):
parse_listen_addr(':')
class TestInitStorageDir(object):
def test_files_created_if_not_exist(self, tmpdir):
"""
When the certificate directory does not contain a 'default.pem' file
and a 'certs' directory, calling init_storage_dir() should create a
'default.pem' file with x509 certificate data and create a 'certs'
directory.
"""
init_storage_dir(str(tmpdir))
assert_that(str(tmpdir.join('default.pem')), FileExists())
# Check that this *looks* like a x509 cert
assert_that(str(tmpdir.join('default.pem')),
FileContains(matcher=Contains(
'-----BEGIN RSA PRIVATE KEY-----')))
assert_that(str(tmpdir.join('certs')), DirExists())
def test_files_not_created_if_exist(self, tmpdir):
"""
When the certificate directory does contain a 'default.pem' file
and a 'certs' directory, calling init_storage_dir() should not attempt
to create those files.
"""
tmpdir.join('default.pem').write('blah')
tmpdir.join('certs').mkdir()
tmpdir.join('unmanaged-certs').mkdir()
init_storage_dir(str(tmpdir))
assert_that(str(tmpdir.join('default.pem')), FileExists())
# Check that the file hasn't changed
assert_that(str(tmpdir.join('default.pem')), FileContains('blah'))
assert_that(str(tmpdir.join('certs')), DirExists())
| [
"testtools.twistedsupport.flush_logged_errors",
"testtools.ExpectedException",
"testtools.matchers.FileExists",
"testtools.matchers.Equals",
"testtools.matchers.Contains",
"txacme.urls.LETSENCRYPT_STAGING_DIRECTORY.asText",
"testtools.matchers.FileContains",
"testtools.matchers.DirExists",
"marathon... | [((2630, 2673), 'testtools.twistedsupport.flush_logged_errors', 'flush_logged_errors', (['ConnectionRefusedError'], {}), '(ConnectionRefusedError)\n', (2649, 2673), False, 'from testtools.twistedsupport import AsynchronousDeferredRunTest, flush_logged_errors\n'), ((1379, 1433), 'testtools.twistedsupport.AsynchronousDeferredRunTest.make_factory', 'AsynchronousDeferredRunTest.make_factory', ([], {'timeout': '(10.0)'}), '(timeout=10.0)\n', (1419, 1433), False, 'from testtools.twistedsupport import AsynchronousDeferredRunTest, flush_logged_errors\n'), ((3740, 3783), 'testtools.twistedsupport.flush_logged_errors', 'flush_logged_errors', (['ConnectionRefusedError'], {}), '(ConnectionRefusedError)\n', (3759, 3783), False, 'from testtools.twistedsupport import AsynchronousDeferredRunTest, flush_logged_errors\n'), ((2715, 2768), 'testtools.twistedsupport.AsynchronousDeferredRunTest.make_factory', 'AsynchronousDeferredRunTest.make_factory', ([], {'timeout': '(5.0)'}), '(timeout=5.0)\n', (2755, 2768), False, 'from testtools.twistedsupport import AsynchronousDeferredRunTest, flush_logged_errors\n'), ((4698, 4736), 'testtools.twistedsupport.flush_logged_errors', 'flush_logged_errors', (['CannotListenError'], {}), '(CannotListenError)\n', (4717, 4736), False, 'from testtools.twistedsupport import AsynchronousDeferredRunTest, flush_logged_errors\n'), ((3825, 3878), 'testtools.twistedsupport.AsynchronousDeferredRunTest.make_factory', 'AsynchronousDeferredRunTest.make_factory', ([], {'timeout': '(5.0)'}), '(timeout=5.0)\n', (3865, 3878), False, 'from testtools.twistedsupport import AsynchronousDeferredRunTest, flush_logged_errors\n'), ((858, 896), 'txacme.urls.LETSENCRYPT_STAGING_DIRECTORY.asText', 'LETSENCRYPT_STAGING_DIRECTORY.asText', ([], {}), '()\n', (894, 896), False, 'from txacme.urls import LETSENCRYPT_STAGING_DIRECTORY\n'), ((1886, 1895), 'fixtures.TempDir', 'TempDir', ([], {}), '()\n', (1893, 1895), False, 'from fixtures import TempDir\n'), ((2229, 2241), 'testtools.matchers.Equals', 'Equals', (['(True)'], {}), '(True)\n', (2235, 2241), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((2392, 2404), 'testtools.matchers.Equals', 'Equals', (['(True)'], {}), '(True)\n', (2398, 2404), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((2554, 2566), 'testtools.matchers.Equals', 'Equals', (['(True)'], {}), '(True)\n', (2560, 2566), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((3231, 3316), 'testtools.ExpectedException', 'ExpectedException', (['ConnectionRefusedError', '"""Connection was refused by other side"""'], {}), "(ConnectionRefusedError,\n 'Connection was refused by other side')\n", (3248, 3316), False, 'from testtools import ExpectedException, TestCase, run_test_with\n'), ((4199, 4208), 'fixtures.TempDir', 'TempDir', ([], {}), '()\n', (4206, 4208), False, 'from fixtures import TempDir\n'), ((4473, 4485), 'testtools.matchers.Equals', 'Equals', (['(True)'], {}), '(True)\n', (4479, 4485), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((4635, 4647), 'testtools.matchers.Equals', 'Equals', (['(True)'], {}), '(True)\n', (4641, 4647), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((4937, 5061), 'testtools.ExpectedException', 'ExpectedException', (['ValueError', '"""\'foobar\' does not have the correct form for a listen address: \\\\[ipaddress\\\\]:port"""'], {}), '(ValueError,\n "\'foobar\' does not have the correct form for a listen address: \\\\[ipaddress\\\\]:port"\n )\n', (4954, 5061), False, 'from testtools import ExpectedException, TestCase, run_test_with\n'), ((5110, 5137), 'marathon_acme.cli.parse_listen_addr', 'parse_listen_addr', (['"""foobar"""'], {}), "('foobar')\n", (5127, 5137), False, 'from marathon_acme.cli import init_storage_dir, main, parse_listen_addr\n'), ((5384, 5410), 'marathon_acme.cli.parse_listen_addr', 'parse_listen_addr', (['""":8080"""'], {}), "(':8080')\n", (5401, 5410), False, 'from marathon_acme.cli import init_storage_dir, main, parse_listen_addr\n'), ((5412, 5430), 'testtools.matchers.Equals', 'Equals', (['"""tcp:8080"""'], {}), "('tcp:8080')\n", (5418, 5430), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((5652, 5687), 'marathon_acme.cli.parse_listen_addr', 'parse_listen_addr', (['"""127.0.0.1:8080"""'], {}), "('127.0.0.1:8080')\n", (5669, 5687), False, 'from marathon_acme.cli import init_storage_dir, main, parse_listen_addr\n'), ((5709, 5747), 'testtools.matchers.Equals', 'Equals', (['"""tcp:8080:interface=127.0.0.1"""'], {}), "('tcp:8080:interface=127.0.0.1')\n", (5715, 5747), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((5969, 5999), 'marathon_acme.cli.parse_listen_addr', 'parse_listen_addr', (['"""[::]:8080"""'], {}), "('[::]:8080')\n", (5986, 5999), False, 'from marathon_acme.cli import init_storage_dir, main, parse_listen_addr\n'), ((6021, 6057), 'testtools.matchers.Equals', 'Equals', (['"""tcp6:8080:interface=\\\\:\\\\:"""'], {}), "('tcp6:8080:interface=\\\\:\\\\:')\n", (6027, 6057), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((6237, 6329), 'testtools.ExpectedException', 'ExpectedException', (['ValueError', '"""u?\'hello\' does not appear to be an IPv4 or IPv6 address"""'], {}), '(ValueError,\n "u?\'hello\' does not appear to be an IPv4 or IPv6 address")\n', (6254, 6329), False, 'from testtools import ExpectedException, TestCase, run_test_with\n'), ((6369, 6400), 'marathon_acme.cli.parse_listen_addr', 'parse_listen_addr', (['"""hello:8080"""'], {}), "('hello:8080')\n", (6386, 6400), False, 'from marathon_acme.cli import init_storage_dir, main, parse_listen_addr\n'), ((6568, 6653), 'testtools.ExpectedException', 'ExpectedException', (['ValueError', '"""\'foo\' does not appear to be a valid port number"""'], {}), '(ValueError, "\'foo\' does not appear to be a valid port number"\n )\n', (6585, 6653), False, 'from testtools import ExpectedException, TestCase, run_test_with\n'), ((6696, 6721), 'marathon_acme.cli.parse_listen_addr', 'parse_listen_addr', (['""":foo"""'], {}), "(':foo')\n", (6713, 6721), False, 'from marathon_acme.cli import init_storage_dir, main, parse_listen_addr\n'), ((6736, 6814), 'testtools.ExpectedException', 'ExpectedException', (['ValueError', '"""\'0\' does not appear to be a valid port number"""'], {}), '(ValueError, "\'0\' does not appear to be a valid port number")\n', (6753, 6814), False, 'from testtools import ExpectedException, TestCase, run_test_with\n'), ((6862, 6885), 'marathon_acme.cli.parse_listen_addr', 'parse_listen_addr', (['""":0"""'], {}), "(':0')\n", (6879, 6885), False, 'from marathon_acme.cli import init_storage_dir, main, parse_listen_addr\n'), ((6900, 6986), 'testtools.ExpectedException', 'ExpectedException', (['ValueError', '"""\'65536\' does not appear to be a valid port number"""'], {}), '(ValueError,\n "\'65536\' does not appear to be a valid port number")\n', (6917, 6986), False, 'from testtools import ExpectedException, TestCase, run_test_with\n'), ((7030, 7057), 'marathon_acme.cli.parse_listen_addr', 'parse_listen_addr', (['""":65536"""'], {}), "(':65536')\n", (7047, 7057), False, 'from marathon_acme.cli import init_storage_dir, main, parse_listen_addr\n'), ((7072, 7149), 'testtools.ExpectedException', 'ExpectedException', (['ValueError', '"""\'\' does not appear to be a valid port number"""'], {}), '(ValueError, "\'\' does not appear to be a valid port number")\n', (7089, 7149), False, 'from testtools import ExpectedException, TestCase, run_test_with\n'), ((7197, 7219), 'marathon_acme.cli.parse_listen_addr', 'parse_listen_addr', (['""":"""'], {}), "(':')\n", (7214, 7219), False, 'from marathon_acme.cli import init_storage_dir, main, parse_listen_addr\n'), ((7674, 7686), 'testtools.matchers.FileExists', 'FileExists', ([], {}), '()\n', (7684, 7686), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((7952, 7963), 'testtools.matchers.DirExists', 'DirExists', ([], {}), '()\n', (7961, 7963), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((8454, 8466), 'testtools.matchers.FileExists', 'FileExists', ([], {}), '()\n', (8464, 8466), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((8566, 8586), 'testtools.matchers.FileContains', 'FileContains', (['"""blah"""'], {}), "('blah')\n", (8578, 8586), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((8636, 8647), 'testtools.matchers.DirExists', 'DirExists', ([], {}), '()\n', (8645, 8647), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((7833, 7876), 'testtools.matchers.Contains', 'Contains', (['"""-----BEGIN RSA PRIVATE KEY-----"""'], {}), "('-----BEGIN RSA PRIVATE KEY-----')\n", (7841, 7876), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((1288, 1297), 'testtools.matchers.Equals', 'Equals', (['(2)'], {}), '(2)\n', (1294, 1297), False, 'from testtools.matchers import Contains, DirExists, Equals, FileContains, FileExists, MatchesStructure\n'), ((1983, 2021), 'txacme.urls.LETSENCRYPT_STAGING_DIRECTORY.asText', 'LETSENCRYPT_STAGING_DIRECTORY.asText', ([], {}), '()\n', (2019, 2021), False, 'from txacme.urls import LETSENCRYPT_STAGING_DIRECTORY\n'), ((3659, 3697), 'txacme.urls.LETSENCRYPT_STAGING_DIRECTORY.asText', 'LETSENCRYPT_STAGING_DIRECTORY.asText', ([], {}), '()\n', (3695, 3697), False, 'from txacme.urls import LETSENCRYPT_STAGING_DIRECTORY\n')] |
#!/usr/local/bin/python
# UUT is running continuous pre/post snapshots
# subscribe to the snapshots and save all the data.
import threading
import epics
import argparse
import time
import datetime
import os
NCHAN = 16
# WF record, raw binary (shorts)
WFNAME = ":1:AI:WF:{:02d}"
# alt WF record, VOLTS. Kindof harder to store this in a portable way..
#WFNAME = ":1:AI:WF:{:02d}:V.VALA""
#1:AI:WF:08:V.VALA
class Uut:
root = "DATA"
def make_file_name(self, upcount):
timecode = datetime.datetime.now().strftime("%Y/%m/%d/%H/%M/")
return self.root+"/"+timecode +"{:06d}".format(upcount)
def store_format(self, path):
# created a kst / dirfile compatible format file
fp = open(path+"/format", "w")
fp.write ("# format file {}\n".format(path))
# TODO enter start sample from event sample count
fp.write ("START_SAMPLE CONST UINT32 0\n")
fp.writelines(["CH{:02d} RAW s 1\n".format(ch) for ch in range(1,NCHAN+1)])
fp.close()
def on_update(self, **kws):
self.upcount = self.upcount + 1
fn = self.make_file_name(self.upcount)
print(fn)
if not os.path.isdir(fn):
os.makedirs(fn)
for ch in range(1, NCHAN+1):
yy = self.channels[ch-1].get()
yy.astype('int16').tofile(fn+"/CH{:02d}".format(ch))
self.store_format(fn)
print("{} {}".format(kws['pvname'], kws['value']))
print(self.channels[1])
def monitor(self):
self.channels = [epics.PV(self.name+WFNAME.format(ch)) for ch in range(1, NCHAN+1)]
updates = epics.PV(self.name + ":1:AI:WF:01:UPDATES", auto_monitor=True, callback=self.on_update)
def __init__(self, _name):
self.name = _name
self.upcount = 0
threading.Thread(target=self.monitor).start()
def multivent(parser):
uuts = [Uut(_name) for _name in parser.uuts]
for u in uuts:
u.root = parser.root
while True:
time.sleep(0.5)
def run_main():
parser = argparse.ArgumentParser(description='acq400 multivent')
parser.add_argument('--root', type=str, default="DATA", help="output root path")
parser.add_argument('uuts', nargs='+', help="uut names")
multivent(parser.parse_args())
# execution starts here
if __name__ == '__main__':
run_main()
| [
"epics.PV",
"os.makedirs",
"argparse.ArgumentParser",
"time.sleep",
"datetime.datetime.now",
"os.path.isdir",
"threading.Thread"
] | [((2149, 2204), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""acq400 multivent"""'}), "(description='acq400 multivent')\n", (2172, 2204), False, 'import argparse\n'), ((1703, 1795), 'epics.PV', 'epics.PV', (["(self.name + ':1:AI:WF:01:UPDATES')"], {'auto_monitor': '(True)', 'callback': 'self.on_update'}), "(self.name + ':1:AI:WF:01:UPDATES', auto_monitor=True, callback=\n self.on_update)\n", (1711, 1795), False, 'import epics\n'), ((2099, 2114), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2109, 2114), False, 'import time\n'), ((1177, 1194), 'os.path.isdir', 'os.path.isdir', (['fn'], {}), '(fn)\n', (1190, 1194), False, 'import os\n'), ((1208, 1223), 'os.makedirs', 'os.makedirs', (['fn'], {}), '(fn)\n', (1219, 1223), False, 'import os\n'), ((496, 519), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (517, 519), False, 'import datetime\n'), ((1890, 1927), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.monitor'}), '(target=self.monitor)\n', (1906, 1927), False, 'import threading\n')] |
from __future__ import annotations
from abc import abstractmethod
from logging import getLogger
from typing import (
TYPE_CHECKING,
Any,
Dict,
Generic,
List,
Optional,
Type,
TypeVar,
cast,
)
from django.conf import settings
from django.core.cache import cache
from django.db import models as m
from .model import DCFModel, IDCFModel, __implements__
LOG = getLogger(__name__)
if TYPE_CHECKING:
from ...serializers.serializer import DCFSerializer
D = TypeVar("D")
T = TypeVar("T", bound="ISerializable")
class ISerializable(IDCFModel[DCFModel], Generic[T, D]):
def to_serializable(self) -> Serializable[T, D]:
return cast(Serializable[T, D], self)
@classmethod
@abstractmethod
def get_serializer_class(
cls, *, version: str, context: Dict[str, Any]
) -> Type[DCFSerializer[T, D]]:
...
@abstractmethod
def get_serializer(
self, *, version: str, context: Dict[str, Any], **kwargs: Any
) -> DCFSerializer[T, D]:
...
@abstractmethod
def json(
self,
*,
version: str,
context: Dict[str, Any] = {},
serializer: Optional[DCFSerializer[T, D]] = None,
ignore_cache: bool = False,
) -> D:
...
@abstractmethod
def get_json(
self,
*,
version: str,
context: Dict[str, Any] = {},
serializer: Optional[DCFSerializer[T, D]] = None,
) -> D:
...
class Serializable(__implements__, ISerializable[T, D]):
@classmethod
def get_serializer_class(
cls, *, version: str, context: Dict[str, Any]
) -> Type[DCFSerializer[T, D]]:
raise NotImplementedError(
f"{cls} must implement .get_serializer_class(version, context)"
)
def get_serializer(
self, *, version: str, context: Dict[str, Any], **kwargs: Any
) -> DCFSerializer[T, D]:
return self.get_serializer_class(version=version, context=context)(
instance=self, **kwargs
)
def json(
self,
*,
version: str,
context: Dict[str, Any] = {},
serializer: Optional[DCFSerializer[T, D]] = None,
ignore_cache: bool = False,
) -> D:
if ignore_cache or self.get_cache_timeout() == 0:
return self.get_json(
version=version,
context=context,
serializer=serializer,
)
return self.cached_json(
version=version,
context=context,
serializer=serializer,
)
def get_json(
self: T,
*,
version: str,
context: Dict[str, Any] = {},
serializer: Optional[DCFSerializer[T, D]] = None,
) -> D:
if serializer is None:
serializer = self.get_serializer(version=version, context=context)
return serializer.to_representation(instance=self)
def get_extra_content_to_hash(self) -> List[Any]:
return []
def values(self) -> Optional[T]:
self._meta: Any
return self._meta.model.objects.filter(pk=self.id).values().first()
def __repr__(self) -> str:
if settings.DEBUG:
return f"<<{self.__class__.__name__}:{self.values()}>>"
else:
return f"<{self.__class__.__name__}:{self.id}>"
def __str__(self) -> str:
return f"<{self.__class__.__name__}:{self.id}>"
def get_cache_timeout(self) -> int:
"""Return how long to cache the serialization in seconds"""
return 0
def cached_json(
self,
*,
version: str,
context: Dict[str, Any] = {},
serializer: Optional[DCFSerializer[T, D]] = None,
) -> Any:
timeout = self.get_cache_timeout()
if timeout == 0:
return self.get_json(
version=version,
context=context,
serializer=serializer,
)
if result := cache.get(
self.get_cache_key_for_serialization(version, context), None
):
return result
else:
data = self.get_json(
version=version,
context=context,
serializer=serializer,
)
cache.add(
self.get_cache_key_for_serialization(version, context),
data,
timeout=timeout,
)
return data
def get_cache_key_for_serialization(
self, version: str, context: Dict[str, Any]
) -> str:
# whenver one of the hashed content is changed, the cache misses, and a
# re-serialization is forced.
return "serialization_cache_" + str(
hash(
[self._meta.model_name, self.id, version, context]
+ self.get_extra_content_to_hash()
)
)
def check_integrity() -> None:
from ...serializers import DelegateSerializer, Serializer
for model in Serializable.__subclasses__():
if model.__module__ == "__fake__":
break
if Serializable not in model.__bases__:
break
if m.Model not in model.__bases__:
break
i = model.__bases__.index(Serializable)
j = model.__bases__.index(m.Model)
if i > j:
raise AssertionError(
f"{model} must extend {Serializable} before {m.Model}, current order: {model.__bases__}"
)
for model in Serializable.__subclasses__():
sercls: Type[Serializer] = model.get_serializer_class(
version="default", context={}
)
if not (
issubclass(sercls, Serializer) or issubclass(sercls, DelegateSerializer)
):
raise NotImplementedError(
f"{model}.get_serializer_class() does not return a Serialzer class "
)
| [
"logging.getLogger",
"typing.cast",
"typing.TypeVar"
] | [((395, 414), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (404, 414), False, 'from logging import getLogger\n'), ((496, 508), 'typing.TypeVar', 'TypeVar', (['"""D"""'], {}), "('D')\n", (503, 508), False, 'from typing import TYPE_CHECKING, Any, Dict, Generic, List, Optional, Type, TypeVar, cast\n'), ((513, 548), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {'bound': '"""ISerializable"""'}), "('T', bound='ISerializable')\n", (520, 548), False, 'from typing import TYPE_CHECKING, Any, Dict, Generic, List, Optional, Type, TypeVar, cast\n'), ((676, 706), 'typing.cast', 'cast', (['Serializable[T, D]', 'self'], {}), '(Serializable[T, D], self)\n', (680, 706), False, 'from typing import TYPE_CHECKING, Any, Dict, Generic, List, Optional, Type, TypeVar, cast\n')] |
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/gaogaotiantian/watchpoints/blob/master/NOTICE.txt
import unittest
import inspect
from watchpoints.util import getline, getargnodes
class TestUtil(unittest.TestCase):
def test_getline(self):
def watch(*args):
frame = inspect.currentframe().f_back
return getline(frame)
a = []
b = {}
line = watch(a)
self.assertEqual(line, "line = watch ( a )")
line = watch(
a,
b
)
self.assertEqual(line, "line = watch ( a , b )")
def test_getargnodes(self):
def watch(*args):
frame = inspect.currentframe().f_back
return list(getargnodes(frame))
a = [0, 1]
b = {}
argnodes = watch(a)
self.assertEqual(len(argnodes), 1)
self.assertEqual(argnodes[0][1], "a")
argnodes = watch(
a,
b
)
self.assertEqual(len(argnodes), 2)
self.assertEqual(argnodes[0][1], "a")
self.assertEqual(argnodes[1][1], "b")
argnodes = watch(
a[0], # comments
b
)
self.assertEqual(len(argnodes), 2)
self.assertEqual(argnodes[0][1], "a[0]")
self.assertEqual(argnodes[1][1], "b")
with self.assertRaises(Exception):
argnodes = [i for i in watch(a)]
| [
"watchpoints.util.getline",
"watchpoints.util.getargnodes",
"inspect.currentframe"
] | [((407, 421), 'watchpoints.util.getline', 'getline', (['frame'], {}), '(frame)\n', (414, 421), False, 'from watchpoints.util import getline, getargnodes\n'), ((358, 380), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (378, 380), False, 'import inspect\n'), ((763, 785), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (783, 785), False, 'import inspect\n'), ((817, 835), 'watchpoints.util.getargnodes', 'getargnodes', (['frame'], {}), '(frame)\n', (828, 835), False, 'from watchpoints.util import getline, getargnodes\n')] |
# -*- coding: utf-8 -*-
from amplify.agent.common.util.math import median
from unittest import TestCase
from hamcrest import *
__author__ = "<NAME>"
__copyright__ = "Copyright (C) Nginx, Inc. All rights reserved."
__license__ = ""
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
class MathTestCase(TestCase):
def test_median(self):
# even length
assert_that(median([1, 3, 5, 7]), equal_to(4.0))
# unsorted
assert_that(median([1, 5, 7, 3]), equal_to(4.0))
# odd length
assert_that(median([1, 2, 3, 4, 5, 6, 7]), equal_to(4.0))
assert_that(median([]), equal_to(None))
| [
"amplify.agent.common.util.math.median"
] | [((384, 404), 'amplify.agent.common.util.math.median', 'median', (['[1, 3, 5, 7]'], {}), '([1, 3, 5, 7])\n', (390, 404), False, 'from amplify.agent.common.util.math import median\n'), ((460, 480), 'amplify.agent.common.util.math.median', 'median', (['[1, 5, 7, 3]'], {}), '([1, 5, 7, 3])\n', (466, 480), False, 'from amplify.agent.common.util.math import median\n'), ((538, 567), 'amplify.agent.common.util.math.median', 'median', (['[1, 2, 3, 4, 5, 6, 7]'], {}), '([1, 2, 3, 4, 5, 6, 7])\n', (544, 567), False, 'from amplify.agent.common.util.math import median\n'), ((604, 614), 'amplify.agent.common.util.math.median', 'median', (['[]'], {}), '([])\n', (610, 614), False, 'from amplify.agent.common.util.math import median\n')] |
import numpy as np
from scipy.misc import toimage
from scipy.ndimage.filters import gaussian_filter
from os import mkdir
from os.path import dirname, join
from time import time
from keras.models import Model
from keras.layers import Dense
from keras import backend as K
from keras.applications.vgg16 import VGG16
# define output path and make folder
output_path = join(dirname(__file__), 'output')
try:
mkdir(output_path)
except FileExistsError:
# folder exists, which is what we wanted
pass
# set channel dimension based on image data format from Keras backend
if K.image_data_format() == 'channels_last':
ch_dim = 3
else:
ch_dim = 1
# for VGG16 specific testing
is_VGG16 = True
VGG16_MEAN_VALUES = np.array([103.939, 116.779, 123.68])
# set learning rate
learning_rate = 2500.0
# how many times we update image
no_of_iterations = 500
# specify L2-decay
# used to prevent a small number of extreme pixel values from dominating the output image
l2_decay = 0.0001
# specify frequency of blurring and standard deviation for kernel for Gaussian blur
# used to penalize high frequency information in the output image
blur_interval = 4
# standard deviation values between 0.0 and 0.3 work poorly, according to yosinski
blur_std = 1.0
# specify value percentile limit
# used to induce sparsity by setting pixels with small absolute value to zero
value_percentile = 0
# specify norm percentile limit
# used to induce sparsity by setting pixels with small norm to zero
norm_percentile = 0
# specify contribution percentile limit
# used to induce sparsity by setting pixels with small contribution to zero
contribution_percentile = 0
# specify absolute contribution percentile limit
# used to induce sparsity by setting pixels with small absolute contribution to zero
abs_contribution_percentile = 0
# choose whether to include regularization
regularize = True
# utility function used to convert an array into a savable image array
def deprocess(vis_array):
# remove batch dimension, and alter color dimension accordingly
img_array = vis_array[0]
if K.image_data_format() == 'channels_first':
# alter dimensions from (color, height, width) to (height, width, color)
img_array = img_array.transpose((1, 2, 0))
if is_VGG16:
# add mean values
img_array += VGG16_MEAN_VALUES.reshape((1, 1, 3))
# change back to RGB
img_array = img_array[:, :, ::-1]
# clip in [0, 255], and convert to uint8
img_array = np.clip(img_array, 0, 255).astype('uint8')
return img_array
# creates a model to generate gradients from
def create_model():
base_model = VGG16(include_top=True, weights='imagenet')
# save weights from last layer (softmax)
softmax_weights = base_model.layers[-1].get_weights()
# create new last layer for model with linear activation and connect to same layer as old layer
out = Dense(1000, activation='linear', weights=softmax_weights)(base_model.layers[-1].input)
return Model(base_model.input, out)
# saves the visualization and a text file describing its creation environment
def save_visualization(img, layer_no, unit_index, loss_value):
# create appropriate name to identify image
if regularize:
img_name = 'regularized'
else:
img_name = 'vanilla'
img_name += '_{}_{}_{}'.format(layer_no, unit_index, time())
# save the resulting image to disk
# avoid scipy.misc.imsave because it will normalize the image pixel value between 0 and 255
toimage(img).save(join(output_path, img_name + '.png'))
# also save a txt-file containing information about creation environment and obtained loss
img_info = 'Image "{}.png" was created from unit {} in layer {}, using the following hyperparameters:\n\n' \
'Learning rate: {}\n' \
'Number of iterations: {}\n' \
'----------\n' \
''.format(img_name, unit_index, layer_no, learning_rate, no_of_iterations)
if regularize:
img_info += 'Regularization enabled\n\n' \
'L2-decay: {}\n' \
'Blur interval and std: {} & {}\n' \
'Value percentile: {}\n' \
'Norm percentile: {}\n' \
'Contribution percentile: {}\n' \
'Abs. contribution percentile: {}\n' \
''.format(l2_decay, blur_interval, blur_std, value_percentile, norm_percentile,
contribution_percentile, abs_contribution_percentile)
else:
img_info += 'Regularization disabled\n'
img_info += '----------\n' \
'Obtained loss value: {}\n' \
''.format(loss_value)
with open(join(output_path, img_name + '_info.txt'), 'w') as f:
f.write(img_info)
print('\nImage of unit {} from layer {} have been saved as {}.png\n'.format(unit_index, layer_no, img_name))
# returns a function for computing loss and gradients w.r.t. the activations for the chosen unit in the output tensor
def get_loss_and_gradient_function(input_tensor, output_tensor, unit_index):
# if unit index is specified as integer, convert to tuple
if isinstance(unit_index, int):
unit_index = (unit_index,)
if len(output_tensor.shape[1:]) != len(unit_index):
raise ValueError('Index mismatch: Unit indices should be of length {}, not {}'
.format(len(output_tensor.shape[1:]), len(unit_index)))
else:
tensor_min = np.array([0 for _ in output_tensor.shape[1:]])
tensor_max = np.array([int(dim) - 1 for dim in output_tensor.shape[1:]])
if np.any(np.array(unit_index) < tensor_min) or np.any(np.array(unit_index) > tensor_max):
raise ValueError('Invalid unit index {}: Unit indices should have values between {} and {}'
.format(np.array(unit_index), tensor_min, tensor_max))
# pad with batch index
unit_index = (0,) + unit_index
# loss is the activation of the unit in the chosen output tensor (chosen layer output)
loss = output_tensor[unit_index]
# compute gradients of the loss of the chosen unit w.r.t. the input image
gradients = K.gradients(loss, input_tensor)[0]
# return function returning the loss and gradients given a visualization image
# add a flag to disable the learning phase
return K.function([input_tensor, K.learning_phase()], [loss, gradients])
# creates an random, initial image to manipulate into a visualization
def create_initial_image(model_input_shape):
# add (1,) for batch dimension
return np.random.normal(0, 10, (1,) + model_input_shape[1:])
# regularizes visualization with various techniques
# each technique is activated by non-zero values for their respective global variables
def apply_ensemble_regularization(visualization, pixel_gradients, iteration_no):
# regularizer #1
# apply L2-decay
if l2_decay > 0:
visualization *= (1 - l2_decay)
# regularizer #2
# apply Gaussian blur
if blur_interval > 0 and blur_std > 0:
# only blur at certain iterations, as blurring is expensive
if not iteration_no % blur_interval:
# define standard deviations for blur kernel
blur_kernel_std = [0, blur_std, blur_std, blur_std]
# blur along height and width, but not along channel (color) dimension
blur_kernel_std[ch_dim] = 0
# perform blurring
visualization = gaussian_filter(visualization, sigma=blur_kernel_std)
# regularizer #3
# apply value limit
if value_percentile > 0:
# find absolute values
abs_visualization = abs(visualization)
# find mask of high values (values above chosen value percentile)
high_value_mask = abs_visualization >= np.percentile(abs_visualization, value_percentile)
# apply to image to set pixels with small values to zero
visualization *= high_value_mask
# regularizer #4
# apply norm limit
if norm_percentile > 0:
# compute pixel norms along channel (color) dimension
pixel_norms = np.linalg.norm(visualization, axis=ch_dim)
# find initial mask of high norms (norms above chosen norm percentile)
high_norm_mask = pixel_norms >= np.percentile(pixel_norms, norm_percentile)
# expand mask to account for color dimension
high_norm_mask = expand_for_color(high_norm_mask)
# apply to image to set pixels with small norms to zero
visualization *= high_norm_mask
# regularizer #5
# apply contribution limit
if contribution_percentile > 0:
# predict the contribution of each pixel
predicted_contribution = -visualization * pixel_gradients
# sum over channel (color) dimension
contribution = predicted_contribution.sum(ch_dim)
# find initial mask of high contributions (contr. above chosen contr. percentile)
high_contribution_mask = contribution >= np.percentile(contribution, contribution_percentile)
# expand mask to account for color dimension
high_contribution_mask = expand_for_color(high_contribution_mask)
# apply to image to set pixels with small contributions to zero
visualization *= high_contribution_mask
# regularizer #6
# apply absolute contribution limit
if abs_contribution_percentile > 0:
# alternative approach
# predict the contribution of each pixel
predicted_contribution = -visualization * pixel_gradients
# sum over channel (color) dimension, and find absolute value
abs_contribution = abs(predicted_contribution.sum(ch_dim))
# find initial mask of high absolute contributions (abs. contr. above chosen abs. contr. percentile)
high_abs_contribution_mask = abs_contribution >= np.percentile(abs_contribution, abs_contribution_percentile)
# expand mask to account for color dimension
high_abs_contribution_mask = expand_for_color(high_abs_contribution_mask)
# apply to image to set pixels with small absolute contributions to zero
visualization *= high_abs_contribution_mask
return visualization
# use to expand a (batch, height, width)-numpy array with a channel (color) dimension
def expand_for_color(np_array):
# expand at channel (color) dimension
np_array = np.expand_dims(np_array, axis=ch_dim)
# create tile repetition list, repeating thrice in channel (color) dimension
tile_reps = [1, 1, 1, 1]
tile_reps[ch_dim] = 3
# apply tile repetition
np_array = np.tile(np_array, tile_reps)
return np_array
def main():
# create model to generate gradients from
model = create_model()
# select units to visualize for by adding (layer number, unit index), where unit index is tuple for layers with
# 3D structured output, like convolutional and pooling layers
# units_to_visualize = [(22, 130), (2, 351), (22, 736), (22, 850)]
# units_to_visualize = [(22, 402), (22, 587), (22, 950)]
units_to_visualize = [(1, (112, 112, ch)) for ch in range(1)]
# unit indices in last layer represent the following classes:
# 130 flamingo, 351 hartebeest, 736 pool table, 850 teddy bear
# for the chosen layer number and unit index
for layer_no, unit_index in units_to_visualize:
print('\nProcessing unit {} in layer {}'.format(unit_index, layer_no))
# used to time generation of each image
start_time = time()
if layer_no < 0 or layer_no >= len(model.layers):
raise ValueError('Invalid layer number {}: Layer numbers should be between {} and {}'.format(layer_no, 0, len(model.layers) - 1))
# create and save loss and gradient function for current unit
compute_loss_and_gradients = get_loss_and_gradient_function(model.input, model.layers[layer_no].output, unit_index)
# create an initial visualization image
visualization = create_initial_image(model.input_shape)
# perform gradient ascent update with or without regularization for n steps
for i in range(1, no_of_iterations + 1):
# compute loss and gradient values (input 0 as arg. #2 to deactivate training layers, like dropout)
loss_value, pixel_gradients = compute_loss_and_gradients([visualization, 0])
# update visualization image
visualization += pixel_gradients * learning_rate
# if regularization has been activated, regularize image
if regularize:
visualization = apply_ensemble_regularization(visualization, pixel_gradients, i)
# print('Current loss value:', loss_value)
print('Round {} finished.'.format(i))
# process visualization to match with standard image dimensions
visualization_image = deprocess(visualization)
# save visualization image, complete with info about creation environment
save_visualization(visualization_image, layer_no, unit_index, loss_value)
print('Visualization for unit {} from layer {} completed in {:.4f} seconds'.format(unit_index, layer_no, time() - start_time))
main()
| [
"numpy.clip",
"keras.applications.vgg16.VGG16",
"scipy.ndimage.filters.gaussian_filter",
"keras.backend.learning_phase",
"keras.backend.gradients",
"numpy.array",
"numpy.linalg.norm",
"keras.layers.Dense",
"keras.backend.image_data_format",
"os.mkdir",
"keras.models.Model",
"numpy.random.norma... | [((710, 746), 'numpy.array', 'np.array', (['[103.939, 116.779, 123.68]'], {}), '([103.939, 116.779, 123.68])\n', (718, 746), True, 'import numpy as np\n'), ((372, 389), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (379, 389), False, 'from os.path import dirname, join\n'), ((407, 425), 'os.mkdir', 'mkdir', (['output_path'], {}), '(output_path)\n', (412, 425), False, 'from os import mkdir\n'), ((572, 593), 'keras.backend.image_data_format', 'K.image_data_format', ([], {}), '()\n', (591, 593), True, 'from keras import backend as K\n'), ((2579, 2622), 'keras.applications.vgg16.VGG16', 'VGG16', ([], {'include_top': '(True)', 'weights': '"""imagenet"""'}), "(include_top=True, weights='imagenet')\n", (2584, 2622), False, 'from keras.applications.vgg16 import VGG16\n'), ((2922, 2950), 'keras.models.Model', 'Model', (['base_model.input', 'out'], {}), '(base_model.input, out)\n', (2927, 2950), False, 'from keras.models import Model\n'), ((6174, 6227), 'numpy.random.normal', 'np.random.normal', (['(0)', '(10)', '((1,) + model_input_shape[1:])'], {}), '(0, 10, (1,) + model_input_shape[1:])\n', (6190, 6227), True, 'import numpy as np\n'), ((9646, 9683), 'numpy.expand_dims', 'np.expand_dims', (['np_array'], {'axis': 'ch_dim'}), '(np_array, axis=ch_dim)\n', (9660, 9683), True, 'import numpy as np\n'), ((9851, 9879), 'numpy.tile', 'np.tile', (['np_array', 'tile_reps'], {}), '(np_array, tile_reps)\n', (9858, 9879), True, 'import numpy as np\n'), ((2067, 2088), 'keras.backend.image_data_format', 'K.image_data_format', ([], {}), '()\n', (2086, 2088), True, 'from keras import backend as K\n'), ((2826, 2883), 'keras.layers.Dense', 'Dense', (['(1000)'], {'activation': '"""linear"""', 'weights': 'softmax_weights'}), "(1000, activation='linear', weights=softmax_weights)\n", (2831, 2883), False, 'from keras.layers import Dense\n'), ((3268, 3274), 'time.time', 'time', ([], {}), '()\n', (3272, 3274), False, 'from time import time\n'), ((3425, 3461), 'os.path.join', 'join', (['output_path', "(img_name + '.png')"], {}), "(output_path, img_name + '.png')\n", (3429, 3461), False, 'from os.path import dirname, join\n'), ((5137, 5185), 'numpy.array', 'np.array', (['[(0) for _ in output_tensor.shape[1:]]'], {}), '([(0) for _ in output_tensor.shape[1:]])\n', (5145, 5185), True, 'import numpy as np\n'), ((5780, 5811), 'keras.backend.gradients', 'K.gradients', (['loss', 'input_tensor'], {}), '(loss, input_tensor)\n', (5791, 5811), True, 'from keras import backend as K\n'), ((7562, 7604), 'numpy.linalg.norm', 'np.linalg.norm', (['visualization'], {'axis': 'ch_dim'}), '(visualization, axis=ch_dim)\n', (7576, 7604), True, 'import numpy as np\n'), ((10702, 10708), 'time.time', 'time', ([], {}), '()\n', (10706, 10708), False, 'from time import time\n'), ((2435, 2461), 'numpy.clip', 'np.clip', (['img_array', '(0)', '(255)'], {}), '(img_array, 0, 255)\n', (2442, 2461), True, 'import numpy as np\n'), ((3407, 3419), 'scipy.misc.toimage', 'toimage', (['img'], {}), '(img)\n', (3414, 3419), False, 'from scipy.misc import toimage\n'), ((4408, 4449), 'os.path.join', 'join', (['output_path', "(img_name + '_info.txt')"], {}), "(output_path, img_name + '_info.txt')\n", (4412, 4449), False, 'from os.path import dirname, join\n'), ((5975, 5993), 'keras.backend.learning_phase', 'K.learning_phase', ([], {}), '()\n', (5991, 5993), True, 'from keras import backend as K\n'), ((6980, 7033), 'scipy.ndimage.filters.gaussian_filter', 'gaussian_filter', (['visualization'], {'sigma': 'blur_kernel_std'}), '(visualization, sigma=blur_kernel_std)\n', (6995, 7033), False, 'from scipy.ndimage.filters import gaussian_filter\n'), ((7278, 7328), 'numpy.percentile', 'np.percentile', (['abs_visualization', 'value_percentile'], {}), '(abs_visualization, value_percentile)\n', (7291, 7328), True, 'import numpy as np\n'), ((7715, 7758), 'numpy.percentile', 'np.percentile', (['pixel_norms', 'norm_percentile'], {}), '(pixel_norms, norm_percentile)\n', (7728, 7758), True, 'import numpy as np\n'), ((8358, 8410), 'numpy.percentile', 'np.percentile', (['contribution', 'contribution_percentile'], {}), '(contribution, contribution_percentile)\n', (8371, 8410), True, 'import numpy as np\n'), ((9143, 9203), 'numpy.percentile', 'np.percentile', (['abs_contribution', 'abs_contribution_percentile'], {}), '(abs_contribution, abs_contribution_percentile)\n', (9156, 9203), True, 'import numpy as np\n'), ((5271, 5291), 'numpy.array', 'np.array', (['unit_index'], {}), '(unit_index)\n', (5279, 5291), True, 'import numpy as np\n'), ((5316, 5336), 'numpy.array', 'np.array', (['unit_index'], {}), '(unit_index)\n', (5324, 5336), True, 'import numpy as np\n'), ((5463, 5483), 'numpy.array', 'np.array', (['unit_index'], {}), '(unit_index)\n', (5471, 5483), True, 'import numpy as np\n'), ((12215, 12221), 'time.time', 'time', ([], {}), '()\n', (12219, 12221), False, 'from time import time\n')] |
import json, urllib.parse
import requests as reqs
prefixes = '''
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX noInferences: <http://www.ontotext.com/explicit>
PREFIX skos: <http://www.w3.org/2004/02/skos/core#>
PREFIX : <http://www.di.uminho.pt/prc2021/mapa-virtual#>
'''
getLink = "http://localhost:7200/repositories/mava-virtual?query="
upLink = "http://localhost:7200/repositories/mava-virtual/statements?update="
# Query feita em aula.
query = '''CONSTRUCT { ?c1 :temLigação ?c2 . }
WHERE { ?l :origem ?c1. ?l :destino ?c2. } '''
encoded = urllib.parse.quote(prefixes + query)
resp = reqs.get(getLink + encoded)
resp.raise_for_status()
for l in resp.text.split('.\n'):
s = l.split()
if len(s) == 3:
c1 = s[0].split('#')[1][:-1]
c2 = s[2].split('#')[1][:-1]
# print(c1,c2)
insert = "INSERT DATA { :" + c1 + " :temLigação" + " :" + c2 + " . }"
encoded_ = urllib.parse.quote(prefixes + insert)
resp_ = reqs.post(upLink + encoded_)
resp_.raise_for_status()
| [
"requests.post",
"requests.get"
] | [((771, 798), 'requests.get', 'reqs.get', (['(getLink + encoded)'], {}), '(getLink + encoded)\n', (779, 798), True, 'import requests as reqs\n'), ((1157, 1185), 'requests.post', 'reqs.post', (['(upLink + encoded_)'], {}), '(upLink + encoded_)\n', (1166, 1185), True, 'import requests as reqs\n')] |
'''
This is a sample class for a model. You may choose to use it as-is or make any changes to it.
This has been provided just to give you an idea of how to structure your model class.
'''
import cv2
import numpy as np
import logging as log
from openvino.inference_engine import IENetwork, IECore
import warnings
import math
warnings.filterwarnings("ignore")
class GazeEstimationClass:
'''
Class for the Gaze Estimation Model.
'''
def __init__(self, model_name, device='CPU', extensions=None):
'''
this method is to set instance variables.
'''
self.model_weights = model_name + '.bin'
self.model_structure = model_name + '.xml'
self.device = device
self.extension = extensions
try:
self.model = IENetwork(self.model_structure, self.model_weights)
except Exception as e:
raise ValueError("Could not Initialise the network. Have you enterred the correct model path?")
self.input_name = next(iter(self.model.inputs))
# self.input_shape = self.model.inputs[self.input_name['left_eye_image']].shape
self.output_name = next(iter(self.model.outputs))
self.output_shape = self.model.outputs[self.output_name].shape
def load_model(self):
'''
This method is for loading the model to the device specified by the user.
If your model requires any Plugins, this is where you can load them.
'''
self.model = IENetwork(self.model_structure, self.model_weights)
self.core = IECore()
supported_layers = self.core.query_network(network=self.model, device_name=self.device)
unsupported_layers = [R for R in self.model.layers.keys() if R not in supported_layers]
if len(unsupported_layers) != 0:
log.error("Unsupported layers found ...")
log.error("Adding specified extension")
self.core.add_extension(self.extension, self.device)
supported_layers = self.core.query_network(network=self.model, device_name=self.device)
unsupported_layers = [R for R in self.model.layers.keys() if R not in supported_layers]
if len(unsupported_layers) != 0:
log.error("ERROR: There are still unsupported layers after adding extension...")
exit(1)
self.net = self.core.load_network(network=self.model, device_name=self.device, num_requests=1)
def predict(self, left_eye_image, right_eye_image, head_pose_output):
'''
This method is meant for running predictions on the input image.
'''
self.left_eye_pre_image, self.right_eye_pre_image = self.preprocess_input(left_eye_image, right_eye_image)
self.results = self.net.infer(
inputs={'left_eye_image': self.left_eye_pre_image, 'right_eye_image': self.right_eye_pre_image,
'head_pose_angles': head_pose_output})
self.mouse_coordinate, self.gaze_vector = self.preprocess_output(self.results, head_pose_output)
return self.mouse_coordinate, self.gaze_vector
def check_model(self):
pass
def preprocess_input(self, left_eye_image, right_eye_image):
'''
Before feeding the data into the model for inference,
you might have to preprocess it. This function is where you can do that.
'''
left_eye_pre_image = cv2.resize(left_eye_image, (60, 60))
left_eye_pre_image = left_eye_pre_image.transpose((2, 0, 1))
left_eye_pre_image = left_eye_pre_image.reshape(1, *left_eye_pre_image.shape)
right_eye_pre_image = cv2.resize(right_eye_image, (60, 60))
right_eye_pre_image = right_eye_pre_image.transpose((2, 0, 1))
right_eye_pre_image = right_eye_pre_image.reshape(1, *right_eye_pre_image.shape)
return left_eye_pre_image, right_eye_pre_image
def preprocess_output(self, outputs, head_pose_estimation_output):
'''
Before feeding the output of this model to the next model,
you might have to preprocess the output. This function is where you can do that.
'''
roll_value = head_pose_estimation_output[2]
outputs = outputs[self.output_name][0]
cos_theta = math.cos(roll_value * math.pi / 180)
sin_theta = math.sin(roll_value * math.pi / 180)
x_value = outputs[0] * cos_theta + outputs[1] * sin_theta
y_value = outputs[1] * cos_theta - outputs[0] * sin_theta
return (x_value, y_value), outputs
| [
"math.cos",
"openvino.inference_engine.IECore",
"logging.error",
"cv2.resize",
"math.sin",
"openvino.inference_engine.IENetwork",
"warnings.filterwarnings"
] | [((325, 358), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (348, 358), False, 'import warnings\n'), ((1486, 1537), 'openvino.inference_engine.IENetwork', 'IENetwork', (['self.model_structure', 'self.model_weights'], {}), '(self.model_structure, self.model_weights)\n', (1495, 1537), False, 'from openvino.inference_engine import IENetwork, IECore\n'), ((1558, 1566), 'openvino.inference_engine.IECore', 'IECore', ([], {}), '()\n', (1564, 1566), False, 'from openvino.inference_engine import IENetwork, IECore\n'), ((3399, 3435), 'cv2.resize', 'cv2.resize', (['left_eye_image', '(60, 60)'], {}), '(left_eye_image, (60, 60))\n', (3409, 3435), False, 'import cv2\n'), ((3622, 3659), 'cv2.resize', 'cv2.resize', (['right_eye_image', '(60, 60)'], {}), '(right_eye_image, (60, 60))\n', (3632, 3659), False, 'import cv2\n'), ((4247, 4283), 'math.cos', 'math.cos', (['(roll_value * math.pi / 180)'], {}), '(roll_value * math.pi / 180)\n', (4255, 4283), False, 'import math\n'), ((4304, 4340), 'math.sin', 'math.sin', (['(roll_value * math.pi / 180)'], {}), '(roll_value * math.pi / 180)\n', (4312, 4340), False, 'import math\n'), ((790, 841), 'openvino.inference_engine.IENetwork', 'IENetwork', (['self.model_structure', 'self.model_weights'], {}), '(self.model_structure, self.model_weights)\n', (799, 841), False, 'from openvino.inference_engine import IENetwork, IECore\n'), ((1813, 1854), 'logging.error', 'log.error', (['"""Unsupported layers found ..."""'], {}), "('Unsupported layers found ...')\n", (1822, 1854), True, 'import logging as log\n'), ((1867, 1906), 'logging.error', 'log.error', (['"""Adding specified extension"""'], {}), "('Adding specified extension')\n", (1876, 1906), True, 'import logging as log\n'), ((2233, 2318), 'logging.error', 'log.error', (['"""ERROR: There are still unsupported layers after adding extension..."""'], {}), "('ERROR: There are still unsupported layers after adding extension...'\n )\n", (2242, 2318), True, 'import logging as log\n')] |
from tumor_data.SYNDataLoader import *
from tensorboardX import SummaryWriter
from torchsummaryX import summary
from driver.helper.base_syn_helper import BaseTrainHelper
class SYNHelper_Kumar(BaseTrainHelper):
def __init__(self, generator, discriminator,
criterions, config):
super(SYNHelper_Kumar, self).__init__(generator, discriminator,
criterions, config)
def out_put_shape(self):
self.summary_writer = SummaryWriter(self.config.tensorboard_dir)
# summary(self.generator.cpu(),
# torch.zeros((1, 1, self.config.patch_x, self.config.patch_y, self.config.patch_z)),
# torch.zeros((1, 1, self.config.patch_x, self.config.patch_y, self.config.patch_z)))
# summary(self.discriminator.cpu(),
# torch.zeros((1, 2, self.config.patch_x, self.config.patch_y, self.config.patch_z)))
def test_one_batch(self, batch_gen):
real_A = batch_gen['real_A']
real_B = batch_gen['real_B']
tumor_B = batch_gen['tumor_B']
fake_B = self.generator(real_A, tumor_B)
loss_pixel = self.criterions['criterion_pixelwise'](fake_B, real_B)
losses = loss_pixel.item()
return {
"loss_pixel": losses,
"fake_B": fake_B,
"loss_boundary": 0,
"loss_tumor": 0,
"loss_style": 0,
"loss_percep": 0,
"loss_pixel_coarse": 0,
"loss_boundary_coarse": 0,
"fake_B_coarse": None,
"boundary": None,
"boundary_coarse": None,
"loss_tumor_coarse": 0,
}
def train_generator_one_batch(self, batch_gen):
real_A = batch_gen['real_A']
real_B = batch_gen['real_B']
tumor_B = batch_gen['tumor_B']
# GAN loss
fake_B = self.generator(real_A, tumor_B)
# loss gan
pred_fake = self.discriminator(torch.cat((fake_B.detach(), tumor_B), 1))
out_shape = (pred_fake.size(0), 1, pred_fake.size(2), pred_fake.size(3), pred_fake.size(4))
valid_label = self.FloatTensor(np.ones(out_shape))
loss_fake = self.criterions['criterion_GAN'](pred_fake, valid_label)
loss_GAN_final = loss_fake
losses_GAN = loss_GAN_final.item()
# # Pixel-wise loss
loss_pixel = self.criterions['criterion_pixelwise'](fake_B, real_B)
losses_pixel = loss_pixel.item()
# Total loss
loss_G = loss_GAN_final + loss_pixel
losses_G = loss_G.item()
loss_G.backward()
return {
"loss_GAN": losses_GAN,
"loss_pixel": losses_pixel,
"loss_boundary": 0,
"loss_G": losses_G,
"fake_B": fake_B,
"loss_tumor": 0,
"loss_percep": 0,
"loss_style": 0,
"loss_pixel_coarse": 0,
"loss_boundary_coarse": 0,
"loss_tumor_coarse": 0,
}
| [
"tensorboardX.SummaryWriter"
] | [((497, 539), 'tensorboardX.SummaryWriter', 'SummaryWriter', (['self.config.tensorboard_dir'], {}), '(self.config.tensorboard_dir)\n', (510, 539), False, 'from tensorboardX import SummaryWriter\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
class mobilenet(nn.Module):
def __init__(self):
super(mobilenet, self).__init__()
self.conv_1 = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(64),
nn.ReLU()
)
self.conv_dw2 = self.conv_dw(32, 32, 1)
self.conv_dw3 = self.conv_dw(32, 64, 2)
self.conv_dw4 = self.conv_dw(64, 64, 1)
self.conv_dw5 = self.conv_dw(64, 128, 2)
self.conv_dw6 = self.conv_dw(128, 128, 1)
self.conv_dw7 = self.conv_dw(128, 256, 2)
self.conv_dw8 = self.conv_dw(256, 256, 1)
self.conv_dw9 = self.conv_dw(256, 512, 2)
self.fc = nn.Linear(512, 10)
def conv_dw(in_channel, out_channel, stride):
nn.Sequential(
nn.Conv2d(in_channel, out_channel, kernel_size=3 ,stride=stride, padding=1, groups=in_channel, bias=False),
nn.BatchNorm2d(in_channel),
nn.ReLU(),
nn.Conv2d(in_channel, out_channel, kernel_size=1, stride=1, padding=0, bias=False),
nn.BatchNorm2d(in_channel),
nn.ReLU(),
)
self.conv_dw = conv_dw
raise
def forward(self,x):
out = self.conv1(x)
out = self.conv_dw2(out)
out = self.conv_dw3(out)
out = self.conv_dw4(out)
out = self.conv_dw5(out)
out = self.conv_dw6(out)
out = self.conv_dw7(out)
out = self.conv_dw8(out)
out = self.conv_dw9(out)
out = F.avg_pool2d(out,2)
out = out.view(-1, 512)
out = self.fc(out)
return out
def mobilenetv1_small():
return mobilenet() | [
"torch.nn.BatchNorm2d",
"torch.nn.ReLU",
"torch.nn.functional.avg_pool2d",
"torch.nn.Conv2d",
"torch.nn.Linear"
] | [((745, 763), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(10)'], {}), '(512, 10)\n', (754, 763), True, 'import torch.nn as nn\n'), ((1604, 1624), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['out', '(2)'], {}), '(out, 2)\n', (1616, 1624), True, 'import torch.nn.functional as F\n'), ((212, 264), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(64)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(3, 64, kernel_size=3, stride=1, padding=1)\n', (221, 264), True, 'import torch.nn as nn\n'), ((278, 296), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (292, 296), True, 'import torch.nn as nn\n'), ((310, 319), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (317, 319), True, 'import torch.nn as nn\n'), ((862, 972), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channel', 'out_channel'], {'kernel_size': '(3)', 'stride': 'stride', 'padding': '(1)', 'groups': 'in_channel', 'bias': '(False)'}), '(in_channel, out_channel, kernel_size=3, stride=stride, padding=1,\n groups=in_channel, bias=False)\n', (871, 972), True, 'import torch.nn as nn\n'), ((986, 1012), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['in_channel'], {}), '(in_channel)\n', (1000, 1012), True, 'import torch.nn as nn\n'), ((1030, 1039), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1037, 1039), True, 'import torch.nn as nn\n'), ((1058, 1145), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channel', 'out_channel'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(in_channel, out_channel, kernel_size=1, stride=1, padding=0, bias\n =False)\n', (1067, 1145), True, 'import torch.nn as nn\n'), ((1158, 1184), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['in_channel'], {}), '(in_channel)\n', (1172, 1184), True, 'import torch.nn as nn\n'), ((1202, 1211), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1209, 1211), True, 'import torch.nn as nn\n')] |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from azure.ai.ml.constants import SearchSpace
from marshmallow import fields, post_load, pre_dump, ValidationError
from azure.ai.ml._schema.core.fields import StringTransformedEnum
from azure.ai.ml._schema.core.schema import PatchedSchemaMeta
class RandintSchema(metaclass=PatchedSchemaMeta):
type = StringTransformedEnum(required=True, allowed_values=SearchSpace.RANDINT)
upper = fields.Integer(required=True)
@post_load
def make(self, data, **kwargs):
from azure.ai.ml.sweep import Randint
return Randint(**data)
@pre_dump
def predump(self, data, **kwargs):
from azure.ai.ml.sweep import Randint
if not isinstance(data, Randint):
raise ValidationError("Cannot dump non-Randint object into RandintSchema")
return data
| [
"azure.ai.ml._schema.core.fields.StringTransformedEnum",
"marshmallow.ValidationError",
"marshmallow.fields.Integer",
"azure.ai.ml.sweep.Randint"
] | [((487, 559), 'azure.ai.ml._schema.core.fields.StringTransformedEnum', 'StringTransformedEnum', ([], {'required': '(True)', 'allowed_values': 'SearchSpace.RANDINT'}), '(required=True, allowed_values=SearchSpace.RANDINT)\n', (508, 559), False, 'from azure.ai.ml._schema.core.fields import StringTransformedEnum\n'), ((572, 601), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)'}), '(required=True)\n', (586, 601), False, 'from marshmallow import fields, post_load, pre_dump, ValidationError\n'), ((716, 731), 'azure.ai.ml.sweep.Randint', 'Randint', ([], {}), '(**data)\n', (723, 731), False, 'from azure.ai.ml.sweep import Randint\n'), ((893, 961), 'marshmallow.ValidationError', 'ValidationError', (['"""Cannot dump non-Randint object into RandintSchema"""'], {}), "('Cannot dump non-Randint object into RandintSchema')\n", (908, 961), False, 'from marshmallow import fields, post_load, pre_dump, ValidationError\n')] |
#!/usr/bin/env python3
"""
Advent of Code 2020 Day 20: Jurassic Jigsaw
https://adventofcode.com/2020/day/20
Solution by <NAME>
"""
import re
from functools import reduce
SEA_MONSTER_PROFILE = """
..................#.
#....##....##....###
.#..#..#..#..#..#..."""
class Tile:
def __init__(self, id: int, content):
self.id = id
self.base_content = content
self.reoriented_base_content = content
self.content = content
self.corner_locations = None
self.current_rotation = 0
self.build_signatures()
def build_signatures(self):
self.top = self._build_signature(self.content[0])
self.bottom = self._build_signature(self.content[-1])
self.left = self._build_signature([entry[0] for entry in self.content])
self.right = self._build_signature([entry[-1] for entry in self.content])
def orient_to(self, orientation):
self.content = self.base_content
if orientation & 1:
self.flip_horizontally()
if orientation & 2:
self.flip_vertically()
self.reoriented_base_content = self.content
self.build_signatures()
def rotate_to(self, rotation: int):
if self.current_rotation == rotation:
return
content = [i.copy() for i in self.reoriented_base_content]
for _ in range((rotation - self.current_rotation) % 4):
new_content = [i.copy() for i in content]
for i in range(len(content)):
for j in range(len(content[i])):
new_content[i][j] = content[-j - 1][i]
content = [i.copy() for i in new_content]
self.content = content
self.build_signatures()
def flip_vertically(self):
for i in range(len(self.content) // 2):
self.content[i], self.content[-i - 1] = self.content[-i - 1], self.content[i]
def flip_horizontally(self):
for i in range(len(self.content)):
for j in range(len(self.content[i]) // 2):
self.content[i][j], self.content[i][-j - 1] = self.content[i][-j - 1], self.content[i][j]
def linkable_with_edges(self, available_edges):
return (self.top in available_edges or
self.bottom in available_edges or
self.left in available_edges or
self.right in available_edges)
def _build_signature(self, elements: list):
total = 0
for i, value in enumerate(reversed(list(elements))):
if value:
total += 2 ** i
return total
class PuzzleBuilder:
def __init__(self, tiles: set):
self.initial_tiles = tiles
self.build_success = False
self.corner_locations = None
self.side_length = int(len(tiles) ** 0.5)
if self.side_length ** 2 != len(tiles):
raise Exception("Number of pieces must be square")
def build(self):
initial_tile = reduce(lambda a, b: a if a.id < b.id else b, self.initial_tiles)
unlinked_tiles = self.initial_tiles.copy()
unlinked_tiles.remove(initial_tile)
self.tile_for_location = {}
self.tile_for_location[(0, 0)] = initial_tile
self.location_for_tile = {}
self.location_for_tile[initial_tile] = (0, 0)
if self._build(set([initial_tile]), unlinked_tiles):
self.build_success = True
return True
return False
def ordered_tiles(self):
if not self.build_success:
raise Exception("Cannot fetched ordered tiles without build success")
min_x, min_y, _, _ = self._find_corner_locations()
ordered_tiles = []
for y in range(min_y, min_y + self.side_length):
row = []
for x in range(min_x, min_x + self.side_length):
row.append(self.tile_for_location[(x, y)])
ordered_tiles.append(row)
return list(reversed(ordered_tiles))
def can_be_placed(self, candidate_tile: 'Tile', candidate_location: tuple):
hits = 0
x, y = candidate_location
above = x, y + 1
if above in self.tile_for_location:
hits += 1
above_tile = self.tile_for_location[above]
if above_tile.bottom != candidate_tile.top:
return False
below = x, y - 1
if below in self.tile_for_location:
hits += 1
below_tile = self.tile_for_location[below]
if below_tile.top != candidate_tile.bottom:
return False
to_the_left = x - 1, y
if to_the_left in self.tile_for_location:
hits += 1
to_the_left_tile = self.tile_for_location[to_the_left]
if to_the_left_tile.right != candidate_tile.left:
return False
to_the_right = x + 1, y
if to_the_right in self.tile_for_location:
hits += 1
to_the_right_tile = self.tile_for_location[to_the_right]
if to_the_right_tile.left != candidate_tile.right:
return False
if hits == 0:
raise Exception("Location {} should link to at least one tile".format(candidate_location))
return True
def corner_tile_ids_product(self):
if not self.build_success:
raise Exception("Corners can only be queried once built")
min_x, min_y, max_x, max_y = self._find_corner_locations()
corner_tiles = [\
self.tile_for_location[(min_x, min_y)], \
self.tile_for_location[(min_x, max_y)], \
self.tile_for_location[(max_x, min_y)], \
self.tile_for_location[(max_x, max_y)], \
]
return reduce(lambda a, b: a * b, map(lambda a: a.id, corner_tiles))
def _calculate_available_edges(self, linked_tiles: set):
available_edges = set()
for tile in linked_tiles:
x, y = self.location_for_tile[tile]
if (x, y + 1) not in self.tile_for_location:
available_edges.add(tile.top)
if (x, y - 1) not in self.tile_for_location:
available_edges.add(tile.bottom)
if (x - 1, y) not in self.tile_for_location:
available_edges.add(tile.left)
if (x + 1, y) not in self.tile_for_location:
available_edges.add(tile.right)
return available_edges
def _build(self, linked_tiles: set, unlinked_tiles: set):
if len(unlinked_tiles) == 0:
return True
available_edges = self._calculate_available_edges(linked_tiles)
for linked_tile in linked_tiles:
linked_location = self.location_for_tile[linked_tile]
for unlinked_tile in unlinked_tiles:
for orientation in range(4):
unlinked_tile.orient_to(orientation)
for rotation in range(4):
unlinked_tile.rotate_to(rotation)
if not unlinked_tile.linkable_with_edges(available_edges):
continue
for side in range(4):
new_location = self._side_available(linked_location, side)
if new_location:
if self.can_be_placed(unlinked_tile, new_location):
self.location_for_tile[unlinked_tile] = new_location
self.tile_for_location[new_location] = unlinked_tile
self._reset_corner_locations()
linked_tiles_copy = linked_tiles.copy()
unlinked_tiles_copy = unlinked_tiles.copy()
linked_tiles_copy.add(unlinked_tile)
unlinked_tiles_copy.remove(unlinked_tile)
rv = self._build(linked_tiles_copy, unlinked_tiles_copy)
if rv:
return True
else:
del self.location_for_tile[unlinked_tile]
del self.tile_for_location[new_location]
self._reset_corner_locations()
return None
def _reset_corner_locations(self):
self.corner_locations = None
def _find_corner_locations(self):
if self.corner_locations:
return self.corner_locations
min_x, min_y, max_x, max_y = 0, 0, 0, 0
for location in self.tile_for_location.keys():
x, y = location
min_x, min_y = min(min_x, x), min(min_y, y)
max_x, max_y = max(max_x, x), max(max_y, y)
self.corner_locations = min_x, min_y, max_x, max_y
return self.corner_locations
def _side_available(self, location, side):
x, y = location
if side == 0:
candidate = x, y + 1
elif side == 1:
candidate = x + 1, y
elif side == 2:
candidate = x, y - 1
elif side == 3:
candidate = x - 1, y
else:
raise Exception("Illegal side: '{}'".format(side))
min_x, min_y, max_x, max_y = self._find_corner_locations()
if max_x - x > self.side_length or \
min_x + x > self.side_length or \
max_y - y > self.side_length or \
min_y + y > self.side_length:
return None
return None if candidate in self.tile_for_location else candidate
def generate_combined_tile(ordered_tiles: list):
rows = []
for y_tile in range(len(ordered_tiles)):
for y_char in range(1, len(ordered_tiles[y_tile][0].content) - 1):
row = []
for x_tile in range(len(ordered_tiles[y_tile])):
tile = ordered_tiles[y_tile][x_tile]
for x_char in range(1, len(tile.content[y_char]) - 1):
row.append(tile.content[y_char][x_char])
rows.append(row)
return Tile(0, rows)
def parse_sea_monster():
iterator = iter(SEA_MONSTER_PROFILE.split("\n"))
next(iterator) # ignore first line
rows = []
for line in iterator:
rows.append([1 if c == '#' else 0 for c in line.rstrip("\n")])
return rows
def count_non_sea_monster_positives(tile: Tile):
sea_monster = parse_sea_monster()
non_sea_monster_count_low_watermark = -1
for orientation in range(4):
tile.orient_to(orientation)
for rotation in range(4):
tile.rotate_to(rotation)
sm_counter = 0
sm_marked = set()
for y in range(len(tile.content) - len(sea_monster)):
tile_row = tile.content[y]
for x in range(len(tile_row) - len(sea_monster[0])):
sm_found = True
for sm_y in range(len(sea_monster)):
for sm_x in range(len(sea_monster[sm_y])):
if sea_monster[sm_y][sm_x] == 1:
if not tile.content[y + sm_y][x + sm_x] == 1:
sm_found = False
break
if not sm_found:
break
if sm_found:
sm_counter += 1
for sm_y in range(len(sea_monster)):
for sm_x in range(len(sea_monster[sm_y])):
if sea_monster[sm_y][sm_x] == 1:
sm_marked.add((y + sm_y, x + sm_x))
non_sea_monster_count = 0
for y in range(len(tile.content)):
for x in range(len(tile.content[y])):
if tile.content[y][x] and not (y, x) in sm_marked:
non_sea_monster_count += 1
if non_sea_monster_count_low_watermark > 0:
non_sea_monster_count_low_watermark = min(non_sea_monster_count_low_watermark, non_sea_monster_count)
else:
non_sea_monster_count_low_watermark = non_sea_monster_count
return non_sea_monster_count_low_watermark
def parse_input(input):
tiles = set()
current_tile_id, current_tile_contents = None, None
line_count = -1
for line in input:
line = line.rstrip()
line_count += 1
header_match = re.search(r'Tile (\d+):', line)
if header_match:
current_tile_id = int(header_match.group(1))
current_tile_contents = []
continue
elif line == '':
if not current_tile_id:
raise Exception("unexpected empty line")
tiles.add(Tile(current_tile_id, current_tile_contents))
current_tile_id, current_tile_contents = None, None
else:
content_match = re.fullmatch(r'([\#\.]+)', line)
if content_match:
current_tile_contents.append([1 if c == '#' else 0 for c in line])
else:
raise Exception("unexpected content: '{}' on line {}".format(line, line_count))
if current_tile_id and current_tile_contents:
tiles.add(Tile(current_tile_id, current_tile_contents))
return tiles
if __name__ == '__main__':
input_filename = __file__.rstrip('.py') + '_input.txt'
with open(input_filename, 'r') as file:
data = parse_input(file)
puzzle_builder = PuzzleBuilder(data)
puzzle_builder.build()
part_1 = puzzle_builder.corner_tile_ids_product()
assert part_1 == 32287787075651
print("The solution to Part 1 is {}".format(part_1))
ordered_tiles = puzzle_builder.ordered_tiles()
combined_tile = generate_combined_tile(ordered_tiles)
part_2 = count_non_sea_monster_positives(combined_tile)
assert part_2 == 1939
print("The solution to Part 2 is {}".format(part_2))
| [
"functools.reduce",
"re.fullmatch",
"re.search"
] | [((2941, 3005), 'functools.reduce', 'reduce', (['(lambda a, b: a if a.id < b.id else b)', 'self.initial_tiles'], {}), '(lambda a, b: a if a.id < b.id else b, self.initial_tiles)\n', (2947, 3005), False, 'from functools import reduce\n'), ((12452, 12483), 're.search', 're.search', (['"""Tile (\\\\d+):"""', 'line'], {}), "('Tile (\\\\d+):', line)\n", (12461, 12483), False, 'import re\n'), ((12918, 12951), 're.fullmatch', 're.fullmatch', (['"""([\\\\#\\\\.]+)"""', 'line'], {}), "('([\\\\#\\\\.]+)', line)\n", (12930, 12951), False, 'import re\n')] |
import json
import logging
import os
import sys
from argparse import ArgumentParser
import re
import numpy as np
import pandas as pd
import torch
from transformers import GPT2Tokenizer
from src.data.cleaning import mask_not_na, inds_unique, mask_long_enough
from src.data.nli import TransformersSeqPairDataset
from src.models.pg_trainer import AutoregressivePGTrainer
parser = ArgumentParser()
parser.add_argument("--experiment_dir", type=str, default="debug")
parser.add_argument("--paraphrase_path", type=str,
default="/home/matej/Documents/paraphrase-nli/experiments/SciTail_NLI/PARAPHRASE_IDENTIFICATION/id-scitail-roberta-base-argmax/all_para_id.csv")
parser.add_argument("--pretrained_name_or_path", type=str, default="gpt2")
parser.add_argument("--model_type", type=str, default="gpt2",
choices=["gpt2"])
parser.add_argument("--num_epochs", type=int, default=10)
parser.add_argument("--max_seq_len", type=int, default=79)
parser.add_argument("--batch_size", type=int, default=8)
parser.add_argument("--learning_rate", type=float, default=2e-5)
parser.add_argument("--early_stopping_rounds", type=int, default=5)
parser.add_argument("--validate_every_n_examples", type=int, default=5000)
parser.add_argument("--random_seed", type=int, default=17)
parser.add_argument("--use_cpu", action="store_true")
if __name__ == "__main__":
args = parser.parse_args()
DEVICE = torch.device("cpu") if args.use_cpu else torch.device("cuda")
if not os.path.exists(args.experiment_dir):
os.makedirs(args.experiment_dir)
if args.random_seed is not None:
np.random.seed(args.random_seed)
torch.manual_seed(args.random_seed)
with open(os.path.join(args.experiment_dir, "experiment_config.json"), "w") as f:
json.dump(vars(args), fp=f, indent=4)
# Set up logging to file and stdout
logger = logging.getLogger()
logger.setLevel(logging.INFO)
for curr_handler in [logging.StreamHandler(sys.stdout),
logging.FileHandler(os.path.join(args.experiment_dir, "experiment.log"))]:
curr_handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-5.5s] %(message)s"))
logger.addHandler(curr_handler)
for k, v in vars(args).items():
v_str = str(v)
v_str = f"...{v_str[-(50 - 3):]}" if len(v_str) > 50 else v_str
logging.info(f"|{k:30s}|{v_str:50s}|")
# No AutoTokenizerFast at the moment?
if args.model_type == "gpt2":
tokenizer_cls = GPT2Tokenizer
else:
raise NotImplementedError(f"Model_type '{args.model_type}' is not supported")
tokenizer = tokenizer_cls.from_pretrained(args.pretrained_name_or_path)
tokenizer.add_special_tokens({
"eos_token": "<EOS>",
"pad_token": "<PAD>",
"additional_special_tokens": ["<PARA>"]
})
tokenizer.save_pretrained(args.experiment_dir)
SEPARATOR_ID = int(tokenizer.encode("<PARA>", add_special_tokens=False)[0])
df = pd.read_csv(args.paraphrase_path)
# Basic data cleaning - remove NAs (?), duplicate pairs, pairs with one sequence very short
df = df.loc[mask_not_na(df["sequence1"], df["sequence2"])]
df = df.iloc[inds_unique(df["sequence1"], df["sequence2"])]
df = df.loc[mask_long_enough(df["sequence1"], df["sequence2"])]
df = df.loc[df["label"] == 1].reset_index(drop=True)
df["formatted"] = list(map(
lambda pair: f"{pair[0]} <PARA> {pair[1]} {tokenizer.eos_token}",
zip(df["sequence1"].tolist(), df["sequence2"].tolist())
))
num_ex = df.shape[0]
indices = np.random.permutation(num_ex)
train_df = df.iloc[indices[:int(0.7 * num_ex)]]
dev_df = df.iloc[indices[int(0.7 * num_ex): int(0.85 * num_ex)]]
test_df = df.iloc[indices[int(0.85 * num_ex):]]
train_df.drop("formatted", axis=1).to_csv(os.path.join(args.experiment_dir, "train.csv"), sep=",", index=False)
dev_df.drop("formatted", axis=1).to_csv(os.path.join(args.experiment_dir, "dev.csv"), sep=",", index=False)
test_df.drop("formatted", axis=1).to_csv(os.path.join(args.experiment_dir, "test.csv"), sep=",", index=False)
_encoded_train = tokenizer.batch_encode_plus(
train_df["formatted"].tolist(),
max_length=args.max_seq_len, padding="max_length", truncation="longest_first", return_tensors="pt"
)
_train_labels = _encoded_train["input_ids"].clone()
for idx_ex in range(_train_labels.shape[0]):
for idx_token in range(args.max_seq_len):
_train_labels[idx_ex, idx_token] = -100
if _encoded_train["input_ids"][idx_ex, idx_token] == SEPARATOR_ID:
break
_encoded_train["labels"] = _train_labels
_encoded_dev = tokenizer.batch_encode_plus(
dev_df["formatted"].tolist(),
max_length=args.max_seq_len, padding="max_length", truncation="longest_first", return_tensors="pt"
)
_dev_labels = _encoded_dev["input_ids"].clone()
for idx_ex in range(_dev_labels.shape[0]):
for idx_token in range(args.max_seq_len):
_dev_labels[idx_ex, idx_token] = -100
if _encoded_dev["input_ids"][idx_ex, idx_token] == SEPARATOR_ID:
break
_encoded_dev["labels"] = _dev_labels
_encoded_test = tokenizer.batch_encode_plus(
test_df["formatted"].tolist(),
max_length=args.max_seq_len, padding="max_length", truncation="longest_first", return_tensors="pt"
)
_test_labels = _encoded_test["input_ids"].clone()
for idx_ex in range(_test_labels.shape[0]):
for idx_token in range(args.max_seq_len):
_test_labels[idx_ex, idx_token] = -100
if _encoded_test["input_ids"][idx_ex, idx_token] == SEPARATOR_ID:
break
_encoded_test["labels"] = _test_labels
train_set = TransformersSeqPairDataset(**_encoded_train)
dev_set = TransformersSeqPairDataset(**_encoded_dev)
test_set = TransformersSeqPairDataset(**_encoded_test)
logging.info(f"Loaded {len(train_set)} training examples, {len(dev_set)} dev examples and "
f"{len(test_set)} test examples")
pg_trainer = AutoregressivePGTrainer(args.experiment_dir,
pretrained_model_name_or_path=args.pretrained_name_or_path,
tokenizer_path=args.experiment_dir,
batch_size=args.batch_size,
learning_rate=args.learning_rate,
validate_every_n_steps=args.validate_every_n_examples,
early_stopping_tol=args.early_stopping_rounds,
device=("cuda" if not args.use_cpu else "cpu"))
pg_trainer.run(train_dataset=train_set, val_dataset=dev_set, num_epochs=args.num_epochs)
# Reload best model
pg_trainer = AutoregressivePGTrainer.from_pretrained(args.experiment_dir)
dev_prompts = dev_df["sequence1"].apply(lambda s: f"{s} <PARA>")
test_prompts = test_df["sequence1"].apply(lambda s: f"{s} <PARA>")
dev_df["sequence2"].to_csv(os.path.join(args.experiment_dir, "dev_ref.txt"), sep=",", index=False, header=False)
test_df["sequence2"].to_csv(os.path.join(args.experiment_dir, "test_ref.txt"), sep=",", index=False, header=False)
dev_df["sequence1"].to_csv(os.path.join(args.experiment_dir, "dev_input_copy.txt"), sep=",", index=False, header=False)
test_df["sequence1"].to_csv(os.path.join(args.experiment_dir, "test_input_copy.txt"), sep=",", index=False, header=False)
strategies = {
"greedy": {},
"beam": {"num_beams": 5, "early_stopping": True},
"top_p": {"do_sample": True, "top_p": 0.9, "top_k": 0},
"top_k": {"do_sample": True, "top_k": 10}
}
for curr_strat, strat_kwargs in strategies.items():
dev_pred_para = pg_trainer.generate(dev_prompts.tolist(), max_seq_len=args.max_seq_len, strategy=strat_kwargs)
with open(os.path.join(args.experiment_dir, f"dev_{curr_strat}_hyp.txt"), "w", encoding="utf-8") as f:
for _txt in dev_pred_para:
print(re.sub(r"(\n)+", " ", _txt.strip()), file=f)
test_pred_para = pg_trainer.generate(test_prompts.tolist(), max_seq_len=args.max_seq_len, strategy=strat_kwargs)
with open(os.path.join(args.experiment_dir, f"test_{curr_strat}_hyp.txt"), "w", encoding="utf-8") as f:
for _txt in test_pred_para:
print(re.sub(r"(\n)+", " ", _txt.strip()), file=f)
| [
"logging.getLogger",
"src.data.nli.TransformersSeqPairDataset",
"logging.StreamHandler",
"pandas.read_csv",
"src.data.cleaning.inds_unique",
"logging.info",
"os.path.exists",
"argparse.ArgumentParser",
"src.data.cleaning.mask_not_na",
"numpy.random.seed",
"numpy.random.permutation",
"src.data.... | [((380, 396), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (394, 396), False, 'from argparse import ArgumentParser\n'), ((1810, 1829), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (1827, 1829), False, 'import logging\n'), ((2809, 2842), 'pandas.read_csv', 'pd.read_csv', (['args.paraphrase_path'], {}), '(args.paraphrase_path)\n', (2820, 2842), True, 'import pandas as pd\n'), ((3370, 3399), 'numpy.random.permutation', 'np.random.permutation', (['num_ex'], {}), '(num_ex)\n', (3391, 3399), True, 'import numpy as np\n'), ((5367, 5411), 'src.data.nli.TransformersSeqPairDataset', 'TransformersSeqPairDataset', ([], {}), '(**_encoded_train)\n', (5393, 5411), False, 'from src.data.nli import TransformersSeqPairDataset\n'), ((5423, 5465), 'src.data.nli.TransformersSeqPairDataset', 'TransformersSeqPairDataset', ([], {}), '(**_encoded_dev)\n', (5449, 5465), False, 'from src.data.nli import TransformersSeqPairDataset\n'), ((5478, 5521), 'src.data.nli.TransformersSeqPairDataset', 'TransformersSeqPairDataset', ([], {}), '(**_encoded_test)\n', (5504, 5521), False, 'from src.data.nli import TransformersSeqPairDataset\n'), ((5669, 6040), 'src.models.pg_trainer.AutoregressivePGTrainer', 'AutoregressivePGTrainer', (['args.experiment_dir'], {'pretrained_model_name_or_path': 'args.pretrained_name_or_path', 'tokenizer_path': 'args.experiment_dir', 'batch_size': 'args.batch_size', 'learning_rate': 'args.learning_rate', 'validate_every_n_steps': 'args.validate_every_n_examples', 'early_stopping_tol': 'args.early_stopping_rounds', 'device': "('cuda' if not args.use_cpu else 'cpu')"}), "(args.experiment_dir, pretrained_model_name_or_path=\n args.pretrained_name_or_path, tokenizer_path=args.experiment_dir,\n batch_size=args.batch_size, learning_rate=args.learning_rate,\n validate_every_n_steps=args.validate_every_n_examples,\n early_stopping_tol=args.early_stopping_rounds, device='cuda' if not\n args.use_cpu else 'cpu')\n", (5692, 6040), False, 'from src.models.pg_trainer import AutoregressivePGTrainer\n'), ((6225, 6285), 'src.models.pg_trainer.AutoregressivePGTrainer.from_pretrained', 'AutoregressivePGTrainer.from_pretrained', (['args.experiment_dir'], {}), '(args.experiment_dir)\n', (6264, 6285), False, 'from src.models.pg_trainer import AutoregressivePGTrainer\n'), ((1388, 1407), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (1400, 1407), False, 'import torch\n'), ((1429, 1449), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (1441, 1449), False, 'import torch\n'), ((1458, 1493), 'os.path.exists', 'os.path.exists', (['args.experiment_dir'], {}), '(args.experiment_dir)\n', (1472, 1493), False, 'import os\n'), ((1497, 1529), 'os.makedirs', 'os.makedirs', (['args.experiment_dir'], {}), '(args.experiment_dir)\n', (1508, 1529), False, 'import os\n'), ((1567, 1599), 'numpy.random.seed', 'np.random.seed', (['args.random_seed'], {}), '(args.random_seed)\n', (1581, 1599), True, 'import numpy as np\n'), ((1602, 1637), 'torch.manual_seed', 'torch.manual_seed', (['args.random_seed'], {}), '(args.random_seed)\n', (1619, 1637), False, 'import torch\n'), ((1883, 1916), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (1904, 1916), False, 'import logging\n'), ((2248, 2286), 'logging.info', 'logging.info', (['f"""|{k:30s}|{v_str:50s}|"""'], {}), "(f'|{k:30s}|{v_str:50s}|')\n", (2260, 2286), False, 'import logging\n'), ((2949, 2994), 'src.data.cleaning.mask_not_na', 'mask_not_na', (["df['sequence1']", "df['sequence2']"], {}), "(df['sequence1'], df['sequence2'])\n", (2960, 2994), False, 'from src.data.cleaning import mask_not_na, inds_unique, mask_long_enough\n'), ((3010, 3055), 'src.data.cleaning.inds_unique', 'inds_unique', (["df['sequence1']", "df['sequence2']"], {}), "(df['sequence1'], df['sequence2'])\n", (3021, 3055), False, 'from src.data.cleaning import mask_not_na, inds_unique, mask_long_enough\n'), ((3070, 3120), 'src.data.cleaning.mask_long_enough', 'mask_long_enough', (["df['sequence1']", "df['sequence2']"], {}), "(df['sequence1'], df['sequence2'])\n", (3086, 3120), False, 'from src.data.cleaning import mask_not_na, inds_unique, mask_long_enough\n'), ((3608, 3654), 'os.path.join', 'os.path.join', (['args.experiment_dir', '"""train.csv"""'], {}), "(args.experiment_dir, 'train.csv')\n", (3620, 3654), False, 'import os\n'), ((3719, 3763), 'os.path.join', 'os.path.join', (['args.experiment_dir', '"""dev.csv"""'], {}), "(args.experiment_dir, 'dev.csv')\n", (3731, 3763), False, 'import os\n'), ((3829, 3874), 'os.path.join', 'os.path.join', (['args.experiment_dir', '"""test.csv"""'], {}), "(args.experiment_dir, 'test.csv')\n", (3841, 3874), False, 'import os\n'), ((6450, 6498), 'os.path.join', 'os.path.join', (['args.experiment_dir', '"""dev_ref.txt"""'], {}), "(args.experiment_dir, 'dev_ref.txt')\n", (6462, 6498), False, 'import os\n'), ((6565, 6614), 'os.path.join', 'os.path.join', (['args.experiment_dir', '"""test_ref.txt"""'], {}), "(args.experiment_dir, 'test_ref.txt')\n", (6577, 6614), False, 'import os\n'), ((6681, 6736), 'os.path.join', 'os.path.join', (['args.experiment_dir', '"""dev_input_copy.txt"""'], {}), "(args.experiment_dir, 'dev_input_copy.txt')\n", (6693, 6736), False, 'import os\n'), ((6803, 6859), 'os.path.join', 'os.path.join', (['args.experiment_dir', '"""test_input_copy.txt"""'], {}), "(args.experiment_dir, 'test_input_copy.txt')\n", (6815, 6859), False, 'import os\n'), ((1650, 1709), 'os.path.join', 'os.path.join', (['args.experiment_dir', '"""experiment_config.json"""'], {}), "(args.experiment_dir, 'experiment_config.json')\n", (1662, 1709), False, 'import os\n'), ((1945, 1996), 'os.path.join', 'os.path.join', (['args.experiment_dir', '"""experiment.log"""'], {}), "(args.experiment_dir, 'experiment.log')\n", (1957, 1996), False, 'import os\n'), ((2028, 2093), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s [%(levelname)-5.5s] %(message)s"""'], {}), "('%(asctime)s [%(levelname)-5.5s] %(message)s')\n", (2045, 2093), False, 'import logging\n'), ((7266, 7328), 'os.path.join', 'os.path.join', (['args.experiment_dir', 'f"""dev_{curr_strat}_hyp.txt"""'], {}), "(args.experiment_dir, f'dev_{curr_strat}_hyp.txt')\n", (7278, 7328), False, 'import os\n'), ((7572, 7635), 'os.path.join', 'os.path.join', (['args.experiment_dir', 'f"""test_{curr_strat}_hyp.txt"""'], {}), "(args.experiment_dir, f'test_{curr_strat}_hyp.txt')\n", (7584, 7635), False, 'import os\n')] |
import numpy as np
from pyspark.sql import SparkSession
from pyspark import SparkContext, SparkConf
from TransEmodule import utils
def check_entities(x, map):
if x in map:
return map[x]
else:
return None
def calculate_rankings(rank_list):
flat = rank_list.map(lambda x: x[0]).persist()
prepare_mean = flat.map(lambda x: (x, 1))
prepare_hits = flat.map(lambda x: (1 if x <= 10 else 0, 1))
x = prepare_mean.reduce(lambda x, y: (x[0] + y[0], x[1] + y[1]))
mean = x[0]/x[1]
x = prepare_hits.reduce(lambda x, y: (x[0] + y[0], x[1] + y[1]))
hits = x[0]/x[1]
return mean, hits
def testing(partition, test_entities_to_id, test_labels_to_id,
entities_to_id_map, label_to_id_map, entity_embedding,
label_embedding):
rank_list = []
i = 0
for (h, l, t) in partition:
# get train ids from testset ids
h_train = check_entities(utils.get_id_by_value(test_entities_to_id.value, h)[0],
entities_to_id_map.value)
l_train = check_entities(utils.get_id_by_value(test_labels_to_id.value, l)[0],
label_to_id_map.value)
t_train = check_entities(utils.get_id_by_value(test_entities_to_id.value, t)[0],
entities_to_id_map.value)
if h_train is None or l_train is None or t_train is None:
continue
# head
corrupted_entities = entity_embedding.value.vector + label_embedding.value.vector[l_train] - entity_embedding.value.vector[t_train]
distances = np.apply_along_axis(lambda x: np.sum(np.square(x)), 1, corrupted_entities)
indices = np.argsort(distances)
rank = np.where(indices == h_train)
rank_list.append(rank[0])
# tail
corrupted_entities = entity_embedding.value.vector[h_train] + label_embedding.value.vector[l_train]
distances = np.apply_along_axis(lambda x: np.sum(np.square(corrupted_entities - x)), 1, entity_embedding.value.vector)
indices = np.argsort(distances)
rank = np.where(indices == t_train)
rank_list.append(rank[0])
if i % 50 == 0:
rank_list_baby = np.concatenate(rank_list, axis=0)
print("Mean: " + str(np.mean(rank_list_baby)))
print("Hit: " + str(np.mean(rank_list_baby <= 10)*100))
print(i)
i += 1
return rank_list
def test(testset, test_entities_to_id, test_labels_to_id,
entities_to_id_map, label_to_id_map, entity_embedding,
label_embedding):
testset_rdd = sc.parallelize(testset).persist()
test_entities_BC = sc.broadcast(test_entities_to_id)
test_labels_BC = sc.broadcast(test_labels_to_id)
entities_embedding_BC = sc.broadcast(entity_embedding)
labels_embedding_BC = sc.broadcast(label_embedding)
entities_map_BC = sc.broadcast(entities_to_id_map)
labels_map_BC = sc.broadcast(label_to_id_map)
rank_list = testset_rdd.mapPartitions(lambda x: testing(x,
test_entities_BC,
test_labels_BC,
entities_map_BC,
labels_map_BC,
entities_embedding_BC,
labels_embedding_BC)
)
mean, hits = calculate_rankings(rank_list)
return mean, hits
if __name__ == "__main__":
# change the paths if you are not using
# our terraform project!
# create the session
conf = SparkConf().setAll([("spark.worker.cleanup.enabled", True),
("spark.serializer",
"org.apache.spark.serializer.KryoSerializer"),
("spark.kryo.registrationRequired", "false"),
("spark.master", "spark://s01:7077")])
sc = SparkContext(conf=conf).getOrCreate()
sc.addPyFile('TransEmodule.zip')
entity_embedding, label_embedding = utils.restore('/home/ubuntu/entity_embedding_999.pkl',
'/home/ubuntu/label_embedding_999.pkl')
ds_to_id, entities_to_id_map, label_to_id_map = utils.load_dataset(sc, "hdfs://s01:9000/train2.tsv")
testset, test_entities_to_id, test_labels_to_id = utils.load_dataset(sc, "hdfs://s01:9000/test2.tsv")
mean, hits = test(testset, test_entities_to_id, test_labels_to_id,
entities_to_id_map, label_to_id_map,
entity_embedding, label_embedding)
print("Mean: " + str(mean) + "\nHits@10: " + str(hits))
| [
"numpy.mean",
"TransEmodule.utils.get_id_by_value",
"numpy.where",
"TransEmodule.utils.load_dataset",
"pyspark.SparkConf",
"numpy.square",
"numpy.argsort",
"TransEmodule.utils.restore",
"numpy.concatenate",
"pyspark.SparkContext"
] | [((4227, 4325), 'TransEmodule.utils.restore', 'utils.restore', (['"""/home/ubuntu/entity_embedding_999.pkl"""', '"""/home/ubuntu/label_embedding_999.pkl"""'], {}), "('/home/ubuntu/entity_embedding_999.pkl',\n '/home/ubuntu/label_embedding_999.pkl')\n", (4240, 4325), False, 'from TransEmodule import utils\n'), ((4429, 4481), 'TransEmodule.utils.load_dataset', 'utils.load_dataset', (['sc', '"""hdfs://s01:9000/train2.tsv"""'], {}), "(sc, 'hdfs://s01:9000/train2.tsv')\n", (4447, 4481), False, 'from TransEmodule import utils\n'), ((4537, 4588), 'TransEmodule.utils.load_dataset', 'utils.load_dataset', (['sc', '"""hdfs://s01:9000/test2.tsv"""'], {}), "(sc, 'hdfs://s01:9000/test2.tsv')\n", (4555, 4588), False, 'from TransEmodule import utils\n'), ((1696, 1717), 'numpy.argsort', 'np.argsort', (['distances'], {}), '(distances)\n', (1706, 1717), True, 'import numpy as np\n'), ((1733, 1761), 'numpy.where', 'np.where', (['(indices == h_train)'], {}), '(indices == h_train)\n', (1741, 1761), True, 'import numpy as np\n'), ((2066, 2087), 'numpy.argsort', 'np.argsort', (['distances'], {}), '(distances)\n', (2076, 2087), True, 'import numpy as np\n'), ((2103, 2131), 'numpy.where', 'np.where', (['(indices == t_train)'], {}), '(indices == t_train)\n', (2111, 2131), True, 'import numpy as np\n'), ((2221, 2254), 'numpy.concatenate', 'np.concatenate', (['rank_list'], {'axis': '(0)'}), '(rank_list, axis=0)\n', (2235, 2254), True, 'import numpy as np\n'), ((3762, 3773), 'pyspark.SparkConf', 'SparkConf', ([], {}), '()\n', (3771, 3773), False, 'from pyspark import SparkContext, SparkConf\n'), ((4110, 4133), 'pyspark.SparkContext', 'SparkContext', ([], {'conf': 'conf'}), '(conf=conf)\n', (4122, 4133), False, 'from pyspark import SparkContext, SparkConf\n'), ((931, 982), 'TransEmodule.utils.get_id_by_value', 'utils.get_id_by_value', (['test_entities_to_id.value', 'h'], {}), '(test_entities_to_id.value, h)\n', (952, 982), False, 'from TransEmodule import utils\n'), ((1080, 1129), 'TransEmodule.utils.get_id_by_value', 'utils.get_id_by_value', (['test_labels_to_id.value', 'l'], {}), '(test_labels_to_id.value, l)\n', (1101, 1129), False, 'from TransEmodule import utils\n'), ((1224, 1275), 'TransEmodule.utils.get_id_by_value', 'utils.get_id_by_value', (['test_entities_to_id.value', 't'], {}), '(test_entities_to_id.value, t)\n', (1245, 1275), False, 'from TransEmodule import utils\n'), ((1640, 1652), 'numpy.square', 'np.square', (['x'], {}), '(x)\n', (1649, 1652), True, 'import numpy as np\n'), ((1978, 2011), 'numpy.square', 'np.square', (['(corrupted_entities - x)'], {}), '(corrupted_entities - x)\n', (1987, 2011), True, 'import numpy as np\n'), ((2288, 2311), 'numpy.mean', 'np.mean', (['rank_list_baby'], {}), '(rank_list_baby)\n', (2295, 2311), True, 'import numpy as np\n'), ((2346, 2375), 'numpy.mean', 'np.mean', (['(rank_list_baby <= 10)'], {}), '(rank_list_baby <= 10)\n', (2353, 2375), True, 'import numpy as np\n')] |
"""Submit jobs to Sun Grid Engine."""
# pylint: disable=invalid-name
import os
import subprocess
from . import tracker
def submit(args):
"""Job submission script for SGE."""
if args.jobname is None:
args.jobname = ('dmlc%d.' % args.num_workers) + args.command[0].split('/')[-1]
if args.sge_log_dir is None:
args.sge_log_dir = args.jobname + '.log'
if os.path.exists(args.sge_log_dir):
if not os.path.isdir(args.sge_log_dir):
raise RuntimeError('specified --sge-log-dir %s is not a dir' % args.sge_log_dir)
else:
os.mkdir(args.sge_log_dir)
runscript = '%s/rundmlc.sh' % args.logdir
fo = open(runscript, 'w')
fo.write('source ~/.bashrc\n')
fo.write('export DMLC_TASK_ID=${SGE_TASK_ID}\n')
fo.write('export DMLC_JOB_CLUSTER=sge\n')
fo.write('\"$@\"\n')
fo.close()
def sge_submit(nworker, nserver, pass_envs):
"""Internal submission function."""
env_arg = ','.join(['%s=\"%s\"' % (k, str(v)) for k, v in list(pass_envs.items())])
cmd = 'qsub -cwd -t 1-%d -S /bin/bash' % (nworker + nserver)
if args.queue != 'default':
cmd += '-q %s' % args.queue
cmd += ' -N %s ' % args.jobname
cmd += ' -e %s -o %s' % (args.logdir, args.logdir)
cmd += ' -pe orte %d' % (args.vcores)
cmd += ' -v %s,PATH=${PATH}:.' % env_arg
cmd += ' %s %s' % (runscript, ' '.join(args.command))
print(cmd)
subprocess.check_call(cmd, shell=True)
print('Waiting for the jobs to get up...')
# call submit, with nslave, the commands to run each job and submit function
tracker.submit(args.num_workers, args.num_servers,
fun_submit=sge_submit,
pscmd=' '.join(args.command))
| [
"os.path.exists",
"os.path.isdir",
"os.mkdir",
"subprocess.check_call"
] | [((387, 419), 'os.path.exists', 'os.path.exists', (['args.sge_log_dir'], {}), '(args.sge_log_dir)\n', (401, 419), False, 'import os\n'), ((580, 606), 'os.mkdir', 'os.mkdir', (['args.sge_log_dir'], {}), '(args.sge_log_dir)\n', (588, 606), False, 'import os\n'), ((1472, 1510), 'subprocess.check_call', 'subprocess.check_call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (1493, 1510), False, 'import subprocess\n'), ((436, 467), 'os.path.isdir', 'os.path.isdir', (['args.sge_log_dir'], {}), '(args.sge_log_dir)\n', (449, 467), False, 'import os\n')] |
# -*- coding: utf-8 -*-
"""
Created on Mon 2018.02.28:00:00:00
@author: <NAME>
Abstrac:
Examples of experiments from different labs.
"""
######################################################################################################################################################################################
from pickle import load, dump
from pandas import read_excel
from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend
rc('font', size=18)
from CBEDataAnalysis import AnaTimeData, PlotIndPost, AnaBF
from DilExp import GetKDE
######################################################################################################################################################################################
"""
EXAMPLES: Intra Lab Experiment.
"""
##################### CBE data #######################
### Three repetitions, ten plated drops in two petri dishes, default values.
# Experiment: Times:
CBE =\
[['RoomTemp', [ 1, 10, 15], "blue"],\
[ '65CTemp', [15, 30, 45, 60, 90], "yellow"],\
[ '70CTemp', [10, 20, 30, 40, 60], "orange"],\
[ '75CTemp', [10, 20], "red"],\
[ '80CTemp', [ 1, 2], "firebrick"]]
def ExpPlots( CBEData, experiment, time, plot_ind=False, K=[0,1], betabinom=False):
md = AnaTimeData( CBEData=CBEData, experiment=experiment, time=time, T=500000, fig=[0,1], betabinom=betabinom)
figure(0)
tight_layout()
savefig("../Images/%s_%dmin_TS.png" % (experiment, time))
if plot_ind:
if betabinom:
PlotIndPost(md, fig=[1,2], K=K, color="green") #without hierachycal model
else:
PlotIndPost(md, fig=[1,2], K=K)
figure(2)
tight_layout()
savefig("../Images/%s_%dmin_IndPosts.jpg" % (experiment, time))
figure(1)
tight_layout()
if betabinom:
savefig("../Images/%s_%dmin_bbinom_Results.jpg" % (experiment, time))
else:
savefig("../Images/%s_%dmin_Results.jpg" % (experiment, time))
return md
if __name__ == '__main__':
CBEData = read_excel( './Data/CBE_BiofilmHotWaterStudies.xls',\
['RoomTemp', '65CTemp', '70CTemp', '75CTemp', '80CTemp'])
### Data taken from spreadsheet: "Biofilm Hot Water Studies.xlsx"
### NOTE: The RoomTemp experiment corresponds to the control of the 80C experiment.
### md = ExpPlots(...) is a MultiDilExp object, see DilExp
### md.d[0].y is the data for repetition 0 etc.
### The simulated values of E are available in md.TwalkE()
### Run all data BF's with beta-binomial
rt_CBE =[]
print("%16s, %2s, %16s, %16s, %16s, %16s" %\
( "experiment", "k" , "bbinom" , "binom" , "Prob binom", "BF"))
for item in CBE:
experiment= item[0]
for time in item[1]:
rt_CBE += AnaBF( CBEData=CBEData, experiment=experiment, time=time)
dump( rt_CBE, open("CBE_rt_CBE.pkl", "wb")) #To be used by InterLabExamples in the BF plot
All = {} ### Dictiionary to hold MCMC iterations of E for all experiments
experiment='70CTemp'
time=10
close(1)
close(2)
md = ExpPlots( CBEData=CBEData, experiment=experiment, time=time,\
betabinom=False)#, plot_ind=True, K=[0,1,2])
#md_bb = ExpPlots( CBEData=CBEData, experiment=experiment, time=time, betabinom=True)#, plot_ind=True, K=[0,1,2])
All[experiment] = {}
All[experiment][time.__str__() + 'min'] = md.TwalkE()
###### Figs 3 and 4 take some 5 min to run
### Plots for Fig. 3
experiment='65CTemp'
time=15
close(1)
close(2)
md = ExpPlots( CBEData=CBEData, experiment=experiment, time=time)
All[experiment] = {}
All[experiment][time.__str__() + 'min'] = md.TwalkE()
experiment='75CTemp'
time=10
close(1)
close(2)
md = ExpPlots( CBEData=CBEData, experiment=experiment, time=time)
All[experiment] = {}
All[experiment][time.__str__() + 'min'] = md.TwalkE()
### This last one is also neede for fig. 4
experiment='RoomTemp'
time=15
close(1)
close(2)
mdControl = ExpPlots( CBEData=CBEData, experiment=experiment, time=time)
All[experiment] = {}
All[experiment][time.__str__() + 'min'] = mdControl.TwalkE()
### Plots for Fig. 4
experiment='80CTemp'
time=2
close(1)
close(2)
md = ExpPlots( CBEData=CBEData, experiment=experiment, time=time, plot_ind=True)
All[experiment] = {}
All[experiment][time.__str__() + 'min'] = md.TwalkE()
figure(1)
xlim((-0.5,3.5))
tight_layout()
savefig("../Images/%s_%dmin_Results.jpg" % (experiment, time))
figure(2)
xlim((0,900))
tight_layout()
savefig("../Images/%s_%dmin_IndPosts.jpg" % (experiment, time))
### Log reduction wrt RoomTemp experiment
close(1)
figure(1)
LR = mdControl.TwalkE() - md.TwalkE()
e, kde = GetKDE( LR, alpha=0.0000001)
plot( e, kde, 'k-')
ylabel("Density")
xlabel(r"$log_{10}\left(\frac{CFU_0 + 1}{CFU + 1}\right)$")
xlim(( 4, 9))
tight_layout()
savefig("../Images/%s_%dmin_LR.jpg" % (experiment, time))
print("%s_%s, $P[ LR > 3 ] = %6.4f" % (experiment, time, sum(LR > 3)/len(LR)))
### Activation threshold figure: Takes longer, some 15 min
### We load the data from "CBEallE.pkl" below ########
### Remaining experiments:
CBE_R=[\
['RoomTemp', [ 10, 15]],\
[ '65CTemp', [ 30, 45, 60, 90]],\
[ '70CTemp', [ 20, 30, 40, 60]],\
[ '75CTemp', [ 20]],\
[ '80CTemp', [ 1]]]
for ex in CBE_R:
experiment = ex[0]
for time in ex[1]:
md = ExpPlots( CBEData=CBEData, experiment=experiment, time=time)
All[experiment][time.__str__() + 'min'] = md.TwalkE()
#print("All[%s][%s]" % ( experiment, time.__str__() + 'min'))
dump( All, open("CBEallE.pkl", "wb"))
CBE =\
[['RoomTemp', [ 1, 10, 15], "blue"],\
[ '65CTemp', [15, 30, 45, 60, 90], "yellow"],\
[ '70CTemp', [10, 20, 30, 40, 60], "orange"],\
[ '75CTemp', [10, 20], "red"],\
[ '80CTemp', [ 1, 2], "firebrick"]]
#All = load(open("CBEallE.pkl", "rb"))
close(2)
figure(2)
eh = 2 ### Threshold for E
for ex in CBE:
experiment = ex[0]
act_prob = []
for time in ex[1]:
act_prob += [sum(All[experiment][time.__str__() + 'min'] < eh)/len(All[experiment][time.__str__() + 'min'])]
ex += [act_prob]
for ex in CBE[1:]:
experiment, time, color, act_prob = ex
plot( time, act_prob, '-o', color=color, label=experiment[:2] + r" $^o$C")
legend(fontsize=14) #(loc=( 61, 0.3))
xlabel("min")
ylabel("Act. Probability")
tight_layout()
savefig("../Images/ActivationProbability.jpg")
| [
"CBEDataAnalysis.AnaTimeData",
"pylab.rc",
"pylab.tight_layout",
"pylab.plot",
"pylab.savefig",
"pylab.xlabel",
"pylab.legend",
"CBEDataAnalysis.PlotIndPost",
"pylab.close",
"pylab.figure",
"CBEDataAnalysis.AnaBF",
"DilExp.GetKDE",
"pylab.xlim",
"pandas.read_excel",
"pylab.ylabel"
] | [((498, 517), 'pylab.rc', 'rc', (['"""font"""'], {'size': '(18)'}), "('font', size=18)\n", (500, 517), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((1326, 1435), 'CBEDataAnalysis.AnaTimeData', 'AnaTimeData', ([], {'CBEData': 'CBEData', 'experiment': 'experiment', 'time': 'time', 'T': '(500000)', 'fig': '[0, 1]', 'betabinom': 'betabinom'}), '(CBEData=CBEData, experiment=experiment, time=time, T=500000,\n fig=[0, 1], betabinom=betabinom)\n', (1337, 1435), False, 'from CBEDataAnalysis import AnaTimeData, PlotIndPost, AnaBF\n'), ((1437, 1446), 'pylab.figure', 'figure', (['(0)'], {}), '(0)\n', (1443, 1446), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((1452, 1466), 'pylab.tight_layout', 'tight_layout', ([], {}), '()\n', (1464, 1466), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((1472, 1529), 'pylab.savefig', 'savefig', (["('../Images/%s_%dmin_TS.png' % (experiment, time))"], {}), "('../Images/%s_%dmin_TS.png' % (experiment, time))\n", (1479, 1529), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((1839, 1848), 'pylab.figure', 'figure', (['(1)'], {}), '(1)\n', (1845, 1848), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((1854, 1868), 'pylab.tight_layout', 'tight_layout', ([], {}), '()\n', (1866, 1868), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((2112, 2225), 'pandas.read_excel', 'read_excel', (['"""./Data/CBE_BiofilmHotWaterStudies.xls"""', "['RoomTemp', '65CTemp', '70CTemp', '75CTemp', '80CTemp']"], {}), "('./Data/CBE_BiofilmHotWaterStudies.xls', ['RoomTemp', '65CTemp',\n '70CTemp', '75CTemp', '80CTemp'])\n", (2122, 2225), False, 'from pandas import read_excel\n'), ((3163, 3171), 'pylab.close', 'close', (['(1)'], {}), '(1)\n', (3168, 3171), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((3177, 3185), 'pylab.close', 'close', (['(2)'], {}), '(2)\n', (3182, 3185), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((3649, 3657), 'pylab.close', 'close', (['(1)'], {}), '(1)\n', (3654, 3657), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((3663, 3671), 'pylab.close', 'close', (['(2)'], {}), '(2)\n', (3668, 3671), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((3874, 3882), 'pylab.close', 'close', (['(1)'], {}), '(1)\n', (3879, 3882), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((3888, 3896), 'pylab.close', 'close', (['(2)'], {}), '(2)\n', (3893, 3896), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4148, 4156), 'pylab.close', 'close', (['(1)'], {}), '(1)\n', (4153, 4156), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4162, 4170), 'pylab.close', 'close', (['(2)'], {}), '(2)\n', (4167, 4170), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4414, 4422), 'pylab.close', 'close', (['(1)'], {}), '(1)\n', (4419, 4422), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4428, 4436), 'pylab.close', 'close', (['(2)'], {}), '(2)\n', (4433, 4436), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4615, 4624), 'pylab.figure', 'figure', (['(1)'], {}), '(1)\n', (4621, 4624), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4630, 4647), 'pylab.xlim', 'xlim', (['(-0.5, 3.5)'], {}), '((-0.5, 3.5))\n', (4634, 4647), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4652, 4666), 'pylab.tight_layout', 'tight_layout', ([], {}), '()\n', (4664, 4666), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4672, 4734), 'pylab.savefig', 'savefig', (["('../Images/%s_%dmin_Results.jpg' % (experiment, time))"], {}), "('../Images/%s_%dmin_Results.jpg' % (experiment, time))\n", (4679, 4734), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4740, 4749), 'pylab.figure', 'figure', (['(2)'], {}), '(2)\n', (4746, 4749), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4755, 4769), 'pylab.xlim', 'xlim', (['(0, 900)'], {}), '((0, 900))\n', (4759, 4769), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4774, 4788), 'pylab.tight_layout', 'tight_layout', ([], {}), '()\n', (4786, 4788), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4794, 4857), 'pylab.savefig', 'savefig', (["('../Images/%s_%dmin_IndPosts.jpg' % (experiment, time))"], {}), "('../Images/%s_%dmin_IndPosts.jpg' % (experiment, time))\n", (4801, 4857), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4910, 4918), 'pylab.close', 'close', (['(1)'], {}), '(1)\n', (4915, 4918), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4924, 4933), 'pylab.figure', 'figure', (['(1)'], {}), '(1)\n', (4930, 4933), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((4991, 5014), 'DilExp.GetKDE', 'GetKDE', (['LR'], {'alpha': '(1e-07)'}), '(LR, alpha=1e-07)\n', (4997, 5014), False, 'from DilExp import GetKDE\n'), ((5025, 5043), 'pylab.plot', 'plot', (['e', 'kde', '"""k-"""'], {}), "(e, kde, 'k-')\n", (5029, 5043), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((5050, 5067), 'pylab.ylabel', 'ylabel', (['"""Density"""'], {}), "('Density')\n", (5056, 5067), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((5073, 5134), 'pylab.xlabel', 'xlabel', (['"""$log_{10}\\\\left(\\\\frac{CFU_0 + 1}{CFU + 1}\\\\right)$"""'], {}), "('$log_{10}\\\\left(\\\\frac{CFU_0 + 1}{CFU + 1}\\\\right)$')\n", (5079, 5134), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((5138, 5150), 'pylab.xlim', 'xlim', (['(4, 9)'], {}), '((4, 9))\n', (5142, 5150), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((5157, 5171), 'pylab.tight_layout', 'tight_layout', ([], {}), '()\n', (5169, 5171), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((5177, 5234), 'pylab.savefig', 'savefig', (["('../Images/%s_%dmin_LR.jpg' % (experiment, time))"], {}), "('../Images/%s_%dmin_LR.jpg' % (experiment, time))\n", (5184, 5234), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((6317, 6325), 'pylab.close', 'close', (['(2)'], {}), '(2)\n', (6322, 6325), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((6331, 6340), 'pylab.figure', 'figure', (['(2)'], {}), '(2)\n', (6337, 6340), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((6781, 6800), 'pylab.legend', 'legend', ([], {'fontsize': '(14)'}), '(fontsize=14)\n', (6787, 6800), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((6824, 6837), 'pylab.xlabel', 'xlabel', (['"""min"""'], {}), "('min')\n", (6830, 6837), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((6843, 6869), 'pylab.ylabel', 'ylabel', (['"""Act. Probability"""'], {}), "('Act. Probability')\n", (6849, 6869), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((6875, 6889), 'pylab.tight_layout', 'tight_layout', ([], {}), '()\n', (6887, 6889), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((6895, 6941), 'pylab.savefig', 'savefig', (['"""../Images/ActivationProbability.jpg"""'], {}), "('../Images/ActivationProbability.jpg')\n", (6902, 6941), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((1727, 1736), 'pylab.figure', 'figure', (['(2)'], {}), '(2)\n', (1733, 1736), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((1746, 1760), 'pylab.tight_layout', 'tight_layout', ([], {}), '()\n', (1758, 1760), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((1770, 1833), 'pylab.savefig', 'savefig', (["('../Images/%s_%dmin_IndPosts.jpg' % (experiment, time))"], {}), "('../Images/%s_%dmin_IndPosts.jpg' % (experiment, time))\n", (1777, 1833), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((1897, 1966), 'pylab.savefig', 'savefig', (["('../Images/%s_%dmin_bbinom_Results.jpg' % (experiment, time))"], {}), "('../Images/%s_%dmin_bbinom_Results.jpg' % (experiment, time))\n", (1904, 1966), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((1987, 2049), 'pylab.savefig', 'savefig', (["('../Images/%s_%dmin_Results.jpg' % (experiment, time))"], {}), "('../Images/%s_%dmin_Results.jpg' % (experiment, time))\n", (1994, 2049), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((6701, 6773), 'pylab.plot', 'plot', (['time', 'act_prob', '"""-o"""'], {'color': 'color', 'label': "(experiment[:2] + ' $^o$C')"}), "(time, act_prob, '-o', color=color, label=experiment[:2] + ' $^o$C')\n", (6705, 6773), False, 'from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend\n'), ((1584, 1631), 'CBEDataAnalysis.PlotIndPost', 'PlotIndPost', (['md'], {'fig': '[1, 2]', 'K': 'K', 'color': '"""green"""'}), "(md, fig=[1, 2], K=K, color='green')\n", (1595, 1631), False, 'from CBEDataAnalysis import AnaTimeData, PlotIndPost, AnaBF\n'), ((1686, 1718), 'CBEDataAnalysis.PlotIndPost', 'PlotIndPost', (['md'], {'fig': '[1, 2]', 'K': 'K'}), '(md, fig=[1, 2], K=K)\n', (1697, 1718), False, 'from CBEDataAnalysis import AnaTimeData, PlotIndPost, AnaBF\n'), ((2882, 2938), 'CBEDataAnalysis.AnaBF', 'AnaBF', ([], {'CBEData': 'CBEData', 'experiment': 'experiment', 'time': 'time'}), '(CBEData=CBEData, experiment=experiment, time=time)\n', (2887, 2938), False, 'from CBEDataAnalysis import AnaTimeData, PlotIndPost, AnaBF\n')] |
import time
import json
from wptserve.utils import isomorphic_decode, isomorphic_encode
def main(request, response):
headers = [(b'Content-Type', b'application/javascript'),
(b'Cache-Control', b'max-age=86400'),
(b'Last-Modified', isomorphic_encode(time.strftime(u"%a, %d %b %Y %H:%M:%S GMT", time.gmtime())))]
test = request.GET[b'test']
body = u'''
const mainTime = {time:8f};
const testName = {test};
importScripts('update-max-aged-worker-imported-script.py');
addEventListener('message', event => {{
event.source.postMessage({{
mainTime,
importTime,
test: {test}
}});
}});
'''.format(
time=time.time(),
test=json.dumps(isomorphic_decode(test))
)
return headers, body
| [
"wptserve.utils.isomorphic_decode",
"time.time",
"time.gmtime"
] | [((765, 776), 'time.time', 'time.time', ([], {}), '()\n', (774, 776), False, 'import time\n'), ((802, 825), 'wptserve.utils.isomorphic_decode', 'isomorphic_decode', (['test'], {}), '(test)\n', (819, 825), False, 'from wptserve.utils import isomorphic_decode, isomorphic_encode\n'), ((329, 342), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (340, 342), False, 'import time\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) Copyright 2015 Nuxeo SA (http://nuxeo.com/) and contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the GNU Lesser General Public License
# (LGPL) version 2.1 which accompanies this distribution, and is available at
# http://www.gnu.org/licenses/lgpl-2.1.html
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# Contributors:
# <NAME>
#
import os
import sys
import logging
import argparse
from abc import ABCMeta, abstractmethod
from redis import RedisWriter
from nuxeo import NuxeoWriter
from utils import download
__version__ = "0.1.0"
module = sys.modules['__main__'].__file__
DESC = """By default read input from stdin and output Redis pipe command on stdout.
To download the data file see the -d option.
"""
class Injector(object):
"""Abstract class to create an injector, take care of parsing args, downloading data"""
__metaclass__ = ABCMeta
@abstractmethod
def parse(self, input, writer):
"""Parse an input file, use Nuxeo writer to output document in redis format."""
pass
@abstractmethod
def downloadInfo(self):
"""Return a tupple (download_url, archive_name)"""
pass
def run(self):
"""Run the injector"""
self.log = logging.getLogger(module)
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG,
format='%(name)s (%(levelname)s): %(message)s')
try:
args = self.parse_command_line()
self.set_log_level()
output = args.output
writer = NuxeoWriter(RedisWriter(out=output,
prefix=args.redis_ns,
usePipeProtocol=not args.no_pipe))
self.parse(self.get_input(), writer)
output.flush()
output.close()
return 0
except KeyboardInterrupt:
self.log.error('Program interrupted!')
return -1
finally:
logging.shutdown()
def parse_command_line(self):
argv = sys.argv
formatter_class = argparse.ArgumentDefaultsHelpFormatter
parser = argparse.ArgumentParser(description=DESC,
formatter_class=formatter_class)
parser.add_argument('--version', action='version',
version='%(prog)s {}'.format(__version__))
parser.add_argument('-v', '--verbose', dest='verbose_count',
action='count', default=0,
help='Increases log verbosity for each occurence.')
parser.add_argument('-o', '--output', metavar='output',
type=argparse.FileType('w'), default=sys.stdout,
help='Redirect output to a file')
parser.add_argument('-i', '--input', metavar='input',
type=argparse.FileType('r'),
default=sys.stdin,
help='Input file')
parser.add_argument('--no-pipe', action='store_true',
help='Output Redis command in clear not using pipe mode protocol.')
parser.add_argument('-d', '--download', action='store_true', dest='download',
help='Download input if not already done.')
parser.add_argument('-u', '--download-url', dest='url', default=self.downloadInfo()[1],
help='URL used to download the data file.')
parser.add_argument('-O', '--data-directory', dest='data_dir', default=os.path.join('~', 'data'),
help='Data directory to store downloaded file.')
parser.add_argument('-p', '--redis-namespace', dest='redis_ns', default="imp",
help='Redis key prefix.')
arguments = parser.parse_args(argv[1:])
self.arguments = arguments
return arguments
def set_log_level(self):
# Sets log level to WARN going more verbose for each new -v.
self.log.setLevel(max(3 - self.arguments.verbose_count, 0) * 10)
def get_input(self):
args = self.arguments
if args.download:
self.log.info("downloading")
archive_name, url = self.downloadInfo()
return open(download(args.data_dir, archive_name, url), 'r')
return args.input
| [
"logging.getLogger",
"logging.basicConfig",
"argparse.FileType",
"argparse.ArgumentParser",
"os.path.join",
"redis.RedisWriter",
"logging.shutdown",
"utils.download"
] | [((1542, 1567), 'logging.getLogger', 'logging.getLogger', (['module'], {}), '(module)\n', (1559, 1567), False, 'import logging\n'), ((1576, 1688), 'logging.basicConfig', 'logging.basicConfig', ([], {'stream': 'sys.stderr', 'level': 'logging.DEBUG', 'format': '"""%(name)s (%(levelname)s): %(message)s"""'}), "(stream=sys.stderr, level=logging.DEBUG, format=\n '%(name)s (%(levelname)s): %(message)s')\n", (1595, 1688), False, 'import logging\n'), ((2460, 2534), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'DESC', 'formatter_class': 'formatter_class'}), '(description=DESC, formatter_class=formatter_class)\n', (2483, 2534), False, 'import argparse\n'), ((2300, 2318), 'logging.shutdown', 'logging.shutdown', ([], {}), '()\n', (2316, 2318), False, 'import logging\n'), ((1869, 1948), 'redis.RedisWriter', 'RedisWriter', ([], {'out': 'output', 'prefix': 'args.redis_ns', 'usePipeProtocol': '(not args.no_pipe)'}), '(out=output, prefix=args.redis_ns, usePipeProtocol=not args.no_pipe)\n', (1880, 1948), False, 'from redis import RedisWriter\n'), ((3007, 3029), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {}), "('w')\n", (3024, 3029), False, 'import argparse\n'), ((3208, 3230), 'argparse.FileType', 'argparse.FileType', (['"""r"""'], {}), "('r')\n", (3225, 3230), False, 'import argparse\n'), ((3889, 3914), 'os.path.join', 'os.path.join', (['"""~"""', '"""data"""'], {}), "('~', 'data')\n", (3901, 3914), False, 'import os\n'), ((4613, 4655), 'utils.download', 'download', (['args.data_dir', 'archive_name', 'url'], {}), '(args.data_dir, archive_name, url)\n', (4621, 4655), False, 'from utils import download\n')] |
from random import randint
class Column(object):
def __init__(self, columns = 5):
super(Column, self).__init__()
self.columns = columns
def encrypt(self, payload):
matrix = []
payload = payload.replace(' ', '')
complete = False
while True:
row = []
for i in range(self.columns):
try:
row.append(payload[0])
payload = payload[1:]
except IndexError:
complete = True
done = len(row)
left = self.columns - done
for j in range(left):
row.append(chr(randint(97,26+97)))
matrix.append(row)
if (complete): break
cypher = ''
for j in xrange(self.columns):
for i in xrange(len(matrix)):
cypher = cypher + matrix[i][j]
cypher = cypher + ' '
return cypher
def decrypt(self, payload):
columns = payload.split(' ')
matrix = []
[matrix.append(list(column)) for column in columns]
matrix = [m for m in matrix if m]
cipher_columns = len(matrix)
cipher_rows = len(matrix[0])
decrypt = ''
for j in range(cipher_rows):
for i in range(cipher_columns):
decrypt = decrypt + matrix[i][j]
return decrypt
def main():
payload = raw_input("Enter Payload : ")
c = raw_input("Enter Columns : ")
#payload = 'All the students of third year are intelligent'
encrypt = Column(int(c)).encrypt(payload)
print(encrypt)
decrypt = Column(int(c)).decrypt(encrypt)
print(decrypt)
if __name__ == '__main__':
main() | [
"random.randint"
] | [((702, 722), 'random.randint', 'randint', (['(97)', '(26 + 97)'], {}), '(97, 26 + 97)\n', (709, 722), False, 'from random import randint\n')] |
""""""
# Standard library modules.
import csv
import io
import functools
# Third party modules.
from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets
import xlsxwriter
# Local modules.
from pymontecarlo_gui.settings import SettingsBasedField
from pymontecarlo_gui.widgets.dialog import ExecutionProgressDialog
from pymontecarlo.formats.document import publish_html, DocumentBuilder
# Globals and constants variables.
class ResultWidgetBase(QtWidgets.QWidget):
def __init__(self, result, settings, parent=None):
super().__init__(parent)
# Variables
self._result = result
self._settings = settings
def result(self):
return self._result
def settings(self):
return self._settings
class ResultTableWidgetBase(ResultWidgetBase):
def __init__(self, result, settings, parent=None):
super().__init__(result, settings, parent)
# Actions
self.action_copy = QtWidgets.QAction("Copy to clipboard")
self.action_copy.setIcon(QtGui.QIcon.fromTheme("edit-copy"))
self.action_copy.setShortcut(QtGui.QKeySequence.Copy)
self.action_copy.triggered.connect(self._on_copy)
self.action_save = QtWidgets.QAction("Save")
self.action_save.setIcon(QtGui.QIcon.fromTheme("document-save"))
self.action_save.triggered.connect(self._on_save)
# Widgets
self.table_view = QtWidgets.QTableView()
self.table_view.setModel(self._create_model(result, settings))
self.table_view.horizontalHeader().setSectionResizeMode(
QtWidgets.QHeaderView.Stretch
)
self.table_view.setSortingEnabled(True)
self.web_widget = QtWebEngineWidgets.QWebEngineView()
self.web_widget.setHtml(self._render_html(result, settings))
self.toolbar = QtWidgets.QToolBar()
self.toolbar.addAction(self.action_copy)
self.toolbar.addAction(self.action_save)
# Layouts
widget = QtWidgets.QTabWidget()
widget.addTab(self.table_view, "Results")
widget.addTab(self.web_widget, "Analysis")
layout = QtWidgets.QVBoxLayout()
layout.addWidget(widget)
layout.addWidget(self.toolbar)
self.setLayout(layout)
# Signals
settings.settings_changed.connect(self._on_settings_changed)
def _create_model(self, result, settings):
raise NotImplementedError
def _render_html(self, result, settings):
builder = DocumentBuilder(settings)
result.analysis.convert_document(builder)
return publish_html(builder).decode("utf8")
def _on_settings_changed(self):
model = self.table_view.model()
model.modelReset.emit()
def _get_data(self):
model = self.table_view.model()
rows = []
# Header
header = []
for icol in range(model.columnCount()):
header.append(
model.headerData(icol, QtCore.Qt.Horizontal, QtCore.Qt.UserRole)
)
rows.append(header)
# Data
for irow in range(model.rowCount()):
row = []
for icol in range(model.columnCount()):
index = model.createIndex(irow, icol)
row.append(model.data(index, QtCore.Qt.UserRole))
rows.append(row)
return rows
def _on_copy(self):
data = self._get_data()
buffer = io.StringIO()
writer = csv.writer(buffer, lineterminator="\n", delimiter="\t")
writer.writerows(data)
data = QtCore.QMimeData()
data.setText(buffer.getvalue())
QtGui.QGuiApplication.instance().clipboard().setMimeData(data)
def _save_csv(self, filepath):
data = self._get_data()
with open(filepath, "w", encoding="utf8") as fp:
writer = csv.writer(fp, lineterminator="\n")
writer.writerows(data)
def _save_xlsx(self, filepath):
data = self._get_data()
workbook = xlsxwriter.Workbook(filepath)
try:
format_header = workbook.add_format({"bold": True})
worksheet = workbook.add_worksheet(self.result().getname())
worksheet.write_row(0, 0, data[0], format_header)
for irow, row in enumerate(data[1:], 1):
for icol, value in enumerate(row):
worksheet.write(irow, icol, value)
finally:
workbook.close()
def _on_save(self):
caption = "Save result"
dirpath = self.settings().savedir
namefilters = "Excel spreadsheet (*.xlsx);;CSV text file (*.csv)"
filepath, namefilter = QtWidgets.QFileDialog.getSaveFileName(
self, caption, dirpath, namefilters
)
if not namefilter:
return False
if not filepath:
return False
if namefilter == "CSV text file (*.csv)":
ext = ".csv"
function = functools.partial(self._save_csv, filepath)
elif namefilter == "Excel spreadsheet (*.xlsx)":
ext = "xlsx"
function = functools.partial(self._save_xlsx, filepath)
if not filepath.endswith(ext):
filepath += ext
dialog = ExecutionProgressDialog(
"Save result", "Saving result...", "Result saved", function
)
dialog.exec_()
class ResultSummaryWidgetBase(QtWidgets.QWidget):
def setProject(self, project):
raise NotImplementedError
class ResultFieldBase(SettingsBasedField):
def __init__(self, result, settings):
self._result = result
super().__init__(settings)
def title(self):
return self.result().getname()
def icon(self):
return QtGui.QIcon.fromTheme("format-justify-fill")
def result(self):
return self._result
| [
"qtpy.QtCore.QMimeData",
"pymontecarlo.formats.document.DocumentBuilder",
"qtpy.QtWidgets.QTabWidget",
"qtpy.QtGui.QIcon.fromTheme",
"qtpy.QtWidgets.QToolBar",
"qtpy.QtWidgets.QVBoxLayout",
"csv.writer",
"qtpy.QtWebEngineWidgets.QWebEngineView",
"qtpy.QtWidgets.QTableView",
"qtpy.QtWidgets.QAction... | [((955, 993), 'qtpy.QtWidgets.QAction', 'QtWidgets.QAction', (['"""Copy to clipboard"""'], {}), "('Copy to clipboard')\n", (972, 993), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((1211, 1236), 'qtpy.QtWidgets.QAction', 'QtWidgets.QAction', (['"""Save"""'], {}), "('Save')\n", (1228, 1236), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((1413, 1435), 'qtpy.QtWidgets.QTableView', 'QtWidgets.QTableView', ([], {}), '()\n', (1433, 1435), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((1699, 1734), 'qtpy.QtWebEngineWidgets.QWebEngineView', 'QtWebEngineWidgets.QWebEngineView', ([], {}), '()\n', (1732, 1734), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((1828, 1848), 'qtpy.QtWidgets.QToolBar', 'QtWidgets.QToolBar', ([], {}), '()\n', (1846, 1848), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((1983, 2005), 'qtpy.QtWidgets.QTabWidget', 'QtWidgets.QTabWidget', ([], {}), '()\n', (2003, 2005), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((2125, 2148), 'qtpy.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (2146, 2148), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((2487, 2512), 'pymontecarlo.formats.document.DocumentBuilder', 'DocumentBuilder', (['settings'], {}), '(settings)\n', (2502, 2512), False, 'from pymontecarlo.formats.document import publish_html, DocumentBuilder\n'), ((3428, 3441), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (3439, 3441), False, 'import io\n'), ((3459, 3514), 'csv.writer', 'csv.writer', (['buffer'], {'lineterminator': '"""\n"""', 'delimiter': '"""\t"""'}), "(buffer, lineterminator='\\n', delimiter='\\t')\n", (3469, 3514), False, 'import csv\n'), ((3562, 3580), 'qtpy.QtCore.QMimeData', 'QtCore.QMimeData', ([], {}), '()\n', (3578, 3580), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((3999, 4028), 'xlsxwriter.Workbook', 'xlsxwriter.Workbook', (['filepath'], {}), '(filepath)\n', (4018, 4028), False, 'import xlsxwriter\n'), ((4653, 4727), 'qtpy.QtWidgets.QFileDialog.getSaveFileName', 'QtWidgets.QFileDialog.getSaveFileName', (['self', 'caption', 'dirpath', 'namefilters'], {}), '(self, caption, dirpath, namefilters)\n', (4690, 4727), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((5233, 5321), 'pymontecarlo_gui.widgets.dialog.ExecutionProgressDialog', 'ExecutionProgressDialog', (['"""Save result"""', '"""Saving result..."""', '"""Result saved"""', 'function'], {}), "('Save result', 'Saving result...', 'Result saved',\n function)\n", (5256, 5321), False, 'from pymontecarlo_gui.widgets.dialog import ExecutionProgressDialog\n'), ((5733, 5777), 'qtpy.QtGui.QIcon.fromTheme', 'QtGui.QIcon.fromTheme', (['"""format-justify-fill"""'], {}), "('format-justify-fill')\n", (5754, 5777), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((1027, 1061), 'qtpy.QtGui.QIcon.fromTheme', 'QtGui.QIcon.fromTheme', (['"""edit-copy"""'], {}), "('edit-copy')\n", (1048, 1061), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((1270, 1308), 'qtpy.QtGui.QIcon.fromTheme', 'QtGui.QIcon.fromTheme', (['"""document-save"""'], {}), "('document-save')\n", (1291, 1308), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n'), ((3840, 3875), 'csv.writer', 'csv.writer', (['fp'], {'lineterminator': '"""\n"""'}), "(fp, lineterminator='\\n')\n", (3850, 3875), False, 'import csv\n'), ((4953, 4996), 'functools.partial', 'functools.partial', (['self._save_csv', 'filepath'], {}), '(self._save_csv, filepath)\n', (4970, 4996), False, 'import functools\n'), ((2578, 2599), 'pymontecarlo.formats.document.publish_html', 'publish_html', (['builder'], {}), '(builder)\n', (2590, 2599), False, 'from pymontecarlo.formats.document import publish_html, DocumentBuilder\n'), ((5102, 5146), 'functools.partial', 'functools.partial', (['self._save_xlsx', 'filepath'], {}), '(self._save_xlsx, filepath)\n', (5119, 5146), False, 'import functools\n'), ((3630, 3662), 'qtpy.QtGui.QGuiApplication.instance', 'QtGui.QGuiApplication.instance', ([], {}), '()\n', (3660, 3662), False, 'from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets\n')] |
# -*- coding: UTF-8 -*-
import os
import sys
import subprocess
import argparse
import re
import configparser
import logging
import copy
import psutil
if sys.version_info[0] < 3:
import struct
import tensorflow as tf
from tensorflow.core.framework import graph_pb2
# level=logging.INFO only logging.debug info,level=logging.DEBUG may logging.debug debug and info
logging.basicConfig(level=logging.INFO)
# Global configuration mainly got from test.cfg
TRANSFORMER_PATH = ''
TF_SRC_PATH = ''
TF_SLIM_PATH= ''
NUM_THREADS = 1
EPSILON = 0.0
LOOPS = 1
testcases = []
class TestCase(object):
def __init__(self):
self.model_name = ''
self.model_type = ''
self.url = ''
self.output_node = ''
# Some checkpoint cannot be frozen without fix
self.fix_graph = False
# File path of checkpoint (.ckpt)
self.ckpt_file = None
# File path of inference graph, generated by export_inference_graph.py
self.graph_file = None
# File path of frozen pb, generated by freeze_graph
self.frozen_file = None
# File path where to save the transformed model
self.save_model_dir = None
def __repr__(self):
return '[%s]\ntype=%s\nurl=%s\noutput_node=%s\nfix_graph=%r' % \
(self.model_name, self.model_type, self.url, self.output_node, self.fix_graph)
def exec_cmd(cmd, title, check_output=True):
logging.info(title)
logging.debug(cmd)
if check_output:
out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
return out
else:
text = os.popen(cmd)
out = text.read()
return out
def get_extract_command(filename, target_path):
if filename.endswith('.tar.gz'):
extract_file = 'tar -xzf %s -C %s' % (filename, target_path)
elif filename.endswith('.tar.bz2'):
extract_file = 'tar -xjf %s -C %s' % (filename, target_path)
else:
extract_file = 'tar -xf %s -C %s' % (filename, target_path)
return extract_file
def decode_string(str):
str = str.decode('UTF-8')
return str
# Find a ckpt file in a directory, and return the path of ckpt file
def find_ckpt(ckpt_dir):
ckpt_file = None
files = os.listdir(ckpt_dir)
for f in files:
if f.endswith('.ckpt'):
ckpt_file = f
break
return os.path.join(ckpt_dir, ckpt_file) if ckpt_file else None
def download_ckpt(tc):
ckpt_dir = '%s/checkpoints/%s' % (TRANSFORMER_PATH, tc.model_name)
tc.graph_file = '%s/%s_inf_graph.pb' % (ckpt_dir, tc.model_name)
tc.frozen_file = '%s/frozen_%s.pb' % (ckpt_dir, tc.model_name)
tc.save_model_dir = '%s/saved_model/%s' % (TRANSFORMER_PATH, tc.model_name)
# Look for existing ckpt file
if os.path.exists(ckpt_dir):
tc.ckpt_file = find_ckpt(ckpt_dir)
else:
os.makedirs(ckpt_dir)
# Already exist
if tc.ckpt_file:
logging.debug("ckpt file (%s) already exist!" % tc.ckpt_file)
return
tar_name = tc.url.split('/')[-1]
download_cmd = 'wget -c %s' % tc.url
extract_cmd = get_extract_command(tar_name, ckpt_dir)
rm_cmd = 'rm -f %s' % tar_name
exec_cmd(download_cmd, "download ckpt file ...")
exec_cmd(extract_cmd, "untar ckpt file ...")
exec_cmd(rm_cmd, "delete ckpt archieve ...")
logging.debug("ckpt file has been download!")
tc.ckpt_file = find_ckpt(ckpt_dir)
def export_inference_graph(tc):
if os.path.isfile(tc.graph_file):
logging.debug("graph file of %s already exist!" % tc.model_name)
else:
export_cmd = 'cd %s && python export_inference_graph.py --alsologtostderr --model_name=%s --output_file=%s' % (TF_SLIM_PATH, tc.model_name, tc.graph_file)
exec_cmd(export_cmd, "export graph.pb")
logging.debug("%s has been exported!" % tc.graph_file)
def summarize_graph(tc):
bazel_build = 'cd %s && bazel build tensorflow/tools/graph_transforms:summarize_graph' % TF_SRC_PATH
summarize_graph = 'cd %s && bazel-bin/tensorflow/tools/graph_transforms/summarize_graph --in_graph=%s' % (TF_SRC_PATH, tc.graph_file)
exec_cmd(bazel_build, "bazel build ...")
exec_cmd(summarize_graph, "summarize graph ...")
logging.debug("summarize graph has been done!")
def load_graph(filename):
graph_def = tf.GraphDef()
with tf.gfile.FastGFile(filename, 'rb') as f:
graph_def.ParseFromString(f.read())
return graph_def
def change_tensor_shape(tensor_shape):
dims = len(tensor_shape.dim)
if dims == 4 and tensor_shape.dim[3].size == 1001:
tensor_shape.dim[3].size = 1000
#print("shape changed, shape=%s" % str(tensor_shape))
if dims == 1 and tensor_shape.dim[0].size == 1001:
tensor_shape.dim[0].size = 1000
#print("shape changed, shape=%s" % str(tensor_shape))
def int_to_bytes(val):
if sys.version_info[0] >= 3:
return val.to_bytes(4, 'little')
else:
return struct.pack("<L", val)
def int_from_bytes(barray):
if sys.version_info[0] >= 3:
return int.from_bytes(barray, byteorder='little')
else:
return struct.unpack("<L", barray)[0]
# Designed to fix the error like:
# Assign requires shapes of both tensors to match. lhs shape= [1,1,4096,1001] rhs shape= [1,1,4096,1000]
def fix_graph_1001_to_1000(tc):
pb_file = tc.graph_file
graph_def = load_graph(pb_file)
new_graph_def = graph_pb2.GraphDef()
for node in graph_def.node:
# Check the value of const node
if node.op == 'Const':
tensor = node.attr.get('value').tensor
# DataType value got from https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/core/framework/types.proto
if tensor and tensor.dtype >= 3 and tensor.dtype <= 6: # data type is int
tensor_shape = tensor.tensor_shape
# Change tensor like: {"tensor":{"dtype":"DT_INT32","tensor_shape":{"dim":[{"size":4}]},"tensor_content":"\\001\\000\\000\\000\\001\\000\\000\\000\\000\\020\\000\\000\\350\\003\\000\\000"}}
if len(tensor_shape.dim) == 1 and tensor_shape.dim[0].size == 4:
element_size = (int)(len(tensor.tensor_content) / 4)
shape = [0, 0, 0, 0]
for i in range(4):
shape[i] = int_from_bytes(tensor.tensor_content[i*element_size: (i+1)*element_size])
# 1x1x2048x1001 -> 1x1x2048x1000, etc.
if shape[3] == 1001:
shape[3] = 1000
content = int_to_bytes(shape[0]) + int_to_bytes(shape[1]) + int_to_bytes(shape[2]) + int_to_bytes(shape[3]);
tensor.tensor_content = content
# Change tensor like: {"tensor":{"dtype":"DT_INT32","tensor_shape":{"dim":[{"size":2}]},"tensor_content":"\\377\\377\\377\\377\\351\\003\\000\\000"}}
if len(tensor_shape.dim) == 1 and tensor_shape.dim[0].size == 2:
element_size = (int)(len(tensor.tensor_content) / 2)
shape = [0, 0]
for i in range(2):
shape[i] = int_from_bytes(tensor.tensor_content[i*element_size: (i+1)*element_size])
# -1x1001 -> -1x1000, etc.
if shape[1] == 1001:
shape[1] = 1000
content = int_to_bytes(shape[0]) + int_to_bytes(shape[1]);
tensor.tensor_content = content
# Change tensor like: {"tensor":{"dtype":"DT_INT32","tensor_shape":{"dim":[{"size":1}]},"int_val":1000}}
if len(tensor_shape.dim) == 1 and tensor_shape.dim[0].size == 1 and len(tensor.int_val) == 1:
if tensor.int_val[0] == 1001:
tensor.int_val[0] = 1000
# Check shape attribute
shape_value = node.attr.get('shape')
if shape_value:
change_tensor_shape(shape_value.shape)
new_graph_def.node.extend([copy.deepcopy(node)])
# save new graph
with tf.gfile.GFile(pb_file, "wb") as f:
f.write(new_graph_def.SerializeToString())
def frozen_pb(tc):
if os.path.exists(tc.frozen_file):
logging.debug("frozen pb file exist")
else:
bazel_build = 'cd %s && bazel build tensorflow/python/tools:freeze_graph' % TF_SRC_PATH
frozen_cmd = 'cd %s && bazel-bin/tensorflow/python/tools/freeze_graph --input_graph=%s --input_checkpoint=%s --input_binary=true --output_graph=%s --output_node_names=%s' % \
(TF_SRC_PATH, tc.graph_file, tc.ckpt_file, tc.frozen_file, tc.output_node)
exec_cmd(bazel_build, "bazel build ...")
exec_cmd(frozen_cmd, "frozen pb ...")
logging.debug("frozen has been done!")
def tf_inference(tc, inference_input):
if inference_input == 'data':
do_inference = 'cd %s/tests && OMP_NUM_THREADS=%d python pb_inference.py --pb_file=%s --output_node=%s --batch_size=1 --loop=%d' % \
(TRANSFORMER_PATH, NUM_THREADS, tc.frozen_file, tc.output_node, LOOPS)
else:
do_inference = 'cd %s/tests && OMP_NUM_THREADS=%d python pb_inference.py --pb_file=%s --output_node=% --batch_size=1 --loop=%d --picture=%s' % \
(TRANSFORMER_PATH, NUM_THREADS, tc.frozen_file, tc.output_node, LOOPS, inference_input)
output = exec_cmd(do_inference,"tensorflow do inference!")
output = decode_string(output)
# Parse output info from cmd out
tf_info = "tensorflow output:\s\[([\-?\d+\.?\d*e?-?\d*?\s]+)"
tf_result = re.findall(tf_info, output)
logging.debug("tensorflow output: %s" % tf_result)
tf_time_info = "TF time used per loop is: (\d+\.?\d*) ms"
tf_time_used = re.findall(tf_time_info, output)
return tf_result, tf_time_used
def mkldnn_inference(tc, inference_input):
if not os.path.exists(tc.save_model_dir):
os.makedirs(tc.save_model_dir)
tf2topo = 'cd %s/ && python tf2topo.py --input_model_filename=%s --weights_file=%s/weights.bin --pkl_file=%s/weights.pkl --topo_file=%s/topo.txt' % \
(TRANSFORMER_PATH, tc.frozen_file, tc.save_model_dir, tc.save_model_dir, tc.save_model_dir)
exec_cmd(tf2topo, "convert tf pb file to topo")
topo2mkldnn = 'cd %s/ && python topo2code.py --topo=%s/topo.txt' % (TRANSFORMER_PATH, tc.save_model_dir)
exec_cmd(topo2mkldnn, "convert topo to inference code")
run_mkldnn = 'cd %s/inference_code/ && sh build.sh && OMP_NUM_THREADS=%d ./test -W %s/weights.bin -b 1 -l %d' % \
(TRANSFORMER_PATH, NUM_THREADS, tc.save_model_dir, LOOPS)
output = exec_cmd(run_mkldnn, "build and run inference code")
output = decode_string(output)
#search info from cmd ouput
out_info = "Last_output >> \[([\-?\d+\.?\d*e?-?\d*?\s]+)"
inference_result = re.findall(out_info, output)
logging.debug("mkldnn output:%s" % inference_result)
mkldnn_time_info = "AVG Time: (\d+\.?\d*) ms"
mkldnn_time_used = re.findall(mkldnn_time_info, output)
return inference_result,mkldnn_time_used
def str2list(input_str):
output_list = input_str.split()
return output_list
def compare(list1,list2):
length = min(len(list1),len(list2))
num = 0
for i in range(length):
if abs(float(list1[i]) - float(list2[i])) <= float(EPSILON):
num = num + 1
if num == length:
logging.debug("mkldnn inference outputs equal tensorflow outputs, num=%d" % num)
return True
else:
logging.debug("mkldnn inference outputs are different from tensorflow outputs!")
return False
def init_config():
global TRANSFORMER_PATH, TF_SRC_PATH, TF_SLIM_PATH, NUM_THREADS, EPSILON, LOOPS
TRANSFORMER_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
NUM_THREADS = psutil.cpu_count(logical = False)
config = configparser.ConfigParser()
config.read('test.cfg')
TF_SRC_PATH = config.get('path', 'tensorflow')
TF_SLIM_PATH = config.get('path', 'tensorflow_slim')
EPSILON = config.get('control', 'epsilon')
LOOPS = int(config.get('control', 'loops'))
str_models = config.get('models', 'names')
model_list = str_models.split(',')
for model in model_list:
tc = TestCase()
tc.model_name = model
tc.model_type = config.get(model, 'type')
tc.url = config.get(model, 'url')
tc.output_node = config.get(model, 'output_node')
try:
tc.fix_graph = config.get(model, 'fix_graph')
except:
pass
testcases.append(tc)
def model_test(tc):
print(" Test model: %s start ............" % tc.model_name)
if tc.model_type == "ckpt":
download_ckpt(tc)
export_inference_graph(tc)
if tc.fix_graph:
fix_graph_1001_to_1000(tc)
# if need to get the output name, could call this func
summarize_graph(tc)
frozen_pb(tc)
else:
logger.debug("model type error!")
exit()
tf_output, tf_time = tf_inference(tc, args.inference_input)
mkldnn_output, mkldnn_time = mkldnn_inference(tc, args.inference_input)
tf_output_list = str2list(tf_output[0])
mkldnn_output_list = str2list(mkldnn_output[0])
logging.debug("tensorflow ouput: %s" % tf_output_list)
logging.debug("mkldnn output: %s" % mkldnn_output_list)
result = compare(mkldnn_output_list, tf_output_list)
if result:
print(" %s passed! tensorflow used time: %s ms, mkldnn used time: %s ms." % (tc.model_name, tf_time, mkldnn_time))
else:
print(" %s failed! tensorflow used time: %s ms, mkldnn used time: %s ms." % (tc.model_name, tf_time, mkldnn_time))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--model_name", "-n", default="all", type=str, help="which model to test, default means to test all configured models in test.cfg.")
parser.add_argument("--inference_input", "-i", default="data", type=str, help="input: 'data' or an image file path. Currently only support 'data', which means to use emulated data.")
args = parser.parse_args()
init_config()
if args.model_name == "all":
for tc in testcases:
model_test(tc)
else:
for tc in testcases:
if tc.model_name == args.model_name:
model_test(tc)
print(" All tests done!")
| [
"logging.debug",
"configparser.ConfigParser",
"tensorflow.gfile.FastGFile",
"copy.deepcopy",
"tensorflow.gfile.GFile",
"logging.info",
"tensorflow.core.framework.graph_pb2.GraphDef",
"os.path.exists",
"os.listdir",
"argparse.ArgumentParser",
"tensorflow.GraphDef",
"os.popen",
"subprocess.che... | [((369, 408), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (388, 408), False, 'import logging\n'), ((1430, 1449), 'logging.info', 'logging.info', (['title'], {}), '(title)\n', (1442, 1449), False, 'import logging\n'), ((1454, 1472), 'logging.debug', 'logging.debug', (['cmd'], {}), '(cmd)\n', (1467, 1472), False, 'import logging\n'), ((2256, 2276), 'os.listdir', 'os.listdir', (['ckpt_dir'], {}), '(ckpt_dir)\n', (2266, 2276), False, 'import os\n'), ((2800, 2824), 'os.path.exists', 'os.path.exists', (['ckpt_dir'], {}), '(ckpt_dir)\n', (2814, 2824), False, 'import os\n'), ((3367, 3412), 'logging.debug', 'logging.debug', (['"""ckpt file has been download!"""'], {}), "('ckpt file has been download!')\n", (3380, 3412), False, 'import logging\n'), ((3495, 3524), 'os.path.isfile', 'os.path.isfile', (['tc.graph_file'], {}), '(tc.graph_file)\n', (3509, 3524), False, 'import os\n'), ((4252, 4299), 'logging.debug', 'logging.debug', (['"""summarize graph has been done!"""'], {}), "('summarize graph has been done!')\n", (4265, 4299), False, 'import logging\n'), ((4344, 4357), 'tensorflow.GraphDef', 'tf.GraphDef', ([], {}), '()\n', (4355, 4357), True, 'import tensorflow as tf\n'), ((5451, 5471), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (5469, 5471), False, 'from tensorflow.core.framework import graph_pb2\n'), ((8243, 8273), 'os.path.exists', 'os.path.exists', (['tc.frozen_file'], {}), '(tc.frozen_file)\n', (8257, 8273), False, 'import os\n'), ((9658, 9685), 're.findall', 're.findall', (['tf_info', 'output'], {}), '(tf_info, output)\n', (9668, 9685), False, 'import re\n'), ((9690, 9740), 'logging.debug', 'logging.debug', (["('tensorflow output: %s' % tf_result)"], {}), "('tensorflow output: %s' % tf_result)\n", (9703, 9740), False, 'import logging\n'), ((9824, 9856), 're.findall', 're.findall', (['tf_time_info', 'output'], {}), '(tf_time_info, output)\n', (9834, 9856), False, 'import re\n'), ((10937, 10965), 're.findall', 're.findall', (['out_info', 'output'], {}), '(out_info, output)\n', (10947, 10965), False, 'import re\n'), ((10970, 11022), 'logging.debug', 'logging.debug', (["('mkldnn output:%s' % inference_result)"], {}), "('mkldnn output:%s' % inference_result)\n", (10983, 11022), False, 'import logging\n'), ((11097, 11133), 're.findall', 're.findall', (['mkldnn_time_info', 'output'], {}), '(mkldnn_time_info, output)\n', (11107, 11133), False, 'import re\n'), ((11947, 11978), 'psutil.cpu_count', 'psutil.cpu_count', ([], {'logical': '(False)'}), '(logical=False)\n', (11963, 11978), False, 'import psutil\n'), ((11995, 12022), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (12020, 12022), False, 'import configparser\n'), ((13396, 13450), 'logging.debug', 'logging.debug', (["('tensorflow ouput: %s' % tf_output_list)"], {}), "('tensorflow ouput: %s' % tf_output_list)\n", (13409, 13450), False, 'import logging\n'), ((13455, 13510), 'logging.debug', 'logging.debug', (["('mkldnn output: %s' % mkldnn_output_list)"], {}), "('mkldnn output: %s' % mkldnn_output_list)\n", (13468, 13510), False, 'import logging\n'), ((13914, 13939), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (13937, 13939), False, 'import argparse\n'), ((1508, 1574), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {'stderr': 'subprocess.STDOUT', 'shell': '(True)'}), '(cmd, stderr=subprocess.STDOUT, shell=True)\n', (1531, 1574), False, 'import subprocess\n'), ((1626, 1639), 'os.popen', 'os.popen', (['cmd'], {}), '(cmd)\n', (1634, 1639), False, 'import os\n'), ((2385, 2418), 'os.path.join', 'os.path.join', (['ckpt_dir', 'ckpt_file'], {}), '(ckpt_dir, ckpt_file)\n', (2397, 2418), False, 'import os\n'), ((2887, 2908), 'os.makedirs', 'os.makedirs', (['ckpt_dir'], {}), '(ckpt_dir)\n', (2898, 2908), False, 'import os\n'), ((2959, 3020), 'logging.debug', 'logging.debug', (["('ckpt file (%s) already exist!' % tc.ckpt_file)"], {}), "('ckpt file (%s) already exist!' % tc.ckpt_file)\n", (2972, 3020), False, 'import logging\n'), ((3534, 3598), 'logging.debug', 'logging.debug', (["('graph file of %s already exist!' % tc.model_name)"], {}), "('graph file of %s already exist!' % tc.model_name)\n", (3547, 3598), False, 'import logging\n'), ((3825, 3879), 'logging.debug', 'logging.debug', (["('%s has been exported!' % tc.graph_file)"], {}), "('%s has been exported!' % tc.graph_file)\n", (3838, 3879), False, 'import logging\n'), ((4367, 4401), 'tensorflow.gfile.FastGFile', 'tf.gfile.FastGFile', (['filename', '"""rb"""'], {}), "(filename, 'rb')\n", (4385, 4401), True, 'import tensorflow as tf\n'), ((4989, 5011), 'struct.pack', 'struct.pack', (['"""<L"""', 'val'], {}), "('<L', val)\n", (5000, 5011), False, 'import struct\n'), ((8128, 8157), 'tensorflow.gfile.GFile', 'tf.gfile.GFile', (['pb_file', '"""wb"""'], {}), "(pb_file, 'wb')\n", (8142, 8157), True, 'import tensorflow as tf\n'), ((8284, 8321), 'logging.debug', 'logging.debug', (['"""frozen pb file exist"""'], {}), "('frozen pb file exist')\n", (8297, 8321), False, 'import logging\n'), ((8810, 8848), 'logging.debug', 'logging.debug', (['"""frozen has been done!"""'], {}), "('frozen has been done!')\n", (8823, 8848), False, 'import logging\n'), ((9954, 9987), 'os.path.exists', 'os.path.exists', (['tc.save_model_dir'], {}), '(tc.save_model_dir)\n', (9968, 9987), False, 'import os\n'), ((10001, 10031), 'os.makedirs', 'os.makedirs', (['tc.save_model_dir'], {}), '(tc.save_model_dir)\n', (10012, 10031), False, 'import os\n'), ((11515, 11600), 'logging.debug', 'logging.debug', (["('mkldnn inference outputs equal tensorflow outputs, num=%d' % num)"], {}), "('mkldnn inference outputs equal tensorflow outputs, num=%d' % num\n )\n", (11528, 11600), False, 'import logging\n'), ((11634, 11719), 'logging.debug', 'logging.debug', (['"""mkldnn inference outputs are different from tensorflow outputs!"""'], {}), "('mkldnn inference outputs are different from tensorflow outputs!'\n )\n", (11647, 11719), False, 'import logging\n'), ((5158, 5185), 'struct.unpack', 'struct.unpack', (['"""<L"""', 'barray'], {}), "('<L', barray)\n", (5171, 5185), False, 'import struct\n'), ((11895, 11920), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (11910, 11920), False, 'import os\n'), ((8071, 8090), 'copy.deepcopy', 'copy.deepcopy', (['node'], {}), '(node)\n', (8084, 8090), False, 'import copy\n')] |
import asyncio
import time
from collections import namedtuple
from http import HTTPStatus
import pytest
from aiojenkins.exceptions import JenkinsError
from aiojenkins.jenkins import Jenkins
from tests import CreateJob, get_host, get_login, get_password, is_ci_server
@pytest.mark.asyncio
async def test_invalid_host(jenkins):
with pytest.raises(JenkinsError):
jenkins = Jenkins('@#$')
await jenkins.get_version()
@pytest.mark.asyncio
async def test_get_status(jenkins):
await jenkins.get_status()
@pytest.mark.asyncio
async def test_quiet_down(jenkins):
await jenkins.quiet_down()
server_status = await jenkins.get_status()
assert server_status['quietingDown'] is True
await jenkins.cancel_quiet_down()
server_status = await jenkins.get_status()
assert server_status['quietingDown'] is False
@pytest.mark.asyncio
async def test_restart(jenkins):
if not is_ci_server():
pytest.skip('takes too much time +40 seconds')
await jenkins.safe_restart()
await asyncio.sleep(5)
await jenkins.wait_until_ready()
assert (await jenkins.is_ready()) is True
await jenkins.restart()
await jenkins.wait_until_ready()
assert (await jenkins.is_ready()) is True
@pytest.mark.asyncio
async def test_tokens(jenkins):
version = await jenkins.get_version()
if not (version.major >= 2 and version.minor >= 129):
pytest.skip('Version isn`t support API tokens')
async with CreateJob(jenkins) as job_name:
token_value, token_uuid = await jenkins.generate_token('')
token_name = str(time.time())
token_value, token_uuid = await jenkins.generate_token(token_name)
await jenkins.nodes.enable('master')
# instance without credentials
jenkins_tokened = Jenkins(get_host(), get_login(), token_value)
await jenkins_tokened.builds.start(job_name)
await jenkins.revoke_token(token_uuid)
with pytest.raises(JenkinsError):
await jenkins_tokened.builds.start(job_name)
@pytest.mark.asyncio
async def test_run_groovy_script(jenkins):
# TC: compare with expected result
text = 'test'
response = await jenkins.run_groovy_script('print("{}")'.format(text))
assert response == text
# TC: invalid script
response = await jenkins.run_groovy_script('xxx')
assert 'No such property' in response
@pytest.mark.asyncio
async def test_retry_client(monkeypatch):
attempts = 0
async def text():
return 'error'
async def request(*args, **kwargs):
nonlocal attempts
attempts += 1
response = namedtuple(
'response', ['status', 'cookies', 'text', 'json']
)
if attempts == 1:
raise asyncio.TimeoutError
elif attempts < 3:
response.status = HTTPStatus.INTERNAL_SERVER_ERROR
else:
response.status = HTTPStatus.OK
response.text = text
response.json = text
return response
retry = dict(total=5, statuses=[HTTPStatus.INTERNAL_SERVER_ERROR])
try:
jenkins = Jenkins(get_host(), get_login(), get_password(), retry=retry)
await jenkins.get_status()
monkeypatch.setattr('aiohttp.client.ClientSession.request', request)
await jenkins.get_status()
finally:
await jenkins.close()
@pytest.mark.asyncio
async def test_retry_validation():
retry = dict(attempts=5, statuses=[HTTPStatus.INTERNAL_SERVER_ERROR])
with pytest.raises(JenkinsError):
jenkins = Jenkins(get_host(), get_login(), get_password(), retry=retry)
await jenkins.get_status()
def test_session_close():
def do():
Jenkins(
get_host(),
get_login(),
get_password(),
retry=dict(enabled=True)
)
do()
# just check for no exceptions
import gc
gc.collect()
| [
"tests.get_password",
"aiojenkins.jenkins.Jenkins",
"collections.namedtuple",
"asyncio.sleep",
"tests.get_login",
"tests.CreateJob",
"tests.get_host",
"pytest.raises",
"tests.is_ci_server",
"gc.collect",
"pytest.skip",
"time.time"
] | [((3899, 3911), 'gc.collect', 'gc.collect', ([], {}), '()\n', (3909, 3911), False, 'import gc\n'), ((340, 367), 'pytest.raises', 'pytest.raises', (['JenkinsError'], {}), '(JenkinsError)\n', (353, 367), False, 'import pytest\n'), ((387, 401), 'aiojenkins.jenkins.Jenkins', 'Jenkins', (['"""@#$"""'], {}), "('@#$')\n", (394, 401), False, 'from aiojenkins.jenkins import Jenkins\n'), ((917, 931), 'tests.is_ci_server', 'is_ci_server', ([], {}), '()\n', (929, 931), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((941, 987), 'pytest.skip', 'pytest.skip', (['"""takes too much time +40 seconds"""'], {}), "('takes too much time +40 seconds')\n", (952, 987), False, 'import pytest\n'), ((1032, 1048), 'asyncio.sleep', 'asyncio.sleep', (['(5)'], {}), '(5)\n', (1045, 1048), False, 'import asyncio\n'), ((1409, 1456), 'pytest.skip', 'pytest.skip', (['"""Version isn`t support API tokens"""'], {}), "('Version isn`t support API tokens')\n", (1420, 1456), False, 'import pytest\n'), ((1473, 1491), 'tests.CreateJob', 'CreateJob', (['jenkins'], {}), '(jenkins)\n', (1482, 1491), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((2630, 2691), 'collections.namedtuple', 'namedtuple', (['"""response"""', "['status', 'cookies', 'text', 'json']"], {}), "('response', ['status', 'cookies', 'text', 'json'])\n", (2640, 2691), False, 'from collections import namedtuple\n'), ((3507, 3534), 'pytest.raises', 'pytest.raises', (['JenkinsError'], {}), '(JenkinsError)\n', (3520, 3534), False, 'import pytest\n'), ((1598, 1609), 'time.time', 'time.time', ([], {}), '()\n', (1607, 1609), False, 'import time\n'), ((1806, 1816), 'tests.get_host', 'get_host', ([], {}), '()\n', (1814, 1816), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((1818, 1829), 'tests.get_login', 'get_login', ([], {}), '()\n', (1827, 1829), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((1959, 1986), 'pytest.raises', 'pytest.raises', (['JenkinsError'], {}), '(JenkinsError)\n', (1972, 1986), False, 'import pytest\n'), ((3120, 3130), 'tests.get_host', 'get_host', ([], {}), '()\n', (3128, 3130), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((3132, 3143), 'tests.get_login', 'get_login', ([], {}), '()\n', (3141, 3143), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((3145, 3159), 'tests.get_password', 'get_password', ([], {}), '()\n', (3157, 3159), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((3562, 3572), 'tests.get_host', 'get_host', ([], {}), '()\n', (3570, 3572), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((3574, 3585), 'tests.get_login', 'get_login', ([], {}), '()\n', (3583, 3585), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((3587, 3601), 'tests.get_password', 'get_password', ([], {}), '()\n', (3599, 3601), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((3723, 3733), 'tests.get_host', 'get_host', ([], {}), '()\n', (3731, 3733), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((3747, 3758), 'tests.get_login', 'get_login', ([], {}), '()\n', (3756, 3758), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n'), ((3772, 3786), 'tests.get_password', 'get_password', ([], {}), '()\n', (3784, 3786), False, 'from tests import CreateJob, get_host, get_login, get_password, is_ci_server\n')] |
import requests
import json
with open("api.key") as f:
API_KEY = f.read().strip()
"""
This is for security online, so the API key is not publicized.
"""
f.close()
url='https://webdrink.csh.rit.edu/api/index.php'
def test():
"""
This is not usually called, other than for testing the API.
"""
head = {"request": "test/api/{}".format(API_KEY), "api_key": API_KEY}
ret=requests.get(url,params=head)
print(ret.text)
print(ret.json())
print(ret.json()['message'])
def get_credits(uid):
"""
This is to return the credits a user has, so that it can be shown on the screen.
"""
head = {"request": "users/credits/{}".format(uid), "uid": uid, "api_key": API_KEY}
ret = requests.get(url,params=head)
if ret.status_code != 200:
raise ValueError
else:
return ret.json()['data']
def get_user_info():
"""
Gets name, credits, etc.
"""
head = {"request": "users/info/", "api_key": API_KEY}
ret = requests.get(url,params=head)
if ret.status_code != 200:
raise ValueError
else:
return ret.json()['data']
def get_machine_info():
"""
Gets stock of each machine.
"""
head = {"request": "machines/stock/"}
ret = requests.get(url,params=head)
if ret.status_code != 200:
raise ValueError
else:
return ret.json()['data']
def drop_drink(ib, mach, slot, delay):
"""
Drops specified drink.
"""
head = {"request": "drops/drop/{}/{}/{}/{}".format(ib, mach, slot, delay), "ibutton": ib,
"machine_id": mach, "slot_num": slot, "delay": delay, "api_key": API_KEY}
ret = requests.post(url,data=head)
if ret.status_code != 200:
raise ValueError
else:
return ret.json()['message']
| [
"requests.post",
"requests.get"
] | [((409, 439), 'requests.get', 'requests.get', (['url'], {'params': 'head'}), '(url, params=head)\n', (421, 439), False, 'import requests\n'), ((735, 765), 'requests.get', 'requests.get', (['url'], {'params': 'head'}), '(url, params=head)\n', (747, 765), False, 'import requests\n'), ((1000, 1030), 'requests.get', 'requests.get', (['url'], {'params': 'head'}), '(url, params=head)\n', (1012, 1030), False, 'import requests\n'), ((1255, 1285), 'requests.get', 'requests.get', (['url'], {'params': 'head'}), '(url, params=head)\n', (1267, 1285), False, 'import requests\n'), ((1658, 1687), 'requests.post', 'requests.post', (['url'], {'data': 'head'}), '(url, data=head)\n', (1671, 1687), False, 'import requests\n')] |
from unittest import TestCase
import numpy as np
import dianna
import dianna.visualization
from dianna.methods import LIME
from tests.test_onnx_runner import generate_data
from tests.utils import ModelRunner
from tests.utils import run_model
class LimeOnImages(TestCase):
def test_lime_function(self):
np.random.seed(42)
input_data = np.random.random((1, 224, 224, 3))
labels = ('batch', 'y', 'x', 'channels')
explainer = LIME(random_state=42, axes_labels=labels)
heatmap = explainer.explain_image(run_model, input_data, num_samples=100)
heatmap_expected = np.load('tests/test_data/heatmap_lime_function.npy')
assert heatmap.shape == input_data[0].shape[:2]
assert np.allclose(heatmap, heatmap_expected, atol=.01)
def test_lime_filename(self):
np.random.seed(42)
model_filename = 'tests/test_data/mnist_model.onnx'
black_and_white = generate_data(batch_size=1)
# Make data 3-channel instead of 1-channel
input_data = np.zeros([1, 3] + list(black_and_white.shape[2:])) + black_and_white
input_data = input_data.astype(np.float32)
labels = ('batch', 'channels', 'y', 'x')
def preprocess(data):
# select single channel out of 3, but keep the channel axis
return data[:, [0], ...]
heatmap = dianna.explain_image(model_filename, input_data, method="LIME", preprocess_function=preprocess, random_state=42,
axes_labels=labels)
heatmap_expected = np.load('tests/test_data/heatmap_lime_filename.npy')
assert heatmap.shape == input_data[0, 0].shape
assert np.allclose(heatmap, heatmap_expected, atol=.01)
def test_lime_text():
model_path = 'tests/test_data/movie_review_model.onnx'
word_vector_file = 'tests/test_data/word_vectors.txt'
runner = ModelRunner(model_path, word_vector_file, max_filter_size=5)
review = 'such a bad movie'
explanation = dianna.explain_text(runner, review, labels=[0], method='LIME', random_state=42)[0]
words = [element[0] for element in explanation]
word_indices = [element[1] for element in explanation]
scores = [element[2] for element in explanation]
expected_words = ['bad', 'such', 'movie', 'a']
expected_word_indices = [7, 0, 11, 5]
expected_scores = [.492, -.046, .036, -.008]
assert words == expected_words
assert word_indices == expected_word_indices
assert np.allclose(scores, expected_scores, atol=.01)
| [
"tests.test_onnx_runner.generate_data",
"numpy.allclose",
"dianna.explain_text",
"numpy.random.random",
"dianna.methods.LIME",
"tests.utils.ModelRunner",
"numpy.random.seed",
"dianna.explain_image",
"numpy.load"
] | [((1889, 1949), 'tests.utils.ModelRunner', 'ModelRunner', (['model_path', 'word_vector_file'], {'max_filter_size': '(5)'}), '(model_path, word_vector_file, max_filter_size=5)\n', (1900, 1949), False, 'from tests.utils import ModelRunner\n'), ((2487, 2534), 'numpy.allclose', 'np.allclose', (['scores', 'expected_scores'], {'atol': '(0.01)'}), '(scores, expected_scores, atol=0.01)\n', (2498, 2534), True, 'import numpy as np\n'), ((317, 335), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (331, 335), True, 'import numpy as np\n'), ((357, 391), 'numpy.random.random', 'np.random.random', (['(1, 224, 224, 3)'], {}), '((1, 224, 224, 3))\n', (373, 391), True, 'import numpy as np\n'), ((462, 503), 'dianna.methods.LIME', 'LIME', ([], {'random_state': '(42)', 'axes_labels': 'labels'}), '(random_state=42, axes_labels=labels)\n', (466, 503), False, 'from dianna.methods import LIME\n'), ((613, 665), 'numpy.load', 'np.load', (['"""tests/test_data/heatmap_lime_function.npy"""'], {}), "('tests/test_data/heatmap_lime_function.npy')\n", (620, 665), True, 'import numpy as np\n'), ((737, 786), 'numpy.allclose', 'np.allclose', (['heatmap', 'heatmap_expected'], {'atol': '(0.01)'}), '(heatmap, heatmap_expected, atol=0.01)\n', (748, 786), True, 'import numpy as np\n'), ((829, 847), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (843, 847), True, 'import numpy as np\n'), ((935, 962), 'tests.test_onnx_runner.generate_data', 'generate_data', ([], {'batch_size': '(1)'}), '(batch_size=1)\n', (948, 962), False, 'from tests.test_onnx_runner import generate_data\n'), ((1363, 1499), 'dianna.explain_image', 'dianna.explain_image', (['model_filename', 'input_data'], {'method': '"""LIME"""', 'preprocess_function': 'preprocess', 'random_state': '(42)', 'axes_labels': 'labels'}), "(model_filename, input_data, method='LIME',\n preprocess_function=preprocess, random_state=42, axes_labels=labels)\n", (1383, 1499), False, 'import dianna\n'), ((1563, 1615), 'numpy.load', 'np.load', (['"""tests/test_data/heatmap_lime_filename.npy"""'], {}), "('tests/test_data/heatmap_lime_filename.npy')\n", (1570, 1615), True, 'import numpy as np\n'), ((1686, 1735), 'numpy.allclose', 'np.allclose', (['heatmap', 'heatmap_expected'], {'atol': '(0.01)'}), '(heatmap, heatmap_expected, atol=0.01)\n', (1697, 1735), True, 'import numpy as np\n'), ((2002, 2081), 'dianna.explain_text', 'dianna.explain_text', (['runner', 'review'], {'labels': '[0]', 'method': '"""LIME"""', 'random_state': '(42)'}), "(runner, review, labels=[0], method='LIME', random_state=42)\n", (2021, 2081), False, 'import dianna\n')] |
import re
import pyUnicodeSteganography.zerowidth as zerowidth
import pyUnicodeSteganography.lookalikes as lookalikes
import pyUnicodeSteganography.snow as snow
import pyUnicodeSteganography.emoji as emoji
from pyUnicodeSteganography.zerowidth import zwc_4
def encode(unencoded_string, msg, method="zw", binary=False, replacements=None, delimiter=None):
'''
Main encoding method
Dispatches to corresponding encoder based on specified method and handles
insertion/appending etc. of message into the string.
'''
if method == "zw":
code = zerowidth.encode(msg, character_set=replacements, binary=binary)
chars = list(unencoded_string)
split_code = [code[i:i+4] for i in range(0, len(code), 4)]
if len(split_code) >= len(chars):
raise ValueError("String too short to encode message")
out = ''
for i in range(len(chars)):
out = out + chars[i]
if i < len(split_code):
out = out + split_code[i]
return out
elif method == "snow":
if not delimiter:
delimiter = '\t\t\t'
code = snow.encode(msg, character_set=replacements, binary=binary)
return unencoded_string + delimiter + code
elif method == "lookalike":
return lookalikes.encode(unencoded_string, msg, substitution_table=replacements, binary=binary)
elif method == "emoji":
return emoji.encode(msg, binary=binary)
else:
raise Exception("Method: {}, is not supported".format(method))
def decode(encoded_string, method="zw", binary=False, replacements=None, delimiter=None):
'''
Main decoding method
Dispatches to corresponding decoder based on specified method and handles
extraction of encoded message from the string.
'''
if method == "zw":
if not replacements:
replacements = zwc_4
code = ''
for c in encoded_string:
if c in replacements:
code = code + c
return zerowidth.decode(code, character_set=replacements, binary=binary)
elif method == "snow":
if not delimiter:
delimiter = '\t\t\t'
regex = "{}(.+)$".format(delimiter)
m = re.search(regex, encoded_string)
code = m.groups()[0]
return snow.decode(code, character_set=replacements, binary=binary)
elif method == "lookalike":
return lookalikes.decode(encoded_string, substitution_table=replacements, binary=binary)
elif method == "emoji":
return emoji.decode(encoded_string, binary=binary)
| [
"pyUnicodeSteganography.zerowidth.encode",
"pyUnicodeSteganography.zerowidth.decode",
"pyUnicodeSteganography.lookalikes.encode",
"pyUnicodeSteganography.lookalikes.decode",
"pyUnicodeSteganography.emoji.encode",
"pyUnicodeSteganography.emoji.decode",
"pyUnicodeSteganography.snow.encode",
"pyUnicodeSt... | [((579, 643), 'pyUnicodeSteganography.zerowidth.encode', 'zerowidth.encode', (['msg'], {'character_set': 'replacements', 'binary': 'binary'}), '(msg, character_set=replacements, binary=binary)\n', (595, 643), True, 'import pyUnicodeSteganography.zerowidth as zerowidth\n'), ((2034, 2099), 'pyUnicodeSteganography.zerowidth.decode', 'zerowidth.decode', (['code'], {'character_set': 'replacements', 'binary': 'binary'}), '(code, character_set=replacements, binary=binary)\n', (2050, 2099), True, 'import pyUnicodeSteganography.zerowidth as zerowidth\n'), ((1147, 1206), 'pyUnicodeSteganography.snow.encode', 'snow.encode', (['msg'], {'character_set': 'replacements', 'binary': 'binary'}), '(msg, character_set=replacements, binary=binary)\n', (1158, 1206), True, 'import pyUnicodeSteganography.snow as snow\n'), ((2243, 2275), 're.search', 're.search', (['regex', 'encoded_string'], {}), '(regex, encoded_string)\n', (2252, 2275), False, 'import re\n'), ((2321, 2381), 'pyUnicodeSteganography.snow.decode', 'snow.decode', (['code'], {'character_set': 'replacements', 'binary': 'binary'}), '(code, character_set=replacements, binary=binary)\n', (2332, 2381), True, 'import pyUnicodeSteganography.snow as snow\n'), ((1306, 1398), 'pyUnicodeSteganography.lookalikes.encode', 'lookalikes.encode', (['unencoded_string', 'msg'], {'substitution_table': 'replacements', 'binary': 'binary'}), '(unencoded_string, msg, substitution_table=replacements,\n binary=binary)\n', (1323, 1398), True, 'import pyUnicodeSteganography.lookalikes as lookalikes\n'), ((2430, 2516), 'pyUnicodeSteganography.lookalikes.decode', 'lookalikes.decode', (['encoded_string'], {'substitution_table': 'replacements', 'binary': 'binary'}), '(encoded_string, substitution_table=replacements, binary=\n binary)\n', (2447, 2516), True, 'import pyUnicodeSteganography.lookalikes as lookalikes\n'), ((1439, 1471), 'pyUnicodeSteganography.emoji.encode', 'emoji.encode', (['msg'], {'binary': 'binary'}), '(msg, binary=binary)\n', (1451, 1471), True, 'import pyUnicodeSteganography.emoji as emoji\n'), ((2556, 2599), 'pyUnicodeSteganography.emoji.decode', 'emoji.decode', (['encoded_string'], {'binary': 'binary'}), '(encoded_string, binary=binary)\n', (2568, 2599), True, 'import pyUnicodeSteganography.emoji as emoji\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 21 16:52:10 2017
@author: margauxmouchene
"""
import numpy as np
import pytest
from numpy.testing import assert_almost_equal
from landlab import RasterModelGrid
from landlab.components import (
FlowAccumulator,
FlowDirectorSteepest,
TransportLengthHillslopeDiffuser,
)
def test_route_to_multiple_error_raised():
mg = RasterModelGrid((10, 10))
z = mg.add_zeros("node", "topographic__elevation")
z += mg.x_of_node + mg.y_of_node
fa = FlowAccumulator(mg, flow_director="MFD")
fa.run_one_step()
with pytest.raises(NotImplementedError):
TransportLengthHillslopeDiffuser(mg, erodibility=1.0, slope_crit=0.5)
def test_tl_hill_diff():
"""Test cases where S>Sc, S=Sc and S<Sc"""
# Test cases where S>Sc, S=Sc and S<Sc
# Set up a 3x16 grid with closed boundaries and initial elevations.
mg = RasterModelGrid((3, 12))
z = np.array(
[
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
5.,
1.9,
1.9,
1.9,
1.9,
1.3,
1.3,
1.3,
1.3,
1.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
]
)
mg.add_field("node", "topographic__elevation", z)
mg.set_closed_boundaries_at_grid_edges(True, True, True, True)
# Parameter values for test
k = 0.001
Sc = 0.6
# Instantiate flow director and tl hillslope diffuser
fdir = FlowDirectorSteepest(mg)
tl_diff = TransportLengthHillslopeDiffuser(mg, erodibility=k, slope_crit=Sc)
# Run flow director
fdir.run_one_step()
# test slopes
s_out = mg.at_node["topographic__steepest_slope"]
s_test = np.array(
[
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
3.1,
0.,
0.,
0.,
0.6,
0.,
0.,
0.,
0.3,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
]
)
assert_almost_equal(s_out, s_test, decimal=10)
# Run tl hillslope diffusion component
tl_diff.run_one_step(1.)
# Test results
# flux_out
fo_test = np.array(
[
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.025,
0.,
0.,
0.,
0.0006,
0.,
0.,
0.,
0.0003,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
]
)
fo_out = mg.at_node["sediment__flux_out"]
assert_almost_equal(fo_out, fo_test, decimal=10)
# updated elevation
elev_test = np.array(
[
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
4.975,
1.9,
1.9,
1.9,
1.8994,
1.3,
1.3,
1.3,
1.2997,
1.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
]
)
elev_out = mg.at_node["topographic__elevation"]
assert_almost_equal(elev_out, elev_test, decimal=10)
# Run another time step because deposition and transfer were null
# the first time
fdir.run_one_step()
tl_diff.run_one_step(1.)
# Test results
# flux_out
fo_test = np.array(
[
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
2.47500000e-02,
0.00000000e+00,
0.00000000e+00,
6.00000000e-07,
5.99400000e-04,
0.00000000e+00,
0.00000000e+00,
3.00000000e-07,
2.99700000e-04,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
]
)
fo_out = mg.at_node["sediment__flux_out"]
assert_almost_equal(fo_out, fo_test, decimal=10)
# updated elevation
elev_test = np.array(
[
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
4.95025,
1.925,
1.9,
1.8999994,
1.8988006,
1.3006,
1.3,
1.2999997,
1.2994003,
1.0003,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
]
)
elev_out = mg.at_node["topographic__elevation"]
assert_almost_equal(elev_out, elev_test, decimal=10)
| [
"landlab.components.FlowAccumulator",
"landlab.RasterModelGrid",
"landlab.components.TransportLengthHillslopeDiffuser",
"numpy.array",
"numpy.testing.assert_almost_equal",
"pytest.raises",
"landlab.components.FlowDirectorSteepest"
] | [((408, 433), 'landlab.RasterModelGrid', 'RasterModelGrid', (['(10, 10)'], {}), '((10, 10))\n', (423, 433), False, 'from landlab import RasterModelGrid\n'), ((535, 575), 'landlab.components.FlowAccumulator', 'FlowAccumulator', (['mg'], {'flow_director': '"""MFD"""'}), "(mg, flow_director='MFD')\n", (550, 575), False, 'from landlab.components import FlowAccumulator, FlowDirectorSteepest, TransportLengthHillslopeDiffuser\n'), ((921, 945), 'landlab.RasterModelGrid', 'RasterModelGrid', (['(3, 12)'], {}), '((3, 12))\n', (936, 945), False, 'from landlab import RasterModelGrid\n'), ((954, 1154), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.0, 1.9,\n 1.9, 1.9, 1.9, 1.3, 1.3, 1.3, 1.3, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 5.0, 1.9, 1.9, 1.9, 1.9, 1.3, 1.3, 1.3, 1.3, 1.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])\n', (962, 1154), True, 'import numpy as np\n'), ((1825, 1849), 'landlab.components.FlowDirectorSteepest', 'FlowDirectorSteepest', (['mg'], {}), '(mg)\n', (1845, 1849), False, 'from landlab.components import FlowAccumulator, FlowDirectorSteepest, TransportLengthHillslopeDiffuser\n'), ((1864, 1930), 'landlab.components.TransportLengthHillslopeDiffuser', 'TransportLengthHillslopeDiffuser', (['mg'], {'erodibility': 'k', 'slope_crit': 'Sc'}), '(mg, erodibility=k, slope_crit=Sc)\n', (1896, 1930), False, 'from landlab.components import FlowAccumulator, FlowDirectorSteepest, TransportLengthHillslopeDiffuser\n'), ((2066, 2266), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.1, 0.0,\n 0.0, 0.0, 0.6, 0.0, 0.0, 0.0, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 3.1, 0.0, 0.0, 0.0, 0.6, 0.0, 0.0, 0.0, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])\n', (2074, 2266), True, 'import numpy as np\n'), ((2685, 2731), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['s_out', 's_test'], {'decimal': '(10)'}), '(s_out, s_test, decimal=10)\n', (2704, 2731), False, 'from numpy.testing import assert_almost_equal\n'), ((2854, 3061), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.025, \n 0.0, 0.0, 0.0, 0.0006, 0.0, 0.0, 0.0, 0.0003, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.025, 0.0, 0.0, 0.0, 0.0006, 0.0, 0.0, 0.0, 0.0003, 0.0, 0.0, 0.0, 0.0,\n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])\n', (2862, 3061), True, 'import numpy as np\n'), ((3527, 3575), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['fo_out', 'fo_test'], {'decimal': '(10)'}), '(fo_out, fo_test, decimal=10)\n', (3546, 3575), False, 'from numpy.testing import assert_almost_equal\n'), ((3617, 3824), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.975, \n 1.9, 1.9, 1.9, 1.8994, 1.3, 1.3, 1.3, 1.2997, 1.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 4.975, 1.9, 1.9, 1.9, 1.8994, 1.3, 1.3, 1.3, 1.2997, 1.0, 0.0, 0.0, 0.0,\n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])\n', (3625, 3824), True, 'import numpy as np\n'), ((4302, 4354), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['elev_out', 'elev_test'], {'decimal': '(10)'}), '(elev_out, elev_test, decimal=10)\n', (4321, 4354), False, 'from numpy.testing import assert_almost_equal\n'), ((4549, 4769), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.02475, \n 0.0, 0.0, 6e-07, 0.0005994, 0.0, 0.0, 3e-07, 0.0002997, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.02475, 0.0, 0.0, 6e-07, 0.0005994, 0.0, 0.0, 3e-07, 0.0002997, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])\n', (4557, 4769), True, 'import numpy as np\n'), ((5643, 5691), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['fo_out', 'fo_test'], {'decimal': '(10)'}), '(fo_out, fo_test, decimal=10)\n', (5662, 5691), False, 'from numpy.testing import assert_almost_equal\n'), ((5733, 5974), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.95025, \n 1.925, 1.9, 1.8999994, 1.8988006, 1.3006, 1.3, 1.2999997, 1.2994003, \n 1.0003, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 4.95025, 1.925, 1.9, 1.8999994, 1.8988006, 1.3006, 1.3, 1.2999997, \n 1.2994003, 1.0003, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0])\n', (5741, 5974), True, 'import numpy as np\n'), ((6447, 6499), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['elev_out', 'elev_test'], {'decimal': '(10)'}), '(elev_out, elev_test, decimal=10)\n', (6466, 6499), False, 'from numpy.testing import assert_almost_equal\n'), ((608, 642), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (621, 642), False, 'import pytest\n'), ((652, 721), 'landlab.components.TransportLengthHillslopeDiffuser', 'TransportLengthHillslopeDiffuser', (['mg'], {'erodibility': '(1.0)', 'slope_crit': '(0.5)'}), '(mg, erodibility=1.0, slope_crit=0.5)\n', (684, 721), False, 'from landlab.components import FlowAccumulator, FlowDirectorSteepest, TransportLengthHillslopeDiffuser\n')] |
# import app
from flask import Flask, render_template, make_response, send_file
from flask_cors import CORS
# import custom helpers
from maplib import generate_embed
import loyaltylib as ll
app = Flask(__name__)
CORS(app)
# import declared routes
import frontenddata
@app.route('/ll')
def llfn():
ll.create_loyalty_account()
ll.retrieve_loyalty_account()
return
@app.route('/map')
def map():
location = '850 FOLSOM ST, San Francisco, CA 94107'
addresslist = {'a' : generate_embed(location)}
return render_template('map.html', addresslist=addresslist)
@app.route('/cal')
def cal():
appoint = {
'stylist': '<NAME>',
'salon': '<NAME>',
'event': 'Men\'s Haircut',
'location':'850 FOLSOM ST, San Francisco, CA 94107',
'starttime':'2020-06-23 08:00:00',
'endtime':'2020-06-23 08:45:00',
}
return render_template('cal.html', appoint=appoint)
# def loop_matcher(delay):
# while(True):
# print('Matcher Automatically Run')
# handle_matcher()
# #do expired status update here
# time.sleep(delay)
# Run Server
if __name__ == "__main__":
#matcher_delay = 3600 # 1 hour in seconds
#p = Process(target=loop_matcher, args=(matcher_delay,))
#p.start()
app.run(host = '0.0.0.0', debug=True, use_reloader=False)
#p.join() | [
"flask.render_template",
"flask_cors.CORS",
"flask.Flask",
"loyaltylib.create_loyalty_account",
"maplib.generate_embed",
"loyaltylib.retrieve_loyalty_account"
] | [((198, 213), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (203, 213), False, 'from flask import Flask, render_template, make_response, send_file\n'), ((214, 223), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (218, 223), False, 'from flask_cors import CORS\n'), ((306, 333), 'loyaltylib.create_loyalty_account', 'll.create_loyalty_account', ([], {}), '()\n', (331, 333), True, 'import loyaltylib as ll\n'), ((338, 367), 'loyaltylib.retrieve_loyalty_account', 'll.retrieve_loyalty_account', ([], {}), '()\n', (365, 367), True, 'import loyaltylib as ll\n'), ((528, 580), 'flask.render_template', 'render_template', (['"""map.html"""'], {'addresslist': 'addresslist'}), "('map.html', addresslist=addresslist)\n", (543, 580), False, 'from flask import Flask, render_template, make_response, send_file\n'), ((881, 925), 'flask.render_template', 'render_template', (['"""cal.html"""'], {'appoint': 'appoint'}), "('cal.html', appoint=appoint)\n", (896, 925), False, 'from flask import Flask, render_template, make_response, send_file\n'), ((491, 515), 'maplib.generate_embed', 'generate_embed', (['location'], {}), '(location)\n', (505, 515), False, 'from maplib import generate_embed\n')] |
import torch
import torch.nn.functional as F
try:
import matplotlib.pyplot as plt
except ImportError:
pass
def factor_getter(n, base):
base = base * 0.8 ** (n // 6)
i = n % 6
if i < 3:
f = [0, 0, 0]
f[i] = base
else:
base /= 2
f = [base, base, base]
f[i - 3] = 0
return f
def resize(image, target, scale_factor):
ori_image_shape = image.shape[-2:]
image = F.interpolate(image[None], scale_factor=scale_factor, mode='bilinear', align_corners=False)[0]
if target is None:
return image, target
if 'boxes' in target:
box = target['boxes']
box[:, [0, 2]] = box[:, [0, 2]] * image.shape[-1] / ori_image_shape[1]
box[:, [1, 3]] = box[:, [1, 3]] * image.shape[-2] / ori_image_shape[0]
target['boxes'] = box
if 'masks' in target:
mask = target['masks']
mask = F.interpolate(mask[None].float(), scale_factor=scale_factor)[0].byte()
target['masks'] = mask
return image, target
def show(image, target=None, classes=None, scale_factor=None, base=0.4):
image = image.clone()
if scale_factor is not None:
image, target = resize(image, target, scale_factor)
if target is not None and 'masks' in target:
mask = target['masks']
mask = mask.reshape(-1, 1, mask.shape[1], mask.shape[2])
mask = mask.repeat(1, 3, 1, 1).to(image)
for i, m in enumerate(mask):
factor = torch.tensor(factor_getter(i, base)).reshape(3, 1, 1).to(image)
value = factor * m
image += value
image = image.clamp(0, 1)
im = image.cpu().numpy()
plt.imshow(im.transpose(1, 2, 0))
if target is not None:
if 'boxes' in target:
box = target['boxes']
box = box.cpu()
for i, b in enumerate(box):
plt.plot(b[[0, 2, 2, 0, 0]], b[[1, 1, 3, 3, 1]])
if 'labels' in target:
l = target['labels'][i].item()
if classes is None:
raise ValueError("'classes' should not be None when 'target' has 'labels'!")
txt = classes[l]
if 'scores' in target:
s = target['scores'][i]
s = round(s.item() * 100)
txt = '{} {}%'.format(txt, s)
plt.text(
b[0], b[1], txt, fontsize=14,
bbox=dict(boxstyle='round', fc='white', lw=1, alpha=0.7))
plt.title(im.shape)
plt.axis('off')
plt.show() | [
"matplotlib.pyplot.plot",
"matplotlib.pyplot.axis",
"torch.nn.functional.interpolate",
"matplotlib.pyplot.title",
"matplotlib.pyplot.show"
] | [((2598, 2617), 'matplotlib.pyplot.title', 'plt.title', (['im.shape'], {}), '(im.shape)\n', (2607, 2617), True, 'import matplotlib.pyplot as plt\n'), ((2622, 2637), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (2630, 2637), True, 'import matplotlib.pyplot as plt\n'), ((2642, 2652), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2650, 2652), True, 'import matplotlib.pyplot as plt\n'), ((436, 531), 'torch.nn.functional.interpolate', 'F.interpolate', (['image[None]'], {'scale_factor': 'scale_factor', 'mode': '"""bilinear"""', 'align_corners': '(False)'}), "(image[None], scale_factor=scale_factor, mode='bilinear',\n align_corners=False)\n", (449, 531), True, 'import torch.nn.functional as F\n'), ((1902, 1950), 'matplotlib.pyplot.plot', 'plt.plot', (['b[[0, 2, 2, 0, 0]]', 'b[[1, 1, 3, 3, 1]]'], {}), '(b[[0, 2, 2, 0, 0]], b[[1, 1, 3, 3, 1]])\n', (1910, 1950), True, 'import matplotlib.pyplot as plt\n')] |
from flax.geometry import Blob, Point, Rectangle, Size, Span
def test_blob_create():
rect = Rectangle(origin=Point(0, 0), size=Size(5, 5))
blob = Blob.from_rectangle(rect)
assert blob.area == rect.area
assert blob.height == rect.height
def test_blob_math_disjoint():
# These rectangles look like this:
# xxx
# xxx
# xxx xxx
# xxx
# xxx
rect1 = Rectangle(origin=Point(0, 0), size=Size(3, 3))
rect2 = Rectangle(origin=Point(6, 2), size=Size(3, 3))
blob1 = Blob.from_rectangle(rect1)
blob2 = Blob.from_rectangle(rect2)
union_blob = blob1 + blob2
assert union_blob.area == blob1.area + blob2.area
assert union_blob.area == rect1.area + rect2.area
assert union_blob.height == 5
left_blob = blob1 - blob2
from pprint import pprint
pprint(blob1.spans)
pprint(blob2.spans)
pprint(left_blob.spans)
assert left_blob.area == blob1.area
assert left_blob == blob1
right_blob = blob2 - blob1
from pprint import pprint
pprint(blob1.spans)
pprint(blob2.spans)
pprint(right_blob.spans)
assert right_blob.area == blob2.area
assert right_blob == blob2
def test_blob_math_overlap():
# These rectangles look like this:
# xxx
# x##x
# x##x
# xxx
rect1 = Rectangle(origin=Point(0, 0), size=Size(3, 3))
rect2 = Rectangle(origin=Point(1, 1), size=Size(3, 3))
blob1 = Blob.from_rectangle(rect1)
blob2 = Blob.from_rectangle(rect2)
union_blob = blob1 + blob2
assert union_blob.area == 14
left_blob = blob1 - blob2
assert left_blob.area == 5
assert left_blob.height == 3
assert left_blob.spans == {
0: (Span(0, 2),),
1: (Span(0, 0),),
2: (Span(0, 0),),
}
right_blob = blob2 - blob1
assert right_blob.area == 5
assert right_blob.height == 3
assert right_blob.spans == {
1: (Span(3, 3),),
2: (Span(3, 3),),
3: (Span(1, 3),),
}
def test_blob_math_contain():
# These rectangles look like this:
# xxxxx
# x###x
# x###x
# x###x
# xxxxx
rect1 = Rectangle(origin=Point(0, 0), size=Size(5, 5))
rect2 = Rectangle(origin=Point(1, 1), size=Size(3, 3))
blob1 = Blob.from_rectangle(rect1)
blob2 = Blob.from_rectangle(rect2)
union_blob = blob1 + blob2
assert union_blob.area == blob1.area
assert union_blob.height == blob1.height
left_blob = blob1 - blob2
assert left_blob.area == 16
assert left_blob.height == 5
assert left_blob.spans == {
0: (Span(0, 4),),
1: (Span(0, 0), Span(4, 4)),
2: (Span(0, 0), Span(4, 4)),
3: (Span(0, 0), Span(4, 4)),
4: (Span(0, 4),),
}
right_blob = blob2 - blob1
assert right_blob.area == 0
assert right_blob.height == 0
assert right_blob.spans == {}
def test_blob_math_fuzzer():
pass
| [
"flax.geometry.Size",
"flax.geometry.Span",
"flax.geometry.Point",
"flax.geometry.Blob.from_rectangle",
"pprint.pprint"
] | [((156, 181), 'flax.geometry.Blob.from_rectangle', 'Blob.from_rectangle', (['rect'], {}), '(rect)\n', (175, 181), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((525, 551), 'flax.geometry.Blob.from_rectangle', 'Blob.from_rectangle', (['rect1'], {}), '(rect1)\n', (544, 551), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((564, 590), 'flax.geometry.Blob.from_rectangle', 'Blob.from_rectangle', (['rect2'], {}), '(rect2)\n', (583, 590), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((830, 849), 'pprint.pprint', 'pprint', (['blob1.spans'], {}), '(blob1.spans)\n', (836, 849), False, 'from pprint import pprint\n'), ((854, 873), 'pprint.pprint', 'pprint', (['blob2.spans'], {}), '(blob2.spans)\n', (860, 873), False, 'from pprint import pprint\n'), ((878, 901), 'pprint.pprint', 'pprint', (['left_blob.spans'], {}), '(left_blob.spans)\n', (884, 901), False, 'from pprint import pprint\n'), ((1038, 1057), 'pprint.pprint', 'pprint', (['blob1.spans'], {}), '(blob1.spans)\n', (1044, 1057), False, 'from pprint import pprint\n'), ((1062, 1081), 'pprint.pprint', 'pprint', (['blob2.spans'], {}), '(blob2.spans)\n', (1068, 1081), False, 'from pprint import pprint\n'), ((1086, 1110), 'pprint.pprint', 'pprint', (['right_blob.spans'], {}), '(right_blob.spans)\n', (1092, 1110), False, 'from pprint import pprint\n'), ((1427, 1453), 'flax.geometry.Blob.from_rectangle', 'Blob.from_rectangle', (['rect1'], {}), '(rect1)\n', (1446, 1453), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((1466, 1492), 'flax.geometry.Blob.from_rectangle', 'Blob.from_rectangle', (['rect2'], {}), '(rect2)\n', (1485, 1492), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2245, 2271), 'flax.geometry.Blob.from_rectangle', 'Blob.from_rectangle', (['rect1'], {}), '(rect1)\n', (2264, 2271), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2284, 2310), 'flax.geometry.Blob.from_rectangle', 'Blob.from_rectangle', (['rect2'], {}), '(rect2)\n', (2303, 2310), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((115, 126), 'flax.geometry.Point', 'Point', (['(0)', '(0)'], {}), '(0, 0)\n', (120, 126), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((133, 143), 'flax.geometry.Size', 'Size', (['(5)', '(5)'], {}), '(5, 5)\n', (137, 143), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((424, 435), 'flax.geometry.Point', 'Point', (['(0)', '(0)'], {}), '(0, 0)\n', (429, 435), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((442, 452), 'flax.geometry.Size', 'Size', (['(3)', '(3)'], {}), '(3, 3)\n', (446, 452), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((483, 494), 'flax.geometry.Point', 'Point', (['(6)', '(2)'], {}), '(6, 2)\n', (488, 494), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((501, 511), 'flax.geometry.Size', 'Size', (['(3)', '(3)'], {}), '(3, 3)\n', (505, 511), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((1326, 1337), 'flax.geometry.Point', 'Point', (['(0)', '(0)'], {}), '(0, 0)\n', (1331, 1337), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((1344, 1354), 'flax.geometry.Size', 'Size', (['(3)', '(3)'], {}), '(3, 3)\n', (1348, 1354), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((1385, 1396), 'flax.geometry.Point', 'Point', (['(1)', '(1)'], {}), '(1, 1)\n', (1390, 1396), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((1403, 1413), 'flax.geometry.Size', 'Size', (['(3)', '(3)'], {}), '(3, 3)\n', (1407, 1413), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2144, 2155), 'flax.geometry.Point', 'Point', (['(0)', '(0)'], {}), '(0, 0)\n', (2149, 2155), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2162, 2172), 'flax.geometry.Size', 'Size', (['(5)', '(5)'], {}), '(5, 5)\n', (2166, 2172), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2203, 2214), 'flax.geometry.Point', 'Point', (['(1)', '(1)'], {}), '(1, 1)\n', (2208, 2214), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2221, 2231), 'flax.geometry.Size', 'Size', (['(3)', '(3)'], {}), '(3, 3)\n', (2225, 2231), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((1697, 1707), 'flax.geometry.Span', 'Span', (['(0)', '(2)'], {}), '(0, 2)\n', (1701, 1707), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((1723, 1733), 'flax.geometry.Span', 'Span', (['(0)', '(0)'], {}), '(0, 0)\n', (1727, 1733), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((1749, 1759), 'flax.geometry.Span', 'Span', (['(0)', '(0)'], {}), '(0, 0)\n', (1753, 1759), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((1912, 1922), 'flax.geometry.Span', 'Span', (['(3)', '(3)'], {}), '(3, 3)\n', (1916, 1922), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((1938, 1948), 'flax.geometry.Span', 'Span', (['(3)', '(3)'], {}), '(3, 3)\n', (1942, 1948), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((1964, 1974), 'flax.geometry.Span', 'Span', (['(1)', '(3)'], {}), '(1, 3)\n', (1968, 1974), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2569, 2579), 'flax.geometry.Span', 'Span', (['(0)', '(4)'], {}), '(0, 4)\n', (2573, 2579), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2595, 2605), 'flax.geometry.Span', 'Span', (['(0)', '(0)'], {}), '(0, 0)\n', (2599, 2605), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2607, 2617), 'flax.geometry.Span', 'Span', (['(4)', '(4)'], {}), '(4, 4)\n', (2611, 2617), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2632, 2642), 'flax.geometry.Span', 'Span', (['(0)', '(0)'], {}), '(0, 0)\n', (2636, 2642), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2644, 2654), 'flax.geometry.Span', 'Span', (['(4)', '(4)'], {}), '(4, 4)\n', (2648, 2654), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2669, 2679), 'flax.geometry.Span', 'Span', (['(0)', '(0)'], {}), '(0, 0)\n', (2673, 2679), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2681, 2691), 'flax.geometry.Span', 'Span', (['(4)', '(4)'], {}), '(4, 4)\n', (2685, 2691), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n'), ((2706, 2716), 'flax.geometry.Span', 'Span', (['(0)', '(4)'], {}), '(0, 4)\n', (2710, 2716), False, 'from flax.geometry import Blob, Point, Rectangle, Size, Span\n')] |
import os
dir = ['trainA', 'trainB', 'testA', 'testB']
for d in dir:
img_names = os.listdir(d)
f = open('list_' + d + '.txt', "w")
for img in img_names:
f.write('./' + img + '\n')
# print(img_names) | [
"os.listdir"
] | [((87, 100), 'os.listdir', 'os.listdir', (['d'], {}), '(d)\n', (97, 100), False, 'import os\n')] |
#!/usr/bin/env python3
import logging
import numpy as np
import time
import torch
import cv2
logger = logging.getLogger(__name__)
def retry_load_images(image_paths, retry=10, backend="pytorch"):
"""
This function is to load images with support of retrying for failed load.
Args:
image_paths (list): paths of images needed to be loaded.
retry (int, optional): maximum time of loading retrying. Defaults to 10.
backend (str): `pytorch` or `cv2`.
Returns:
imgs (list): list of loaded images.
"""
for i in range(retry):
imgs = [cv2.imread(image_path) for image_path in image_paths]
if all(img is not None for img in imgs):
if backend == "pytorch":
imgs = torch.as_tensor(np.stack(imgs))
return imgs
else:
logger.warn("Reading failed. Will retry.")
time.sleep(1.0)
if i == retry - 1:
raise Exception("Failed to load images {}".format(image_paths))
def get_sequence(center_idx, half_len, sample_rate, num_frames):
"""
Sample frames among the corresponding clip.
Args:
center_idx (int): center frame idx for current clip
half_len (int): half of the clip length
sample_rate (int): sampling rate for sampling frames inside of the clip
num_frames (int): number of expected sampled frames
Returns:
seq (list): list of indexes of sampled frames in this clip.
"""
seq = list(range(center_idx - half_len, center_idx + half_len, sample_rate))
for seq_idx in range(len(seq)):
if seq[seq_idx] < 0:
seq[seq_idx] = 0
elif seq[seq_idx] >= num_frames:
seq[seq_idx] = num_frames - 1
return seq
def pack_pathway_output(cfg, frames):
"""
Prepare output as a list of tensors. Each tensor corresponding to a
unique pathway.
Args:
frames (tensor): frames of images sampled from the video. The
dimension is `channel` x `num frames` x `height` x `width`.
Returns:
frame_list (list): list of tensors with the dimension of
`channel` x `num frames` x `height` x `width`.
"""
# if cfg.MODEL.ARCH in cfg.MODEL.SINGLE_PATHWAY_ARCH:
# frame_list = [frames]
if cfg.MODEL.ARCH in cfg.MODEL.MULTI_PATHWAY_ARCH:
fast_pathway = frames
# Perform temporal sampling from the fast pathway.
slow_pathway = torch.index_select(
frames,
1,
torch.linspace(
0, frames.shape[1] - 1, frames.shape[1] // cfg.SLOWFAST.ALPHA
).long(),
)
frame_list = [slow_pathway, fast_pathway]
else:
frame_list = [frames]
# raise NotImplementedError(
# "Model arch {} is not in {}".format(
# cfg.MODEL.ARCH,
# cfg.MODEL.SINGLE_PATHWAY_ARCH + cfg.MODEL.MULTI_PATHWAY_ARCH,
# )
# )
return frame_list
| [
"logging.getLogger",
"time.sleep",
"numpy.stack",
"cv2.imread",
"torch.linspace"
] | [((105, 132), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (122, 132), False, 'import logging\n'), ((595, 617), 'cv2.imread', 'cv2.imread', (['image_path'], {}), '(image_path)\n', (605, 617), False, 'import cv2\n'), ((896, 911), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (906, 911), False, 'import time\n'), ((775, 789), 'numpy.stack', 'np.stack', (['imgs'], {}), '(imgs)\n', (783, 789), True, 'import numpy as np\n'), ((2522, 2599), 'torch.linspace', 'torch.linspace', (['(0)', '(frames.shape[1] - 1)', '(frames.shape[1] // cfg.SLOWFAST.ALPHA)'], {}), '(0, frames.shape[1] - 1, frames.shape[1] // cfg.SLOWFAST.ALPHA)\n', (2536, 2599), False, 'import torch\n')] |
import config
from malleus.api.service.protos.bench_service_pb2 import BenchRequest
#from malleus.api.service.protos.bench_service_pb2 import BenchRequest.Datasource
from malleus.api.domain.timer import Timer
import grpc
from malleus.api.service.protos.bench_service_pb2 import BenchRequest
import malleus.api.service.protos.bench_service_pb2_grpc as bench_service_pb2_grpc
class CallService:
def __init__(self, region):
channel = grpc.insecure_channel(config.host[region])
self.stub = bench_service_pb2_grpc.BenchServiceStub(channel)
def write(self, num, datasource = None):
bench_request = BenchRequest()
bench_request.num = num
return self.stub.write(bench_request)
def read(self, num, datasource = None):
datasources = [BenchRequest.GDATASTORE, BenchRequest.MONGODB]
for datasource in datasources:
bench_request = BenchRequest()
bench_request.datasource = datasource
bench_request.num = num
timings = self.stub.read(bench_request)
timer = Timer(timings)
self.print_stats(datasource, timer)
#return timings
def print_stats(self, datasource, timer):
print(datasource)
print("Duration: " + str(timer.get_duration()))
print("Average: " + str(timer.get_avg()))
print("95pct:" + str(timer.get_95p()))
print("99pct:" + str(timer.get_99p()))
| [
"malleus.api.service.protos.bench_service_pb2_grpc.BenchServiceStub",
"grpc.insecure_channel",
"malleus.api.domain.timer.Timer",
"malleus.api.service.protos.bench_service_pb2.BenchRequest"
] | [((446, 488), 'grpc.insecure_channel', 'grpc.insecure_channel', (['config.host[region]'], {}), '(config.host[region])\n', (467, 488), False, 'import grpc\n'), ((509, 557), 'malleus.api.service.protos.bench_service_pb2_grpc.BenchServiceStub', 'bench_service_pb2_grpc.BenchServiceStub', (['channel'], {}), '(channel)\n', (548, 557), True, 'import malleus.api.service.protos.bench_service_pb2_grpc as bench_service_pb2_grpc\n'), ((628, 642), 'malleus.api.service.protos.bench_service_pb2.BenchRequest', 'BenchRequest', ([], {}), '()\n', (640, 642), False, 'from malleus.api.service.protos.bench_service_pb2 import BenchRequest\n'), ((904, 918), 'malleus.api.service.protos.bench_service_pb2.BenchRequest', 'BenchRequest', ([], {}), '()\n', (916, 918), False, 'from malleus.api.service.protos.bench_service_pb2 import BenchRequest\n'), ((1077, 1091), 'malleus.api.domain.timer.Timer', 'Timer', (['timings'], {}), '(timings)\n', (1082, 1091), False, 'from malleus.api.domain.timer import Timer\n')] |
from resolwe.flow.models import Data
from resolwe.test import tag_process
from resolwe_bio.utils.filter import filter_vcf_variable
from resolwe_bio.utils.test import BioProcessTestCase
class CheMutWorkflowTestCase(BioProcessTestCase):
@tag_process("workflow-chemut")
def test_chemut_workflow(self):
with self.preparation_stage():
inputs = {
"src": "chemut_genome.fasta.gz",
"species": "Dictyostelium discoideum",
"build": "dd-05-2009",
}
ref_seq = self.run_process("upload-fasta-nucl", inputs)
bwa_index = self.run_process("bwa-index", {"ref_seq": ref_seq.id})
inputs = {"src1": ["AX4_mate1.fq.gz"], "src2": ["AX4_mate2.fq.gz"]}
parental_reads = self.run_process("upload-fastq-paired", inputs)
inputs = {"src1": ["CM_mate1.fq.gz"], "src2": ["CM_mate2.fq.gz"]}
mut_reads = self.run_process("upload-fastq-paired", inputs)
inputs = {"genome": bwa_index.id, "reads": parental_reads.id}
align_parental = self.run_process("alignment-bwa-mem", inputs)
inputs = {"genome": bwa_index.id, "reads": mut_reads.id}
align_mut = self.run_process("alignment-bwa-mem", inputs)
self.run_process(
"workflow-chemut",
{
"analysis_type": "snv",
"parental_strains": [align_parental.id],
"mutant_strains": [align_mut.id],
"genome": ref_seq.id,
"Vc": {"stand_emit_conf": 15, "stand_call_conf": 35, "rf": True},
"Vf": {"read_depth": 7},
},
)
for data in Data.objects.all():
self.assertStatus(data, Data.STATUS_DONE)
variants = Data.objects.last()
self.assertFile(
variants,
"vcf",
"chemut.vcf.gz",
file_filter=filter_vcf_variable,
compression="gzip",
)
| [
"resolwe.flow.models.Data.objects.last",
"resolwe.flow.models.Data.objects.all",
"resolwe.test.tag_process"
] | [((243, 273), 'resolwe.test.tag_process', 'tag_process', (['"""workflow-chemut"""'], {}), "('workflow-chemut')\n", (254, 273), False, 'from resolwe.test import tag_process\n'), ((1703, 1721), 'resolwe.flow.models.Data.objects.all', 'Data.objects.all', ([], {}), '()\n', (1719, 1721), False, 'from resolwe.flow.models import Data\n'), ((1797, 1816), 'resolwe.flow.models.Data.objects.last', 'Data.objects.last', ([], {}), '()\n', (1814, 1816), False, 'from resolwe.flow.models import Data\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import docker
import socket
docker_client = docker.from_env()
myself = docker_client.containers.get(socket.gethostname())
myself.pause() | [
"docker.from_env",
"socket.gethostname"
] | [((92, 109), 'docker.from_env', 'docker.from_env', ([], {}), '()\n', (107, 109), False, 'import docker\n'), ((149, 169), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (167, 169), False, 'import socket\n')] |
# An example for how to lock specific folders using the lockRegExp param
import studiolibrary
if __name__ == "__main__":
# Use the studiolibrary.app context for creating a QApplication instance
with studiolibrary.app():
# Lock all folders that contain the words "icon" & "Pixar" in the path
lockRegExp = "icon|Pixar"
studiolibrary.main(name="Example3", path="data", lockRegExp=lockRegExp)
| [
"studiolibrary.app",
"studiolibrary.main"
] | [((211, 230), 'studiolibrary.app', 'studiolibrary.app', ([], {}), '()\n', (228, 230), False, 'import studiolibrary\n'), ((355, 426), 'studiolibrary.main', 'studiolibrary.main', ([], {'name': '"""Example3"""', 'path': '"""data"""', 'lockRegExp': 'lockRegExp'}), "(name='Example3', path='data', lockRegExp=lockRegExp)\n", (373, 426), False, 'import studiolibrary\n')] |
# GUI Development using Tkinter
import tkinter as tk
app = tk.Tk()
app.geometry('340x310')
app.title("Calculator")
entry = tk.Entry(app,text='0',font=('arial',20,'normal'))
entry.place(x=20,y=15)
def number(n):
if n=="C":
entry.delete(0,'end')
elif n=="ans":
k2 = entry.get()
entry.delete(0,'end')
entry.insert('end',eval(k2))
elif n=='del':
entry.delete(len(entry.get())-1)
else:
entry.insert('end',n)
one_button = tk.Button(app,text = "7",font=('arial',10,'bold'),command = lambda:number(7),width = 6).place(x=30,y=70)
one_button = tk.Button(app,text = "8",font=('arial',10,'bold'),command = lambda:number(8),width = 6).place(x=100,y=70)
one_button = tk.Button(app,text = "9",font=('arial',10,'bold'),command = lambda:number(9),width = 6).place(x=170,y=70)
one_button = tk.Button(app,text = "+",font=('arial',10,'bold'),command = lambda:number('+'),width = 6).place(x=250,y=70)
one_button = tk.Button(app,text = "4",font=('arial',10,'bold'),command = lambda:number(4),width = 6).place(x=30,y=120)
one_button = tk.Button(app,text = "5",font=('arial',10,'bold'),command = lambda:number(5),width = 6).place(x=100,y=120)
one_button = tk.Button(app,text = "6",font=('arial',10,'bold'),command = lambda:number(6),width = 6).place(x=170,y=120)
one_button = tk.Button(app,text = "-",font=('arial',10,'bold'),command = lambda:number('-'),width = 6).place(x=250,y=120)
one_button = tk.Button(app,text = "1",font=('arial',10,'bold'),command = lambda:number(1),width = 6).place(x=30,y=170)
one_button = tk.Button(app,text = "2",font=('arial',10,'bold'),command = lambda:number(2),width = 6).place(x=100,y=170)
one_button = tk.Button(app,text = "3",font=('arial',10,'bold'),command = lambda:number(3),width = 6).place(x=170,y=170)
one_button = tk.Button(app,text = "*",font=('arial',10,'bold'),command = lambda:number('*'),width = 6).place(x=250,y=170)
one_button = tk.Button(app,text = "C",font=('arial',10,'bold'),command = lambda:number("C"),width = 6).place(x=30,y=220)
one_button = tk.Button(app,text = "0",font=('arial',10,'bold'),command = lambda:number(0),width = 6).place(x=100,y=220)
one_button = tk.Button(app,text = "=",font=('arial',10,'bold'),command = lambda:number('ans'),width = 6).place(x=170,y=220)
one_button = tk.Button(app,text = "/",font=('arial',10,'bold'),command = lambda:number('/'),width = 6).place(x=250,y=220)
one_button = tk.Button(app,text = ".",font=('arial',10,'bold'),command = lambda:number('.'),width = 6).place(x=30,y=270)
one_button = tk.Button(app,text = "00",font=('arial',10,'bold'),command = lambda:number("00"),width = 6).place(x=100,y=270)
one_button = tk.Button(app,text = "del",font=('arial',10,'bold'),command = lambda:number('del'),width = 6).place(x=170,y=270)
app.mainloop()
| [
"tkinter.Tk",
"tkinter.Entry"
] | [((63, 70), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (68, 70), True, 'import tkinter as tk\n'), ((134, 187), 'tkinter.Entry', 'tk.Entry', (['app'], {'text': '"""0"""', 'font': "('arial', 20, 'normal')"}), "(app, text='0', font=('arial', 20, 'normal'))\n", (142, 187), True, 'import tkinter as tk\n')] |
# Generated by Django 2.2.4 on 2019-08-21 20:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0055_pycron'),
]
operations = [
migrations.AlterField(
model_name='pycron',
name='model_name',
field=models.CharField(max_length=40, verbose_name='Modelo'),
),
migrations.AlterField(
model_name='pycron',
name='number_call',
field=models.IntegerField(default=-1, verbose_name='Número de llamadas'),
),
]
| [
"django.db.models.CharField",
"django.db.models.IntegerField"
] | [((325, 379), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'verbose_name': '"""Modelo"""'}), "(max_length=40, verbose_name='Modelo')\n", (341, 379), False, 'from django.db import migrations, models\n'), ((506, 572), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(-1)', 'verbose_name': '"""Número de llamadas"""'}), "(default=-1, verbose_name='Número de llamadas')\n", (525, 572), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python
import argparse
import logging
from tornado import gen, ioloop
from kiel.clients import GroupedConsumer
log = logging.getLogger()
parser = argparse.ArgumentParser(
description="Example grouped consumer that prints out messages it gets."
)
parser.add_argument(
"brokers", type=lambda v: v.split(","),
help="Comma-separated list of bootstrap broker servers"
)
parser.add_argument(
"zk_hosts", type=lambda v: v.split(","),
help="Comma-separated list of zookeeper servers."
)
parser.add_argument(
"topic", type=str,
help="Topic to publish to"
)
parser.add_argument(
"--debug", type=bool, default=False,
help="Sets the logging level to DEBUG"
)
def process_message(msg):
print(msg)
@gen.coroutine
def run(c, args):
yield c.connect()
while True:
msgs = yield c.consume(args.topic)
for msg in msgs:
process_message(msg)
if msgs:
c.commit_offsets()
if __name__ == "__main__":
args = parser.parse_args()
loop = ioloop.IOLoop.instance()
if args.debug:
log.setLevel(logging.DEBUG)
c = GroupedConsumer(
brokers=args.brokers,
group="worker-group",
zk_hosts=args.zk_hosts,
autocommit=False
)
loop.add_callback(run, c, args)
try:
loop.start()
except KeyboardInterrupt:
c.close().add_done_callback(lambda f: loop.stop())
| [
"logging.getLogger",
"tornado.ioloop.IOLoop.instance",
"argparse.ArgumentParser",
"kiel.clients.GroupedConsumer"
] | [((137, 156), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (154, 156), False, 'import logging\n'), ((168, 270), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Example grouped consumer that prints out messages it gets."""'}), "(description=\n 'Example grouped consumer that prints out messages it gets.')\n", (191, 270), False, 'import argparse\n'), ((1044, 1068), 'tornado.ioloop.IOLoop.instance', 'ioloop.IOLoop.instance', ([], {}), '()\n', (1066, 1068), False, 'from tornado import gen, ioloop\n'), ((1134, 1240), 'kiel.clients.GroupedConsumer', 'GroupedConsumer', ([], {'brokers': 'args.brokers', 'group': '"""worker-group"""', 'zk_hosts': 'args.zk_hosts', 'autocommit': '(False)'}), "(brokers=args.brokers, group='worker-group', zk_hosts=args.\n zk_hosts, autocommit=False)\n", (1149, 1240), False, 'from kiel.clients import GroupedConsumer\n')] |
import pandas as pd
from caf import source_config
from utils import write_csv_file
def _get_year(p):
"""Get year from string having the DD/MM/YEAR format."""
date_tokens = p['dtref'].split('/')
if len(date_tokens) == 3:
date_year = date_tokens[-1]
else:
date_year = None
return date_year
def _format_column_name(raw_column_name, year):
"""Formats a raw column name to remove: spaces, year and character with
accents.
Args:
raw_column_name (str): a column name
year: the year tu remove
Returns:
(str) The formatted column name.
"""
raw_column_name = ('_'.join(raw_column_name.lower().split()))
raw_column_name = raw_column_name.replace('_en_{}_'.format(year), '_')
formatted_column_name = raw_column_name.replace('é', 'e')
return formatted_column_name
def make_caf_foyers_bas_revenus(decoupage_geo=None):
"""
Collects and formats 'allocations foyers bas revenus' data for France by
commune (source INSEE). Reads the information location and outputs in the
`source_config.py` file.
"""
if decoupage_geo is None:
decoupage_geo = 'commune'
for year, file_paths in source_config.foyers_alloc_bas_revenus_files[
decoupage_geo].items():
raw_cols = []
raw_file = file_paths['raw']
for item in pd.read_csv(raw_file, sep=';',
encoding='ISO-8859-1').columns:
raw_cols.append(_format_column_name(item, year))
raw_data = pd.read_csv(raw_file, sep=';',
encoding='ISO-8859-1')
raw_data.columns = raw_cols
output_file = source_config.foyers_alloc_bas_revenus_files[
decoupage_geo][year]['processed']
if int(year) <= 2015:
raw_data['year'] = year
else:
raw_data['year'] = raw_data.apply(_get_year, axis=1)
raw_data = raw_data[raw_data.year == year]
raw_data.drop(['dtref'], axis=1, inplace=True)
write_csv_file(raw_data, output_file=output_file)
| [
"utils.write_csv_file",
"pandas.read_csv"
] | [((1537, 1590), 'pandas.read_csv', 'pd.read_csv', (['raw_file'], {'sep': '""";"""', 'encoding': '"""ISO-8859-1"""'}), "(raw_file, sep=';', encoding='ISO-8859-1')\n", (1548, 1590), True, 'import pandas as pd\n'), ((2039, 2088), 'utils.write_csv_file', 'write_csv_file', (['raw_data'], {'output_file': 'output_file'}), '(raw_data, output_file=output_file)\n', (2053, 2088), False, 'from utils import write_csv_file\n'), ((1362, 1415), 'pandas.read_csv', 'pd.read_csv', (['raw_file'], {'sep': '""";"""', 'encoding': '"""ISO-8859-1"""'}), "(raw_file, sep=';', encoding='ISO-8859-1')\n", (1373, 1415), True, 'import pandas as pd\n')] |
'''
Run constraint solver to complete spec.
'''
import json
import logging
import os
import subprocess
import tempfile
from typing import Dict, List, Tuple, Optional
import clyngor
from draco.spec import Query, Task
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
DRACO_LP = ['define.lp', 'generate.lp', 'hard.lp', 'soft.lp', 'weights.lp', 'assign_weights.lp', 'optimize.lp', 'output.lp']
DRACO_LP_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../asp'))
file_cache: Dict = {}
def load_file(path):
content = file_cache.get(path)
if content is not None:
return content
with open(path) as f:
content = f.read().encode('utf8')
file_cache[path] = content
return content
def run_draco(task: Task, constants: Dict[str, str] = None, files: List[str] = None, silence_warnings=False, debug=False) -> Tuple[str, str]:
'''
Run draco and return stderr and stdout
'''
# default args
files = files or DRACO_LP
constants = constants or {}
options = ['--outf=2', '--quiet=1,2,2']
if silence_warnings:
options.append('--warn=no-atom-undefined')
for name, value in constants.items():
options.append(f'-c {name}={value}')
cmd = ['clingo'] + options
logger.debug('Command: %s', ' '.join(cmd))
proc = subprocess.Popen(
args=cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
task_program = task.to_asp()
file_names = [os.path.join(DRACO_LP_DIR, f) for f in files]
asp_program = b'\n'.join(map(load_file, file_names)) + task_program.encode('utf8')
if debug:
with tempfile.NamedTemporaryFile(mode='w', delete=False) as fd:
fd.write(task_program)
logger.info('Debug ASP with "clingo %s %s"', ' '.join(file_names), fd.name)
stdout, stderr = proc.communicate(asp_program)
return (stderr, stdout)
def run(task: Task, constants: Dict[str, str] = None, files: List[str] = None, silence_warnings=False, debug=False, clear_cache=False) -> Optional[Task]:
''' Run clingo to compute a completion of a partial spec or violations. '''
# Clear file cache. useful during development in notebooks.
if clear_cache and file_cache:
logger.warning('Cleared file cache')
file_cache.clear()
stderr, stdout = run_draco(task, constants, files, silence_warnings, debug)
try:
json_result = json.loads(stdout)
except json.JSONDecodeError:
logger.error('stdout: %s', stdout)
logger.error('stderr: %s', stderr)
raise
if stderr:
logger.error(stderr)
result = json_result['Result']
if result == 'UNSATISFIABLE':
logger.info('Constraints are unsatisfiable.')
return None
elif result == 'OPTIMUM FOUND':
# get the last witness, which is the best result
answers = json_result['Call'][0]['Witnesses'][-1]
logger.debug(answers['Value'])
return Task.parse_from_answer(
clyngor.Answers(answers['Value']).sorted,
data=task.data,
cost=json_result['Models']['Costs'][0])
elif result == 'SATISFIABLE':
answers = json_result['Call'][0]['Witnesses'][-1]
assert json_result['Models']['Number'] == 1, 'Should not have more than one model if we don\'t optimize'
logger.debug(answers['Value'])
return Task.parse_from_answer(
clyngor.Answers(answers['Value']).sorted,
data=task.data)
else:
logger.error('Unsupported result: %s', result)
return None
| [
"logging.basicConfig",
"logging.getLogger",
"json.loads",
"subprocess.Popen",
"os.path.join",
"os.path.dirname",
"clyngor.Answers",
"tempfile.NamedTemporaryFile"
] | [((220, 259), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (239, 259), False, 'import logging\n'), ((269, 296), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (286, 296), False, 'import logging\n'), ((1347, 1448), 'subprocess.Popen', 'subprocess.Popen', ([], {'args': 'cmd', 'stdin': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(args=cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n', (1363, 1448), False, 'import subprocess\n'), ((467, 492), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (482, 492), False, 'import os\n'), ((1530, 1559), 'os.path.join', 'os.path.join', (['DRACO_LP_DIR', 'f'], {}), '(DRACO_LP_DIR, f)\n', (1542, 1559), False, 'import os\n'), ((2475, 2493), 'json.loads', 'json.loads', (['stdout'], {}), '(stdout)\n', (2485, 2493), False, 'import json\n'), ((1691, 1742), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'mode': '"""w"""', 'delete': '(False)'}), "(mode='w', delete=False)\n", (1718, 1742), False, 'import tempfile\n'), ((3060, 3093), 'clyngor.Answers', 'clyngor.Answers', (["answers['Value']"], {}), "(answers['Value'])\n", (3075, 3093), False, 'import clyngor\n'), ((3480, 3513), 'clyngor.Answers', 'clyngor.Answers', (["answers['Value']"], {}), "(answers['Value'])\n", (3495, 3513), False, 'import clyngor\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'diptera_track.ui'
#
# Created by: PyQt5 UI code generator 5.9.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1140, 683)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setMinimumSize(QtCore.QSize(1124, 674))
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.tabs = QtWidgets.QTabWidget(self.centralwidget)
self.tabs.setObjectName("tabs")
self.ses_par_tab = QtWidgets.QWidget()
self.ses_par_tab.setObjectName("ses_par_tab")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.ses_par_tab)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.widget = QtWidgets.QWidget(self.ses_par_tab)
self.widget.setMinimumSize(QtCore.QSize(0, 551))
self.widget.setObjectName("widget")
self.folder_select_tree = QtWidgets.QTreeView(self.widget)
self.folder_select_tree.setGeometry(QtCore.QRect(9, 30, 571, 321))
self.folder_select_tree.setMinimumSize(QtCore.QSize(451, 0))
self.folder_select_tree.setObjectName("folder_select_tree")
self.label = QtWidgets.QLabel(self.widget)
self.label.setGeometry(QtCore.QRect(9, 9, 128, 16))
self.label.setObjectName("label")
self.label_3 = QtWidgets.QLabel(self.widget)
self.label_3.setGeometry(QtCore.QRect(600, 0, 124, 16))
self.label_3.setObjectName("label_3")
self.line = QtWidgets.QFrame(self.widget)
self.line.setGeometry(QtCore.QRect(590, 20, 511, 20))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.label_2 = QtWidgets.QLabel(self.widget)
self.label_2.setGeometry(QtCore.QRect(600, 30, 91, 16))
self.label_2.setObjectName("label_2")
self.ses_folder_label = QtWidgets.QLabel(self.widget)
self.ses_folder_label.setGeometry(QtCore.QRect(620, 50, 471, 20))
self.ses_folder_label.setObjectName("ses_folder_label")
self.label_5 = QtWidgets.QLabel(self.widget)
self.label_5.setGeometry(QtCore.QRect(600, 90, 141, 16))
self.label_5.setObjectName("label_5")
self.bckg_folder_label = QtWidgets.QLabel(self.widget)
self.bckg_folder_label.setGeometry(QtCore.QRect(620, 110, 281, 20))
self.bckg_folder_label.setObjectName("bckg_folder_label")
self.line_2 = QtWidgets.QFrame(self.widget)
self.line_2.setGeometry(QtCore.QRect(580, 30, 20, 561))
self.line_2.setFrameShape(QtWidgets.QFrame.VLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.label_7 = QtWidgets.QLabel(self.widget)
self.label_7.setGeometry(QtCore.QRect(600, 130, 291, 16))
self.label_7.setObjectName("label_7")
self.cal_folder_label = QtWidgets.QLabel(self.widget)
self.cal_folder_label.setGeometry(QtCore.QRect(620, 150, 381, 20))
self.cal_folder_label.setObjectName("cal_folder_label")
self.label_9 = QtWidgets.QLabel(self.widget)
self.label_9.setGeometry(QtCore.QRect(600, 200, 291, 16))
self.label_9.setObjectName("label_9")
self.mov_folder1_label = QtWidgets.QLabel(self.widget)
self.mov_folder1_label.setGeometry(QtCore.QRect(620, 220, 371, 20))
self.mov_folder1_label.setObjectName("mov_folder1_label")
self.mov_folder2_label = QtWidgets.QLabel(self.widget)
self.mov_folder2_label.setGeometry(QtCore.QRect(620, 240, 371, 20))
self.mov_folder2_label.setObjectName("mov_folder2_label")
self.mov_folder3_label = QtWidgets.QLabel(self.widget)
self.mov_folder3_label.setGeometry(QtCore.QRect(620, 260, 371, 20))
self.mov_folder3_label.setObjectName("mov_folder3_label")
self.mov_folder4_label = QtWidgets.QLabel(self.widget)
self.mov_folder4_label.setGeometry(QtCore.QRect(620, 280, 371, 20))
self.mov_folder4_label.setObjectName("mov_folder4_label")
self.mov_folder5_label = QtWidgets.QLabel(self.widget)
self.mov_folder5_label.setGeometry(QtCore.QRect(620, 300, 371, 20))
self.mov_folder5_label.setObjectName("mov_folder5_label")
self.mov_folder6_label = QtWidgets.QLabel(self.widget)
self.mov_folder6_label.setGeometry(QtCore.QRect(620, 320, 371, 20))
self.mov_folder6_label.setObjectName("mov_folder6_label")
self.mov_folder7_label = QtWidgets.QLabel(self.widget)
self.mov_folder7_label.setGeometry(QtCore.QRect(620, 340, 371, 20))
self.mov_folder7_label.setObjectName("mov_folder7_label")
self.mov_folder8_label = QtWidgets.QLabel(self.widget)
self.mov_folder8_label.setGeometry(QtCore.QRect(620, 360, 371, 20))
self.mov_folder8_label.setObjectName("mov_folder8_label")
self.label_18 = QtWidgets.QLabel(self.widget)
self.label_18.setGeometry(QtCore.QRect(600, 390, 301, 20))
self.label_18.setObjectName("label_18")
self.cam_folder1_label = QtWidgets.QLabel(self.widget)
self.cam_folder1_label.setGeometry(QtCore.QRect(620, 410, 371, 20))
self.cam_folder1_label.setObjectName("cam_folder1_label")
self.cam_folder2_label = QtWidgets.QLabel(self.widget)
self.cam_folder2_label.setGeometry(QtCore.QRect(620, 430, 371, 20))
self.cam_folder2_label.setObjectName("cam_folder2_label")
self.cam_folder3_label = QtWidgets.QLabel(self.widget)
self.cam_folder3_label.setGeometry(QtCore.QRect(620, 450, 371, 20))
self.cam_folder3_label.setObjectName("cam_folder3_label")
self.cam_folder4_label = QtWidgets.QLabel(self.widget)
self.cam_folder4_label.setGeometry(QtCore.QRect(620, 470, 371, 20))
self.cam_folder4_label.setObjectName("cam_folder4_label")
self.ses_folder_rbtn = QtWidgets.QRadioButton(self.widget)
self.ses_folder_rbtn.setGeometry(QtCore.QRect(600, 50, 21, 21))
self.ses_folder_rbtn.setObjectName("ses_folder_rbtn")
self.bckg_folder_rbtn = QtWidgets.QRadioButton(self.widget)
self.bckg_folder_rbtn.setGeometry(QtCore.QRect(600, 110, 21, 21))
self.bckg_folder_rbtn.setObjectName("bckg_folder_rbtn")
self.cal_folder_rbtn = QtWidgets.QRadioButton(self.widget)
self.cal_folder_rbtn.setGeometry(QtCore.QRect(600, 150, 21, 21))
self.cal_folder_rbtn.setObjectName("cal_folder_rbtn")
self.mov_folder1_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder1_rbtn.setGeometry(QtCore.QRect(600, 220, 21, 21))
self.mov_folder1_rbtn.setObjectName("mov_folder1_rbtn")
self.mov_folder2_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder2_rbtn.setGeometry(QtCore.QRect(600, 240, 21, 21))
self.mov_folder2_rbtn.setObjectName("mov_folder2_rbtn")
self.mov_folder3_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder3_rbtn.setGeometry(QtCore.QRect(600, 260, 21, 21))
self.mov_folder3_rbtn.setObjectName("mov_folder3_rbtn")
self.mov_folder4_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder4_rbtn.setGeometry(QtCore.QRect(600, 280, 21, 21))
self.mov_folder4_rbtn.setObjectName("mov_folder4_rbtn")
self.mov_folder5_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder5_rbtn.setGeometry(QtCore.QRect(600, 300, 21, 21))
self.mov_folder5_rbtn.setObjectName("mov_folder5_rbtn")
self.mov_folder6_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder6_rbtn.setGeometry(QtCore.QRect(600, 320, 21, 21))
self.mov_folder6_rbtn.setObjectName("mov_folder6_rbtn")
self.mov_folder7_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder7_rbtn.setGeometry(QtCore.QRect(600, 340, 21, 21))
self.mov_folder7_rbtn.setObjectName("mov_folder7_rbtn")
self.mov_folder8_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder8_rbtn.setGeometry(QtCore.QRect(600, 360, 21, 21))
self.mov_folder8_rbtn.setObjectName("mov_folder8_rbtn")
self.cam_folder1_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder1_rbtn.setGeometry(QtCore.QRect(600, 410, 21, 21))
self.cam_folder1_rbtn.setObjectName("cam_folder1_rbtn")
self.cam_folder2_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder2_rbtn.setGeometry(QtCore.QRect(600, 430, 21, 21))
self.cam_folder2_rbtn.setObjectName("cam_folder2_rbtn")
self.cam_folder3_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder3_rbtn.setGeometry(QtCore.QRect(600, 450, 21, 21))
self.cam_folder3_rbtn.setObjectName("cam_folder3_rbtn")
self.cam_folder4_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder4_rbtn.setGeometry(QtCore.QRect(600, 470, 21, 21))
self.cam_folder4_rbtn.setObjectName("cam_folder4_rbtn")
self.cam_folder5_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder5_rbtn.setGeometry(QtCore.QRect(600, 490, 21, 21))
self.cam_folder5_rbtn.setObjectName("cam_folder5_rbtn")
self.cam_folder6_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder6_rbtn.setGeometry(QtCore.QRect(600, 510, 21, 21))
self.cam_folder6_rbtn.setObjectName("cam_folder6_rbtn")
self.cam_folder5_label = QtWidgets.QLabel(self.widget)
self.cam_folder5_label.setGeometry(QtCore.QRect(620, 490, 371, 20))
self.cam_folder5_label.setObjectName("cam_folder5_label")
self.cam_folder6_label = QtWidgets.QLabel(self.widget)
self.cam_folder6_label.setGeometry(QtCore.QRect(620, 510, 371, 20))
self.cam_folder6_label.setObjectName("cam_folder6_label")
self.label_25 = QtWidgets.QLabel(self.widget)
self.label_25.setGeometry(QtCore.QRect(600, 540, 201, 16))
self.label_25.setObjectName("label_25")
self.frame_name_rbtn = QtWidgets.QRadioButton(self.widget)
self.frame_name_rbtn.setGeometry(QtCore.QRect(600, 560, 21, 21))
self.frame_name_rbtn.setObjectName("frame_name_rbtn")
self.frame_name_label = QtWidgets.QLabel(self.widget)
self.frame_name_label.setGeometry(QtCore.QRect(620, 560, 391, 20))
self.frame_name_label.setObjectName("frame_name_label")
self.label_27 = QtWidgets.QLabel(self.widget)
self.label_27.setGeometry(QtCore.QRect(930, 80, 161, 20))
self.label_27.setObjectName("label_27")
self.bck_img_fmt_box = QtWidgets.QComboBox(self.widget)
self.bck_img_fmt_box.setGeometry(QtCore.QRect(1020, 100, 79, 23))
self.bck_img_fmt_box.setObjectName("bck_img_fmt_box")
self.label_28 = QtWidgets.QLabel(self.widget)
self.label_28.setGeometry(QtCore.QRect(930, 130, 161, 20))
self.label_28.setObjectName("label_28")
self.cal_img_fmt_box = QtWidgets.QComboBox(self.widget)
self.cal_img_fmt_box.setGeometry(QtCore.QRect(1020, 150, 79, 23))
self.cal_img_fmt_box.setObjectName("cal_img_fmt_box")
self.label_29 = QtWidgets.QLabel(self.widget)
self.label_29.setGeometry(QtCore.QRect(970, 540, 131, 20))
self.label_29.setObjectName("label_29")
self.frame_img_fmt_box = QtWidgets.QComboBox(self.widget)
self.frame_img_fmt_box.setGeometry(QtCore.QRect(1020, 560, 79, 23))
self.frame_img_fmt_box.setObjectName("frame_img_fmt_box")
self.line_4 = QtWidgets.QFrame(self.widget)
self.line_4.setGeometry(QtCore.QRect(10, 460, 571, 16))
self.line_4.setFrameShape(QtWidgets.QFrame.HLine)
self.line_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_4.setObjectName("line_4")
self.line_5 = QtWidgets.QFrame(self.widget)
self.line_5.setGeometry(QtCore.QRect(10, 580, 1091, 20))
self.line_5.setFrameShape(QtWidgets.QFrame.HLine)
self.line_5.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_5.setObjectName("line_5")
self.label_30 = QtWidgets.QLabel(self.widget)
self.label_30.setGeometry(QtCore.QRect(250, 470, 151, 16))
self.label_30.setObjectName("label_30")
self.start_frame_spin = QtWidgets.QSpinBox(self.widget)
self.start_frame_spin.setGeometry(QtCore.QRect(120, 490, 91, 24))
self.start_frame_spin.setObjectName("start_frame_spin")
self.label_31 = QtWidgets.QLabel(self.widget)
self.label_31.setGeometry(QtCore.QRect(10, 490, 101, 16))
self.label_31.setObjectName("label_31")
self.label_32 = QtWidgets.QLabel(self.widget)
self.label_32.setGeometry(QtCore.QRect(10, 520, 101, 16))
self.label_32.setObjectName("label_32")
self.trig_frame_spin = QtWidgets.QSpinBox(self.widget)
self.trig_frame_spin.setGeometry(QtCore.QRect(120, 520, 91, 24))
self.trig_frame_spin.setObjectName("trig_frame_spin")
self.label_33 = QtWidgets.QLabel(self.widget)
self.label_33.setGeometry(QtCore.QRect(10, 550, 101, 16))
self.label_33.setObjectName("label_33")
self.end_frame_spin = QtWidgets.QSpinBox(self.widget)
self.end_frame_spin.setGeometry(QtCore.QRect(120, 550, 91, 24))
self.end_frame_spin.setObjectName("end_frame_spin")
self.label_34 = QtWidgets.QLabel(self.widget)
self.label_34.setGeometry(QtCore.QRect(250, 490, 91, 16))
self.label_34.setObjectName("label_34")
self.trig_mode_box = QtWidgets.QComboBox(self.widget)
self.trig_mode_box.setGeometry(QtCore.QRect(350, 490, 111, 23))
self.trig_mode_box.setObjectName("trig_mode_box")
self.line_3 = QtWidgets.QFrame(self.widget)
self.line_3.setGeometry(QtCore.QRect(10, 350, 571, 16))
self.line_3.setFrameShape(QtWidgets.QFrame.HLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.label_4 = QtWidgets.QLabel(self.widget)
self.label_4.setGeometry(QtCore.QRect(250, 360, 101, 16))
self.label_4.setObjectName("label_4")
self.mdl_loc_rbtn = QtWidgets.QRadioButton(self.widget)
self.mdl_loc_rbtn.setGeometry(QtCore.QRect(10, 400, 21, 21))
self.mdl_loc_rbtn.setObjectName("mdl_loc_rbtn")
self.mdl_loc_label = QtWidgets.QLabel(self.widget)
self.mdl_loc_label.setGeometry(QtCore.QRect(40, 400, 541, 21))
self.mdl_loc_label.setObjectName("mdl_loc_label")
self.label_10 = QtWidgets.QLabel(self.widget)
self.label_10.setGeometry(QtCore.QRect(10, 380, 171, 16))
self.label_10.setObjectName("label_10")
self.label_11 = QtWidgets.QLabel(self.widget)
self.label_11.setGeometry(QtCore.QRect(10, 420, 141, 16))
self.label_11.setObjectName("label_11")
self.mdl_name_rbtn = QtWidgets.QRadioButton(self.widget)
self.mdl_name_rbtn.setGeometry(QtCore.QRect(10, 440, 21, 21))
self.mdl_name_rbtn.setObjectName("mdl_name_rbtn")
self.mdl_name_label = QtWidgets.QLabel(self.widget)
self.mdl_name_label.setGeometry(QtCore.QRect(40, 440, 541, 21))
self.mdl_name_label.setObjectName("mdl_name_label")
self.label_6 = QtWidgets.QLabel(self.widget)
self.label_6.setGeometry(QtCore.QRect(600, 170, 101, 16))
self.label_6.setObjectName("label_6")
self.cal_file_label = QtWidgets.QLabel(self.widget)
self.cal_file_label.setGeometry(QtCore.QRect(710, 170, 291, 20))
self.cal_file_label.setObjectName("cal_file_label")
self.label_8 = QtWidgets.QLabel(self.widget)
self.label_8.setGeometry(QtCore.QRect(600, 70, 101, 16))
self.label_8.setObjectName("label_8")
self.ses_name_label = QtWidgets.QLabel(self.widget)
self.ses_name_label.setGeometry(QtCore.QRect(700, 70, 381, 20))
self.ses_name_label.setObjectName("ses_name_label")
self.reset_selection_push_btn = QtWidgets.QPushButton(self.widget)
self.reset_selection_push_btn.setGeometry(QtCore.QRect(470, 0, 101, 23))
self.reset_selection_push_btn.setObjectName("reset_selection_push_btn")
self.start_session_push_btn = QtWidgets.QPushButton(self.widget)
self.start_session_push_btn.setGeometry(QtCore.QRect(1010, 600, 85, 23))
self.start_session_push_btn.setObjectName("start_session_push_btn")
self.save_settings_push_btn = QtWidgets.QPushButton(self.widget)
self.save_settings_push_btn.setGeometry(QtCore.QRect(870, 600, 131, 23))
self.save_settings_push_btn.setObjectName("save_settings_push_btn")
self.load_settings_file_label = QtWidgets.QLabel(self.widget)
self.load_settings_file_label.setGeometry(QtCore.QRect(40, 600, 671, 21))
self.load_settings_file_label.setObjectName("load_settings_file_label")
self.load_settings_push_btn = QtWidgets.QPushButton(self.widget)
self.load_settings_push_btn.setGeometry(QtCore.QRect(720, 600, 141, 23))
self.load_settings_push_btn.setObjectName("load_settings_push_btn")
self.load_settings_rbtn = QtWidgets.QRadioButton(self.widget)
self.load_settings_rbtn.setGeometry(QtCore.QRect(10, 600, 21, 21))
self.load_settings_rbtn.setObjectName("load_settings_rbtn")
self.verticalLayout_4.addWidget(self.widget)
self.tabs.addTab(self.ses_par_tab, "")
self.focal_grid_tab = QtWidgets.QWidget()
self.focal_grid_tab.setObjectName("focal_grid_tab")
self.gridLayout_2 = QtWidgets.QGridLayout(self.focal_grid_tab)
self.gridLayout_2.setObjectName("gridLayout_2")
self.widget_2 = QtWidgets.QWidget(self.focal_grid_tab)
self.widget_2.setObjectName("widget_2")
self.gridLayout_7 = QtWidgets.QGridLayout(self.widget_2)
self.gridLayout_7.setObjectName("gridLayout_7")
self.line_9 = QtWidgets.QFrame(self.widget_2)
self.line_9.setFrameShape(QtWidgets.QFrame.HLine)
self.line_9.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_9.setObjectName("line_9")
self.gridLayout_7.addWidget(self.line_9, 0, 0, 2, 8)
self.label_16 = QtWidgets.QLabel(self.widget_2)
self.label_16.setObjectName("label_16")
self.gridLayout_7.addWidget(self.label_16, 1, 2, 2, 3)
self.line_6 = QtWidgets.QFrame(self.widget_2)
self.line_6.setFrameShape(QtWidgets.QFrame.HLine)
self.line_6.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_6.setObjectName("line_6")
self.gridLayout_7.addWidget(self.line_6, 2, 0, 1, 2)
self.label_12 = QtWidgets.QLabel(self.widget_2)
self.label_12.setObjectName("label_12")
self.gridLayout_7.addWidget(self.label_12, 3, 2, 1, 1)
self.nx_spin = QtWidgets.QSpinBox(self.widget_2)
self.nx_spin.setObjectName("nx_spin")
self.gridLayout_7.addWidget(self.nx_spin, 3, 3, 1, 1)
spacerItem = QtWidgets.QSpacerItem(928, 213, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_7.addItem(spacerItem, 3, 4, 7, 4)
self.label_13 = QtWidgets.QLabel(self.widget_2)
self.label_13.setObjectName("label_13")
self.gridLayout_7.addWidget(self.label_13, 4, 2, 1, 1)
self.ny_spin = QtWidgets.QSpinBox(self.widget_2)
self.ny_spin.setObjectName("ny_spin")
self.gridLayout_7.addWidget(self.ny_spin, 4, 3, 1, 1)
self.label_14 = QtWidgets.QLabel(self.widget_2)
self.label_14.setObjectName("label_14")
self.gridLayout_7.addWidget(self.label_14, 5, 2, 1, 1)
self.nz_spin = QtWidgets.QSpinBox(self.widget_2)
self.nz_spin.setObjectName("nz_spin")
self.gridLayout_7.addWidget(self.nz_spin, 5, 3, 1, 1)
self.label_15 = QtWidgets.QLabel(self.widget_2)
self.label_15.setObjectName("label_15")
self.gridLayout_7.addWidget(self.label_15, 6, 2, 1, 1)
self.ds_spin = QtWidgets.QDoubleSpinBox(self.widget_2)
self.ds_spin.setObjectName("ds_spin")
self.gridLayout_7.addWidget(self.ds_spin, 6, 3, 1, 1)
self.label_17 = QtWidgets.QLabel(self.widget_2)
self.label_17.setObjectName("label_17")
self.gridLayout_7.addWidget(self.label_17, 7, 2, 1, 1)
self.x0_spin = QtWidgets.QDoubleSpinBox(self.widget_2)
self.x0_spin.setObjectName("x0_spin")
self.gridLayout_7.addWidget(self.x0_spin, 7, 3, 1, 1)
self.label_19 = QtWidgets.QLabel(self.widget_2)
self.label_19.setObjectName("label_19")
self.gridLayout_7.addWidget(self.label_19, 8, 2, 1, 1)
self.y0_spin = QtWidgets.QDoubleSpinBox(self.widget_2)
self.y0_spin.setObjectName("y0_spin")
self.gridLayout_7.addWidget(self.y0_spin, 8, 3, 1, 1)
self.label_20 = QtWidgets.QLabel(self.widget_2)
self.label_20.setObjectName("label_20")
self.gridLayout_7.addWidget(self.label_20, 9, 2, 1, 1)
self.z0_spin = QtWidgets.QDoubleSpinBox(self.widget_2)
self.z0_spin.setObjectName("z0_spin")
self.gridLayout_7.addWidget(self.z0_spin, 9, 3, 1, 1)
self.line_7 = QtWidgets.QFrame(self.widget_2)
self.line_7.setFrameShape(QtWidgets.QFrame.HLine)
self.line_7.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_7.setObjectName("line_7")
self.gridLayout_7.addWidget(self.line_7, 10, 0, 1, 7)
spacerItem1 = QtWidgets.QSpacerItem(696, 48, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_7.addItem(spacerItem1, 10, 7, 2, 1)
self.calc_vox_btn = QtWidgets.QPushButton(self.widget_2)
self.calc_vox_btn.setObjectName("calc_vox_btn")
self.gridLayout_7.addWidget(self.calc_vox_btn, 11, 0, 1, 4)
self.vox_progress_bar = QtWidgets.QProgressBar(self.widget_2)
self.vox_progress_bar.setMinimumSize(QtCore.QSize(211, 0))
self.vox_progress_bar.setProperty("value", 24)
self.vox_progress_bar.setObjectName("vox_progress_bar")
self.gridLayout_7.addWidget(self.vox_progress_bar, 11, 5, 1, 2)
self.line_8 = QtWidgets.QFrame(self.widget_2)
self.line_8.setFrameShape(QtWidgets.QFrame.HLine)
self.line_8.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_8.setObjectName("line_8")
self.gridLayout_7.addWidget(self.line_8, 12, 0, 2, 8)
self.label_49 = QtWidgets.QLabel(self.widget_2)
self.label_49.setObjectName("label_49")
self.gridLayout_7.addWidget(self.label_49, 13, 1, 2, 6)
spacerItem2 = QtWidgets.QSpacerItem(804, 48, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_7.addItem(spacerItem2, 14, 6, 2, 2)
self.label_50 = QtWidgets.QLabel(self.widget_2)
self.label_50.setObjectName("label_50")
self.gridLayout_7.addWidget(self.label_50, 15, 1, 1, 3)
self.pixel_size_spin = QtWidgets.QDoubleSpinBox(self.widget_2)
self.pixel_size_spin.setObjectName("pixel_size_spin")
self.gridLayout_7.addWidget(self.pixel_size_spin, 15, 4, 1, 2)
spacerItem3 = QtWidgets.QSpacerItem(1079, 267, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_7.addItem(spacerItem3, 16, 0, 1, 8)
self.gridLayout_2.addWidget(self.widget_2, 0, 0, 1, 1)
self.tabs.addTab(self.focal_grid_tab, "")
self.model_scale_tab = QtWidgets.QWidget()
self.model_scale_tab.setObjectName("model_scale_tab")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.model_scale_tab)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.widget_3 = QtWidgets.QWidget(self.model_scale_tab)
self.widget_3.setObjectName("widget_3")
self.gridLayout_3 = QtWidgets.QGridLayout(self.widget_3)
self.gridLayout_3.setObjectName("gridLayout_3")
self.rawFrameView = ScaleModelWidget(self.widget_3)
self.rawFrameView.setMinimumSize(QtCore.QSize(1091, 511))
self.rawFrameView.setObjectName("rawFrameView")
self.gridLayout_3.addWidget(self.rawFrameView, 0, 0, 1, 1)
self.widget_4 = QtWidgets.QWidget(self.widget_3)
self.widget_4.setMinimumSize(QtCore.QSize(1091, 0))
self.widget_4.setMaximumSize(QtCore.QSize(16777215, 101))
self.widget_4.setObjectName("widget_4")
self.gridLayout = QtWidgets.QGridLayout(self.widget_4)
self.gridLayout.setObjectName("gridLayout")
self.label_22 = QtWidgets.QLabel(self.widget_4)
self.label_22.setObjectName("label_22")
self.gridLayout.addWidget(self.label_22, 0, 0, 1, 1)
self.scaleTable = QtWidgets.QTableWidget(self.widget_4)
self.scaleTable.setMinimumSize(QtCore.QSize(411, 81))
self.scaleTable.setObjectName("scaleTable")
self.scaleTable.setColumnCount(0)
self.scaleTable.setRowCount(0)
self.gridLayout.addWidget(self.scaleTable, 0, 1, 4, 1)
spacerItem4 = QtWidgets.QSpacerItem(248, 78, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem4, 0, 2, 4, 1)
self.raw_mov_spin = QtWidgets.QSpinBox(self.widget_4)
self.raw_mov_spin.setObjectName("raw_mov_spin")
self.gridLayout.addWidget(self.raw_mov_spin, 1, 0, 1, 1)
self.load_scale_btn = QtWidgets.QPushButton(self.widget_4)
self.load_scale_btn.setObjectName("load_scale_btn")
self.gridLayout.addWidget(self.load_scale_btn, 1, 3, 2, 1)
self.save_scale_btn = QtWidgets.QPushButton(self.widget_4)
self.save_scale_btn.setObjectName("save_scale_btn")
self.gridLayout.addWidget(self.save_scale_btn, 1, 4, 2, 1)
self.raw_frame_spin = QtWidgets.QSpinBox(self.widget_4)
self.raw_frame_spin.setObjectName("raw_frame_spin")
self.gridLayout.addWidget(self.raw_frame_spin, 3, 0, 1, 1)
self.set_model_btn = QtWidgets.QPushButton(self.widget_4)
self.set_model_btn.setObjectName("set_model_btn")
self.gridLayout.addWidget(self.set_model_btn, 1, 5, 2, 1)
self.label_21 = QtWidgets.QLabel(self.widget_4)
self.label_21.setObjectName("label_21")
self.gridLayout.addWidget(self.label_21, 2, 0, 1, 1)
self.gridLayout_3.addWidget(self.widget_4, 1, 0, 1, 1)
self.verticalLayout_2.addWidget(self.widget_3)
self.tabs.addTab(self.model_scale_tab, "")
self.model_view_tab = QtWidgets.QWidget()
self.model_view_tab.setObjectName("model_view_tab")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.model_view_tab)
self.horizontalLayout.setObjectName("horizontalLayout")
self.model_param_disp = QtWidgets.QWidget(self.model_view_tab)
self.model_param_disp.setObjectName("model_param_disp")
self.gridLayout_4 = QtWidgets.QGridLayout(self.model_param_disp)
self.gridLayout_4.setObjectName("gridLayout_4")
self.label_23 = QtWidgets.QLabel(self.model_param_disp)
self.label_23.setMinimumSize(QtCore.QSize(114, 621))
self.label_23.setObjectName("label_23")
self.gridLayout_4.addWidget(self.label_23, 0, 0, 1, 1)
self.horizontalLayout.addWidget(self.model_param_disp)
self.model_view_window = ModelViewWidget(self.model_view_tab)
self.model_view_window.setMinimumSize(QtCore.QSize(971, 631))
self.model_view_window.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.model_view_window.setFrameShadow(QtWidgets.QFrame.Raised)
self.model_view_window.setObjectName("model_view_window")
self.horizontalLayout.addWidget(self.model_view_window)
self.tabs.addTab(self.model_view_tab, "")
self.segment_tab = QtWidgets.QWidget()
self.segment_tab.setObjectName("segment_tab")
self.verticalLayout = QtWidgets.QVBoxLayout(self.segment_tab)
self.verticalLayout.setObjectName("verticalLayout")
self.seg_view = ImageSegmentWidget(self.segment_tab)
self.seg_view.setMinimumSize(QtCore.QSize(1101, 481))
self.seg_view.setObjectName("seg_view")
self.verticalLayout.addWidget(self.seg_view)
self.seg_widget = QtWidgets.QWidget(self.segment_tab)
self.seg_widget.setMinimumSize(QtCore.QSize(1122, 90))
self.seg_widget.setMaximumSize(QtCore.QSize(16777215, 141))
self.seg_widget.setObjectName("seg_widget")
self.gridLayout_5 = QtWidgets.QGridLayout(self.seg_widget)
self.gridLayout_5.setObjectName("gridLayout_5")
self.label_40 = QtWidgets.QLabel(self.seg_widget)
self.label_40.setObjectName("label_40")
self.gridLayout_5.addWidget(self.label_40, 0, 0, 1, 1)
self.label_24 = QtWidgets.QLabel(self.seg_widget)
self.label_24.setObjectName("label_24")
self.gridLayout_5.addWidget(self.label_24, 0, 1, 1, 1)
self.label_26 = QtWidgets.QLabel(self.seg_widget)
self.label_26.setObjectName("label_26")
self.gridLayout_5.addWidget(self.label_26, 0, 2, 1, 1)
self.label_35 = QtWidgets.QLabel(self.seg_widget)
self.label_35.setObjectName("label_35")
self.gridLayout_5.addWidget(self.label_35, 0, 3, 1, 1)
self.label_36 = QtWidgets.QLabel(self.seg_widget)
self.label_36.setObjectName("label_36")
self.gridLayout_5.addWidget(self.label_36, 0, 4, 1, 1)
self.label_37 = QtWidgets.QLabel(self.seg_widget)
self.label_37.setObjectName("label_37")
self.gridLayout_5.addWidget(self.label_37, 0, 5, 1, 1)
self.label_38 = QtWidgets.QLabel(self.seg_widget)
self.label_38.setObjectName("label_38")
self.gridLayout_5.addWidget(self.label_38, 0, 6, 1, 1)
self.label_39 = QtWidgets.QLabel(self.seg_widget)
self.label_39.setObjectName("label_39")
self.gridLayout_5.addWidget(self.label_39, 0, 7, 1, 1)
self.line_10 = QtWidgets.QFrame(self.seg_widget)
self.line_10.setFrameShape(QtWidgets.QFrame.VLine)
self.line_10.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_10.setObjectName("line_10")
self.gridLayout_5.addWidget(self.line_10, 0, 8, 4, 1)
self.label_43 = QtWidgets.QLabel(self.seg_widget)
self.label_43.setObjectName("label_43")
self.gridLayout_5.addWidget(self.label_43, 0, 9, 1, 2)
self.line_11 = QtWidgets.QFrame(self.seg_widget)
self.line_11.setFrameShape(QtWidgets.QFrame.VLine)
self.line_11.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_11.setObjectName("line_11")
self.gridLayout_5.addWidget(self.line_11, 0, 12, 4, 1)
spacerItem5 = QtWidgets.QSpacerItem(176, 110, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_5.addItem(spacerItem5, 0, 13, 4, 1)
spacerItem6 = QtWidgets.QSpacerItem(88, 81, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_5.addItem(spacerItem6, 0, 14, 3, 1)
self.seg_mov_spin = QtWidgets.QSpinBox(self.seg_widget)
self.seg_mov_spin.setObjectName("seg_mov_spin")
self.gridLayout_5.addWidget(self.seg_mov_spin, 1, 0, 1, 1)
self.seg_frame_spin = QtWidgets.QSpinBox(self.seg_widget)
self.seg_frame_spin.setObjectName("seg_frame_spin")
self.gridLayout_5.addWidget(self.seg_frame_spin, 1, 1, 1, 1)
self.body_thresh_spin = QtWidgets.QSpinBox(self.seg_widget)
self.body_thresh_spin.setObjectName("body_thresh_spin")
self.gridLayout_5.addWidget(self.body_thresh_spin, 1, 2, 1, 1)
self.wing_thresh_spin = QtWidgets.QSpinBox(self.seg_widget)
self.wing_thresh_spin.setObjectName("wing_thresh_spin")
self.gridLayout_5.addWidget(self.wing_thresh_spin, 1, 3, 1, 1)
self.sigma_spin = QtWidgets.QDoubleSpinBox(self.seg_widget)
self.sigma_spin.setObjectName("sigma_spin")
self.gridLayout_5.addWidget(self.sigma_spin, 1, 4, 1, 1)
self.K_spin = QtWidgets.QSpinBox(self.seg_widget)
self.K_spin.setObjectName("K_spin")
self.gridLayout_5.addWidget(self.K_spin, 1, 5, 1, 1)
self.min_body_spin = QtWidgets.QSpinBox(self.seg_widget)
self.min_body_spin.setObjectName("min_body_spin")
self.gridLayout_5.addWidget(self.min_body_spin, 1, 6, 1, 1)
self.min_wing_spin = QtWidgets.QSpinBox(self.seg_widget)
self.min_wing_spin.setObjectName("min_wing_spin")
self.gridLayout_5.addWidget(self.min_wing_spin, 1, 7, 1, 1)
self.label_44 = QtWidgets.QLabel(self.seg_widget)
self.label_44.setObjectName("label_44")
self.gridLayout_5.addWidget(self.label_44, 1, 9, 1, 1)
self.mask_cam_nr_spin = QtWidgets.QSpinBox(self.seg_widget)
self.mask_cam_nr_spin.setObjectName("mask_cam_nr_spin")
self.gridLayout_5.addWidget(self.mask_cam_nr_spin, 1, 10, 1, 2)
self.label_45 = QtWidgets.QLabel(self.seg_widget)
self.label_45.setObjectName("label_45")
self.gridLayout_5.addWidget(self.label_45, 2, 9, 1, 1)
self.mask_seg_nr_spin = QtWidgets.QSpinBox(self.seg_widget)
self.mask_seg_nr_spin.setObjectName("mask_seg_nr_spin")
self.gridLayout_5.addWidget(self.mask_seg_nr_spin, 2, 10, 1, 2)
self.seg_update_btn = QtWidgets.QPushButton(self.seg_widget)
self.seg_update_btn.setObjectName("seg_update_btn")
self.gridLayout_5.addWidget(self.seg_update_btn, 3, 0, 1, 2)
self.add_mask_btn = QtWidgets.QPushButton(self.seg_widget)
self.add_mask_btn.setObjectName("add_mask_btn")
self.gridLayout_5.addWidget(self.add_mask_btn, 3, 9, 1, 1)
self.reset_mask_btn = QtWidgets.QPushButton(self.seg_widget)
self.reset_mask_btn.setObjectName("reset_mask_btn")
self.gridLayout_5.addWidget(self.reset_mask_btn, 3, 11, 1, 1)
self.continue_btn = QtWidgets.QPushButton(self.seg_widget)
self.continue_btn.setObjectName("continue_btn")
self.gridLayout_5.addWidget(self.continue_btn, 3, 14, 1, 1)
self.verticalLayout.addWidget(self.seg_widget)
self.tabs.addTab(self.segment_tab, "")
self.pcl_view_tab = QtWidgets.QWidget()
self.pcl_view_tab.setObjectName("pcl_view_tab")
self.verticalLayout_6 = QtWidgets.QVBoxLayout(self.pcl_view_tab)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.pcl_view = BBoxWidget(self.pcl_view_tab)
self.pcl_view.setMinimumSize(QtCore.QSize(1121, 521))
self.pcl_view.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.pcl_view.setFrameShadow(QtWidgets.QFrame.Raised)
self.pcl_view.setObjectName("pcl_view")
self.verticalLayout_6.addWidget(self.pcl_view)
self.widget_5 = QtWidgets.QWidget(self.pcl_view_tab)
self.widget_5.setMinimumSize(QtCore.QSize(1101, 111))
self.widget_5.setObjectName("widget_5")
self.gridLayout_8 = QtWidgets.QGridLayout(self.widget_5)
self.gridLayout_8.setObjectName("gridLayout_8")
self.label_41 = QtWidgets.QLabel(self.widget_5)
self.label_41.setObjectName("label_41")
self.gridLayout_8.addWidget(self.label_41, 0, 0, 1, 1)
self.flight_select_btn_group = QtWidgets.QGroupBox(self.widget_5)
self.flight_select_btn_group.setObjectName("flight_select_btn_group")
self.gridLayout_6 = QtWidgets.QGridLayout(self.flight_select_btn_group)
self.gridLayout_6.setObjectName("gridLayout_6")
self.tethered_radio_btn = QtWidgets.QRadioButton(self.flight_select_btn_group)
self.tethered_radio_btn.setObjectName("tethered_radio_btn")
self.gridLayout_6.addWidget(self.tethered_radio_btn, 0, 0, 1, 1)
self.free_radio_btn = QtWidgets.QRadioButton(self.flight_select_btn_group)
self.free_radio_btn.setObjectName("free_radio_btn")
self.gridLayout_6.addWidget(self.free_radio_btn, 1, 0, 1, 1)
self.gridLayout_8.addWidget(self.flight_select_btn_group, 0, 1, 4, 1)
self.label_47 = QtWidgets.QLabel(self.widget_5)
self.label_47.setObjectName("label_47")
self.gridLayout_8.addWidget(self.label_47, 0, 2, 1, 1)
self.label_51 = QtWidgets.QLabel(self.widget_5)
self.label_51.setObjectName("label_51")
self.gridLayout_8.addWidget(self.label_51, 0, 3, 1, 1)
spacerItem7 = QtWidgets.QSpacerItem(456, 90, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_8.addItem(spacerItem7, 0, 4, 4, 1)
self.view_select_group = QtWidgets.QGroupBox(self.widget_5)
self.view_select_group.setObjectName("view_select_group")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.view_select_group)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.pcl_view_btn = QtWidgets.QRadioButton(self.view_select_group)
self.pcl_view_btn.setObjectName("pcl_view_btn")
self.verticalLayout_3.addWidget(self.pcl_view_btn)
self.bbox_view_btn = QtWidgets.QRadioButton(self.view_select_group)
self.bbox_view_btn.setObjectName("bbox_view_btn")
self.verticalLayout_3.addWidget(self.bbox_view_btn)
self.model_view_btn = QtWidgets.QRadioButton(self.view_select_group)
self.model_view_btn.setObjectName("model_view_btn")
self.verticalLayout_3.addWidget(self.model_view_btn)
self.gridLayout_8.addWidget(self.view_select_group, 0, 5, 4, 1)
self.pcl_mov_spin = QtWidgets.QSpinBox(self.widget_5)
self.pcl_mov_spin.setObjectName("pcl_mov_spin")
self.gridLayout_8.addWidget(self.pcl_mov_spin, 1, 0, 1, 1)
self.stroke_bound_spin = QtWidgets.QSpinBox(self.widget_5)
self.stroke_bound_spin.setObjectName("stroke_bound_spin")
self.gridLayout_8.addWidget(self.stroke_bound_spin, 1, 2, 1, 1)
self.wing_pitch_bound_spin = QtWidgets.QSpinBox(self.widget_5)
self.wing_pitch_bound_spin.setObjectName("wing_pitch_bound_spin")
self.gridLayout_8.addWidget(self.wing_pitch_bound_spin, 1, 3, 1, 1)
self.label_42 = QtWidgets.QLabel(self.widget_5)
self.label_42.setObjectName("label_42")
self.gridLayout_8.addWidget(self.label_42, 2, 0, 1, 1)
self.label_48 = QtWidgets.QLabel(self.widget_5)
self.label_48.setObjectName("label_48")
self.gridLayout_8.addWidget(self.label_48, 2, 2, 1, 1)
self.label_46 = QtWidgets.QLabel(self.widget_5)
self.label_46.setObjectName("label_46")
self.gridLayout_8.addWidget(self.label_46, 2, 3, 1, 1)
self.pcl_frame_spin = QtWidgets.QSpinBox(self.widget_5)
self.pcl_frame_spin.setObjectName("pcl_frame_spin")
self.gridLayout_8.addWidget(self.pcl_frame_spin, 3, 0, 1, 1)
self.dev_bound_spin = QtWidgets.QSpinBox(self.widget_5)
self.dev_bound_spin.setObjectName("dev_bound_spin")
self.gridLayout_8.addWidget(self.dev_bound_spin, 3, 2, 1, 1)
self.sphere_radius_spin = QtWidgets.QDoubleSpinBox(self.widget_5)
self.sphere_radius_spin.setObjectName("sphere_radius_spin")
self.gridLayout_8.addWidget(self.sphere_radius_spin, 3, 3, 1, 1)
self.verticalLayout_6.addWidget(self.widget_5)
self.tabs.addTab(self.pcl_view_tab, "")
self.opt_tab = QtWidgets.QWidget()
self.opt_tab.setObjectName("opt_tab")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.opt_tab)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.opt_widget = QtWidgets.QWidget(self.opt_tab)
self.opt_widget.setObjectName("opt_widget")
self.verticalLayout_8 = QtWidgets.QVBoxLayout(self.opt_widget)
self.verticalLayout_8.setObjectName("verticalLayout_8")
self.contour_view = ContourViewWidget(self.opt_widget)
self.contour_view.setObjectName("contour_view")
self.verticalLayout_8.addWidget(self.contour_view)
self.opt_settings_widget = QtWidgets.QWidget(self.opt_widget)
self.opt_settings_widget.setMinimumSize(QtCore.QSize(0, 120))
self.opt_settings_widget.setObjectName("opt_settings_widget")
self.gridLayout_9 = QtWidgets.QGridLayout(self.opt_settings_widget)
self.gridLayout_9.setObjectName("gridLayout_9")
self.label_52 = QtWidgets.QLabel(self.opt_settings_widget)
self.label_52.setObjectName("label_52")
self.gridLayout_9.addWidget(self.label_52, 0, 0, 1, 1)
self.label_54 = QtWidgets.QLabel(self.opt_settings_widget)
self.label_54.setObjectName("label_54")
self.gridLayout_9.addWidget(self.label_54, 0, 1, 1, 1)
spacerItem8 = QtWidgets.QSpacerItem(849, 78, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_9.addItem(spacerItem8, 0, 2, 3, 1)
self.init_view_check = QtWidgets.QCheckBox(self.opt_settings_widget)
self.init_view_check.setObjectName("init_view_check")
self.gridLayout_9.addWidget(self.init_view_check, 0, 3, 1, 1)
self.opt_mov_spin = QtWidgets.QSpinBox(self.opt_settings_widget)
self.opt_mov_spin.setObjectName("opt_mov_spin")
self.gridLayout_9.addWidget(self.opt_mov_spin, 1, 0, 1, 1)
self.alpha_spin = QtWidgets.QDoubleSpinBox(self.opt_settings_widget)
self.alpha_spin.setObjectName("alpha_spin")
self.gridLayout_9.addWidget(self.alpha_spin, 1, 1, 1, 1)
self.dest_view_check = QtWidgets.QCheckBox(self.opt_settings_widget)
self.dest_view_check.setObjectName("dest_view_check")
self.gridLayout_9.addWidget(self.dest_view_check, 1, 3, 2, 1)
self.label_53 = QtWidgets.QLabel(self.opt_settings_widget)
self.label_53.setObjectName("label_53")
self.gridLayout_9.addWidget(self.label_53, 2, 0, 1, 1)
self.opt_frame_spin = QtWidgets.QSpinBox(self.opt_settings_widget)
self.opt_frame_spin.setObjectName("opt_frame_spin")
self.gridLayout_9.addWidget(self.opt_frame_spin, 3, 0, 1, 1)
self.src_view_check = QtWidgets.QCheckBox(self.opt_settings_widget)
self.src_view_check.setObjectName("src_view_check")
self.gridLayout_9.addWidget(self.src_view_check, 3, 3, 1, 1)
self.verticalLayout_8.addWidget(self.opt_settings_widget)
self.verticalLayout_7.addWidget(self.opt_widget)
self.tabs.addTab(self.opt_tab, "")
self.verticalLayout_5.addWidget(self.tabs)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
self.tabs.setCurrentIndex(6)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "DipteraTrack"))
self.label.setText(_translate("MainWindow", "Select session folder:"))
self.label_3.setText(_translate("MainWindow", "Session parameters"))
self.label_2.setText(_translate("MainWindow", "Session folder:"))
self.ses_folder_label.setText(_translate("MainWindow", "..."))
self.label_5.setText(_translate("MainWindow", "Background folder:"))
self.bckg_folder_label.setText(_translate("MainWindow", "..."))
self.label_7.setText(_translate("MainWindow", "Calibration folder:"))
self.cal_folder_label.setText(_translate("MainWindow", "..."))
self.label_9.setText(_translate("MainWindow", "Movie folders:"))
self.mov_folder1_label.setText(_translate("MainWindow", "..."))
self.mov_folder2_label.setText(_translate("MainWindow", "..."))
self.mov_folder3_label.setText(_translate("MainWindow", "..."))
self.mov_folder4_label.setText(_translate("MainWindow", "..."))
self.mov_folder5_label.setText(_translate("MainWindow", "..."))
self.mov_folder6_label.setText(_translate("MainWindow", "..."))
self.mov_folder7_label.setText(_translate("MainWindow", "..."))
self.mov_folder8_label.setText(_translate("MainWindow", "..."))
self.label_18.setText(_translate("MainWindow", "Camera folders:"))
self.cam_folder1_label.setText(_translate("MainWindow", "..."))
self.cam_folder2_label.setText(_translate("MainWindow", "..."))
self.cam_folder3_label.setText(_translate("MainWindow", "..."))
self.cam_folder4_label.setText(_translate("MainWindow", "..."))
self.ses_folder_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.bckg_folder_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cal_folder_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder1_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder2_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder3_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder4_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder5_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder6_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder7_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder8_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder1_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder2_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder3_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder4_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder5_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder6_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder5_label.setText(_translate("MainWindow", "..."))
self.cam_folder6_label.setText(_translate("MainWindow", "..."))
self.label_25.setText(_translate("MainWindow", "Frame name:"))
self.frame_name_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.frame_name_label.setText(_translate("MainWindow", "..."))
self.label_27.setText(_translate("MainWindow", "Background image format:"))
self.label_28.setText(_translate("MainWindow", "Calibration image format:"))
self.label_29.setText(_translate("MainWindow", "Frame image format:"))
self.label_30.setText(_translate("MainWindow", "Trigger settings"))
self.label_31.setText(_translate("MainWindow", "start frame nr:"))
self.label_32.setText(_translate("MainWindow", "trigger frame nr:"))
self.label_33.setText(_translate("MainWindow", "end frame nr:"))
self.label_34.setText(_translate("MainWindow", "Trigger mode:"))
self.label_4.setText(_translate("MainWindow", "Model settings"))
self.mdl_loc_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mdl_loc_label.setText(_translate("MainWindow", "..."))
self.label_10.setText(_translate("MainWindow", "Model location:"))
self.label_11.setText(_translate("MainWindow", "Model name:"))
self.mdl_name_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mdl_name_label.setText(_translate("MainWindow", "..."))
self.label_6.setText(_translate("MainWindow", "Calibration file:"))
self.cal_file_label.setText(_translate("MainWindow", "..."))
self.label_8.setText(_translate("MainWindow", "Session name:"))
self.ses_name_label.setText(_translate("MainWindow", "..."))
self.reset_selection_push_btn.setText(_translate("MainWindow", "reset selection"))
self.start_session_push_btn.setText(_translate("MainWindow", "start session"))
self.save_settings_push_btn.setText(_translate("MainWindow", "save parameter file"))
self.load_settings_file_label.setText(_translate("MainWindow", "..."))
self.load_settings_push_btn.setText(_translate("MainWindow", "load parameter file"))
self.load_settings_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.tabs.setTabText(self.tabs.indexOf(self.ses_par_tab), _translate("MainWindow", "Movie selection"))
self.label_16.setText(_translate("MainWindow", "Voxel grid parameters:"))
self.label_12.setText(_translate("MainWindow", "Nx:"))
self.label_13.setText(_translate("MainWindow", "Ny:"))
self.label_14.setText(_translate("MainWindow", "Nz:"))
self.label_15.setText(_translate("MainWindow", "ds:"))
self.label_17.setText(_translate("MainWindow", "x0:"))
self.label_19.setText(_translate("MainWindow", "y0:"))
self.label_20.setText(_translate("MainWindow", "z0:"))
self.calc_vox_btn.setText(_translate("MainWindow", "calculate voxel grid"))
self.label_49.setText(_translate("MainWindow", "Camera parameters:"))
self.label_50.setText(_translate("MainWindow", "pixel size (mm):"))
self.tabs.setTabText(self.tabs.indexOf(self.focal_grid_tab), _translate("MainWindow", "Voxel grid"))
self.label_22.setText(_translate("MainWindow", "Movie nr:"))
self.load_scale_btn.setText(_translate("MainWindow", "load model scale"))
self.save_scale_btn.setText(_translate("MainWindow", "save model scale"))
self.set_model_btn.setText(_translate("MainWindow", "set model scale"))
self.label_21.setText(_translate("MainWindow", "Frame:"))
self.tabs.setTabText(self.tabs.indexOf(self.model_scale_tab), _translate("MainWindow", "Scale model"))
self.label_23.setText(_translate("MainWindow", "Model parameters:"))
self.tabs.setTabText(self.tabs.indexOf(self.model_view_tab), _translate("MainWindow", "Model view"))
self.label_40.setText(_translate("MainWindow", "movie nr:"))
self.label_24.setText(_translate("MainWindow", "frame:"))
self.label_26.setText(_translate("MainWindow", "body threshold"))
self.label_35.setText(_translate("MainWindow", "wing threshold"))
self.label_36.setText(_translate("MainWindow", "sigma"))
self.label_37.setText(_translate("MainWindow", "K"))
self.label_38.setText(_translate("MainWindow", "min body area"))
self.label_39.setText(_translate("MainWindow", "min wing area"))
self.label_43.setText(_translate("MainWindow", "Set image mask:"))
self.label_44.setText(_translate("MainWindow", "cam nr:"))
self.label_45.setText(_translate("MainWindow", "segment nr:"))
self.seg_update_btn.setText(_translate("MainWindow", "update"))
self.add_mask_btn.setText(_translate("MainWindow", "add to mask"))
self.reset_mask_btn.setText(_translate("MainWindow", "reset"))
self.continue_btn.setText(_translate("MainWindow", "continue"))
self.tabs.setTabText(self.tabs.indexOf(self.segment_tab), _translate("MainWindow", "Segmentation"))
self.label_41.setText(_translate("MainWindow", "movie nr:"))
self.tethered_radio_btn.setText(_translate("MainWindow", "tethered flight"))
self.free_radio_btn.setText(_translate("MainWindow", "free flight"))
self.label_47.setText(_translate("MainWindow", "stroke angle bound:"))
self.label_51.setText(_translate("MainWindow", "wing pitch angle bound:"))
self.pcl_view_btn.setText(_translate("MainWindow", "pcl view"))
self.bbox_view_btn.setText(_translate("MainWindow", "bbox view"))
self.model_view_btn.setText(_translate("MainWindow", "model view"))
self.label_42.setText(_translate("MainWindow", "frame nr:"))
self.label_48.setText(_translate("MainWindow", "deviation angle bound:"))
self.label_46.setText(_translate("MainWindow", "sphere radius:"))
self.tabs.setTabText(self.tabs.indexOf(self.pcl_view_tab), _translate("MainWindow", "Pointcloud view"))
self.label_52.setText(_translate("MainWindow", "movie nr:"))
self.label_54.setText(_translate("MainWindow", "alpha:"))
self.init_view_check.setText(_translate("MainWindow", "initial state"))
self.dest_view_check.setText(_translate("MainWindow", "destination contour"))
self.label_53.setText(_translate("MainWindow", "frame nr:"))
self.src_view_check.setText(_translate("MainWindow", "source contour"))
self.tabs.setTabText(self.tabs.indexOf(self.opt_tab), _translate("MainWindow", "Contour optimization"))
from BoundingBoxWidget import BBoxWidget
from ContourViewWidget import ContourViewWidget
from ImageSegmentWidget import ImageSegmentWidget
from ModelViewWidget import ModelViewWidget
from ScaleModelWidget import ScaleModelWidget
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| [
"PyQt5.QtWidgets.QSpinBox",
"BoundingBoxWidget.BBoxWidget",
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QVBoxLayout",
"ScaleModelWidget.ScaleModelWidget",
"PyQt5.QtWidgets.QTableWidget",
"PyQt5.QtWidgets.QComboBox",
"ImageSegmentWidget.ImageSegmentWidget",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtW... | [((52911, 52943), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (52933, 52943), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((52961, 52984), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (52982, 52984), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((422, 451), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (439, 451), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((609, 650), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.centralwidget'], {}), '(self.centralwidget)\n', (630, 650), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((735, 775), 'PyQt5.QtWidgets.QTabWidget', 'QtWidgets.QTabWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (755, 775), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((843, 862), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', ([], {}), '()\n', (860, 862), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((949, 988), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.ses_par_tab'], {}), '(self.ses_par_tab)\n', (970, 988), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1075, 1110), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.ses_par_tab'], {}), '(self.ses_par_tab)\n', (1092, 1110), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1246, 1278), 'PyQt5.QtWidgets.QTreeView', 'QtWidgets.QTreeView', (['self.widget'], {}), '(self.widget)\n', (1265, 1278), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1512, 1541), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (1528, 1541), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1667, 1696), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (1683, 1696), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1827, 1856), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.widget'], {}), '(self.widget)\n', (1843, 1856), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2096, 2125), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (2112, 2125), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2268, 2297), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (2284, 2297), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2459, 2488), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (2475, 2488), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2633, 2662), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (2649, 2662), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2827, 2856), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.widget'], {}), '(self.widget)\n', (2843, 2856), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3106, 3135), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (3122, 3135), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3280, 3309), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (3296, 3309), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3472, 3501), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (3488, 3501), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3647, 3676), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (3663, 3676), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3852, 3881), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (3868, 3881), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4057, 4086), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (4073, 4086), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4262, 4291), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (4278, 4291), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4467, 4496), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (4483, 4496), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4672, 4701), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (4688, 4701), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4877, 4906), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (4893, 4906), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5082, 5111), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (5098, 5111), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5278, 5307), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (5294, 5307), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5456, 5485), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (5472, 5485), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5661, 5690), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (5677, 5690), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5866, 5895), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (5882, 5895), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6071, 6100), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (6087, 6100), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6274, 6309), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (6296, 6309), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6476, 6511), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (6498, 6511), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6681, 6716), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (6703, 6716), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6884, 6919), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (6906, 6919), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7090, 7125), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (7112, 7125), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7296, 7331), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (7318, 7331), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7502, 7537), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (7524, 7537), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7708, 7743), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (7730, 7743), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7914, 7949), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (7936, 7949), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8120, 8155), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (8142, 8155), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8326, 8361), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (8348, 8361), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8532, 8567), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (8554, 8567), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8738, 8773), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (8760, 8773), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8944, 8979), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (8966, 8979), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9150, 9185), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (9172, 9185), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9356, 9391), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (9378, 9391), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9562, 9597), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (9584, 9597), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9769, 9798), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (9785, 9798), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9974, 10003), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (9990, 10003), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10170, 10199), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (10186, 10199), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10346, 10381), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (10368, 10381), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10549, 10578), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (10565, 10578), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10742, 10771), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (10758, 10771), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10917, 10949), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.widget'], {}), '(self.widget)\n', (10936, 10949), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11110, 11139), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (11126, 11139), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11286, 11318), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.widget'], {}), '(self.widget)\n', (11305, 11318), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11479, 11508), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (11495, 11508), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11657, 11689), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.widget'], {}), '(self.widget)\n', (11676, 11689), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11854, 11883), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.widget'], {}), '(self.widget)\n', (11870, 11883), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12132, 12161), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.widget'], {}), '(self.widget)\n', (12148, 12161), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12413, 12442), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (12429, 12442), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12590, 12621), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget'], {}), '(self.widget)\n', (12608, 12621), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12784, 12813), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (12800, 12813), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12952, 12981), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (12968, 12981), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13127, 13158), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget'], {}), '(self.widget)\n', (13145, 13158), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13318, 13347), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (13334, 13347), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13492, 13523), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget'], {}), '(self.widget)\n', (13510, 13523), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13680, 13709), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (13696, 13709), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13853, 13885), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.widget'], {}), '(self.widget)\n', (13872, 13885), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14038, 14067), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.widget'], {}), '(self.widget)\n', (14054, 14067), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14317, 14346), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (14333, 14346), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14487, 14522), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (14509, 14522), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14677, 14706), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (14693, 14706), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14860, 14889), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (14876, 14889), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15028, 15057), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (15044, 15057), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15201, 15236), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (15223, 15236), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15395, 15424), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (15411, 15424), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15580, 15609), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (15596, 15609), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15752, 15781), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (15768, 15781), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15938, 15967), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (15954, 15967), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16109, 16138), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (16125, 16138), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16311, 16345), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.widget'], {}), '(self.widget)\n', (16332, 16345), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16545, 16579), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.widget'], {}), '(self.widget)\n', (16566, 16579), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16775, 16809), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.widget'], {}), '(self.widget)\n', (16796, 16809), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17007, 17036), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (17023, 17036), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17237, 17271), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.widget'], {}), '(self.widget)\n', (17258, 17271), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17463, 17498), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.widget'], {}), '(self.widget)\n', (17485, 17498), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17772, 17791), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', ([], {}), '()\n', (17789, 17791), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17880, 17922), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.focal_grid_tab'], {}), '(self.focal_grid_tab)\n', (17901, 17922), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18003, 18041), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.focal_grid_tab'], {}), '(self.focal_grid_tab)\n', (18020, 18041), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18118, 18154), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.widget_2'], {}), '(self.widget_2)\n', (18139, 18154), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18233, 18264), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.widget_2'], {}), '(self.widget_2)\n', (18249, 18264), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18512, 18543), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_2'], {}), '(self.widget_2)\n', (18528, 18543), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18677, 18708), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.widget_2'], {}), '(self.widget_2)\n', (18693, 18708), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18956, 18987), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_2'], {}), '(self.widget_2)\n', (18972, 18987), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((19122, 19155), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget_2'], {}), '(self.widget_2)\n', (19140, 19155), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((19285, 19385), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(928)', '(213)', 'QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Expanding'], {}), '(928, 213, QtWidgets.QSizePolicy.Minimum, QtWidgets.\n QSizePolicy.Expanding)\n', (19306, 19385), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((19463, 19494), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_2'], {}), '(self.widget_2)\n', (19479, 19494), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((19629, 19662), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget_2'], {}), '(self.widget_2)\n', (19647, 19662), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((19795, 19826), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_2'], {}), '(self.widget_2)\n', (19811, 19826), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((19961, 19994), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget_2'], {}), '(self.widget_2)\n', (19979, 19994), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20127, 20158), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_2'], {}), '(self.widget_2)\n', (20143, 20158), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20293, 20332), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['self.widget_2'], {}), '(self.widget_2)\n', (20317, 20332), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20465, 20496), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_2'], {}), '(self.widget_2)\n', (20481, 20496), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20631, 20670), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['self.widget_2'], {}), '(self.widget_2)\n', (20655, 20670), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20803, 20834), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_2'], {}), '(self.widget_2)\n', (20819, 20834), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20969, 21008), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['self.widget_2'], {}), '(self.widget_2)\n', (20993, 21008), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((21141, 21172), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_2'], {}), '(self.widget_2)\n', (21157, 21172), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((21307, 21346), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['self.widget_2'], {}), '(self.widget_2)\n', (21331, 21346), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((21477, 21508), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.widget_2'], {}), '(self.widget_2)\n', (21493, 21508), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((21755, 21854), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(696)', '(48)', 'QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Minimum'], {}), '(696, 48, QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Minimum)\n', (21776, 21854), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((21938, 21974), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.widget_2'], {}), '(self.widget_2)\n', (21959, 21974), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22131, 22168), 'PyQt5.QtWidgets.QProgressBar', 'QtWidgets.QProgressBar', (['self.widget_2'], {}), '(self.widget_2)\n', (22153, 22168), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22449, 22480), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.widget_2'], {}), '(self.widget_2)\n', (22465, 22480), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22729, 22760), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_2'], {}), '(self.widget_2)\n', (22745, 22760), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22895, 22994), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(804)', '(48)', 'QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Minimum'], {}), '(804, 48, QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Minimum)\n', (22916, 22994), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23074, 23105), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_2'], {}), '(self.widget_2)\n', (23090, 23105), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23249, 23288), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['self.widget_2'], {}), '(self.widget_2)\n', (23273, 23288), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23444, 23545), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(1079)', '(267)', 'QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Expanding'], {}), '(1079, 267, QtWidgets.QSizePolicy.Minimum, QtWidgets.\n QSizePolicy.Expanding)\n', (23465, 23545), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23745, 23764), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', ([], {}), '()\n', (23762, 23764), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23859, 23902), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.model_scale_tab'], {}), '(self.model_scale_tab)\n', (23880, 23902), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23991, 24030), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.model_scale_tab'], {}), '(self.model_scale_tab)\n', (24008, 24030), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24107, 24143), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.widget_3'], {}), '(self.widget_3)\n', (24128, 24143), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24228, 24259), 'ScaleModelWidget.ScaleModelWidget', 'ScaleModelWidget', (['self.widget_3'], {}), '(self.widget_3)\n', (24244, 24259), False, 'from ScaleModelWidget import ScaleModelWidget\n'), ((24473, 24505), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.widget_3'], {}), '(self.widget_3)\n', (24490, 24505), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24706, 24742), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.widget_4'], {}), '(self.widget_4)\n', (24727, 24742), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24819, 24850), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_4'], {}), '(self.widget_4)\n', (24835, 24850), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24986, 25023), 'PyQt5.QtWidgets.QTableWidget', 'QtWidgets.QTableWidget', (['self.widget_4'], {}), '(self.widget_4)\n', (25008, 25023), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25304, 25403), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(248)', '(78)', 'QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Minimum'], {}), '(248, 78, QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Minimum)\n', (25325, 25403), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25484, 25517), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget_4'], {}), '(self.widget_4)\n', (25502, 25517), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25669, 25705), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.widget_4'], {}), '(self.widget_4)\n', (25690, 25705), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25863, 25899), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.widget_4'], {}), '(self.widget_4)\n', (25884, 25899), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26057, 26090), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget_4'], {}), '(self.widget_4)\n', (26075, 26090), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26247, 26283), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.widget_4'], {}), '(self.widget_4)\n', (26268, 26283), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26432, 26463), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_4'], {}), '(self.widget_4)\n', (26448, 26463), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26772, 26791), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', ([], {}), '()\n', (26789, 26791), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26884, 26926), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.model_view_tab'], {}), '(self.model_view_tab)\n', (26905, 26926), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27023, 27061), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.model_view_tab'], {}), '(self.model_view_tab)\n', (27040, 27061), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27154, 27198), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.model_param_disp'], {}), '(self.model_param_disp)\n', (27175, 27198), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27279, 27318), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.model_param_disp'], {}), '(self.model_param_disp)\n', (27295, 27318), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27587, 27623), 'ModelViewWidget.ModelViewWidget', 'ModelViewWidget', (['self.model_view_tab'], {}), '(self.model_view_tab)\n', (27602, 27623), False, 'from ModelViewWidget import ModelViewWidget\n'), ((28047, 28066), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', ([], {}), '()\n', (28064, 28066), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28151, 28190), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.segment_tab'], {}), '(self.segment_tab)\n', (28172, 28190), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28275, 28311), 'ImageSegmentWidget.ImageSegmentWidget', 'ImageSegmentWidget', (['self.segment_tab'], {}), '(self.segment_tab)\n', (28293, 28311), False, 'from ImageSegmentWidget import ImageSegmentWidget\n'), ((28501, 28536), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.segment_tab'], {}), '(self.segment_tab)\n', (28518, 28536), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28748, 28786), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.seg_widget'], {}), '(self.seg_widget)\n', (28769, 28786), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28867, 28900), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.seg_widget'], {}), '(self.seg_widget)\n', (28883, 28900), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29036, 29069), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.seg_widget'], {}), '(self.seg_widget)\n', (29052, 29069), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29205, 29238), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.seg_widget'], {}), '(self.seg_widget)\n', (29221, 29238), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29374, 29407), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.seg_widget'], {}), '(self.seg_widget)\n', (29390, 29407), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29543, 29576), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.seg_widget'], {}), '(self.seg_widget)\n', (29559, 29576), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29712, 29745), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.seg_widget'], {}), '(self.seg_widget)\n', (29728, 29745), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29881, 29914), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.seg_widget'], {}), '(self.seg_widget)\n', (29897, 29914), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30050, 30083), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.seg_widget'], {}), '(self.seg_widget)\n', (30066, 30083), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30218, 30251), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.seg_widget'], {}), '(self.seg_widget)\n', (30234, 30251), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30504, 30537), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.seg_widget'], {}), '(self.seg_widget)\n', (30520, 30537), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30672, 30705), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.seg_widget'], {}), '(self.seg_widget)\n', (30688, 30705), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30957, 31057), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(176)', '(110)', 'QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Minimum'], {}), '(176, 110, QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Minimum)\n', (30978, 31057), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31135, 31233), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(88)', '(81)', 'QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Expanding'], {}), '(88, 81, QtWidgets.QSizePolicy.Minimum, QtWidgets.\n QSizePolicy.Expanding)\n', (31156, 31233), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31317, 31352), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.seg_widget'], {}), '(self.seg_widget)\n', (31335, 31352), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31506, 31541), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.seg_widget'], {}), '(self.seg_widget)\n', (31524, 31541), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31703, 31738), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.seg_widget'], {}), '(self.seg_widget)\n', (31721, 31738), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31906, 31941), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.seg_widget'], {}), '(self.seg_widget)\n', (31924, 31941), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32103, 32144), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['self.seg_widget'], {}), '(self.seg_widget)\n', (32127, 32144), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32284, 32319), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.seg_widget'], {}), '(self.seg_widget)\n', (32302, 32319), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32454, 32489), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.seg_widget'], {}), '(self.seg_widget)\n', (32472, 32489), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32645, 32680), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.seg_widget'], {}), '(self.seg_widget)\n', (32663, 32680), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32831, 32864), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.seg_widget'], {}), '(self.seg_widget)\n', (32847, 32864), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33008, 33043), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.seg_widget'], {}), '(self.seg_widget)\n', (33026, 33043), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33204, 33237), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.seg_widget'], {}), '(self.seg_widget)\n', (33220, 33237), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33381, 33416), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.seg_widget'], {}), '(self.seg_widget)\n', (33399, 33416), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33583, 33621), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.seg_widget'], {}), '(self.seg_widget)\n', (33604, 33621), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33779, 33817), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.seg_widget'], {}), '(self.seg_widget)\n', (33800, 33817), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33971, 34009), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.seg_widget'], {}), '(self.seg_widget)\n', (33992, 34009), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34168, 34206), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.seg_widget'], {}), '(self.seg_widget)\n', (34189, 34206), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34461, 34480), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', ([], {}), '()\n', (34478, 34480), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34569, 34609), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.pcl_view_tab'], {}), '(self.pcl_view_tab)\n', (34590, 34609), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34698, 34727), 'BoundingBoxWidget.BBoxWidget', 'BBoxWidget', (['self.pcl_view_tab'], {}), '(self.pcl_view_tab)\n', (34708, 34727), False, 'from BoundingBoxWidget import BBoxWidget\n'), ((35045, 35081), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.pcl_view_tab'], {}), '(self.pcl_view_tab)\n', (35062, 35081), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35220, 35256), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.widget_5'], {}), '(self.widget_5)\n', (35241, 35256), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35337, 35368), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_5'], {}), '(self.widget_5)\n', (35353, 35368), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35519, 35553), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.widget_5'], {}), '(self.widget_5)\n', (35538, 35553), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35660, 35711), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.flight_select_btn_group'], {}), '(self.flight_select_btn_group)\n', (35681, 35711), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35802, 35854), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.flight_select_btn_group'], {}), '(self.flight_select_btn_group)\n', (35824, 35854), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36026, 36078), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.flight_select_btn_group'], {}), '(self.flight_select_btn_group)\n', (36048, 36078), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36310, 36341), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_5'], {}), '(self.widget_5)\n', (36326, 36341), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36477, 36508), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_5'], {}), '(self.widget_5)\n', (36493, 36508), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36642, 36741), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(456)', '(90)', 'QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Minimum'], {}), '(456, 90, QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Minimum)\n', (36663, 36741), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36829, 36863), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.widget_5'], {}), '(self.widget_5)\n', (36848, 36863), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36962, 37007), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.view_select_group'], {}), '(self.view_select_group)\n', (36983, 37007), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37100, 37146), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.view_select_group'], {}), '(self.view_select_group)\n', (37122, 37146), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37291, 37337), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.view_select_group'], {}), '(self.view_select_group)\n', (37313, 37337), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37486, 37532), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.view_select_group'], {}), '(self.view_select_group)\n', (37508, 37532), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37754, 37787), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget_5'], {}), '(self.widget_5)\n', (37772, 37787), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37944, 37977), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget_5'], {}), '(self.widget_5)\n', (37962, 37977), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38153, 38186), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget_5'], {}), '(self.widget_5)\n', (38171, 38186), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38361, 38392), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_5'], {}), '(self.widget_5)\n', (38377, 38392), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38528, 38559), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_5'], {}), '(self.widget_5)\n', (38544, 38559), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38695, 38726), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget_5'], {}), '(self.widget_5)\n', (38711, 38726), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38868, 38901), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget_5'], {}), '(self.widget_5)\n', (38886, 38901), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39061, 39094), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.widget_5'], {}), '(self.widget_5)\n', (39079, 39094), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39258, 39297), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['self.widget_5'], {}), '(self.widget_5)\n', (39282, 39297), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39565, 39584), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', ([], {}), '()\n', (39582, 39584), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39663, 39698), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.opt_tab'], {}), '(self.opt_tab)\n', (39684, 39698), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39789, 39820), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.opt_tab'], {}), '(self.opt_tab)\n', (39806, 39820), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39905, 39943), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.opt_widget'], {}), '(self.opt_widget)\n', (39926, 39943), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40036, 40070), 'ContourViewWidget.ContourViewWidget', 'ContourViewWidget', (['self.opt_widget'], {}), '(self.opt_widget)\n', (40053, 40070), False, 'from ContourViewWidget import ContourViewWidget\n'), ((40221, 40255), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.opt_widget'], {}), '(self.opt_widget)\n', (40238, 40255), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40424, 40471), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.opt_settings_widget'], {}), '(self.opt_settings_widget)\n', (40445, 40471), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40552, 40594), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.opt_settings_widget'], {}), '(self.opt_settings_widget)\n', (40568, 40594), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40730, 40772), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.opt_settings_widget'], {}), '(self.opt_settings_widget)\n', (40746, 40772), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40906, 41005), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(849)', '(78)', 'QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Minimum'], {}), '(849, 78, QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Minimum)\n', (40927, 41005), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((41091, 41136), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.opt_settings_widget'], {}), '(self.opt_settings_widget)\n', (41110, 41136), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((41297, 41341), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.opt_settings_widget'], {}), '(self.opt_settings_widget)\n', (41315, 41341), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((41491, 41541), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['self.opt_settings_widget'], {}), '(self.opt_settings_widget)\n', (41515, 41541), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((41690, 41735), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.opt_settings_widget'], {}), '(self.opt_settings_widget)\n', (41709, 41735), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((41892, 41934), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.opt_settings_widget'], {}), '(self.opt_settings_widget)\n', (41908, 41934), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((42076, 42120), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.opt_settings_widget'], {}), '(self.opt_settings_widget)\n', (42094, 42120), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((42280, 42325), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.opt_settings_widget'], {}), '(self.opt_settings_widget)\n', (42299, 42325), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((42813, 42862), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (42850, 42862), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((494, 517), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(1124)', '(674)'], {}), '(1124, 674)\n', (506, 517), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1146, 1166), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(551)'], {}), '(0, 551)\n', (1158, 1166), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1323, 1352), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(9)', '(30)', '(571)', '(321)'], {}), '(9, 30, 571, 321)\n', (1335, 1352), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1401, 1421), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(451)', '(0)'], {}), '(451, 0)\n', (1413, 1421), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1573, 1600), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(9)', '(9)', '(128)', '(16)'], {}), '(9, 9, 128, 16)\n', (1585, 1600), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1730, 1759), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(0)', '(124)', '(16)'], {}), '(600, 0, 124, 16)\n', (1742, 1759), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1887, 1917), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(590)', '(20)', '(511)', '(20)'], {}), '(590, 20, 511, 20)\n', (1899, 1917), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2159, 2188), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(30)', '(91)', '(16)'], {}), '(600, 30, 91, 16)\n', (2171, 2188), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2340, 2370), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(50)', '(471)', '(20)'], {}), '(620, 50, 471, 20)\n', (2352, 2370), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2522, 2552), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(90)', '(141)', '(16)'], {}), '(600, 90, 141, 16)\n', (2534, 2552), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2706, 2737), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(110)', '(281)', '(20)'], {}), '(620, 110, 281, 20)\n', (2718, 2737), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2889, 2919), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(580)', '(30)', '(20)', '(561)'], {}), '(580, 30, 20, 561)\n', (2901, 2919), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3169, 3200), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(130)', '(291)', '(16)'], {}), '(600, 130, 291, 16)\n', (3181, 3200), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3352, 3383), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(150)', '(381)', '(20)'], {}), '(620, 150, 381, 20)\n', (3364, 3383), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3535, 3566), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(200)', '(291)', '(16)'], {}), '(600, 200, 291, 16)\n', (3547, 3566), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3720, 3751), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(220)', '(371)', '(20)'], {}), '(620, 220, 371, 20)\n', (3732, 3751), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3925, 3956), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(240)', '(371)', '(20)'], {}), '(620, 240, 371, 20)\n', (3937, 3956), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4130, 4161), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(260)', '(371)', '(20)'], {}), '(620, 260, 371, 20)\n', (4142, 4161), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4335, 4366), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(280)', '(371)', '(20)'], {}), '(620, 280, 371, 20)\n', (4347, 4366), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4540, 4571), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(300)', '(371)', '(20)'], {}), '(620, 300, 371, 20)\n', (4552, 4571), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4745, 4776), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(320)', '(371)', '(20)'], {}), '(620, 320, 371, 20)\n', (4757, 4776), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4950, 4981), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(340)', '(371)', '(20)'], {}), '(620, 340, 371, 20)\n', (4962, 4981), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5155, 5186), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(360)', '(371)', '(20)'], {}), '(620, 360, 371, 20)\n', (5167, 5186), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5342, 5373), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(390)', '(301)', '(20)'], {}), '(600, 390, 301, 20)\n', (5354, 5373), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5529, 5560), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(410)', '(371)', '(20)'], {}), '(620, 410, 371, 20)\n', (5541, 5560), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5734, 5765), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(430)', '(371)', '(20)'], {}), '(620, 430, 371, 20)\n', (5746, 5765), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5939, 5970), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(450)', '(371)', '(20)'], {}), '(620, 450, 371, 20)\n', (5951, 5970), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6144, 6175), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(470)', '(371)', '(20)'], {}), '(620, 470, 371, 20)\n', (6156, 6175), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6351, 6380), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(50)', '(21)', '(21)'], {}), '(600, 50, 21, 21)\n', (6363, 6380), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6554, 6584), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(110)', '(21)', '(21)'], {}), '(600, 110, 21, 21)\n', (6566, 6584), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6758, 6788), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(150)', '(21)', '(21)'], {}), '(600, 150, 21, 21)\n', (6770, 6788), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6962, 6992), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(220)', '(21)', '(21)'], {}), '(600, 220, 21, 21)\n', (6974, 6992), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7168, 7198), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(240)', '(21)', '(21)'], {}), '(600, 240, 21, 21)\n', (7180, 7198), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7374, 7404), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(260)', '(21)', '(21)'], {}), '(600, 260, 21, 21)\n', (7386, 7404), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7580, 7610), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(280)', '(21)', '(21)'], {}), '(600, 280, 21, 21)\n', (7592, 7610), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7786, 7816), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(300)', '(21)', '(21)'], {}), '(600, 300, 21, 21)\n', (7798, 7816), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7992, 8022), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(320)', '(21)', '(21)'], {}), '(600, 320, 21, 21)\n', (8004, 8022), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8198, 8228), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(340)', '(21)', '(21)'], {}), '(600, 340, 21, 21)\n', (8210, 8228), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8404, 8434), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(360)', '(21)', '(21)'], {}), '(600, 360, 21, 21)\n', (8416, 8434), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8610, 8640), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(410)', '(21)', '(21)'], {}), '(600, 410, 21, 21)\n', (8622, 8640), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8816, 8846), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(430)', '(21)', '(21)'], {}), '(600, 430, 21, 21)\n', (8828, 8846), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9022, 9052), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(450)', '(21)', '(21)'], {}), '(600, 450, 21, 21)\n', (9034, 9052), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9228, 9258), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(470)', '(21)', '(21)'], {}), '(600, 470, 21, 21)\n', (9240, 9258), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9434, 9464), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(490)', '(21)', '(21)'], {}), '(600, 490, 21, 21)\n', (9446, 9464), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9640, 9670), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(510)', '(21)', '(21)'], {}), '(600, 510, 21, 21)\n', (9652, 9670), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9842, 9873), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(490)', '(371)', '(20)'], {}), '(620, 490, 371, 20)\n', (9854, 9873), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10047, 10078), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(510)', '(371)', '(20)'], {}), '(620, 510, 371, 20)\n', (10059, 10078), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10234, 10265), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(540)', '(201)', '(16)'], {}), '(600, 540, 201, 16)\n', (10246, 10265), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10423, 10453), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(560)', '(21)', '(21)'], {}), '(600, 560, 21, 21)\n', (10435, 10453), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10621, 10652), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(620)', '(560)', '(391)', '(20)'], {}), '(620, 560, 391, 20)\n', (10633, 10652), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10806, 10836), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(930)', '(80)', '(161)', '(20)'], {}), '(930, 80, 161, 20)\n', (10818, 10836), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10991, 11022), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(1020)', '(100)', '(79)', '(23)'], {}), '(1020, 100, 79, 23)\n', (11003, 11022), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11174, 11205), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(930)', '(130)', '(161)', '(20)'], {}), '(930, 130, 161, 20)\n', (11186, 11205), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11360, 11391), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(1020)', '(150)', '(79)', '(23)'], {}), '(1020, 150, 79, 23)\n', (11372, 11391), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11543, 11574), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(970)', '(540)', '(131)', '(20)'], {}), '(970, 540, 131, 20)\n', (11555, 11574), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11733, 11764), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(1020)', '(560)', '(79)', '(23)'], {}), '(1020, 560, 79, 23)\n', (11745, 11764), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11916, 11946), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(460)', '(571)', '(16)'], {}), '(10, 460, 571, 16)\n', (11928, 11946), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12194, 12225), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(580)', '(1091)', '(20)'], {}), '(10, 580, 1091, 20)\n', (12206, 12225), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12477, 12508), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(250)', '(470)', '(151)', '(16)'], {}), '(250, 470, 151, 16)\n', (12489, 12508), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12664, 12694), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(120)', '(490)', '(91)', '(24)'], {}), '(120, 490, 91, 24)\n', (12676, 12694), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12848, 12878), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(490)', '(101)', '(16)'], {}), '(10, 490, 101, 16)\n', (12860, 12878), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13016, 13046), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(520)', '(101)', '(16)'], {}), '(10, 520, 101, 16)\n', (13028, 13046), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13200, 13230), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(120)', '(520)', '(91)', '(24)'], {}), '(120, 520, 91, 24)\n', (13212, 13230), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13382, 13412), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(550)', '(101)', '(16)'], {}), '(10, 550, 101, 16)\n', (13394, 13412), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13564, 13594), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(120)', '(550)', '(91)', '(24)'], {}), '(120, 550, 91, 24)\n', (13576, 13594), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13744, 13774), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(250)', '(490)', '(91)', '(16)'], {}), '(250, 490, 91, 16)\n', (13756, 13774), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13925, 13956), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(350)', '(490)', '(111)', '(23)'], {}), '(350, 490, 111, 23)\n', (13937, 13956), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14100, 14130), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(350)', '(571)', '(16)'], {}), '(10, 350, 571, 16)\n', (14112, 14130), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14380, 14411), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(250)', '(360)', '(101)', '(16)'], {}), '(250, 360, 101, 16)\n', (14392, 14411), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14561, 14590), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(400)', '(21)', '(21)'], {}), '(10, 400, 21, 21)\n', (14573, 14590), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14746, 14776), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(40)', '(400)', '(541)', '(21)'], {}), '(40, 400, 541, 21)\n', (14758, 14776), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14924, 14954), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(380)', '(171)', '(16)'], {}), '(10, 380, 171, 16)\n', (14936, 14954), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15092, 15122), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(420)', '(141)', '(16)'], {}), '(10, 420, 141, 16)\n', (15104, 15122), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15276, 15305), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(440)', '(21)', '(21)'], {}), '(10, 440, 21, 21)\n', (15288, 15305), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15465, 15495), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(40)', '(440)', '(541)', '(21)'], {}), '(40, 440, 541, 21)\n', (15477, 15495), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15643, 15674), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(170)', '(101)', '(16)'], {}), '(600, 170, 101, 16)\n', (15655, 15674), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15822, 15853), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(710)', '(170)', '(291)', '(20)'], {}), '(710, 170, 291, 20)\n', (15834, 15853), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16001, 16031), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(600)', '(70)', '(101)', '(16)'], {}), '(600, 70, 101, 16)\n', (16013, 16031), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16179, 16209), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(700)', '(70)', '(381)', '(20)'], {}), '(700, 70, 381, 20)\n', (16191, 16209), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16396, 16425), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(470)', '(0)', '(101)', '(23)'], {}), '(470, 0, 101, 23)\n', (16408, 16425), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16628, 16659), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(1010)', '(600)', '(85)', '(23)'], {}), '(1010, 600, 85, 23)\n', (16640, 16659), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16858, 16889), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(870)', '(600)', '(131)', '(23)'], {}), '(870, 600, 131, 23)\n', (16870, 16889), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17087, 17117), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(40)', '(600)', '(671)', '(21)'], {}), '(40, 600, 671, 21)\n', (17099, 17117), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17320, 17351), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(720)', '(600)', '(141)', '(23)'], {}), '(720, 600, 141, 23)\n', (17332, 17351), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17543, 17572), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(600)', '(21)', '(21)'], {}), '(10, 600, 21, 21)\n', (17555, 17572), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22214, 22234), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(211)', '(0)'], {}), '(211, 0)\n', (22226, 22234), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24301, 24324), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(1091)', '(511)'], {}), '(1091, 511)\n', (24313, 24324), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24543, 24564), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(1091)', '(0)'], {}), '(1091, 0)\n', (24555, 24564), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24603, 24630), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(101)'], {}), '(16777215, 101)\n', (24615, 24630), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25063, 25084), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(411)', '(81)'], {}), '(411, 81)\n', (25075, 25084), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27356, 27378), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(114)', '(621)'], {}), '(114, 621)\n', (27368, 27378), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27670, 27692), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(971)', '(631)'], {}), '(971, 631)\n', (27682, 27692), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28349, 28372), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(1101)', '(481)'], {}), '(1101, 481)\n', (28361, 28372), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28576, 28598), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(1122)', '(90)'], {}), '(1122, 90)\n', (28588, 28598), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28639, 28666), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(141)'], {}), '(16777215, 141)\n', (28651, 28666), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34765, 34788), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(1121)', '(521)'], {}), '(1121, 521)\n', (34777, 34788), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35119, 35142), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(1101)', '(111)'], {}), '(1101, 111)\n', (35131, 35142), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40304, 40324), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(120)'], {}), '(0, 120)\n', (40316, 40324), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
"""Shelving Filter Cascade with Adjustable Transition Slope and Bandwidth
<NAME>, <NAME>, <NAME>
In: Proc. of 148th AES Convention, Virtual Vienna, May 2020, Paper 10339
http://www.aes.org/e-lib/browse.cfm?elib=20756
"""
import numpy as np
from scipy.signal import tf2sos, freqs
from matplotlib import rcParams
def halfpadloss_shelving_filter_num_den_coeff(G):
"""Half-pad-loss polynomial coefficients for 1st/2nd order shelving filter.
- see type III in
long-url: https://github.com/spatialaudio/digital-signal-processing-lecture/blob/master/filter_desig/audiofilter.ipynb # noqa
- see Sec. 3.2 in https://doi.org/10.3390/app6050129
"""
sign = np.sign(G) # amplify/boost (1) or attenuate/cut (-1)
g = 10**(np.abs(G) / 20) # linear gain
n1, n2 = g**(sign / 4), g**(sign / 2) # numerator coeff
d1, d2 = 1 / n1, 1 / n2 # denominator coeff
return n1, n2, d1, d2
def normalized_low_shelving_1st_coeff(G=-10*np.log10(2)):
"""See low_shelving_1st_coeff() for omega=1."""
n1, n2, d1, d2 = halfpadloss_shelving_filter_num_den_coeff(G)
b, a = np.array([0, 1, n2]), np.array([0, 1, d2])
return b, a
def low_shelving_1st_coeff(omega=1, G=-10*np.log10(2)):
"""Half-pad-loss/mid-level low shelving filter 1st order.
Parameters
----------
omega : angular frequency in rad/s at half-pad-loss/mid-level
G : level in dB (G/2 at omega)
Returns
-------
b[0] s^2 + b[1] s^1 + b[2] s^0
b,a : coefficients for Laplace H(s) = ------------------------------
a[0] s^2 + a[1] s^1 + a[2] s^0
with s = j omega, note: b[0]=a[0]=0 here for 1st order filter
see halfpadloss_shelving_filter_num_den_coeff() for references
"""
b, a = normalized_low_shelving_1st_coeff(G=G)
scale = omega**np.arange(-2., 1.) # powers in the Laplace domain
return b * scale, a * scale
def normalized_high_shelving_1st_coeff(G=-10*np.log10(2)):
"""See high_shelving_1st_coeff() for omega=1."""
n1, n2, d1, d2 = halfpadloss_shelving_filter_num_den_coeff(G)
b, a = np.array([0, n2, 1]), np.array([0, d2, 1])
return b, a
def high_shelving_1st_coeff(omega=1, G=-10*np.log10(2)):
"""Half-pad-loss/mid-level high shelving filter 1st order.
Parameters
----------
omega : angular frequency in rad/s at half-pad-loss/mid-level
G : level in dB (G/2 at omega)
Returns
-------
b[0] s^2 + b[1] s^1 + b[2] s^0
b,a : coefficients for Laplace H(s) = ------------------------------
a[0] s^2 + a[1] s^1 + a[2] s^0
with s = j omega, note: b[0]=a[0]=0 here for 1st order filter
see halfpadloss_shelving_filter_num_den_coeff() for references
"""
b, a = normalized_high_shelving_1st_coeff(G=G)
scale = omega**np.arange(-2., 1.) # powers in the Laplace domain
return b * scale, a * scale
def normalized_low_shelving_2nd_coeff(G=-10*np.log10(2), Q=1/np.sqrt(2)):
"""See low_shelving_2nd_coeff() for omega=1."""
n1, n2, d1, d2 = halfpadloss_shelving_filter_num_den_coeff(G)
b, a = np.array([1, n1 / Q, n2]), np.array([1, d1 / Q, d2])
return b, a
def low_shelving_2nd_coeff(omega=1, G=-10*np.log10(2), Q=1/np.sqrt(2)):
"""Half-pad-loss/mid-level low shelving filter 2nd order.
Parameters
----------
omega : angular frequency in rad/s at half-pad-loss/mid-level
G : level in dB (G/2 at omega)
Q : pole/zero quality, Q>0.5
Returns
-------
b[0] s^2 + b[1] s^1 + b[2] s^0
b,a : coefficients for Laplace H(s) = ------------------------------
a[0] s^2 + a[1] s^1 + a[2] s^0
with s = j omega
see halfpadloss_shelving_filter_num_den_coeff() for references
"""
b, a = normalized_low_shelving_2nd_coeff(G=G, Q=Q)
scale = omega**np.arange(-2., 1.) # powers in the Laplace domain
return b * scale, a * scale
def normalized_high_shelving_2nd_coeff(G=-10*np.log10(2), Q=1/np.sqrt(2)):
"""See high_shelving_2nd_coeff() for omega=1."""
n1, n2, d1, d2 = halfpadloss_shelving_filter_num_den_coeff(G)
b, a = np.array([n2, n1 / Q, 1]), np.array([d2, d1 / Q, 1])
return b, a
def high_shelving_2nd_coeff(omega=1, G=-10*np.log10(2), Q=1/np.sqrt(2)):
"""Half-pad-loss/mid-level high shelving filter 2nd order.
Parameters
----------
omega : angular frequency in rad/s at half-pad-loss/mid-level
G : level in dB (G/2 at omega)
Q : pole/zero quality, Q>0.5
Returns
-------
b[0] s^2 + b[1] s^1 + b[2] s^0
b,a : coefficients for Laplace H(s) = ------------------------------
a[0] s^2 + a[1] s^1 + a[2] s^0
with s = j omega
see halfpadloss_shelving_filter_num_den_coeff() for references
"""
b, a = normalized_high_shelving_2nd_coeff(G=G, Q=Q)
scale = omega**np.arange(-2., 1.) # powers in the Laplace domain
return b * scale, a * scale
def db(x, *, power=False):
"""Convert *x* to decibel.
Parameters
----------
x : array_like
Input data. Values of 0 lead to negative infinity.
power : bool, optional
If ``power=False`` (the default), *x* is squared before
conversion.
"""
with np.errstate(divide='ignore'):
return (10 if power else 20) * np.log10(np.abs(x))
def db2lin(x):
return 10**(x / 20)
def shelving_slope_parameters(slope=None, BWd=None, Gd=None):
"""Compute the third parameter from the given two.
Parameters
----------
slope : float, optional
Desired shelving slope in decibel per octave.
BW : float, optional
Desired bandwidth of the slope in octave.
G : float, optional
Desired gain of the stop band in decibel.
"""
if slope == 0:
raise ValueError("`slope` should be nonzero.")
if slope and BWd is not None:
Gd = -BWd * slope
elif BWd and Gd is not None:
slope = -Gd / BWd
elif Gd and slope is not None:
if Gd * slope > 1:
raise ValueError("`Gd` and `slope` cannot have the same sign.")
else:
BWd = np.abs(Gd / slope)
else:
print('At lest two parameters need to be specified.')
return slope, BWd, Gd
def shelving_filter_parameters(biquad_per_octave, **kwargs):
"""Parameters for shelving filter design.
Parameters
----------
biquad_per_octave : float
Number of biquad filters per octave.
Returns
-------
num_biquad : int
Number of biquad filters.
Gb : float
Gain of each biquad filter in decibel.
G : float
Gain of overall (concatenated) filters in decibel. This might differ
from what is returned by `shelving_parameters`.
"""
slope, BWd, Gd = shelving_slope_parameters(**kwargs)
num_biquad = int(np.ceil(BWd * biquad_per_octave))
Gb = -slope / biquad_per_octave
G = Gb * num_biquad
return num_biquad, Gb, G
def check_shelving_filter_validity(biquad_per_octave, **kwargs):
"""Level, slope, bandwidth validity for shelving filter cascade.
Parameters
----------
biquad_per_octave : float
Number of biquad filters per octave.
see shelving_slope_parameters(), shelving_filter_parameters()
Returns
-------
flag = [Boolean, Boolean, Boolean]
if all True then intended parameter triplet holds, if not all True
deviations from desired response occur
"""
flag = [True, True, True]
slope, BWd, Gd = shelving_slope_parameters(**kwargs)
num_biquad, Gb, G = shelving_filter_parameters(biquad_per_octave, **kwargs)
# BWd < 1 octave generally fails
if BWd <= 1:
flag[0] = False
# BWd * biquad_per_octave needs to be integer
flag[1] = float(BWd * biquad_per_octave).is_integer()
# biquad_per_octave must be large enough
# for slope < 12.04 dB at least one biquad per ocatve is required
tmp = slope / (20*np.log10(4))
if tmp > 1.:
if biquad_per_octave < tmp:
flag[2] = False
else:
if biquad_per_octave < 1:
flag[2] = False
return flag
def low_shelving_1st_cascade(w0, Gb, num_biquad, biquad_per_octave):
"""Low shelving filter design using cascaded biquad filters.
- see low_shelving_2nd_cascade()
- under construction for code improvement
"""
sos = np.zeros((num_biquad, 6))
for m in range(num_biquad):
wm = w0 * 2**(-(m + 0.5) / biquad_per_octave)
b, a = low_shelving_1st_coeff(omega=wm, G=Gb)
sos[m] = tf2sos(b, a)
return sos
def high_shelving_1st_cascade(w0, Gb, num_biquad, biquad_per_octave):
"""High shelving filter design using cascaded biquad filters.
- see low_shelving_2nd_cascade()
- under construction for code improvement
"""
sos = np.zeros((num_biquad, 6))
for m in range(num_biquad):
wm = w0 * 2**(-(m + 0.5) / biquad_per_octave)
b, a = high_shelving_1st_coeff(omega=wm, G=Gb)
sos[m] = tf2sos(b, a)
return sos
def low_shelving_2nd_cascade(w0, Gb, num_biquad, biquad_per_octave,
Q=1/np.sqrt(2)):
"""Low shelving filter design using cascaded biquad filters.
Parameters
----------
w0 : float
Cut-off frequency in radian per second.
Gb : float
Gain of each biquad filter in decibel.
num_biquad : int
Number of biquad filters.
Q : float, optional
Quality factor of each biquad filter.
Returns
-------
sos : array_like
Array of second-order filter coefficients, must have shape
``(n_sections, 6)``. Each row corresponds to a second-order
section, with the first three columns providing the numerator
coefficients and the last three providing the denominator
coefficients.
"""
sos = np.zeros((num_biquad, 6))
for m in range(num_biquad):
wm = w0 * 2**(-(m + 0.5) / biquad_per_octave)
b, a = low_shelving_2nd_coeff(omega=wm, G=Gb, Q=Q)
sos[m] = tf2sos(b, a)
return sos
def high_shelving_2nd_cascade(w0, Gb, num_biquad, biquad_per_octave,
Q=1/np.sqrt(2)):
"""High shelving filter design using cascaded biquad filters.
- see low_shelving_2nd_cascade()
- under construction for code improvement
"""
sos = np.zeros((num_biquad, 6))
for m in range(num_biquad):
wm = w0 * 2**(-(m + 0.5) / biquad_per_octave)
b, a = high_shelving_2nd_coeff(omega=wm, G=Gb, Q=Q)
sos[m] = tf2sos(b, a)
return sos
def sosfreqs(sos, worN=200, plot=None):
"""Compute the frequency response of an analog filter in SOS format.
Parameters
----------
sos : array_like
Array of second-order filter coefficients, must have shape
``(n_sections, 6)``. Each row corresponds to a second-order
section, with the first three columns providing the numerator
coefficients and the last three providing the denominator
coefficients.
worN : {None, int, array_like}, optional
If None, then compute at 200 frequencies around the interesting parts
of the response curve (determined by pole-zero locations). If a single
integer, then compute at that many frequencies. Otherwise, compute the
response at the angular frequencies (e.g. rad/s) given in `worN`.
plot : callable, optional
A callable that takes two arguments. If given, the return parameters
`w` and `h` are passed to plot. Useful for plotting the frequency
response inside `freqs`.
Returns
-------
w : ndarray
The angular frequencies at which `h` was computed.
h : ndarray
The frequency response.
"""
h = 1.
for row in sos:
w, rowh = freqs(row[:3], row[3:], worN=worN, plot=plot)
h *= rowh
return w, h
def matchedz_zpk(s_zeros, s_poles, s_gain, fs):
"""Matched-z transform of poles and zeros.
Parameters
----------
s_zeros : array_like
Zeros in the Laplace domain.
s_poles : array_like
Poles in the Laplace domain.
s_gain : float
System gain in the Laplace domain.
fs : int
Sampling frequency in Hertz.
Returns
-------
z_zeros : numpy.ndarray
Zeros in the z-domain.
z_poles : numpy.ndarray
Poles in the z-domain.
z_gain : float
System gain in the z-domain.
See Also
--------
:func:`scipy.signal.bilinear_zpk`
"""
z_zeros = np.exp(s_zeros / fs)
z_poles = np.exp(s_poles / fs)
omega = 1j * np.pi * fs
s_gain *= np.prod((omega - s_zeros) / (omega - s_poles)
* (-1 - z_poles) / (-1 - z_zeros))
return z_zeros, z_poles, np.abs(s_gain)
def nearest_value(x0, x, f):
"""Plot helping."""
return f[np.abs(x - x0).argmin()]
def set_rcparams():
"""Plot helping."""
rcParams['axes.linewidth'] = 0.5
rcParams['axes.edgecolor'] = 'black'
rcParams['axes.facecolor'] = 'None'
rcParams['axes.labelcolor'] = 'black'
rcParams['xtick.color'] = 'black'
rcParams['ytick.color'] = 'black'
rcParams['font.family'] = 'serif'
rcParams['font.size'] = 13
rcParams['text.usetex'] = True
rcParams['text.latex.preamble'] = r'\usepackage{amsmath}'
rcParams['text.latex.preamble'] = r'\usepackage{gensymb}'
rcParams['legend.title_fontsize'] = 10
def set_outdir():
"""Plot helping."""
return '../graphics/'
def interaction_matrix_sge(G_proto, gain_factor, w_command, w_control,
bandwidth):
"""
Parameters
----------
G_proto: array_like
Prototype gain in decibel.
gain_factor: float
Gain factor.
w_command: array_like
Normalized command frequencies.
w_control: array_like
Normalized control frequencies.
bandwidth: array_like
Bandwidth.
"""
num_command = len(w_command)
num_control = len(w_control)
leak = np.zeros((num_command, num_control))
G_bandwidth = gain_factor * G_proto
g_proto = db2lin(G_proto)
g_bandwidth = db2lin(G_bandwidth)
z1 = np.exp(-1j * w_control)
z2 = z1**2
poly = np.zeros((num_command, 3))
poly[6] = 0.000321, 0.00474, 0.00544
poly[7] = 0.00108, 0.0221, 0.0169
poly[8] = 0.00184, 0.125, 0.0212
poly[9] = -0.00751, 0.730, -0.0672
for m, (Gp, gp, p, gb, wc, bw) in enumerate(
zip(G_proto, g_proto, poly, g_bandwidth, w_command, bandwidth)):
G_nyquist = np.sign(Gp) * np.polyval(p, np.abs(Gp))
gn = db2lin(G_nyquist)
gp2 = gp**2
gb2 = gb**2
gn2 = gn**2
F = np.abs(gp2 - gb2)
G00 = np.abs(gp2 - 1)
F00 = np.abs(gb2 - 1)
G01 = np.abs(gp2 - gn)
G11 = np.abs(gp2 - gn2)
F01 = np.abs(gb2 - gn)
F11 = np.abs(gb2 - gn2)
W2 = np.sqrt(G11 / G00) * np.tan(wc / 2)**2
DW = (1 + np.sqrt(F00 / F11) * W2) * np.tan(bw / 2)
C = F11 * DW**2 - 2 * W2 * (F01 - np.sqrt(F00 * F11))
D = 2 * W2 * (G01 - np.sqrt(G00 * G11))
A = np.sqrt((C + D) / F)
B = np.sqrt((gp2 * C + gb2 * D) / F)
num = np.array([gn+W2+B, -2*(gn-W2), (gn-B+W2)]) / (1+W2+A)
den = np.array([1, -2*(1-W2)/(1+W2+A), (1+W2-A)/(1+W2+A)])
H = (num[0] + num[1]*z1 + num[2]*z2)\
/ (den[0] + den[1]*z1 + den[2]*z2)
G = db(H) / Gp
leak[m] = np.abs(G)
return leak
def peq_seg(g_ref, g_nyquist, g, g_bandwidth, w_command, bandwidth):
"""
Parameters
----------
g_ref: float
Reference linear gain.
g_nyquist: float
Nyquist linear gain.
g_bandwidth: float
(Optimized) linear gain.
w_command: float
Normalized command frequencies.
bandwidth: float
Bandwidth.
"""
g2 = g**2
gb2 = g_bandwidth**2
gr2 = g_ref**2
gn2 = g_nyquist**2
grn = g_ref * g_nyquist
F = np.abs(g2 - gb2)
G00 = np.abs(g2 - gr2)
F00 = np.abs(gb2 - gr2)
G01 = np.abs(g2 - grn)
G11 = np.abs(g2 - gn2)
F01 = np.abs(gb2 - grn)
F11 = np.abs(gb2 - gn2)
W2 = np.sqrt(G11 / G00) * np.tan(w_command / 2)**2
DW = (1 + np.sqrt(F00 / F11) * W2) * np.tan(bandwidth / 2)
C = F11 * DW**2 - 2 * W2 * (F01 - np.sqrt(F00 * F11))
D = 2 * W2 * (G01 - np.sqrt(G00 * G11))
A = np.sqrt((C + D) / F)
B = np.sqrt((g**2 * C + g_bandwidth**2 * D) / F)
b = np.array([(g_nyquist + g_ref * W2 + B),
-2*(g_nyquist - g_ref * W2),
(g_nyquist - B + g_ref * W2)]) / (1 + W2 + A)
a = np.array([1, -2*(1 - W2) / (1 + W2 + A), (1 + W2 - A) / (1 + W2 + A)])
return b, a
def optimized_peq_seg(gain_command, gain_proto, gain_factor, w_command,
w_control, bandwidth):
"""
Parameters
----------
gain_command: array_like
Command gain in decibel.
gain_proto: array_like
Prototype gain in decibel.
gain_factor: float
Gain factor.
w_command: array_like
Normalized command frequencies.
w_control: array_like
Normalized control frequencies.
bandwidth: array_like
Bandwidths.
Returns
-------
b_opt: array_like (N, 3)
Moving average coefficients.
a_opt: array_like (N, 3)
Autoregressive (recursive) coefficients.
"""
num_command = len(gain_command)
# symmetric GEG design
gain_control = np.zeros(2 * num_command - 1)
gain_control[::2] = gain_command
gain_control[1::2] = 0.5 * (gain_command[:-1] + gain_command[1:])
# interaction matrix "B"
B = interaction_matrix_sge(gain_proto, gain_factor,
w_command, w_control, bandwidth)
gain2 = np.zeros((2 * num_command - 1, 1))
gain2[::2, 0] = gain_command
gain2[1::2, 0] = 0.5 * (gain_command[:-1] + gain_command[1:])
# band weights
weights = np.ones(2 * num_command - 1)
weights[1::2] *= 0.5
W = np.diag(weights)
gain_opt =\
np.matmul(np.linalg.inv(np.linalg.multi_dot([B, W, np.transpose(B)])),
np.linalg.multi_dot([B, W, gain2]))
gain_opt_bandwidth = gain_factor * gain_opt
gain_opt = np.squeeze(gain_opt)
gain_opt_bandwidth = np.squeeze(gain_opt_bandwidth)
g_opt = db2lin(gain_opt)
g_opt_bandwidth = db2lin(gain_opt_bandwidth)
poly = np.zeros((num_command, 3))
poly[6] = 0.000321, 0.00474, 0.00544
poly[7] = 0.00108, 0.0221, 0.0169
poly[8] = 0.00184, 0.125, 0.0212
poly[9] = -0.00751, 0.730, -0.0672
b_opt = np.zeros((3, num_command))
a_opt = np.zeros((3, num_command))
for m, (Go, go, gob, wc, bw, p) in enumerate(
zip(gain_opt, g_opt, g_opt_bandwidth, w_command, bandwidth, poly)):
gain_nyquist = np.sign(Go) * np.polyval(p, np.abs(Go))
b, a = peq_seg(1, db2lin(gain_nyquist), go, gob, wc, bw)
b_opt[:, m] = b
a_opt[:, m] = a
return b_opt, a_opt
def fracorder_lowshelving_eastty(w1, w2, G1, G2, rB=None):
"""
Parameters
----------
w1: float
Lower corner frequency.
w2: float
Upper corner frequency.
G1: float
Target level at lower corner frequency in dB.
G2: float
Target level at upper corner frequency in dB.
rB: float
Gain per octave.
Returns
-------
z: array_like
Complex zeros in the Laplace domain.
p: array_like
Complex poles in the Laplace domain.
k: float
Gain.
"""
Gd = G1 - G2
n_eff = effective_order(w1, w2, Gd, rB)
n_int, n_frac = np.divmod(n_eff, 1)
n_int = int(n_int)
z = np.array([])
p = np.array([])
# Second-order sections (complex conjugate pole/zero pairs)
if n_int > 0:
alpha = complex_zp_angles(n_int, n_frac)
alpha = np.concatenate((alpha, -alpha))
z = w1 * np.exp(1j * alpha)
p = w2 * np.exp(1j * alpha)
# First-order section (real pole/zero)
if n_eff % 2 != 0:
s_lower, s_upper = real_zp(n_int, n_frac, w1, w2)
if n_int % 2 == 0:
z_real = s_lower
p_real = s_upper
elif n_int % 2 == 1:
z_real = s_upper
p_real = s_lower
z = np.append(z, z_real)
p = np.append(p, p_real)
return z, p, 1
def effective_order(w1, w2, Gd, rB=None):
"""Effective order of shelving filter.
Parameters
----------
w1: float
Lower corner frequency.
w2: float
Upper corner frequency.
Gd: float
Target level difference in dB.
rB: float
Gain per octave.
"""
if rB is None:
rB = db(2) * np.sign(Gd) # Butterworth
return Gd / rB / np.log2(w2/w1)
def complex_zp_angles(n_int, n_frac):
"""Polar angles of the complex conjugate zeros/poles.
These correspond to the second-order section filters.
Parameters
----------
n_int: int
Interger order.
n_frac: float
Fractional order [0, 1).
"""
# linear interpolation of angles
num_zp_pair = int(n_int+1) // 2
return np.pi/2 * np.stack([
(1-n_frac) * (1 + (2*m+1)/n_int)
+ n_frac * (1 + (2*m+1)/(n_int+1))
for m in range(num_zp_pair)])
def real_zp(n_int, n_frac, w_lower, w_upper):
"""Real-valued zero and pole.
These correspond to the first-order section filters.
Parameters
----------
n_int: int
Integer order
n_frac: float
Fractional order [0, 1).
w_lower: float
Lower corner frequency.
w_upper: float
Upper corner frequency.
Returns
-------
s_lower: float
Smaller real-valued zero or pole.
s_upper: float
Larger real-valued zero or pole.
"""
w_mean = np.sqrt(w_lower * w_upper)
ratio = (w_upper / w_lower)
# logarithmic interpolation of zero/pole radius
if n_int % 2 == 0: # even
s_lower = -w_mean * ratio**(-n_frac/2)
s_upper = -w_mean * ratio**(n_frac/2)
elif n_int % 2 == 1: # odd
s_lower = -w_lower * ratio**(n_frac/2)
s_upper = -w_upper * ratio**(-n_frac/2)
return s_lower, s_upper
| [
"numpy.prod",
"numpy.log10",
"numpy.sqrt",
"numpy.linalg.multi_dot",
"numpy.array",
"scipy.signal.freqs",
"numpy.arange",
"numpy.divmod",
"numpy.exp",
"numpy.concatenate",
"numpy.abs",
"numpy.ceil",
"scipy.signal.tf2sos",
"numpy.ones",
"numpy.squeeze",
"numpy.sign",
"numpy.log2",
"... | [((676, 686), 'numpy.sign', 'np.sign', (['G'], {}), '(G)\n', (683, 686), True, 'import numpy as np\n'), ((8566, 8591), 'numpy.zeros', 'np.zeros', (['(num_biquad, 6)'], {}), '((num_biquad, 6))\n', (8574, 8591), True, 'import numpy as np\n'), ((9018, 9043), 'numpy.zeros', 'np.zeros', (['(num_biquad, 6)'], {}), '((num_biquad, 6))\n', (9026, 9043), True, 'import numpy as np\n'), ((10050, 10075), 'numpy.zeros', 'np.zeros', (['(num_biquad, 6)'], {}), '((num_biquad, 6))\n', (10058, 10075), True, 'import numpy as np\n'), ((10553, 10578), 'numpy.zeros', 'np.zeros', (['(num_biquad, 6)'], {}), '((num_biquad, 6))\n', (10561, 10578), True, 'import numpy as np\n'), ((12738, 12758), 'numpy.exp', 'np.exp', (['(s_zeros / fs)'], {}), '(s_zeros / fs)\n', (12744, 12758), True, 'import numpy as np\n'), ((12773, 12793), 'numpy.exp', 'np.exp', (['(s_poles / fs)'], {}), '(s_poles / fs)\n', (12779, 12793), True, 'import numpy as np\n'), ((12836, 12921), 'numpy.prod', 'np.prod', (['((omega - s_zeros) / (omega - s_poles) * (-1 - z_poles) / (-1 - z_zeros))'], {}), '((omega - s_zeros) / (omega - s_poles) * (-1 - z_poles) / (-1 - z_zeros)\n )\n', (12843, 12921), True, 'import numpy as np\n'), ((14214, 14250), 'numpy.zeros', 'np.zeros', (['(num_command, num_control)'], {}), '((num_command, num_control))\n', (14222, 14250), True, 'import numpy as np\n'), ((14369, 14394), 'numpy.exp', 'np.exp', (['(-1.0j * w_control)'], {}), '(-1.0j * w_control)\n', (14375, 14394), True, 'import numpy as np\n'), ((14420, 14446), 'numpy.zeros', 'np.zeros', (['(num_command, 3)'], {}), '((num_command, 3))\n', (14428, 14446), True, 'import numpy as np\n'), ((16185, 16201), 'numpy.abs', 'np.abs', (['(g2 - gb2)'], {}), '(g2 - gb2)\n', (16191, 16201), True, 'import numpy as np\n'), ((16212, 16228), 'numpy.abs', 'np.abs', (['(g2 - gr2)'], {}), '(g2 - gr2)\n', (16218, 16228), True, 'import numpy as np\n'), ((16239, 16256), 'numpy.abs', 'np.abs', (['(gb2 - gr2)'], {}), '(gb2 - gr2)\n', (16245, 16256), True, 'import numpy as np\n'), ((16268, 16284), 'numpy.abs', 'np.abs', (['(g2 - grn)'], {}), '(g2 - grn)\n', (16274, 16284), True, 'import numpy as np\n'), ((16295, 16311), 'numpy.abs', 'np.abs', (['(g2 - gn2)'], {}), '(g2 - gn2)\n', (16301, 16311), True, 'import numpy as np\n'), ((16322, 16339), 'numpy.abs', 'np.abs', (['(gb2 - grn)'], {}), '(gb2 - grn)\n', (16328, 16339), True, 'import numpy as np\n'), ((16350, 16367), 'numpy.abs', 'np.abs', (['(gb2 - gn2)'], {}), '(gb2 - gn2)\n', (16356, 16367), True, 'import numpy as np\n'), ((16599, 16619), 'numpy.sqrt', 'np.sqrt', (['((C + D) / F)'], {}), '((C + D) / F)\n', (16606, 16619), True, 'import numpy as np\n'), ((16628, 16676), 'numpy.sqrt', 'np.sqrt', (['((g ** 2 * C + g_bandwidth ** 2 * D) / F)'], {}), '((g ** 2 * C + g_bandwidth ** 2 * D) / F)\n', (16635, 16676), True, 'import numpy as np\n'), ((16841, 16913), 'numpy.array', 'np.array', (['[1, -2 * (1 - W2) / (1 + W2 + A), (1 + W2 - A) / (1 + W2 + A)]'], {}), '([1, -2 * (1 - W2) / (1 + W2 + A), (1 + W2 - A) / (1 + W2 + A)])\n', (16849, 16913), True, 'import numpy as np\n'), ((17691, 17720), 'numpy.zeros', 'np.zeros', (['(2 * num_command - 1)'], {}), '(2 * num_command - 1)\n', (17699, 17720), True, 'import numpy as np\n'), ((17991, 18025), 'numpy.zeros', 'np.zeros', (['(2 * num_command - 1, 1)'], {}), '((2 * num_command - 1, 1))\n', (17999, 18025), True, 'import numpy as np\n'), ((18159, 18187), 'numpy.ones', 'np.ones', (['(2 * num_command - 1)'], {}), '(2 * num_command - 1)\n', (18166, 18187), True, 'import numpy as np\n'), ((18221, 18237), 'numpy.diag', 'np.diag', (['weights'], {}), '(weights)\n', (18228, 18237), True, 'import numpy as np\n'), ((18452, 18472), 'numpy.squeeze', 'np.squeeze', (['gain_opt'], {}), '(gain_opt)\n', (18462, 18472), True, 'import numpy as np\n'), ((18498, 18528), 'numpy.squeeze', 'np.squeeze', (['gain_opt_bandwidth'], {}), '(gain_opt_bandwidth)\n', (18508, 18528), True, 'import numpy as np\n'), ((18620, 18646), 'numpy.zeros', 'np.zeros', (['(num_command, 3)'], {}), '((num_command, 3))\n', (18628, 18646), True, 'import numpy as np\n'), ((18815, 18841), 'numpy.zeros', 'np.zeros', (['(3, num_command)'], {}), '((3, num_command))\n', (18823, 18841), True, 'import numpy as np\n'), ((18854, 18880), 'numpy.zeros', 'np.zeros', (['(3, num_command)'], {}), '((3, num_command))\n', (18862, 18880), True, 'import numpy as np\n'), ((19844, 19863), 'numpy.divmod', 'np.divmod', (['n_eff', '(1)'], {}), '(n_eff, 1)\n', (19853, 19863), True, 'import numpy as np\n'), ((19895, 19907), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (19903, 19907), True, 'import numpy as np\n'), ((19916, 19928), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (19924, 19928), True, 'import numpy as np\n'), ((22027, 22053), 'numpy.sqrt', 'np.sqrt', (['(w_lower * w_upper)'], {}), '(w_lower * w_upper)\n', (22034, 22053), True, 'import numpy as np\n'), ((956, 967), 'numpy.log10', 'np.log10', (['(2)'], {}), '(2)\n', (964, 967), True, 'import numpy as np\n'), ((1099, 1119), 'numpy.array', 'np.array', (['[0, 1, n2]'], {}), '([0, 1, n2])\n', (1107, 1119), True, 'import numpy as np\n'), ((1121, 1141), 'numpy.array', 'np.array', (['[0, 1, d2]'], {}), '([0, 1, d2])\n', (1129, 1141), True, 'import numpy as np\n'), ((1202, 1213), 'numpy.log10', 'np.log10', (['(2)'], {}), '(2)\n', (1210, 1213), True, 'import numpy as np\n'), ((1865, 1885), 'numpy.arange', 'np.arange', (['(-2.0)', '(1.0)'], {}), '(-2.0, 1.0)\n', (1874, 1885), True, 'import numpy as np\n'), ((1995, 2006), 'numpy.log10', 'np.log10', (['(2)'], {}), '(2)\n', (2003, 2006), True, 'import numpy as np\n'), ((2139, 2159), 'numpy.array', 'np.array', (['[0, n2, 1]'], {}), '([0, n2, 1])\n', (2147, 2159), True, 'import numpy as np\n'), ((2161, 2181), 'numpy.array', 'np.array', (['[0, d2, 1]'], {}), '([0, d2, 1])\n', (2169, 2181), True, 'import numpy as np\n'), ((2243, 2254), 'numpy.log10', 'np.log10', (['(2)'], {}), '(2)\n', (2251, 2254), True, 'import numpy as np\n'), ((2908, 2928), 'numpy.arange', 'np.arange', (['(-2.0)', '(1.0)'], {}), '(-2.0, 1.0)\n', (2917, 2928), True, 'import numpy as np\n'), ((3037, 3048), 'numpy.log10', 'np.log10', (['(2)'], {}), '(2)\n', (3045, 3048), True, 'import numpy as np\n'), ((3054, 3064), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (3061, 3064), True, 'import numpy as np\n'), ((3196, 3221), 'numpy.array', 'np.array', (['[1, n1 / Q, n2]'], {}), '([1, n1 / Q, n2])\n', (3204, 3221), True, 'import numpy as np\n'), ((3223, 3248), 'numpy.array', 'np.array', (['[1, d1 / Q, d2]'], {}), '([1, d1 / Q, d2])\n', (3231, 3248), True, 'import numpy as np\n'), ((3309, 3320), 'numpy.log10', 'np.log10', (['(2)'], {}), '(2)\n', (3317, 3320), True, 'import numpy as np\n'), ((3326, 3336), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (3333, 3336), True, 'import numpy as np\n'), ((3981, 4001), 'numpy.arange', 'np.arange', (['(-2.0)', '(1.0)'], {}), '(-2.0, 1.0)\n', (3990, 4001), True, 'import numpy as np\n'), ((4111, 4122), 'numpy.log10', 'np.log10', (['(2)'], {}), '(2)\n', (4119, 4122), True, 'import numpy as np\n'), ((4128, 4138), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (4135, 4138), True, 'import numpy as np\n'), ((4271, 4296), 'numpy.array', 'np.array', (['[n2, n1 / Q, 1]'], {}), '([n2, n1 / Q, 1])\n', (4279, 4296), True, 'import numpy as np\n'), ((4298, 4323), 'numpy.array', 'np.array', (['[d2, d1 / Q, 1]'], {}), '([d2, d1 / Q, 1])\n', (4306, 4323), True, 'import numpy as np\n'), ((4385, 4396), 'numpy.log10', 'np.log10', (['(2)'], {}), '(2)\n', (4393, 4396), True, 'import numpy as np\n'), ((4402, 4412), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (4409, 4412), True, 'import numpy as np\n'), ((5059, 5079), 'numpy.arange', 'np.arange', (['(-2.0)', '(1.0)'], {}), '(-2.0, 1.0)\n', (5068, 5079), True, 'import numpy as np\n'), ((5441, 5469), 'numpy.errstate', 'np.errstate', ([], {'divide': '"""ignore"""'}), "(divide='ignore')\n", (5452, 5469), True, 'import numpy as np\n'), ((7032, 7064), 'numpy.ceil', 'np.ceil', (['(BWd * biquad_per_octave)'], {}), '(BWd * biquad_per_octave)\n', (7039, 7064), True, 'import numpy as np\n'), ((8749, 8761), 'scipy.signal.tf2sos', 'tf2sos', (['b', 'a'], {}), '(b, a)\n', (8755, 8761), False, 'from scipy.signal import tf2sos, freqs\n'), ((9202, 9214), 'scipy.signal.tf2sos', 'tf2sos', (['b', 'a'], {}), '(b, a)\n', (9208, 9214), False, 'from scipy.signal import tf2sos, freqs\n'), ((9333, 9343), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (9340, 9343), True, 'import numpy as np\n'), ((10238, 10250), 'scipy.signal.tf2sos', 'tf2sos', (['b', 'a'], {}), '(b, a)\n', (10244, 10250), False, 'from scipy.signal import tf2sos, freqs\n'), ((10371, 10381), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (10378, 10381), True, 'import numpy as np\n'), ((10742, 10754), 'scipy.signal.tf2sos', 'tf2sos', (['b', 'a'], {}), '(b, a)\n', (10748, 10754), False, 'from scipy.signal import tf2sos, freqs\n'), ((12007, 12052), 'scipy.signal.freqs', 'freqs', (['row[:3]', 'row[3:]'], {'worN': 'worN', 'plot': 'plot'}), '(row[:3], row[3:], worN=worN, plot=plot)\n', (12012, 12052), False, 'from scipy.signal import tf2sos, freqs\n'), ((12968, 12982), 'numpy.abs', 'np.abs', (['s_gain'], {}), '(s_gain)\n', (12974, 12982), True, 'import numpy as np\n'), ((14893, 14910), 'numpy.abs', 'np.abs', (['(gp2 - gb2)'], {}), '(gp2 - gb2)\n', (14899, 14910), True, 'import numpy as np\n'), ((14926, 14941), 'numpy.abs', 'np.abs', (['(gp2 - 1)'], {}), '(gp2 - 1)\n', (14932, 14941), True, 'import numpy as np\n'), ((14956, 14971), 'numpy.abs', 'np.abs', (['(gb2 - 1)'], {}), '(gb2 - 1)\n', (14962, 14971), True, 'import numpy as np\n'), ((14987, 15003), 'numpy.abs', 'np.abs', (['(gp2 - gn)'], {}), '(gp2 - gn)\n', (14993, 15003), True, 'import numpy as np\n'), ((15018, 15035), 'numpy.abs', 'np.abs', (['(gp2 - gn2)'], {}), '(gp2 - gn2)\n', (15024, 15035), True, 'import numpy as np\n'), ((15050, 15066), 'numpy.abs', 'np.abs', (['(gb2 - gn)'], {}), '(gb2 - gn)\n', (15056, 15066), True, 'import numpy as np\n'), ((15081, 15098), 'numpy.abs', 'np.abs', (['(gb2 - gn2)'], {}), '(gb2 - gn2)\n', (15087, 15098), True, 'import numpy as np\n'), ((15334, 15354), 'numpy.sqrt', 'np.sqrt', (['((C + D) / F)'], {}), '((C + D) / F)\n', (15341, 15354), True, 'import numpy as np\n'), ((15367, 15399), 'numpy.sqrt', 'np.sqrt', (['((gp2 * C + gb2 * D) / F)'], {}), '((gp2 * C + gb2 * D) / F)\n', (15374, 15399), True, 'import numpy as np\n'), ((15482, 15554), 'numpy.array', 'np.array', (['[1, -2 * (1 - W2) / (1 + W2 + A), (1 + W2 - A) / (1 + W2 + A)]'], {}), '([1, -2 * (1 - W2) / (1 + W2 + A), (1 + W2 - A) / (1 + W2 + A)])\n', (15490, 15554), True, 'import numpy as np\n'), ((15669, 15678), 'numpy.abs', 'np.abs', (['G'], {}), '(G)\n', (15675, 15678), True, 'import numpy as np\n'), ((16378, 16396), 'numpy.sqrt', 'np.sqrt', (['(G11 / G00)'], {}), '(G11 / G00)\n', (16385, 16396), True, 'import numpy as np\n'), ((16465, 16486), 'numpy.tan', 'np.tan', (['(bandwidth / 2)'], {}), '(bandwidth / 2)\n', (16471, 16486), True, 'import numpy as np\n'), ((16682, 16784), 'numpy.array', 'np.array', (['[g_nyquist + g_ref * W2 + B, -2 * (g_nyquist - g_ref * W2), g_nyquist - B +\n g_ref * W2]'], {}), '([g_nyquist + g_ref * W2 + B, -2 * (g_nyquist - g_ref * W2), \n g_nyquist - B + g_ref * W2])\n', (16690, 16784), True, 'import numpy as np\n'), ((18352, 18386), 'numpy.linalg.multi_dot', 'np.linalg.multi_dot', (['[B, W, gain2]'], {}), '([B, W, gain2])\n', (18371, 18386), True, 'import numpy as np\n'), ((20077, 20108), 'numpy.concatenate', 'np.concatenate', (['(alpha, -alpha)'], {}), '((alpha, -alpha))\n', (20091, 20108), True, 'import numpy as np\n'), ((20490, 20510), 'numpy.append', 'np.append', (['z', 'z_real'], {}), '(z, z_real)\n', (20499, 20510), True, 'import numpy as np\n'), ((20523, 20543), 'numpy.append', 'np.append', (['p', 'p_real'], {}), '(p, p_real)\n', (20532, 20543), True, 'import numpy as np\n'), ((20961, 20977), 'numpy.log2', 'np.log2', (['(w2 / w1)'], {}), '(w2 / w1)\n', (20968, 20977), True, 'import numpy as np\n'), ((743, 752), 'numpy.abs', 'np.abs', (['G'], {}), '(G)\n', (749, 752), True, 'import numpy as np\n'), ((8145, 8156), 'numpy.log10', 'np.log10', (['(4)'], {}), '(4)\n', (8153, 8156), True, 'import numpy as np\n'), ((14749, 14760), 'numpy.sign', 'np.sign', (['Gp'], {}), '(Gp)\n', (14756, 14760), True, 'import numpy as np\n'), ((15113, 15131), 'numpy.sqrt', 'np.sqrt', (['(G11 / G00)'], {}), '(G11 / G00)\n', (15120, 15131), True, 'import numpy as np\n'), ((15197, 15211), 'numpy.tan', 'np.tan', (['(bw / 2)'], {}), '(bw / 2)\n', (15203, 15211), True, 'import numpy as np\n'), ((15414, 15466), 'numpy.array', 'np.array', (['[gn + W2 + B, -2 * (gn - W2), gn - B + W2]'], {}), '([gn + W2 + B, -2 * (gn - W2), gn - B + W2])\n', (15422, 15466), True, 'import numpy as np\n'), ((16399, 16420), 'numpy.tan', 'np.tan', (['(w_command / 2)'], {}), '(w_command / 2)\n', (16405, 16420), True, 'import numpy as np\n'), ((16570, 16588), 'numpy.sqrt', 'np.sqrt', (['(G00 * G11)'], {}), '(G00 * G11)\n', (16577, 16588), True, 'import numpy as np\n'), ((19034, 19045), 'numpy.sign', 'np.sign', (['Go'], {}), '(Go)\n', (19041, 19045), True, 'import numpy as np\n'), ((20126, 20146), 'numpy.exp', 'np.exp', (['(1.0j * alpha)'], {}), '(1.0j * alpha)\n', (20132, 20146), True, 'import numpy as np\n'), ((20162, 20182), 'numpy.exp', 'np.exp', (['(1.0j * alpha)'], {}), '(1.0j * alpha)\n', (20168, 20182), True, 'import numpy as np\n'), ((20913, 20924), 'numpy.sign', 'np.sign', (['Gd'], {}), '(Gd)\n', (20920, 20924), True, 'import numpy as np\n'), ((5519, 5528), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (5525, 5528), True, 'import numpy as np\n'), ((13051, 13065), 'numpy.abs', 'np.abs', (['(x - x0)'], {}), '(x - x0)\n', (13057, 13065), True, 'import numpy as np\n'), ((14777, 14787), 'numpy.abs', 'np.abs', (['Gp'], {}), '(Gp)\n', (14783, 14787), True, 'import numpy as np\n'), ((15134, 15148), 'numpy.tan', 'np.tan', (['(wc / 2)'], {}), '(wc / 2)\n', (15140, 15148), True, 'import numpy as np\n'), ((15302, 15320), 'numpy.sqrt', 'np.sqrt', (['(G00 * G11)'], {}), '(G00 * G11)\n', (15309, 15320), True, 'import numpy as np\n'), ((16438, 16456), 'numpy.sqrt', 'np.sqrt', (['(F00 / F11)'], {}), '(F00 / F11)\n', (16445, 16456), True, 'import numpy as np\n'), ((16526, 16544), 'numpy.sqrt', 'np.sqrt', (['(F00 * F11)'], {}), '(F00 * F11)\n', (16533, 16544), True, 'import numpy as np\n'), ((19062, 19072), 'numpy.abs', 'np.abs', (['Go'], {}), '(Go)\n', (19068, 19072), True, 'import numpy as np\n'), ((6324, 6342), 'numpy.abs', 'np.abs', (['(Gd / slope)'], {}), '(Gd / slope)\n', (6330, 6342), True, 'import numpy as np\n'), ((15170, 15188), 'numpy.sqrt', 'np.sqrt', (['(F00 / F11)'], {}), '(F00 / F11)\n', (15177, 15188), True, 'import numpy as np\n'), ((15254, 15272), 'numpy.sqrt', 'np.sqrt', (['(F00 * F11)'], {}), '(F00 * F11)\n', (15261, 15272), True, 'import numpy as np\n'), ((18314, 18329), 'numpy.transpose', 'np.transpose', (['B'], {}), '(B)\n', (18326, 18329), True, 'import numpy as np\n')] |
import os.path as osp
from .reader.video_reader import VideoReader
class ReaderFactory():
video_exts = [".mp4", ".avi", ".mov", ".MOV", ".mkv"]
def create(target_input, target_fps):
if osp.isfile(target_input):
ext = osp.splitext(target_input)[1]
if ext in ReaderFactory.video_exts:
return VideoReader(target_input, target_fps)
else:
msg = "{} is not supported. {} are supported.".format(
ext, ReaderFactory.video_exts)
raise TypeError(msg)
# elif osp.isdir(target_input):
# return ImageReader(target_input)
# # USB camera
# elif isinstance(target_input, int):
# return VideoReader(target_input)
# # network camera
# elif isinstance(target_input, str):
# return NetworkCameraReader(target_input)
else:
raise ValueError()
| [
"os.path.isfile",
"os.path.splitext"
] | [((204, 228), 'os.path.isfile', 'osp.isfile', (['target_input'], {}), '(target_input)\n', (214, 228), True, 'import os.path as osp\n'), ((248, 274), 'os.path.splitext', 'osp.splitext', (['target_input'], {}), '(target_input)\n', (260, 274), True, 'import os.path as osp\n')] |
# Code modified from original by @jvfe (BSD2)
# Copyright (c) 2020, jvfe
# https://github.com/jvfe/wdt_contribs/tree/master/complex_portal/src
import math
import re
from collections import defaultdict
from ftplib import FTP
from functools import lru_cache, reduce
from time import gmtime, strftime
import pandas as pd
from wikidata2df import wikidata2df
from wikidataintegrator import wdi_core
from wikidataintegrator.wdi_core import WDItemEngine
import json
with open("mappings.json", "r") as fp:
MAPPINGS = json.load(fp)
def get_list_of_complexes(
datasets, species_id, test_on_wikidata=True, max_complexes=999999
):
"""
Clean and process table of complexes
Parses table of complexes into Complex classes
Args:
datasets (DataFrame): one of the species datasets
species_id (str): The NCBI species ID
test_on_wikidata (bool): A boolean indicating whether to return only complexes that are or aren't on Wikidata. Defaults to True.
max_complexes (str): The maximum number of complexes to be modified on Wikidata
Returns:
list_of_complexes (list): Objects of the Complex class
"""
raw_table = pd.read_table(datasets[species_id], na_values=["-"])
if test_on_wikidata:
raw_table = remove_rows_on_wikidata(raw_table)
columns_to_keep = get_columns_to_keep()
raw_table = raw_table[columns_to_keep]
list_of_complexes = []
print("====== Parsing list to extract into class Complex ======")
# Counter for bot test
counter = 0
for complex_id in raw_table["#Complex ac"]:
counter = counter + 1
list_of_complexes.append(Complex(raw_table, complex_id))
if counter == max_complexes:
break
return list_of_complexes
def update_complex(login_instance, protein_complex, references):
"""
Updates the information for an existing complex on Wikidata.
Args:
login_instance: A Wikidata Integrator login instance
protein_complex: An object of the class Complex containing the information for a protein complex
references: The set of references for WDI
"""
instance_of = wdi_core.WDItemID(
value="Q22325163", prop_nr="P279", references=references
)
subclass_of = wdi_core.WDItemID(
value="Q107509287", prop_nr="P31", references=references
)
found_in_taxon = wdi_core.WDItemID(
value=protein_complex.taxon_qid, prop_nr="P703", references=references
)
complex_portal_id = wdi_core.WDString(
value=protein_complex.complex_id, prop_nr="P7718", references=references
)
data = [instance_of, subclass_of, found_in_taxon, complex_portal_id]
has_parts = []
for component in protein_complex.list_of_components:
quantity = component.quantity
component_qid = component.qid
print(f"Component QID: {component_qid}")
def is_nan(string):
return string != string
if is_nan(component_qid):
break
if quantity != "0" and not math.isnan(int(quantity)):
print(f"Quantity of this component: {str(quantity)}")
# Quantity is valid. 0 represents unknown in Complex Portal.
quantity_qualifier = wdi_core.WDQuantity(
value=int(quantity), prop_nr="P1114", is_qualifier=True
)
statement = wdi_core.WDItemID(
value=component_qid,
prop_nr="P527",
qualifiers=[quantity_qualifier],
references=references,
)
else:
statement = wdi_core.WDItemID(
value=component_qid, prop_nr="P527", references=references
)
has_parts.append(statement)
data.extend(has_parts)
# Reference table via https://w.wiki/3dTC
go_statements = []
go_reference = pd.read_csv("./reference_go_terms.csv")
for go_term in protein_complex.go_ids:
# Considers that each term has only one GO type
try:
row = go_reference[go_reference["id"] == go_term]
obj = row["go_term_qid"].values[0]
label = row["go_termLabel"].values[0]
prop = row["go_props_qid"].values[0]
# Heuristic: Cell components containing the word "complex" in the label
# are actually superclasses.
if "complex" in label and prop == "P681":
prop = "P279"
statement = wdi_core.WDItemID(
value=obj, prop_nr=prop, references=references
)
go_statements.append(statement)
except BaseException as e:
print(e)
print("Problem with " + go_term)
with open("errors/log.csv", "a") as f:
f.write(f"{go_term},'problem with GO term'\n")
data.extend(go_statements)
label = protein_complex.name
aliases = protein_complex.aliases
taxon_name = get_wikidata_label(protein_complex.taxon_qid)
descriptions = {
"en": "macromolecular complex found in " + taxon_name,
"pt": "complexo macromolecular encontrado em " + taxon_name,
"pt-br": "complexo macromolecular encontrado em " + taxon_name,
"nl": "macromoleculair complex gevonden in " + taxon_name,
"de": "makromolekularer Komplex auffindbar in " + taxon_name,
}
# For the list below, the bot will not remove values added on Wikidata
properties_to_append_value = ["P703", "P680", "P681", "P682", "P527"]
wd_item = wdi_core.WDItemEngine(
data=data,
append_value=properties_to_append_value,
debug=True,
)
wd_item.set_label(label=label, lang="en")
wd_item.set_aliases(aliases, lang="en")
# As fast-run is set, I will not update descriptions.
for lang, description in descriptions.items():
wd_item.set_description(description, lang=lang)
wd_item.write(login_instance)
class ComplexComponent:
def __init__(self, external_id, quantity):
self.external_id = external_id
self.quantity = quantity
self.get_qid_for_component()
def get_qid_for_component(self):
external_id = self.external_id
print(external_id)
if "CHEBI" in self.external_id:
external_id = external_id.replace("CHEBI:", "")
# ChEBI ID (P683)
self.qid = get_wikidata_item_by_propertyvalue("P683", external_id)
elif "CPX" in self.external_id:
# Complex Portal ID (P7718)
self.qid = get_wikidata_item_by_propertyvalue("P7718", self.external_id)
elif "URS" in self.external_id:
# RNACentral ID (P8697)
self.qid = get_wikidata_item_by_propertyvalue("P8697", self.external_id)
else:
# UniProt protein ID (P352)
self.qid = get_wikidata_item_by_propertyvalue("P352", self.external_id)
class Complex:
def __init__(self, dataset, complex_id):
self.complex_id = complex_id
# Info is a 1 row data frame with the following columns:
# #Complex ac
# Recommended name
# Aliases for complex
# Taxonomy identifier
# Identifiers (and stoichiometry) of molecules in complex
# Confidence
# Experimental evidence
# Go Annotations
# Cross references
# Description
# Complex properties
# Complex assembly
# Ligand
# Disease
# Agonist
# Antagonist
# Comment
# Source
# Expanded participant list
self.info = dataset[dataset["#Complex ac"] == complex_id]
self.list_of_components = []
self.go_ids = []
self.extract_fields()
print(f"Parsing {self.name}")
def extract_fields(self):
self.get_name()
self.get_aliases()
self.get_components()
self.get_go_ids()
self.get_wikidata_ids()
def get_name(self):
self.name = self.info["Recommended name"].values[0]
def get_aliases(self):
aliases_string = self.info["Aliases for complex"].values[0]
# "-" represents NA in this column
# Sometimes we get true NAs there
if aliases_string == "-" or not isinstance(aliases_string, str):
self.aliases = []
else:
self.aliases = aliases_string.split("|")
def get_components(self):
molecules_column = "Identifiers (and stoichiometry) of molecules in complex"
molecules_string = self.info[molecules_column].values[0]
molecules = molecules_string.split("|")
matches_quantities = [re.search(r"\((.*)\)", i) for i in molecules]
quantities = [m.group(1) for m in matches_quantities]
matches_uniprot_ids = [re.search(r"(.*)\(.*\)", i) for i in molecules]
uniprot_ids = [m.group(1) for m in matches_uniprot_ids]
component_and_quantities = dict(zip(uniprot_ids, quantities))
for external_id in component_and_quantities:
component = ComplexComponent(
external_id, component_and_quantities[external_id]
)
self.list_of_components.append(component)
def get_go_ids(self):
go_column = "Go Annotations"
try:
go_string = self.info[go_column].values[0]
go_list = re.findall(pattern="GO:[0-9]*", string=go_string)
self.go_ids = go_list
except Exception:
print(f"No GOs for {self.complex_id}")
def get_wikidata_ids(self):
# NCBI taxonomy ID (P685)
tax_id = self.info["Taxonomy identifier"].values[0]
self.taxon_qid = get_wikidata_item_by_propertyvalue("P685", int(tax_id))
def get_wikidata_complexes():
"""Gets all Wikidata items with a Complex Portal ID property"""
print("====== Getting complexes on Wikidata ======")
get_macromolecular = """
SELECT ?item ?ComplexPortalID
WHERE
{
?item wdt:P7718 ?ComplexPortalID .
}"""
wikidata_complexes = WDItemEngine.execute_sparql_query(
get_macromolecular, as_dataframe=True
).replace({"http://www.wikidata.org/entity/": ""}, regex=True)
return wikidata_complexes
def get_wikidata_label(qid, langcode="en"):
"""Gets a Wikidata item for a determined property-value pair
Args:
qid (str): The qid to get the label
langcode (str): The language code of the label
"""
query_result = WDItemEngine.execute_sparql_query(
f'SELECT ?label WHERE {{ wd:{qid} rdfs:label ?label. FILTER(LANG(?label)="{langcode}") }}'
)
try:
match = query_result["results"]["bindings"][0]
except IndexError:
print(f"Couldn't find label for {qid}")
raise ("label nof found for " + qid)
label = match["label"]["value"]
return label
@lru_cache(maxsize=None)
def get_wikidata_item_by_propertyvalue(property, value, mappings=MAPPINGS):
"""Gets a Wikidata item for a determined property-value pair
Args:
property (str): The property to search
value (str): The value of said property
"""
try:
qid = mappings[property][value]
return str(qid)
except:
pass
query_result = WDItemEngine.execute_sparql_query(
f'SELECT distinct ?item WHERE {{ ?item wdt:{property} "{value}" }}'
)
try:
match = query_result["results"]["bindings"][0]
except IndexError:
print(f"Couldn't find item for {value}")
if "URS" in value:
with open("errors/rna_central_log.csv", "a") as f:
f.write(f"{value},'not found'\n")
with open("errors/log.csv", "a") as f:
f.write(f"{value},'not found'\n")
return pd.np.NaN
qid = match["item"]["value"]
qid = qid.split("/")[4]
try:
mappings[property][str(value)] = str(qid)
except:
mappings[property] = {}
mappings[property][str(value)] = str(qid)
with open("mappings.json", "w") as fp:
json.dump(MAPPINGS, fp, sort_keys=True, indent=4)
return qid
def get_complex_portal_species_ids():
"""Gets a dictionary of Complex portal datasets
Returns a dictionary of species as keys and dataset url as values.
"""
domain = "ftp.ebi.ac.uk"
complex_data = "pub/databases/intact/complex/current/complextab/"
print("====== Getting Complex Portal Species IDs ======")
ftp = FTP(domain)
ftp.login()
ftp.cwd(complex_data)
files = ftp.nlst()
species_list = []
for species in files:
if "tsv" in species:
species_list.append(species.replace(".tsv", "").strip())
query = (
"""
SELECT ?itemLabel ?id WHERE {
VALUES ?id { """
+ '"'
+ '" "'.join(species_list)
+ '"'
+ """ }
?item wdt:P685 ?id.
SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". }
}
"""
)
df = wikidata2df(query)
return df
def get_complex_portal_dataset_urls():
"""Gets a dictionary of Complex portal datasets
Returns a dictionary of species as keys and dataset url as values.
"""
domain = "ftp.ebi.ac.uk"
complex_data = "pub/databases/intact/complex/current/complextab/"
print("====== Getting Complex Portal datasets via FTP ======")
ftp = FTP(domain)
ftp.login()
ftp.cwd(complex_data)
files = ftp.nlst()
string_replacements = (".tsv", ""), ("_", " ")
cp_datasets = defaultdict()
for species in files:
if "README" not in species:
current_key = reduce(
lambda a, kv: a.replace(*kv), string_replacements, species
)
cp_datasets[current_key] = f"ftp://{domain}/{complex_data}{species}"
return cp_datasets
def remove_rows_on_wikidata(complex_dataframe):
"""
Return complex portal entities that don't have Wikidata links.
"""
print("====== Checking which complexes are not on Wikidata ======")
wikidata_complexes = get_wikidata_complexes()
merged_data = pd.merge(
wikidata_complexes,
complex_dataframe,
how="outer",
left_on=["ComplexPortalID"],
right_on=["#Complex ac"],
indicator=True,
)
missing_from_wikidata = merged_data[merged_data["_merge"] == "right_only"][
complex_dataframe.columns
]
keep = get_columns_to_keep()
missing_from_wikidata = missing_from_wikidata[keep]
return missing_from_wikidata
def split_complexes(species_dataframe):
complex_dfs = [
species_dataframe[
species_dataframe["#Complex ac"] == unique_complex
].reset_index()
for unique_complex in species_dataframe["#Complex ac"].unique()
]
return complex_dfs
def prepare_refs(species_id):
stated_in = wdi_core.WDItemID(value="Q47196990", prop_nr="P248", is_reference=True)
wikidata_time = strftime("+%Y-%m-%dT00:00:00Z", gmtime())
retrieved = wdi_core.WDTime(wikidata_time, prop_nr="P813", is_reference=True)
ftp_url = "https://ftp.ebi.ac.uk/pub/databases/intact/complex/current/complextab"
ref_url = wdi_core.WDString(ftp_url, prop_nr="P854", is_reference=True)
filename_in_archive = f"{species_id}.tsv"
# reference of filename in archive (P7793)
ref_filename = wdi_core.WDString(
filename_in_archive, prop_nr="P7793", is_reference=True
)
references = [[stated_in, retrieved, ref_url, ref_filename]]
return references
def get_columns_to_keep():
keep = [
"#Complex ac",
"Recommended name",
"Aliases for complex",
"Taxonomy identifier",
"Go Annotations",
"Identifiers (and stoichiometry) of molecules in complex",
"Description",
]
return keep
| [
"wikidataintegrator.wdi_core.WDItemEngine.execute_sparql_query",
"wikidataintegrator.wdi_core.WDTime",
"ftplib.FTP",
"wikidata2df.wikidata2df",
"pandas.read_csv",
"pandas.merge",
"wikidataintegrator.wdi_core.WDItemID",
"collections.defaultdict",
"pandas.read_table",
"wikidataintegrator.wdi_core.WD... | [((10803, 10826), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': 'None'}), '(maxsize=None)\n', (10812, 10826), False, 'from functools import lru_cache, reduce\n'), ((515, 528), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (524, 528), False, 'import json\n'), ((1174, 1226), 'pandas.read_table', 'pd.read_table', (['datasets[species_id]'], {'na_values': "['-']"}), "(datasets[species_id], na_values=['-'])\n", (1187, 1226), True, 'import pandas as pd\n'), ((2158, 2233), 'wikidataintegrator.wdi_core.WDItemID', 'wdi_core.WDItemID', ([], {'value': '"""Q22325163"""', 'prop_nr': '"""P279"""', 'references': 'references'}), "(value='Q22325163', prop_nr='P279', references=references)\n", (2175, 2233), False, 'from wikidataintegrator import wdi_core\n'), ((2267, 2342), 'wikidataintegrator.wdi_core.WDItemID', 'wdi_core.WDItemID', ([], {'value': '"""Q107509287"""', 'prop_nr': '"""P31"""', 'references': 'references'}), "(value='Q107509287', prop_nr='P31', references=references)\n", (2284, 2342), False, 'from wikidataintegrator import wdi_core\n'), ((2379, 2472), 'wikidataintegrator.wdi_core.WDItemID', 'wdi_core.WDItemID', ([], {'value': 'protein_complex.taxon_qid', 'prop_nr': '"""P703"""', 'references': 'references'}), "(value=protein_complex.taxon_qid, prop_nr='P703',\n references=references)\n", (2396, 2472), False, 'from wikidataintegrator import wdi_core\n'), ((2508, 2603), 'wikidataintegrator.wdi_core.WDString', 'wdi_core.WDString', ([], {'value': 'protein_complex.complex_id', 'prop_nr': '"""P7718"""', 'references': 'references'}), "(value=protein_complex.complex_id, prop_nr='P7718',\n references=references)\n", (2525, 2603), False, 'from wikidataintegrator import wdi_core\n'), ((3864, 3903), 'pandas.read_csv', 'pd.read_csv', (['"""./reference_go_terms.csv"""'], {}), "('./reference_go_terms.csv')\n", (3875, 3903), True, 'import pandas as pd\n'), ((5515, 5604), 'wikidataintegrator.wdi_core.WDItemEngine', 'wdi_core.WDItemEngine', ([], {'data': 'data', 'append_value': 'properties_to_append_value', 'debug': '(True)'}), '(data=data, append_value=properties_to_append_value,\n debug=True)\n', (5536, 5604), False, 'from wikidataintegrator import wdi_core\n'), ((10427, 10562), 'wikidataintegrator.wdi_core.WDItemEngine.execute_sparql_query', 'WDItemEngine.execute_sparql_query', (['f"""SELECT ?label WHERE {{ wd:{qid} rdfs:label ?label. FILTER(LANG(?label)="{langcode}") }}"""'], {}), '(\n f\'SELECT ?label WHERE {{ wd:{qid} rdfs:label ?label. FILTER(LANG(?label)="{langcode}") }}\'\n )\n', (10460, 10562), False, 'from wikidataintegrator.wdi_core import WDItemEngine\n'), ((11200, 11307), 'wikidataintegrator.wdi_core.WDItemEngine.execute_sparql_query', 'WDItemEngine.execute_sparql_query', (['f"""SELECT distinct ?item WHERE {{ ?item wdt:{property} "{value}" }}"""'], {}), '(\n f\'SELECT distinct ?item WHERE {{ ?item wdt:{property} "{value}" }}\')\n', (11233, 11307), False, 'from wikidataintegrator.wdi_core import WDItemEngine\n'), ((12391, 12402), 'ftplib.FTP', 'FTP', (['domain'], {}), '(domain)\n', (12394, 12402), False, 'from ftplib import FTP\n'), ((12931, 12949), 'wikidata2df.wikidata2df', 'wikidata2df', (['query'], {}), '(query)\n', (12942, 12949), False, 'from wikidata2df import wikidata2df\n'), ((13314, 13325), 'ftplib.FTP', 'FTP', (['domain'], {}), '(domain)\n', (13317, 13325), False, 'from ftplib import FTP\n'), ((13460, 13473), 'collections.defaultdict', 'defaultdict', ([], {}), '()\n', (13471, 13473), False, 'from collections import defaultdict\n'), ((14042, 14178), 'pandas.merge', 'pd.merge', (['wikidata_complexes', 'complex_dataframe'], {'how': '"""outer"""', 'left_on': "['ComplexPortalID']", 'right_on': "['#Complex ac']", 'indicator': '(True)'}), "(wikidata_complexes, complex_dataframe, how='outer', left_on=[\n 'ComplexPortalID'], right_on=['#Complex ac'], indicator=True)\n", (14050, 14178), True, 'import pandas as pd\n'), ((14796, 14867), 'wikidataintegrator.wdi_core.WDItemID', 'wdi_core.WDItemID', ([], {'value': '"""Q47196990"""', 'prop_nr': '"""P248"""', 'is_reference': '(True)'}), "(value='Q47196990', prop_nr='P248', is_reference=True)\n", (14813, 14867), False, 'from wikidataintegrator import wdi_core\n'), ((14946, 15011), 'wikidataintegrator.wdi_core.WDTime', 'wdi_core.WDTime', (['wikidata_time'], {'prop_nr': '"""P813"""', 'is_reference': '(True)'}), "(wikidata_time, prop_nr='P813', is_reference=True)\n", (14961, 15011), False, 'from wikidataintegrator import wdi_core\n'), ((15112, 15173), 'wikidataintegrator.wdi_core.WDString', 'wdi_core.WDString', (['ftp_url'], {'prop_nr': '"""P854"""', 'is_reference': '(True)'}), "(ftp_url, prop_nr='P854', is_reference=True)\n", (15129, 15173), False, 'from wikidataintegrator import wdi_core\n'), ((15286, 15360), 'wikidataintegrator.wdi_core.WDString', 'wdi_core.WDString', (['filename_in_archive'], {'prop_nr': '"""P7793"""', 'is_reference': '(True)'}), "(filename_in_archive, prop_nr='P7793', is_reference=True)\n", (15303, 15360), False, 'from wikidataintegrator import wdi_core\n'), ((11982, 12031), 'json.dump', 'json.dump', (['MAPPINGS', 'fp'], {'sort_keys': '(True)', 'indent': '(4)'}), '(MAPPINGS, fp, sort_keys=True, indent=4)\n', (11991, 12031), False, 'import json\n'), ((14920, 14928), 'time.gmtime', 'gmtime', ([], {}), '()\n', (14926, 14928), False, 'from time import gmtime, strftime\n'), ((3374, 3489), 'wikidataintegrator.wdi_core.WDItemID', 'wdi_core.WDItemID', ([], {'value': 'component_qid', 'prop_nr': '"""P527"""', 'qualifiers': '[quantity_qualifier]', 'references': 'references'}), "(value=component_qid, prop_nr='P527', qualifiers=[\n quantity_qualifier], references=references)\n", (3391, 3489), False, 'from wikidataintegrator import wdi_core\n'), ((3602, 3679), 'wikidataintegrator.wdi_core.WDItemID', 'wdi_core.WDItemID', ([], {'value': 'component_qid', 'prop_nr': '"""P527"""', 'references': 'references'}), "(value=component_qid, prop_nr='P527', references=references)\n", (3619, 3679), False, 'from wikidataintegrator import wdi_core\n'), ((4460, 4525), 'wikidataintegrator.wdi_core.WDItemID', 'wdi_core.WDItemID', ([], {'value': 'obj', 'prop_nr': 'prop', 'references': 'references'}), '(value=obj, prop_nr=prop, references=references)\n', (4477, 4525), False, 'from wikidataintegrator import wdi_core\n'), ((8614, 8640), 're.search', 're.search', (['"""\\\\((.*)\\\\)"""', 'i'], {}), "('\\\\((.*)\\\\)', i)\n", (8623, 8640), False, 'import re\n'), ((8754, 8782), 're.search', 're.search', (['"""(.*)\\\\(.*\\\\)"""', 'i'], {}), "('(.*)\\\\(.*\\\\)', i)\n", (8763, 8782), False, 'import re\n'), ((9321, 9370), 're.findall', 're.findall', ([], {'pattern': '"""GO:[0-9]*"""', 'string': 'go_string'}), "(pattern='GO:[0-9]*', string=go_string)\n", (9331, 9370), False, 'import re\n'), ((10001, 10073), 'wikidataintegrator.wdi_core.WDItemEngine.execute_sparql_query', 'WDItemEngine.execute_sparql_query', (['get_macromolecular'], {'as_dataframe': '(True)'}), '(get_macromolecular, as_dataframe=True)\n', (10034, 10073), False, 'from wikidataintegrator.wdi_core import WDItemEngine\n')] |
import scipy.special as sc
from tests.src.utils import split_list, __print
# .11 Serial Test
def serial_test(key, n, m=3, b_print=True):
def compute(s,m):
if m == 0:
return 0
if m == 1: head = ''
else : head = s[0:(m-1)]
s = s + head
v = [0]*2**m
for i in range(m):
ss=s[i:]
split_key_m=list(split_list(ss,m))
if len(split_key_m[-1]) != len(split_key_m[0]):
split_key_m=split_key_m[0:-1]
split_key_m=list(map(lambda x : int(x,2),split_key_m))
for i in range(2**m):
v[i] = v[i]+split_key_m.count(i)
psi2_m = 2**m/n*(sum(list(map(lambda x : x**2,v)))) - n
return psi2_m
key = ''.join(list(map(str, key)))
psi2_m0=compute(key,m)
psi2_m1=compute(key,(m-1))
psi2_m2=compute(key,(m-2))
d_psi2 = psi2_m0 - psi2_m1
d2_psi2 = psi2_m0 - 2*psi2_m1 + psi2_m2
p1=sc.gammaincc(2**(m-2),d_psi2 / 2)
p2=sc.gammaincc(2**(m-3),d2_psi2 / 2)
b1 = (p1 >= 0.01)
b2 = (p2 >= 0.01)
__print(b_print, '{:40} : {:.3f} -> {} '.format('serial test',p1,b1))
__print(b_print, '{:40} : {:.3f} -> {} '.format('',p2,b2))
return [p1, p2], all([b1, b2]) | [
"scipy.special.gammaincc",
"tests.src.utils.split_list"
] | [((968, 1006), 'scipy.special.gammaincc', 'sc.gammaincc', (['(2 ** (m - 2))', '(d_psi2 / 2)'], {}), '(2 ** (m - 2), d_psi2 / 2)\n', (980, 1006), True, 'import scipy.special as sc\n'), ((1010, 1049), 'scipy.special.gammaincc', 'sc.gammaincc', (['(2 ** (m - 3))', '(d2_psi2 / 2)'], {}), '(2 ** (m - 3), d2_psi2 / 2)\n', (1022, 1049), True, 'import scipy.special as sc\n'), ((390, 407), 'tests.src.utils.split_list', 'split_list', (['ss', 'm'], {}), '(ss, m)\n', (400, 407), False, 'from tests.src.utils import split_list, __print\n')] |
# Copyright 2018 Amazon.com, Inc. or its affiliates.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file.
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
import boto3
import json
import time
import base64
import re
from datetime import datetime
def find_cluster_name(ec2_c, instance_id):
"""
Provided an instance that is currently, or should be part of an ECS cluster
determines the ECS cluster name. This is derived from the user-data
which contains a command to inject the cluster name into ECS agent config
files.
On failure we raise an exception which means this instance isn't a ECS
cluster member so we can proceed with termination.
"""
response = ec2_c.describe_instance_attribute(
InstanceId=instance_id,
Attribute='userData'
)
userdata = base64.b64decode(response['UserData']['Value'])
clustername = re.search("ECS_CLUSTER\s?=\s?(.*?)\s", str(userdata))
if clustername:
return(clustername.group(1))
raise(ValueError(
"Unable to determine the ECS cluster name from instance metadata"
))
def container_instance_healthy(ecs_c, cluster_name, instance_id, context):
"""
Lists all the instances in the cluster to see if we have one joined
that matches the instance ID of the one we've just started.
If we find a cluster member that matches our recently launched instance
ID, checks whether it's in a status of ACTIVE and shows it's ECS
agent is connected to the cluster.
There could be additional checks put in as desired to verify the
instance is healthy!
If we're getting short of time waiting for stability return false
so we can get a continuation.
"""
while True:
paginator = ecs_c.get_paginator('list_container_instances')
instances = paginator.paginate(
cluster=cluster_name,
PaginationConfig={
"PageSize": 10
}
)
for instance in instances:
response = ecs_c.describe_container_instances(
cluster=cluster_name,
containerInstances=instance["containerInstanceArns"]
)
for container_instance in response["containerInstances"]:
if container_instance["ec2InstanceId"] == instance_id:
if container_instance["status"] == "ACTIVE":
if container_instance["agentConnected"] is True:
return(True)
if context.get_remaining_time_in_millis() <= 40000:
return(False)
time.sleep(30)
def find_hook_duration(asg_c, asg_name, instance_id):
"""
Our Lambda function operates in five-minute time samples, however
we eventually give up our actions if they take more than 60 minutes.
This function finds out how long we've been working on our present
operation by listing current Autoscaling activities, and checking
for our instance ID to get a datestamp.
We can then compare that datestamp with present to determine our
overall duration.
"""
paginator = asg_c.get_paginator('describe_scaling_activities')
response_iterator = paginator.paginate(
AutoScalingGroupName=asg_name,
PaginationConfig={
'PageSize': 10,
}
)
hook_start_time = datetime.utcnow()
for response in response_iterator:
for activity in response["Activities"]:
if re.match(
"Terminating.*{}".format(instance_id),
activity["Description"]
):
hook_start_time = activity["StartTime"]
continue
hook_start_time = hook_start_time.replace(tzinfo=None)
hook_duration = (datetime.utcnow() - hook_start_time).total_seconds()
return(int(hook_duration))
def lambda_handler(event, context):
print("Received event {}".format(json.dumps(event)))
# Our hook message can look different depending on how we're called.
# The initial call from AutoScaling has one format, and the call when
# we send a HeartBeat message has another. We need to massage them into
# a consistent format. We'll follow the format used by AutoScaling
# versus the HeartBeat message.
hook_message = {}
# Identify if this is the AutoScaling call
if "LifecycleHookName" in event["detail"]:
hook_message = event["detail"]
# Otherwise this is a HeartBeat call
else:
hook_message = event["detail"]["requestParameters"]
# Heartbeat comes with instanceId instead of EC2InstanceId
hook_message["EC2InstanceId"] = hook_message["instanceId"]
# Our other three elements need to be capitlized
hook_message["LifecycleHookName"] = hook_message["lifecycleHookName"]
hook_message["AutoScalingGroupName"] = \
hook_message["autoScalingGroupName"]
hook_message["LifecycleActionToken"] = \
hook_message["lifecycleActionToken"]
print("Received Lifecycle Hook message {}".format(
json.dumps(hook_message)
))
try:
ec2_c = boto3.client('ec2')
ecs_c = boto3.client('ecs')
asg_c = boto3.client('autoscaling')
print("Determining our ECS Cluster name . . .")
cluster_name = find_cluster_name(
ec2_c,
hook_message["EC2InstanceId"]
)
print(". . . found ECS Cluster name '{}'".format(
cluster_name
))
print("Checking status of new instance in the ECS Cluster . . .")
if container_instance_healthy(
ecs_c, cluster_name, hook_message["EC2InstanceId"], context
):
print(". . . Instance {} connected and active".format(
hook_message["EC2InstanceId"]
))
print("Proceeding with instance {} Launch".format(
hook_message["EC2InstanceId"]
))
asg_c.complete_lifecycle_action(
LifecycleHookName=hook_message["LifecycleHookName"],
AutoScalingGroupName=hook_message["AutoScalingGroupName"],
LifecycleActionToken=hook_message["LifecycleActionToken"],
LifecycleActionResult="CONTINUE",
InstanceId=hook_message["EC2InstanceId"]
)
else:
# Figure out how long we've be at this.
hook_duration = find_hook_duration(
asg_c,
hook_message["AutoScalingGroupName"],
hook_message["EC2InstanceId"]
)
print("Determined we cannot proceed with launch.")
hook_duration = find_hook_duration(
asg_c,
hook_message["AutoScalingGroupName"],
hook_message["EC2InstanceId"]
)
print("We've been waiting {} seconds for instance join.".format(
hook_duration
))
if hook_duration > 3600:
print("Exceeded 3600 seconds waiting to stabilize. Aborting")
asg_c.complete_lifecycle_action(
LifecycleHookName=hook_message["LifecycleHookName"],
AutoScalingGroupName=hook_message["AutoScalingGroupName"],
LifecycleActionToken=hook_message["LifecycleActionToken"],
LifecycleActionResult="ABANDON",
InstanceId=hook_message["EC2InstanceId"]
)
else:
print("Sending a Heartbeat to continue waiting")
asg_c.record_lifecycle_action_heartbeat(
LifecycleHookName=hook_message["LifecycleHookName"],
AutoScalingGroupName=hook_message["AutoScalingGroupName"],
LifecycleActionToken=hook_message["LifecycleActionToken"],
InstanceId=hook_message["EC2InstanceId"]
)
except Exception as e:
print("Exception: {}".format(e))
# Exception handling simply involves a raise so we can be retried.
# CWE should re-try us at least 3 times. Hopefully the issue resolves
# next invocation.
raise
| [
"boto3.client",
"datetime.datetime.utcnow",
"json.dumps",
"base64.b64decode",
"time.sleep"
] | [((1227, 1274), 'base64.b64decode', 'base64.b64decode', (["response['UserData']['Value']"], {}), "(response['UserData']['Value'])\n", (1243, 1274), False, 'import base64\n'), ((3758, 3775), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3773, 3775), False, 'from datetime import datetime\n'), ((3003, 3017), 'time.sleep', 'time.sleep', (['(30)'], {}), '(30)\n', (3013, 3017), False, 'import time\n'), ((5544, 5563), 'boto3.client', 'boto3.client', (['"""ec2"""'], {}), "('ec2')\n", (5556, 5563), False, 'import boto3\n'), ((5580, 5599), 'boto3.client', 'boto3.client', (['"""ecs"""'], {}), "('ecs')\n", (5592, 5599), False, 'import boto3\n'), ((5616, 5643), 'boto3.client', 'boto3.client', (['"""autoscaling"""'], {}), "('autoscaling')\n", (5628, 5643), False, 'import boto3\n'), ((4338, 4355), 'json.dumps', 'json.dumps', (['event'], {}), '(event)\n', (4348, 4355), False, 'import json\n'), ((5486, 5510), 'json.dumps', 'json.dumps', (['hook_message'], {}), '(hook_message)\n', (5496, 5510), False, 'import json\n'), ((4177, 4194), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4192, 4194), False, 'from datetime import datetime\n')] |
from traceback import print_exc
from uuid import uuid4
from flask import Flask, request, send_from_directory
from function_plot import *
app = Flask(__name__)
max_simultaneous_requests = 8
simultaneous_requests = 0
@app.route('/generate/')
def generate():
global simultaneous_requests
simultaneous_requests += 1
try:
if simultaneous_requests >= max_simultaneous_requests:
raise Exception("Too many simultaneous requests")
fn = request.args.get("fn")
x_min = float(request.args.get("x_min", -8))
x_max = float(request.args.get("x_max", 8))
y_min = float(request.args.get("y_min", -5))
y_max = float(request.args.get("y_max", 5))
filename = str(uuid4()) + ".mp4"
return plot(fn, filename, x_min, x_max, y_min, y_max)
except:
print_exc()
return ""
finally:
simultaneous_requests -= 1
@app.route('/videos/<path:filename>')
def download_file(filename):
return send_from_directory(path, filename, mimetype="video/mp4", as_attachment=True)
| [
"flask.request.args.get",
"flask.send_from_directory",
"flask.Flask",
"uuid.uuid4",
"traceback.print_exc"
] | [((144, 159), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (149, 159), False, 'from flask import Flask, request, send_from_directory\n'), ((989, 1066), 'flask.send_from_directory', 'send_from_directory', (['path', 'filename'], {'mimetype': '"""video/mp4"""', 'as_attachment': '(True)'}), "(path, filename, mimetype='video/mp4', as_attachment=True)\n", (1008, 1066), False, 'from flask import Flask, request, send_from_directory\n'), ((472, 494), 'flask.request.args.get', 'request.args.get', (['"""fn"""'], {}), "('fn')\n", (488, 494), False, 'from flask import Flask, request, send_from_directory\n'), ((518, 547), 'flask.request.args.get', 'request.args.get', (['"""x_min"""', '(-8)'], {}), "('x_min', -8)\n", (534, 547), False, 'from flask import Flask, request, send_from_directory\n'), ((571, 599), 'flask.request.args.get', 'request.args.get', (['"""x_max"""', '(8)'], {}), "('x_max', 8)\n", (587, 599), False, 'from flask import Flask, request, send_from_directory\n'), ((623, 652), 'flask.request.args.get', 'request.args.get', (['"""y_min"""', '(-5)'], {}), "('y_min', -5)\n", (639, 652), False, 'from flask import Flask, request, send_from_directory\n'), ((676, 704), 'flask.request.args.get', 'request.args.get', (['"""y_max"""', '(5)'], {}), "('y_max', 5)\n", (692, 704), False, 'from flask import Flask, request, send_from_directory\n'), ((831, 842), 'traceback.print_exc', 'print_exc', ([], {}), '()\n', (840, 842), False, 'from traceback import print_exc\n'), ((730, 737), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (735, 737), False, 'from uuid import uuid4\n')] |
import inspect
import sys
import numpy as np
import attrdict
from mtwaffle import graphs
from mtwaffle import mt
class Site(attrdict.AttrDict):
index_map = {
'xx': [0, 0],
'xy': [0, 1],
'yx': [1, 0],
'yy': [1, 1]
}
EXCLUDED_CALLABLES = ('between_freqs', )
def __init__(self, freqs, zs, name='', phase_func=None, **kwargs):
super(attrdict.AttrDict, self).__init__()
self.freqs = np.asarray(freqs)
self.zs = np.asarray(zs)
self.name = name
if phase_func is None:
phase_func = mt.phase
self.phase_func = phase_func
for key, value in kwargs.items():
setattr(self, key, value)
@property
def periods(self):
return 1. / self.freqs
@property
def phases(self):
return self.phase_func(self.zs)
def inspect_mt_callable(self, name):
f = mt.callables[name]
argnames = [ # Find arguments of callable from mtwaffle.mt
p.name for p in inspect.signature(f).parameters.values()
if p.kind == p.POSITIONAL_OR_KEYWORD and p.default is p.empty
]
return f, argnames
def help(self, output=sys.stdout):
'''Print a list of the attributes which are available.'''
output.write('''
Attributes of mtwaffle.mtsite.Site are calculated using functions from the mtwaffle.mt module:
mtsite.Site mtwaffle.mt function
attribute (args are Site attributes) Function description
-------------- ------------------------------ ----------------------------------------------
''')
label = lambda f: f.__doc__.splitlines()[0] if f.__doc__ else 'MISSING DOC'
fnames = []
for fname, f in mt.callables.items():
try:
getattr(self, fname)
except:
pass
else:
fnames.append(fname)
for fname in fnames:
f, argnames = self.inspect_mt_callable(fname)
cname = self.__class__.__name__
argsig = ', '.join(['{}'.format(arg) for arg in argnames])
source = '{}({})'.format(fname, argsig)
label_attr = '{}'.format(fname.ljust(14))
label_source = source.ljust(30)
label_help = label(f)
output.write('{} {} {}\n'.format(label_attr, label_source, label_help))
# print('{fname}({sig})'.format(
# fname=fname, sig=', '.join([
# '{c}.{a}'.format(c=self.__class__.__name__, a=arg) for arg in f_arg_names])))
# output.write('{}.{} -- {}\n'.format(
# self.__class__.__name__,
# fname.ljust(max([len(fi) for fi in fnames])),
# doc(mt.callables[fname])
# )
# )
def get_property(self, key):
# Is the key ending with xx, xy, yx, or yy?
if key[-2:] in self.index_map:
indices = self.index_map[key[-2:]]
if key.startswith('res_'):
return self.appres[[Ellipsis] + indices]
elif key.startswith('phase_'):
return self.phases[[Ellipsis] + indices]
elif key.startswith('zr_'):
return self.zs.real[[Ellipsis] + indices]
elif key.startswith('zi_'):
return self.zs.imag[[Ellipsis] + indices]
# See if we can complete a function from mtwaffle.mt using the
# existing attributes in this Site:
elif key in mt.callables and not key in self.EXCLUDED_CALLABLES:
f, argnames = self.inspect_mt_callable(key)
return f(*[getattr(self, arg) for arg in argnames])
return False
def __getattr__(self, key):
value = self.get_property(key)
if value is False:
return super(attrdict.AttrDict, self).__getattr__(key)
else:
return value
def __getitem__(self, key):
value = self.get_property(key)
if value is False:
return super(attrdict.AttrDict, self).__getitem__(key)
else:
return value
def plot_res_phase(self, **kwargs):
args = (
(self.freqs, self.freqs),
(self.res_xy, self.res_yx),
(self.phase_xy, self.phase_yx),
)
if not 'res_indiv_kws' in kwargs:
kwargs['res_indiv_kws'] = (
{'label': 'xy', 'color': 'b'},
{'label': 'yx', 'color': 'g'},
)
return graphs.plot_res_phase(*args, **kwargs)
def plot_impedance_tensors(self, *args, **kwargs):
return graphs.plot_impedance_tensors(
self.zs, self.freqs, **kwargs)
def plot_ptensell(self, *args, **kwargs):
return graphs.plot_ptensell(
self.ptensors, self.freqs, *args, **kwargs
)
def plot_ptensell_filled(self, *args, **kwargs):
return graphs.plot_ptensell_filled(
self.ptensors, self.freqs, *args, **kwargs
)
def plot_mohr_imp(self, *args, **kwargs):
kwargs['title'] = kwargs.get('title', self.name)
return graphs.plot_mohr_imp(
self.zs, self.freqs, *args, **kwargs
)
def plot_mohr_ptensor(self, *args, **kwargs):
return graphs.plot_mohr_ptensor(
self.ptensors, self.freqs, *args, **kwargs
) | [
"mtwaffle.graphs.plot_impedance_tensors",
"mtwaffle.mt.callables.items",
"mtwaffle.graphs.plot_mohr_imp",
"numpy.asarray",
"mtwaffle.graphs.plot_ptensell",
"inspect.signature",
"mtwaffle.graphs.plot_mohr_ptensor",
"mtwaffle.graphs.plot_ptensell_filled",
"mtwaffle.graphs.plot_res_phase"
] | [((450, 467), 'numpy.asarray', 'np.asarray', (['freqs'], {}), '(freqs)\n', (460, 467), True, 'import numpy as np\n'), ((486, 500), 'numpy.asarray', 'np.asarray', (['zs'], {}), '(zs)\n', (496, 500), True, 'import numpy as np\n'), ((1758, 1778), 'mtwaffle.mt.callables.items', 'mt.callables.items', ([], {}), '()\n', (1776, 1778), False, 'from mtwaffle import mt\n'), ((4560, 4598), 'mtwaffle.graphs.plot_res_phase', 'graphs.plot_res_phase', (['*args'], {}), '(*args, **kwargs)\n', (4581, 4598), False, 'from mtwaffle import graphs\n'), ((4670, 4730), 'mtwaffle.graphs.plot_impedance_tensors', 'graphs.plot_impedance_tensors', (['self.zs', 'self.freqs'], {}), '(self.zs, self.freqs, **kwargs)\n', (4699, 4730), False, 'from mtwaffle import graphs\n'), ((4806, 4870), 'mtwaffle.graphs.plot_ptensell', 'graphs.plot_ptensell', (['self.ptensors', 'self.freqs', '*args'], {}), '(self.ptensors, self.freqs, *args, **kwargs)\n', (4826, 4870), False, 'from mtwaffle import graphs\n'), ((4962, 5033), 'mtwaffle.graphs.plot_ptensell_filled', 'graphs.plot_ptensell_filled', (['self.ptensors', 'self.freqs', '*args'], {}), '(self.ptensors, self.freqs, *args, **kwargs)\n', (4989, 5033), False, 'from mtwaffle import graphs\n'), ((5175, 5233), 'mtwaffle.graphs.plot_mohr_imp', 'graphs.plot_mohr_imp', (['self.zs', 'self.freqs', '*args'], {}), '(self.zs, self.freqs, *args, **kwargs)\n', (5195, 5233), False, 'from mtwaffle import graphs\n'), ((5322, 5390), 'mtwaffle.graphs.plot_mohr_ptensor', 'graphs.plot_mohr_ptensor', (['self.ptensors', 'self.freqs', '*args'], {}), '(self.ptensors, self.freqs, *args, **kwargs)\n', (5346, 5390), False, 'from mtwaffle import graphs\n'), ((1026, 1046), 'inspect.signature', 'inspect.signature', (['f'], {}), '(f)\n', (1043, 1046), False, 'import inspect\n')] |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""connect() module function unit tests."""
import unittest
from unittest import mock
import google.auth.credentials
INSTANCE = "test-instance"
DATABASE = "test-database"
PROJECT = "test-project"
USER_AGENT = "user-agent"
def _make_credentials():
class _CredentialsWithScopes(
google.auth.credentials.Credentials, google.auth.credentials.Scoped
):
pass
return mock.Mock(spec=_CredentialsWithScopes)
@mock.patch("google.cloud.spanner_v1.Client")
class Test_connect(unittest.TestCase):
def test_w_implicit(self, mock_client):
from google.cloud.spanner_dbapi import connect
from google.cloud.spanner_dbapi import Connection
client = mock_client.return_value
instance = client.instance.return_value
database = instance.database.return_value
connection = connect(INSTANCE, DATABASE)
self.assertIsInstance(connection, Connection)
self.assertIs(connection.instance, instance)
client.instance.assert_called_once_with(INSTANCE)
self.assertIs(connection.database, database)
instance.database.assert_called_once_with(DATABASE, pool=None)
# Datbase constructs its own pool
self.assertIsNotNone(connection.database._pool)
def test_w_explicit(self, mock_client):
from google.cloud.spanner_v1.pool import AbstractSessionPool
from google.cloud.spanner_dbapi import connect
from google.cloud.spanner_dbapi import Connection
from google.cloud.spanner_dbapi.version import PY_VERSION
credentials = _make_credentials()
pool = mock.create_autospec(AbstractSessionPool)
client = mock_client.return_value
instance = client.instance.return_value
database = instance.database.return_value
connection = connect(
INSTANCE, DATABASE, PROJECT, credentials, pool=pool, user_agent=USER_AGENT,
)
self.assertIsInstance(connection, Connection)
mock_client.assert_called_once_with(
project=PROJECT, credentials=credentials, client_info=mock.ANY
)
client_info = mock_client.call_args_list[0][1]["client_info"]
self.assertEqual(client_info.user_agent, USER_AGENT)
self.assertEqual(client_info.python_version, PY_VERSION)
self.assertIs(connection.instance, instance)
client.instance.assert_called_once_with(INSTANCE)
self.assertIs(connection.database, database)
instance.database.assert_called_once_with(DATABASE, pool=pool)
def test_w_instance_not_found(self, mock_client):
from google.cloud.spanner_dbapi import connect
client = mock_client.return_value
instance = client.instance.return_value
instance.exists.return_value = False
with self.assertRaises(ValueError):
connect(INSTANCE, DATABASE)
instance.exists.assert_called_once_with()
def test_w_database_not_found(self, mock_client):
from google.cloud.spanner_dbapi import connect
client = mock_client.return_value
instance = client.instance.return_value
database = instance.database.return_value
database.exists.return_value = False
with self.assertRaises(ValueError):
connect(INSTANCE, DATABASE)
database.exists.assert_called_once_with()
def test_w_credential_file_path(self, mock_client):
from google.cloud.spanner_dbapi import connect
from google.cloud.spanner_dbapi import Connection
from google.cloud.spanner_dbapi.version import PY_VERSION
credentials_path = "dummy/file/path.json"
connection = connect(
INSTANCE,
DATABASE,
PROJECT,
credentials=credentials_path,
user_agent=USER_AGENT,
)
self.assertIsInstance(connection, Connection)
factory = mock_client.from_service_account_json
factory.assert_called_once_with(
credentials_path, project=PROJECT, client_info=mock.ANY,
)
client_info = factory.call_args_list[0][1]["client_info"]
self.assertEqual(client_info.user_agent, USER_AGENT)
self.assertEqual(client_info.python_version, PY_VERSION)
| [
"google.cloud.spanner_dbapi.connect",
"unittest.mock.create_autospec",
"unittest.mock.patch",
"unittest.mock.Mock"
] | [((1012, 1056), 'unittest.mock.patch', 'mock.patch', (['"""google.cloud.spanner_v1.Client"""'], {}), "('google.cloud.spanner_v1.Client')\n", (1022, 1056), False, 'from unittest import mock\n'), ((970, 1008), 'unittest.mock.Mock', 'mock.Mock', ([], {'spec': '_CredentialsWithScopes'}), '(spec=_CredentialsWithScopes)\n', (979, 1008), False, 'from unittest import mock\n'), ((1416, 1443), 'google.cloud.spanner_dbapi.connect', 'connect', (['INSTANCE', 'DATABASE'], {}), '(INSTANCE, DATABASE)\n', (1423, 1443), False, 'from google.cloud.spanner_dbapi import connect\n'), ((2185, 2226), 'unittest.mock.create_autospec', 'mock.create_autospec', (['AbstractSessionPool'], {}), '(AbstractSessionPool)\n', (2205, 2226), False, 'from unittest import mock\n'), ((2389, 2477), 'google.cloud.spanner_dbapi.connect', 'connect', (['INSTANCE', 'DATABASE', 'PROJECT', 'credentials'], {'pool': 'pool', 'user_agent': 'USER_AGENT'}), '(INSTANCE, DATABASE, PROJECT, credentials, pool=pool, user_agent=\n USER_AGENT)\n', (2396, 2477), False, 'from google.cloud.spanner_dbapi import connect\n'), ((4238, 4331), 'google.cloud.spanner_dbapi.connect', 'connect', (['INSTANCE', 'DATABASE', 'PROJECT'], {'credentials': 'credentials_path', 'user_agent': 'USER_AGENT'}), '(INSTANCE, DATABASE, PROJECT, credentials=credentials_path,\n user_agent=USER_AGENT)\n', (4245, 4331), False, 'from google.cloud.spanner_dbapi import connect\n'), ((3418, 3445), 'google.cloud.spanner_dbapi.connect', 'connect', (['INSTANCE', 'DATABASE'], {}), '(INSTANCE, DATABASE)\n', (3425, 3445), False, 'from google.cloud.spanner_dbapi import connect\n'), ((3850, 3877), 'google.cloud.spanner_dbapi.connect', 'connect', (['INSTANCE', 'DATABASE'], {}), '(INSTANCE, DATABASE)\n', (3857, 3877), False, 'from google.cloud.spanner_dbapi import connect\n')] |
from hearthbreaker.cards.base import SpellCard
from hearthbreaker.constants import CHARACTER_CLASS, CARD_RARITY
from hearthbreaker.tags.base import BuffUntil, Buff
from hearthbreaker.tags.event import TurnStarted
from hearthbreaker.tags.status import Stealth, Taunt, Frozen
import hearthbreaker.targeting
class TheCoin(SpellCard):
def __init__(self):
super().__init__("The Coin", 0, CHARACTER_CLASS.ALL, CARD_RARITY.COMMON, False)
def use(self, player, game):
super().use(player, game)
if player.mana < 10:
player.mana += 1
class ArmorPlating(SpellCard):
def __init__(self):
super().__init__("Armor Plating", 1, CHARACTER_CLASS.ALL, CARD_RARITY.COMMON, False,
target_func=hearthbreaker.targeting.find_minion_spell_target)
def use(self, player, game):
super().use(player, game)
self.target.increase_health(1)
class EmergencyCoolant(SpellCard):
def __init__(self):
super().__init__("Emergency Coolant", 1, CHARACTER_CLASS.ALL, CARD_RARITY.COMMON, False,
target_func=hearthbreaker.targeting.find_minion_spell_target)
def use(self, player, game):
super().use(player, game)
self.target.add_buff(Buff(Frozen()))
class FinickyCloakfield(SpellCard):
def __init__(self):
super().__init__("Finicky Cloakfield", 1, CHARACTER_CLASS.ALL, CARD_RARITY.COMMON, False,
target_func=hearthbreaker.targeting.find_friendly_minion_spell_target)
def use(self, player, game):
super().use(player, game)
self.target.add_buff(BuffUntil(Stealth(), TurnStarted()))
class ReversingSwitch(SpellCard):
def __init__(self):
super().__init__("Reversing Switch", 1, CHARACTER_CLASS.ALL, CARD_RARITY.COMMON, False,
target_func=hearthbreaker.targeting.find_minion_spell_target)
def use(self, player, game):
super().use(player, game)
temp_attack = self.target.calculate_attack()
temp_health = self.target.health
if temp_attack == 0:
self.target.die(None)
else:
self.target.set_attack_to(temp_health)
self.target.set_health_to(temp_attack)
class RustyHorn(SpellCard):
def __init__(self):
super().__init__("Rusty Horn", 1, CHARACTER_CLASS.ALL, CARD_RARITY.COMMON, False,
target_func=hearthbreaker.targeting.find_minion_spell_target)
def use(self, player, game):
super().use(player, game)
self.target.add_buff(Buff(Taunt()))
class TimeRewinder(SpellCard):
def __init__(self):
super().__init__("Time Rewinder", 1, CHARACTER_CLASS.ALL, CARD_RARITY.COMMON, False,
target_func=hearthbreaker.targeting.find_friendly_minion_spell_target)
def use(self, player, game):
super().use(player, game)
self.target.bounce()
class WhirlingBlades(SpellCard):
def __init__(self):
super().__init__("Whirling Blades", 1, CHARACTER_CLASS.ALL, CARD_RARITY.COMMON, False,
target_func=hearthbreaker.targeting.find_minion_spell_target)
def use(self, player, game):
super().use(player, game)
self.target.change_attack(1)
spare_part_list = [ArmorPlating(), EmergencyCoolant(), FinickyCloakfield(), TimeRewinder(), ReversingSwitch(),
RustyHorn(), WhirlingBlades()]
class GallywixsCoin(SpellCard):
def __init__(self):
super().__init__("Gallywix's Coin", 0, CHARACTER_CLASS.ALL, CARD_RARITY.COMMON, False)
def use(self, player, game):
super().use(player, game)
if player.mana < 10:
player.mana += 1
| [
"hearthbreaker.tags.status.Taunt",
"hearthbreaker.tags.event.TurnStarted",
"hearthbreaker.tags.status.Stealth",
"hearthbreaker.tags.status.Frozen"
] | [((1262, 1270), 'hearthbreaker.tags.status.Frozen', 'Frozen', ([], {}), '()\n', (1268, 1270), False, 'from hearthbreaker.tags.status import Stealth, Taunt, Frozen\n'), ((1636, 1645), 'hearthbreaker.tags.status.Stealth', 'Stealth', ([], {}), '()\n', (1643, 1645), False, 'from hearthbreaker.tags.status import Stealth, Taunt, Frozen\n'), ((1647, 1660), 'hearthbreaker.tags.event.TurnStarted', 'TurnStarted', ([], {}), '()\n', (1658, 1660), False, 'from hearthbreaker.tags.event import TurnStarted\n'), ((2580, 2587), 'hearthbreaker.tags.status.Taunt', 'Taunt', ([], {}), '()\n', (2585, 2587), False, 'from hearthbreaker.tags.status import Stealth, Taunt, Frozen\n')] |
# Copyright 2021 Dakewe Biotech Corporation. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import PIL.BmpImagePlugin
import cv2
import numpy as np
import torch
import torchvision.transforms as transforms
from PIL import Image
__all__ = [
"opencv2pil", "opencv2tensor", "pil2opencv", "process_image"
]
def opencv2pil(image: np.ndarray) -> PIL.BmpImagePlugin.BmpImageFile:
""" OpenCV Convert to PIL.Image format.
Returns:
PIL.Image.
"""
image = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
return image
def opencv2tensor(image: np.ndarray, gpu: int) -> torch.Tensor:
""" OpenCV Convert to torch.Tensor format.
Returns:
torch.Tensor.
"""
rgb_image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
nhwc_image = torch.from_numpy(rgb_image).div(255.0).unsqueeze(0)
input_tensor = nhwc_image.permute(0, 3, 1, 2)
if gpu is not None:
input_tensor = input_tensor.cuda(gpu, non_blocking=True)
return input_tensor
def pil2opencv(image: PIL.BmpImagePlugin.BmpImageFile) -> np.ndarray:
""" PIL.Image Convert to OpenCV format.
Returns:
np.ndarray.
"""
image = cv2.cvtColor(np.asarray(image), cv2.COLOR_RGB2BGR)
return image
def process_image(image: PIL.BmpImagePlugin.BmpImageFile, gpu: int = None) -> torch.Tensor:
""" PIL.Image Convert to PyTorch format.
Args:
image (PIL.BmpImagePlugin.BmpImageFile): File read by PIL.Image.
gpu (int): Graphics card model.
Returns:
torch.Tensor.
"""
tensor = transforms.ToTensor()(image)
input_tensor = tensor.unsqueeze(0)
if gpu is not None:
input_tensor = input_tensor.cuda(gpu, non_blocking=True)
return input_tensor
| [
"torchvision.transforms.ToTensor",
"torch.from_numpy",
"numpy.asarray",
"cv2.cvtColor"
] | [((1328, 1366), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2RGB'], {}), '(image, cv2.COLOR_BGR2RGB)\n', (1340, 1366), False, 'import cv2\n'), ((1098, 1136), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2RGB'], {}), '(image, cv2.COLOR_BGR2RGB)\n', (1110, 1136), False, 'import cv2\n'), ((1783, 1800), 'numpy.asarray', 'np.asarray', (['image'], {}), '(image)\n', (1793, 1800), True, 'import numpy as np\n'), ((2158, 2179), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (2177, 2179), True, 'import torchvision.transforms as transforms\n'), ((1384, 1411), 'torch.from_numpy', 'torch.from_numpy', (['rgb_image'], {}), '(rgb_image)\n', (1400, 1411), False, 'import torch\n')] |
import core
import model
import settings
def get_communitylist():
res = []
for community in model.Community.select():
res.append(community.title)
return res
if __name__ == "__main__":
regionlist = settings.REGIONLIST # only pinyin support
model.database_init()
core.GetHouseByRegionlist(regionlist)
core.GetRentByRegionlist(regionlist)
core.GetCommunityByRegionlist(regionlist) # Init,scrapy celllist and insert database; could run only 1st time
communitylist = get_communitylist() # Read celllist from database
core.GetSellByCommunitylist(communitylist)
| [
"core.GetHouseByRegionlist",
"core.GetCommunityByRegionlist",
"model.database_init",
"core.GetSellByCommunitylist",
"model.Community.select",
"core.GetRentByRegionlist"
] | [((102, 126), 'model.Community.select', 'model.Community.select', ([], {}), '()\n', (124, 126), False, 'import model\n'), ((272, 293), 'model.database_init', 'model.database_init', ([], {}), '()\n', (291, 293), False, 'import model\n'), ((298, 335), 'core.GetHouseByRegionlist', 'core.GetHouseByRegionlist', (['regionlist'], {}), '(regionlist)\n', (323, 335), False, 'import core\n'), ((340, 376), 'core.GetRentByRegionlist', 'core.GetRentByRegionlist', (['regionlist'], {}), '(regionlist)\n', (364, 376), False, 'import core\n'), ((381, 422), 'core.GetCommunityByRegionlist', 'core.GetCommunityByRegionlist', (['regionlist'], {}), '(regionlist)\n', (410, 422), False, 'import core\n'), ((567, 609), 'core.GetSellByCommunitylist', 'core.GetSellByCommunitylist', (['communitylist'], {}), '(communitylist)\n', (594, 609), False, 'import core\n')] |
from linked_list.linked_listf import LinkedList
def ll_merge(list_A, list_B):
curr_B = list_B.head
curr_A = list_A.head
temp_C = None
while curr_A._next and curr_B:
curr_B = list_B.head
temp_A = curr_A._next
temp_B = curr_B._next
# if curr_B._next._next:
# temp_C = curr_B._next._next
curr_A._next = curr_B
curr_B._next = temp_A
list_B.head = temp_B
curr_A= curr_A._next._next
curr_B=list_B.head
if curr_B:
curr_A._next=curr_B
return list_A.head
ones = LinkedList()
ones.insert('1')
ones.insert('2')
ones.insert('3')
ones.insert('4')
ones.insert('5')
print(ones.print())
tens = LinkedList()
tens.insert('10')
tens.insert('20')
# tens.insert('30')
# tens.insert('40')
# tens.insert('50')
print(tens.print())
ll_merged(ones,tens)
print(ones.print())
| [
"linked_list.linked_listf.LinkedList"
] | [((573, 585), 'linked_list.linked_listf.LinkedList', 'LinkedList', ([], {}), '()\n', (583, 585), False, 'from linked_list.linked_listf import LinkedList\n'), ((699, 711), 'linked_list.linked_listf.LinkedList', 'LinkedList', ([], {}), '()\n', (709, 711), False, 'from linked_list.linked_listf import LinkedList\n')] |
import time
class Timer(object):
def __init__(self):
self._start = 0
self._end = 0
def __enter__(self):
self._start = time.time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._end = time.time()
@property
def duration(self):
if self._end == 0:
return time.time() - self._start
else:
return self._end - self._start
class VerboseTimer(Timer):
def __init__(self, name):
super(VerboseTimer, self).__init__()
self._name = name
def __enter__(self):
print('START: %s...' % self._name)
return super(VerboseTimer, self).__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
super(VerboseTimer, self).__exit__(exc_type, exc_val, exc_tb)
print('DONE: %s took %.3f seconds.' % (self._name, self.duration)) | [
"time.time"
] | [((153, 164), 'time.time', 'time.time', ([], {}), '()\n', (162, 164), False, 'import time\n'), ((257, 268), 'time.time', 'time.time', ([], {}), '()\n', (266, 268), False, 'import time\n'), ((354, 365), 'time.time', 'time.time', ([], {}), '()\n', (363, 365), False, 'import time\n')] |
#!/usr/bin/env python
# vim: expandtab:tabstop=4:shiftwidth=4
"""
This is a script that can be used to tag EBS volumes in OpenShift v3.
This script assume that your AWS credentials are setup in ~/.aws/credentials like this:
[default]
aws_access_key_id = xxxx
aws_secret_access_key = xxxx
Or that environment variables are setup:
AWS_ACCESS_KEY_ID=xxxx
AWS_SECRET_ACCESS_KEY=xxxx
"""
# Ignoring module name
# pylint: disable=invalid-name
import argparse
import os
import sys
import logging
from logging.handlers import RotatingFileHandler
from openshift_tools.cloud.aws.ebs_snapshotter import SUPPORTED_SCHEDULES, EbsSnapshotter
from openshift_tools.cloud.aws.ebs_util import EbsUtil
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
logFormatter = logging.Formatter('%(asctime)s %(levelname)s %(name)s %(message)s')
logFile = '/var/log/ec2-add-snapshot-tag-to-ebs-volumes.log'
logRFH = RotatingFileHandler(logFile, mode='a', maxBytes=2*1024*1024, backupCount=5, delay=0)
logRFH.setFormatter(logFormatter)
logRFH.setLevel(logging.INFO)
logger.addHandler(logRFH)
logConsole = logging.StreamHandler()
logConsole.setFormatter(logFormatter)
logConsole.setLevel(logging.WARNING)
logger.addHandler(logConsole)
TAGGER_SUPPORTED_SCHEDULES = ['never'] + SUPPORTED_SCHEDULES
ROOT_VOLUME_PURPOSE = "root volume"
DOCKER_VOLUME_PURPOSE = "docker storage volume"
PV_PURPOSE = "customer persistent volume"
class TaggerCli(object):
""" Implements the cli interface to the EBS snapshot tagger. """
def __init__(self):
self.args = None
self.parse_args()
if self.args.verbose:
logConsole.setLevel(logging.INFO)
if self.args.debug:
logConsole.setLevel(logging.DEBUG)
if self.args.skip_boto_logs:
logging.getLogger('boto').setLevel(logging.WARNING)
def parse_args(self):
""" parse the args from the cli """
parser = argparse.ArgumentParser(description='EBS Volume Tagger')
parser.add_argument('--master-root-volumes', choices=TAGGER_SUPPORTED_SCHEDULES,
help='The snapshot schedule that master root volumes ' + \
'should be tagged with.')
parser.add_argument('--node-root-volumes', choices=TAGGER_SUPPORTED_SCHEDULES,
help='The snapshot schedule that node root volumes ' + \
'should be tagged with.')
parser.add_argument('--docker-storage-volumes', choices=TAGGER_SUPPORTED_SCHEDULES,
help='The snapshot schedule that docker storage ' + \
'volumes should be tagged with.')
parser.add_argument('--autoprovisioned-pv-volumes', choices=TAGGER_SUPPORTED_SCHEDULES,
help='The snapshot schedule that autoprovisioned pv ' + \
'volumes should be tagged with.')
parser.add_argument('--manually-provisioned-pv-volumes', choices=TAGGER_SUPPORTED_SCHEDULES,
help='The snapshot schedule that manually provisioned pv ' + \
'volumes should be tagged with.')
parser.add_argument('--unidentified-volumes', choices=TAGGER_SUPPORTED_SCHEDULES,
help='The snapshot schedule that unidentified ' + \
'volumes should be tagged with.')
parser.add_argument('--set-name-tag', action='store_true', default=False,
help='Add the Name tag to volumes of the host where this ' + \
'volume is attached.')
parser.add_argument('--set-purpose-tag', action='store_true', default=False,
help='Add the purpose tag to volumes')
parser.add_argument('--retag-volumes', action='store_true', default=False,
help='Retag volumes that already have a snapshot tag. ' + \
'DANGEROUS - Only do this if you know what you\'re doing!')
parser.add_argument('--aws-creds-profile', required=False,
help='The AWS credentials profile to use.')
parser.add_argument('--dry-run', action='store_true', default=False,
help='Say what would have been done, but don\'t actually do it.')
parser.add_argument('-v', '--verbose', action='store_true', default=None, help='Verbose?')
parser.add_argument('--debug', action='store_true', default=None, help='Debug?')
parser.add_argument('--skip-boto-logs', action='store_true', default=False, help='Skip boto logs')
parser.add_argument('--region', required=True,
help='The region that we want to process snapshots in')
self.args = parser.parse_args()
def set_master_root_volume_tags(self, master_root_vol_ids, ebs_snapshotter, ebs_util):
""" Sets tags on master root volumes """
logger.debug("Setting master root volume tags:")
ebs_snapshotter.set_volume_snapshot_tag(master_root_vol_ids, self.args.master_root_volumes,
prefix=" ", dry_run=self.args.dry_run)
if self.args.set_purpose_tag:
ebs_util.set_volume_purpose_tag(master_root_vol_ids, ROOT_VOLUME_PURPOSE,
prefix=" ", dry_run=self.args.dry_run)
if self.args.set_name_tag:
ebs_util.set_volume_name_tag(master_root_vol_ids, prefix=" ", dry_run=self.args.dry_run)
def set_node_root_volume_tags(self, node_root_vol_ids, ebs_snapshotter, ebs_util):
""" Sets tags on node root volumes """
logger.debug("Setting node root volume tags:")
ebs_snapshotter.set_volume_snapshot_tag(node_root_vol_ids, self.args.node_root_volumes,
prefix=" ", dry_run=self.args.dry_run)
if self.args.set_purpose_tag:
ebs_util.set_volume_purpose_tag(node_root_vol_ids, ROOT_VOLUME_PURPOSE,
prefix=" ", dry_run=self.args.dry_run)
if self.args.set_name_tag:
ebs_util.set_volume_name_tag(node_root_vol_ids, prefix=" ", dry_run=self.args.dry_run)
def set_docker_storage_volume_tags(self, docker_storage_vol_ids, ebs_snapshotter, ebs_util):
""" Sets tags on docker storage volumes """
logger.debug("Setting docker storage volume tags:")
ebs_snapshotter.set_volume_snapshot_tag(docker_storage_vol_ids, self.args.docker_storage_volumes,
prefix=" ", dry_run=self.args.dry_run)
if self.args.set_purpose_tag:
ebs_util.set_volume_purpose_tag(docker_storage_vol_ids, DOCKER_VOLUME_PURPOSE,
prefix=" ", dry_run=self.args.dry_run)
if self.args.set_name_tag:
ebs_util.set_volume_name_tag(docker_storage_vol_ids, prefix=" ", dry_run=self.args.dry_run)
def set_manually_provisioned_pv_volume_tags(self, manually_provisioned_pv_vol_ids, ebs_snapshotter, ebs_util):
""" Sets tags on manually provisioned pv volumes """
logger.debug("Setting manually provisioned pv volume tags:")
ebs_snapshotter.set_volume_snapshot_tag(manually_provisioned_pv_vol_ids,
self.args.manually_provisioned_pv_volumes,
prefix=" ", dry_run=self.args.dry_run)
# NOTE: not setting Name tag because PVs don't belong to a specific host.
if self.args.set_purpose_tag:
ebs_util.set_volume_purpose_tag(manually_provisioned_pv_vol_ids, PV_PURPOSE,
prefix=" ", dry_run=self.args.dry_run)
def set_autoprovisioned_pv_volume_tags(self, autoprovisioned_pv_vol_ids, ebs_snapshotter, ebs_util):
""" Sets tags on autoprovisioned pv volumes """
logger.debug("Setting autoprovisioned pv volume tags:")
ebs_snapshotter.set_volume_snapshot_tag(autoprovisioned_pv_vol_ids,
self.args.autoprovisioned_pv_volumes,
prefix=" ", dry_run=self.args.dry_run)
# NOTE: not setting Name tag because PVs don't belong to a specific host.
if self.args.set_purpose_tag:
ebs_util.set_volume_purpose_tag(autoprovisioned_pv_vol_ids, PV_PURPOSE,
prefix=" ", dry_run=self.args.dry_run)
def set_unidentified_volume_tags(self, unidentified_vol_ids, ebs_snapshotter):
""" Sets tags on unidentified pv volumes """
logger.debug("Setting unidentified volume tags:")
ebs_snapshotter.set_volume_snapshot_tag(unidentified_vol_ids, self.args.unidentified_volumes,
prefix=" ", dry_run=self.args.dry_run)
# NOTE: not setting purpose tag because volumes are unidentified, so we don't know.
# NOTE: not setting Name tag because we don't know if it makes sense in this context.
def main(self):
""" Serves as the entry point for the CLI """
logger.info('Starting snapshot tagging')
if self.args.aws_creds_profile:
os.environ['AWS_PROFILE'] = self.args.aws_creds_profile
ebs_snapshotter = EbsSnapshotter(self.args.region, verbose=True)
if not ebs_snapshotter.is_region_valid(self.args.region):
logger.info("Invalid region")
sys.exit(1)
else:
logger.info("Region: %s:", self.args.region)
ebs_util = EbsUtil(self.args.region, verbose=True)
ebs_snapshotter = EbsSnapshotter(self.args.region, verbose=True)
# filter out the already tagged volumes
skip_volume_ids = []
if not self.args.retag_volumes:
# They don't want us to retag volumes that are already tagged, so
# add the already tagged volumes to the list of volume IDs to skip.
skip_volume_ids += ebs_snapshotter.get_already_tagged_volume_ids()
logger.info('Skipping this many volume ids: %s', len(skip_volume_ids))
vol_ids = ebs_util.get_classified_volume_ids(skip_volume_ids)
for id_name, id_list in vol_ids._asdict().iteritems():
logger.info('name: %s amount: %s', id_name, len(id_list))
## Actually create the snapshot tags now
if self.args.master_root_volumes and vol_ids.master_root:
self.set_master_root_volume_tags(vol_ids.master_root, ebs_snapshotter, ebs_util)
if self.args.node_root_volumes and vol_ids.node_root:
self.set_node_root_volume_tags(vol_ids.node_root, ebs_snapshotter, ebs_util)
if self.args.docker_storage_volumes and vol_ids.docker_storage:
self.set_docker_storage_volume_tags(vol_ids.docker_storage, ebs_snapshotter, ebs_util)
if self.args.manually_provisioned_pv_volumes and vol_ids.manually_provisioned_pv:
self.set_manually_provisioned_pv_volume_tags(vol_ids.manually_provisioned_pv,
ebs_snapshotter, ebs_util)
if self.args.autoprovisioned_pv_volumes and vol_ids.autoprovisioned_pv:
self.set_autoprovisioned_pv_volume_tags(vol_ids.autoprovisioned_pv, ebs_snapshotter,
ebs_util)
if self.args.unidentified_volumes and vol_ids.unidentified:
self.set_unidentified_volume_tags(vol_ids.unidentified, ebs_snapshotter)
if __name__ == "__main__":
TaggerCli().main()
| [
"logging.getLogger",
"openshift_tools.cloud.aws.ebs_util.EbsUtil",
"logging.StreamHandler",
"argparse.ArgumentParser",
"logging.Formatter",
"logging.handlers.RotatingFileHandler",
"openshift_tools.cloud.aws.ebs_snapshotter.EbsSnapshotter",
"sys.exit"
] | [((715, 734), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (732, 734), False, 'import logging\n'), ((781, 848), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(levelname)s %(name)s %(message)s"""'], {}), "('%(asctime)s %(levelname)s %(name)s %(message)s')\n", (798, 848), False, 'import logging\n'), ((920, 1012), 'logging.handlers.RotatingFileHandler', 'RotatingFileHandler', (['logFile'], {'mode': '"""a"""', 'maxBytes': '(2 * 1024 * 1024)', 'backupCount': '(5)', 'delay': '(0)'}), "(logFile, mode='a', maxBytes=2 * 1024 * 1024,\n backupCount=5, delay=0)\n", (939, 1012), False, 'from logging.handlers import RotatingFileHandler\n'), ((1108, 1131), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1129, 1131), False, 'import logging\n'), ((1939, 1995), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""EBS Volume Tagger"""'}), "(description='EBS Volume Tagger')\n", (1962, 1995), False, 'import argparse\n'), ((9515, 9561), 'openshift_tools.cloud.aws.ebs_snapshotter.EbsSnapshotter', 'EbsSnapshotter', (['self.args.region'], {'verbose': '(True)'}), '(self.args.region, verbose=True)\n', (9529, 9561), False, 'from openshift_tools.cloud.aws.ebs_snapshotter import SUPPORTED_SCHEDULES, EbsSnapshotter\n'), ((9682, 9693), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (9690, 9693), False, 'import sys\n'), ((9788, 9827), 'openshift_tools.cloud.aws.ebs_util.EbsUtil', 'EbsUtil', (['self.args.region'], {'verbose': '(True)'}), '(self.args.region, verbose=True)\n', (9795, 9827), False, 'from openshift_tools.cloud.aws.ebs_util import EbsUtil\n'), ((9858, 9904), 'openshift_tools.cloud.aws.ebs_snapshotter.EbsSnapshotter', 'EbsSnapshotter', (['self.args.region'], {'verbose': '(True)'}), '(self.args.region, verbose=True)\n', (9872, 9904), False, 'from openshift_tools.cloud.aws.ebs_snapshotter import SUPPORTED_SCHEDULES, EbsSnapshotter\n'), ((1799, 1824), 'logging.getLogger', 'logging.getLogger', (['"""boto"""'], {}), "('boto')\n", (1816, 1824), False, 'import logging\n')] |
import _jpype
import jpype
import common
from jpype.types import *
class JChar2TestCase(common.JPypeTestCase):
def setUp(self):
common.JPypeTestCase.setUp(self)
def testCharRange(self):
self.assertEqual(ord(str(jpype.JChar(65))), 65)
self.assertEqual(ord(str(jpype.JChar(512))), 512)
@common.requireInstrumentation
def testJPChar_new(self):
_jpype.fault("PyJPChar_new")
with self.assertRaisesRegex(SystemError, "fault"):
JChar("a")
_jpype.fault("PyJPModule_getContext")
with self.assertRaisesRegex(SystemError, "fault"):
JChar("a")
JChar("a")
@common.requireInstrumentation
def testJPChar_str(self):
jc = JChar("a")
_jpype.fault("PyJPChar_str")
with self.assertRaisesRegex(SystemError, "fault"):
str(jc)
@common.requireInstrumentation
def testJCharGetJavaConversion(self):
_jpype.fault("JPCharType::findJavaConversion")
with self.assertRaisesRegex(SystemError, "fault"):
JChar._canConvertToJava(object())
@common.requireInstrumentation
def testJPJavaFrameCharArray(self):
ja = JArray(JChar)(5)
_jpype.fault("JPJavaFrame::NewCharArray")
with self.assertRaisesRegex(SystemError, "fault"):
JArray(JChar)(1)
_jpype.fault("JPJavaFrame::SetCharArrayRegion")
with self.assertRaisesRegex(SystemError, "fault"):
ja[0] = 0
_jpype.fault("JPJavaFrame::GetCharArrayRegion")
with self.assertRaisesRegex(SystemError, "fault"):
print(ja[0])
_jpype.fault("JPJavaFrame::GetCharArrayElements")
# Special case, only BufferError is allowed from getBuffer
with self.assertRaises(BufferError):
memoryview(ja[0:3])
_jpype.fault("JPJavaFrame::ReleaseCharArrayElements")
with self.assertRaisesRegex(SystemError, "fault"):
ja[0:3] = bytes([1, 2, 3])
# Not sure why this one changed.
_jpype.fault("JPJavaFrame::ReleaseCharArrayElements")
with self.assertRaisesRegex(SystemError, "fault"):
jpype.JObject(ja[::2], jpype.JObject)
_jpype.fault("JPJavaFrame::ReleaseCharArrayElements")
def f():
# Special case no fault is allowed
memoryview(ja[0:3])
f()
ja = JArray(JChar)(5) # lgtm [py/similar-function]
_jpype.fault("JPCharType::setArrayRange")
with self.assertRaisesRegex(SystemError, "fault"):
ja[1:3] = [0, 0]
def testFromObject(self):
ja = JArray(JChar)(5)
with self.assertRaises(TypeError):
ja[1] = object()
jf = JClass("jpype.common.Fixture")
with self.assertRaises(TypeError):
jf.static_char_field = object()
with self.assertRaises(TypeError):
jf().char_field = object()
def testCharArrayAsString(self):
t = JClass("jpype.array.TestArray")()
v = t.getCharArray()
self.assertEqual(str(v), 'avcd')
def testArrayConversionChar(self):
t = JClass("jpype.array.TestArray")()
v = t.getCharArray()
self.assertEqual(str(v[:]), 'avcd')
def testArrayEqualsChar(self):
contents = "abc"
array = jpype.JArray(jpype.JChar)(contents)
array2 = jpype.JArray(jpype.JChar)(contents)
self.assertEqual(array, array)
self.assertNotEqual(array, array2)
self.assertEqual(array, "abc")
def testArrayHash(self):
ja = JArray(JByte)([1, 2, 3])
self.assertIsInstance(hash(ja), int)
def testNone(self):
self.assertEqual(JChar._canConvertToJava(None), "none")
class JCharTestCase(common.JPypeTestCase):
def setUp(self):
common.JPypeTestCase.setUp(self)
self.nc = JChar('B')
def testStr(self):
self.assertEqual(type(str(self.nc)), str)
self.assertEqual(str(self.nc), 'B')
def testRepr(self):
self.assertEqual(type(repr(self.nc)), str)
self.assertEqual(repr(self.nc), "'B'")
def testOrd(self):
self.assertEqual(ord(self.nc), 66)
def testInt(self):
self.assertEqual(int(self.nc), 66)
def testFloat(self):
self.assertEqual(float(self.nc), 66.0)
def testSub(self):
self.assertEqual(len(self.nc), 1)
def testHash(self):
self.assertEqual(hash(self.nc), hash('B'))
def testAdd(self):
self.assertEqual(self.nc + 1, 67)
self.assertIsInstance(self.nc + 1, int)
self.assertEqual(self.nc + 1.1, 67.1)
self.assertIsInstance(self.nc + 1.1, float)
def testSub(self):
self.assertEqual(self.nc - 1, 65)
self.assertIsInstance(self.nc - 1, int)
self.assertEqual(self.nc - 1.1, 64.9)
self.assertIsInstance(self.nc - 1.1, float)
def testMult(self):
self.assertEqual(self.nc * 2, 132)
self.assertIsInstance(self.nc * 2, int)
self.assertEqual(self.nc * 0.25, 16.5)
self.assertIsInstance(self.nc * 2.0, float)
def testRshift(self):
self.assertEqual(self.nc >> 1, 33)
self.assertIsInstance(self.nc >> 2, int)
def testLshift(self):
self.assertEqual(self.nc << 1, 132)
self.assertIsInstance(self.nc << 2, int)
def testAnd(self):
self.assertEqual(self.nc & 244, 66 & 244)
self.assertIsInstance(self.nc & 2, int)
def testOr(self):
self.assertEqual(self.nc | 40, 66 | 40)
self.assertIsInstance(self.nc | 2, int)
def testXor(self):
self.assertEqual(self.nc ^ 1, 66 ^ 1)
self.assertIsInstance(self.nc ^ 1, int)
def testPass(self):
fixture = jpype.JClass('jpype.common.Fixture')()
self.assertEqual(type(fixture.callChar(self.nc)), JChar)
self.assertEqual(type(fixture.callObject(self.nc)), jpype.java.lang.Character)
def check(self, u, v0, v1, v2):
self.assertEqual(v1, u)
self.assertEqual(u, v1)
self.assertNotEqual(v0, u)
self.assertNotEqual(u, v0)
self.assertNotEqual(v2, u)
self.assertNotEqual(u, v2)
self.assertTrue(u > v0)
self.assertFalse(u > v2)
self.assertTrue(u < v2)
self.assertFalse(u < v0)
self.assertTrue(v0 < u)
self.assertFalse(v2 < u)
self.assertTrue(v2 > u)
self.assertFalse(v0 > u)
self.assertTrue(u >= v1)
self.assertFalse(u >= v2)
self.assertTrue(v1 <= u)
self.assertFalse(v2 <= u)
def testCompareInt(self):
self.check(self.nc, 65, 66, 67)
def testCompareFloat(self):
self.check(self.nc, 65.0, 66.0, 67.0)
def testCompareJInt(self):
self.check(self.nc, JInt(65), JInt(66), JInt(67))
def testCompareJFloat(self):
self.check(self.nc, JFloat(65.0), JFloat(66.0), JFloat(67.0))
class JCharBoxedTestCase(common.JPypeTestCase):
def setUp(self):
common.JPypeTestCase.setUp(self)
self.nc = jpype.java.lang.Character('B')
def testStr(self):
self.assertEqual(type(str(self.nc)), str)
self.assertEqual(str(self.nc), 'B')
def testRepr(self):
self.assertEqual(type(repr(self.nc)), str)
self.assertEqual(repr(self.nc), "'B'")
def testOrd(self):
self.assertEqual(ord(self.nc), 66)
def testInt(self):
self.assertEqual(int(self.nc), 66)
def testFloat(self):
self.assertEqual(float(self.nc), 66.0)
def testSub(self):
self.assertEqual(len(self.nc), 1)
def testHash(self):
self.assertEqual(hash(self.nc), hash('B'))
def testAdd(self):
self.assertEqual(self.nc + 1, 67)
self.assertIsInstance(self.nc + 1, int)
self.assertEqual(self.nc + 1.1, 67.1)
self.assertIsInstance(self.nc + 1.1, float)
def testSub(self):
self.assertEqual(self.nc - 1, 65)
self.assertIsInstance(self.nc - 1, int)
self.assertEqual(self.nc - 1.1, 64.9)
self.assertIsInstance(self.nc - 1.1, float)
def testMult(self):
self.assertEqual(self.nc * 2, 132)
self.assertIsInstance(self.nc * 2, int)
self.assertEqual(self.nc * 0.25, 16.5)
self.assertIsInstance(self.nc * 2.0, float)
def testRshift(self):
self.assertEqual(self.nc >> 1, 33)
self.assertIsInstance(self.nc >> 2, int)
def testLshift(self):
self.assertEqual(self.nc << 1, 132)
self.assertIsInstance(self.nc << 2, int)
def testAnd(self):
self.assertEqual(self.nc & 244, 66 & 244)
self.assertIsInstance(self.nc & 2, int)
def testOr(self):
self.assertEqual(self.nc | 40, 66 | 40)
self.assertIsInstance(self.nc | 2, int)
def testXor(self):
self.assertEqual(self.nc ^ 1, 66 ^ 1)
self.assertIsInstance(self.nc ^ 1, int)
def testFloorDiv(self):
self.assertEqual(self.nc // 3, 66 // 3)
self.assertEqual(3 // self.nc, 3 // 66)
def testDivMod(self):
self.assertEqual(divmod(self.nc, 3), divmod(66, 3))
self.assertEqual(divmod(3, self.nc), divmod(3, 66))
def testInv(self):
self.assertEqual(~self.nc, ~66)
def testPos(self):
self.assertEqual(+self.nc, +66)
def testPass(self):
fixture = jpype.JClass('jpype.common.Fixture')()
self.assertEqual(type(fixture.callObject(self.nc)), type(self.nc))
def check(self, u, v0, v1, v2):
self.assertEqual(v1, u)
self.assertEqual(u, v1)
self.assertNotEqual(v0, u)
self.assertNotEqual(u, v0)
self.assertNotEqual(v2, u)
self.assertNotEqual(u, v2)
self.assertTrue(u > v0)
self.assertFalse(u > v2)
self.assertTrue(u < v2)
self.assertFalse(u < v0)
self.assertTrue(v0 < u)
self.assertFalse(v2 < u)
self.assertTrue(v2 > u)
self.assertFalse(v0 > u)
self.assertTrue(u >= v1)
self.assertFalse(u >= v2)
self.assertTrue(v1 <= u)
self.assertFalse(v2 <= u)
def testCompareInt(self):
self.check(self.nc, 65, 66, 67)
def testCompareFloat(self):
self.check(self.nc, 65.0, 66.0, 67.0)
def testCompareJInt(self):
self.check(self.nc, JInt(65), JInt(66), JInt(67))
def testCompareJFloat(self):
self.check(self.nc, JFloat(65.0), JFloat(66.0), JFloat(67.0))
class JCharBoxedNullTestCase(common.JPypeTestCase):
def setUp(self):
common.JPypeTestCase.setUp(self)
self.nc = jpype.JObject(None, jpype.java.lang.Character)
def testStr(self):
self.assertEqual(type(str(self.nc)), str)
self.assertEqual(str(self.nc), 'None')
def testRepr(self):
self.assertEqual(type(repr(self.nc)), str)
self.assertEqual(repr(self.nc), 'None')
def testInt(self):
with self.assertRaises(TypeError):
int(self.nc)
def testFloat(self):
with self.assertRaises(TypeError):
float(self.nc)
def testSub(self):
with self.assertRaises(TypeError):
len(self.nc)
def testHash(self):
self.assertEqual(hash(self.nc), hash(None))
def testAdd(self):
with self.assertRaises(TypeError):
self.nc + 1
with self.assertRaises(TypeError):
1 + self.nc
def testSub(self):
with self.assertRaises(TypeError):
self.nc - 1
with self.assertRaises(TypeError):
1 - self.nc
def testMult(self):
with self.assertRaises(TypeError):
self.nc * 1
with self.assertRaises(TypeError):
1 * self.nc
def testRshift(self):
with self.assertRaises(TypeError):
self.nc >> 1
with self.assertRaises(TypeError):
1 >> self.nc
def testLshift(self):
with self.assertRaises(TypeError):
self.nc << 1
with self.assertRaises(TypeError):
1 << self.nc
def testAnd(self):
with self.assertRaises(TypeError):
self.nc & 1
with self.assertRaises(TypeError):
1 & self.nc
def testOr(self):
with self.assertRaises(TypeError):
self.nc | 1
with self.assertRaises(TypeError):
1 | self.nc
def testXor(self):
with self.assertRaises(TypeError):
self.nc ^ 1
with self.assertRaises(TypeError):
1 ^ self.nc
def testFloorDiv(self):
with self.assertRaises(TypeError):
self.nc // 1
with self.assertRaises(TypeError):
1 // self.nc
def testDivMod(self):
with self.assertRaises(TypeError):
divmod(self.nc, 1)
with self.assertRaises(TypeError):
divmod(1, self.nc)
def testInv(self):
with self.assertRaises(TypeError):
~self.nc
def testPos(self):
with self.assertRaises(TypeError):
+self.nc
def testPass(self):
fixture = jpype.JClass('jpype.common.Fixture')()
self.assertEqual(fixture.callObject(self.nc), None)
| [
"_jpype.fault",
"jpype.JObject",
"common.JPypeTestCase.setUp",
"jpype.JArray",
"jpype.JChar",
"jpype.java.lang.Character",
"jpype.JClass"
] | [((142, 174), 'common.JPypeTestCase.setUp', 'common.JPypeTestCase.setUp', (['self'], {}), '(self)\n', (168, 174), False, 'import common\n'), ((393, 421), '_jpype.fault', '_jpype.fault', (['"""PyJPChar_new"""'], {}), "('PyJPChar_new')\n", (405, 421), False, 'import _jpype\n'), ((512, 549), '_jpype.fault', '_jpype.fault', (['"""PyJPModule_getContext"""'], {}), "('PyJPModule_getContext')\n", (524, 549), False, 'import _jpype\n'), ((749, 777), '_jpype.fault', '_jpype.fault', (['"""PyJPChar_str"""'], {}), "('PyJPChar_str')\n", (761, 777), False, 'import _jpype\n'), ((943, 989), '_jpype.fault', '_jpype.fault', (['"""JPCharType::findJavaConversion"""'], {}), "('JPCharType::findJavaConversion')\n", (955, 989), False, 'import _jpype\n'), ((1209, 1250), '_jpype.fault', '_jpype.fault', (['"""JPJavaFrame::NewCharArray"""'], {}), "('JPJavaFrame::NewCharArray')\n", (1221, 1250), False, 'import _jpype\n'), ((1347, 1394), '_jpype.fault', '_jpype.fault', (['"""JPJavaFrame::SetCharArrayRegion"""'], {}), "('JPJavaFrame::SetCharArrayRegion')\n", (1359, 1394), False, 'import _jpype\n'), ((1484, 1531), '_jpype.fault', '_jpype.fault', (['"""JPJavaFrame::GetCharArrayRegion"""'], {}), "('JPJavaFrame::GetCharArrayRegion')\n", (1496, 1531), False, 'import _jpype\n'), ((1624, 1673), '_jpype.fault', '_jpype.fault', (['"""JPJavaFrame::GetCharArrayElements"""'], {}), "('JPJavaFrame::GetCharArrayElements')\n", (1636, 1673), False, 'import _jpype\n'), ((1826, 1879), '_jpype.fault', '_jpype.fault', (['"""JPJavaFrame::ReleaseCharArrayElements"""'], {}), "('JPJavaFrame::ReleaseCharArrayElements')\n", (1838, 1879), False, 'import _jpype\n'), ((2027, 2080), '_jpype.fault', '_jpype.fault', (['"""JPJavaFrame::ReleaseCharArrayElements"""'], {}), "('JPJavaFrame::ReleaseCharArrayElements')\n", (2039, 2080), False, 'import _jpype\n'), ((2198, 2251), '_jpype.fault', '_jpype.fault', (['"""JPJavaFrame::ReleaseCharArrayElements"""'], {}), "('JPJavaFrame::ReleaseCharArrayElements')\n", (2210, 2251), False, 'import _jpype\n'), ((2429, 2470), '_jpype.fault', '_jpype.fault', (['"""JPCharType::setArrayRange"""'], {}), "('JPCharType::setArrayRange')\n", (2441, 2470), False, 'import _jpype\n'), ((3781, 3813), 'common.JPypeTestCase.setUp', 'common.JPypeTestCase.setUp', (['self'], {}), '(self)\n', (3807, 3813), False, 'import common\n'), ((6962, 6994), 'common.JPypeTestCase.setUp', 'common.JPypeTestCase.setUp', (['self'], {}), '(self)\n', (6988, 6994), False, 'import common\n'), ((7013, 7043), 'jpype.java.lang.Character', 'jpype.java.lang.Character', (['"""B"""'], {}), "('B')\n", (7038, 7043), False, 'import jpype\n'), ((10490, 10522), 'common.JPypeTestCase.setUp', 'common.JPypeTestCase.setUp', (['self'], {}), '(self)\n', (10516, 10522), False, 'import common\n'), ((10541, 10587), 'jpype.JObject', 'jpype.JObject', (['None', 'jpype.java.lang.Character'], {}), '(None, jpype.java.lang.Character)\n', (10554, 10587), False, 'import jpype\n'), ((2152, 2189), 'jpype.JObject', 'jpype.JObject', (['ja[::2]', 'jpype.JObject'], {}), '(ja[::2], jpype.JObject)\n', (2165, 2189), False, 'import jpype\n'), ((3295, 3320), 'jpype.JArray', 'jpype.JArray', (['jpype.JChar'], {}), '(jpype.JChar)\n', (3307, 3320), False, 'import jpype\n'), ((3348, 3373), 'jpype.JArray', 'jpype.JArray', (['jpype.JChar'], {}), '(jpype.JChar)\n', (3360, 3373), False, 'import jpype\n'), ((5713, 5749), 'jpype.JClass', 'jpype.JClass', (['"""jpype.common.Fixture"""'], {}), "('jpype.common.Fixture')\n", (5725, 5749), False, 'import jpype\n'), ((9314, 9350), 'jpype.JClass', 'jpype.JClass', (['"""jpype.common.Fixture"""'], {}), "('jpype.common.Fixture')\n", (9326, 9350), False, 'import jpype\n'), ((13023, 13059), 'jpype.JClass', 'jpype.JClass', (['"""jpype.common.Fixture"""'], {}), "('jpype.common.Fixture')\n", (13035, 13059), False, 'import jpype\n'), ((238, 253), 'jpype.JChar', 'jpype.JChar', (['(65)'], {}), '(65)\n', (249, 253), False, 'import jpype\n'), ((294, 310), 'jpype.JChar', 'jpype.JChar', (['(512)'], {}), '(512)\n', (305, 310), False, 'import jpype\n')] |
"""
Random utilities this app needs
"""
import os
import re
from buzz import Corpus as BuzzCorpus
from buzz import Collection
from django.conf import settings
from explore.models import Corpus
from .models import OCRUpdate, PDF
# from django.core.exceptions import ObjectDoesNotExist
# when doing OCR, re.findall will be run on it using this regex, which sort of
# approximates a word. note that this will mean that "the end" will be marked
# as blank, but that is a decent tradeoff for marking blank a lot of junk pages.
MEANINGFUL = r"[A-Za-z]{3,}"
THRESHOLD = 3
def markdown_to_buzz_input(markdown, slug):
"""
todo
User can use markdown when correcting OCR
We need to parse out headers and bulletpoints into <meta> features,
handle italics and that sort of thing...perhaps we can convert the text
to html and then postprocess that...
"""
fixed = []
lines = markdown.splitlines()
for line in lines:
# handle headings
# note that this doesn't put text inside respective headings as sections.
# doing so would not work across pages anyway.
if line.startswith("#"):
pref, head = line.split(" ", 1)
depth = len(pref.strip())
head = head.strip()
line = f"<meta heading=\"true\" depth=\"{depth}\">{head}</meta>"
# handle bulletpoints
elif line.startswith("* "):
line = line.lstrip("* ")
line = "<meta point=\"true\">{line}</meta>"
for styler in ["***", "**", "*", "`"]:
pass
fixed.append(line)
return "\n".join(fixed)
def store_buzz_raw(raw, slug, pdf_path):
"""
Put the raw text into the right place for eventual parsing
"""
# todo: corpora dir?
base = os.path.join("static", "corpora", slug, "txt")
os.makedirs(base, exist_ok=True)
filename = os.path.basename(pdf_path).replace(".pdf", ".txt")
with open(os.path.join(base, filename), "w") as fo:
fo.write(raw)
return base
def dump_latest():
"""
Get the latest OCR corrections and build a parseable corpus.
Maybe even parse it?
"""
slugs = OCRUpdate.objects.values_list("slug")
slugs = set(slugs)
for slug in slugs:
corp = Corpus.objects.get(slug=slug)
lang = corp.language.name
# get the associated pdfs
pdfs = PDF.objects.filter(slug=slug)
for pdf in pdfs:
updates = OCRUpdate.objects.filter(pdf=pdf, slug=slug)
plaintext = updates.latest("timestamp").text
corpus_path = store_buzz_raw(plaintext, slug, pdf.path)
print(f"Parsing ({lang}): {corpus_path}")
corp = BuzzCorpus(corpus_path)
parsed = corp.parse(language=lang, multiprocess=1)
corp.parsed = True
corp.path = parsed.path
corp.save()
return parsed
def _is_meaningful(plaintext, language):
"""
Determine if an OCR page contains something worthwhile
"""
# skip this check for non latin alphabet ... right now the parser doesn't
# accept most non-latin languages, so it's mostly academic for now...
if lang in {"zh", "ja", "fa", "iw", "ar"}:
return True
words = re.findall(plaintext, MEANINGFUL)
return len(words) >= THRESHOLD
def _handle_page_numbers(text):
"""
Attempt to make page-level metadata containing page number
"""
# if no handling, just return text
if settings.COMPARE_HANDLE_PAGE_NUMBERS is False:
return text
# get first and maybe last line as list
lines = [i.strip() for i in text.splitlines() if i.strip()]
if not lines:
return text
if len(lines) == 1:
lines = [lines[0]]
else:
lines = [lines[0], lines[-1]]
page_number = None
ix_to_delete = set()
# lines is just the first and last, stripped
for i, line in enumerate(lines):
if line.isnumeric():
page_number = line
ix_to_delete.add(i)
break
if page_number is not None:
form = f"<meta page={page_number} />\n"
# we also want to REMOVE page number from the actual text
cut = [x for i, x in enumerate(text.splitlines()) if i not in ix_to_delete]
if form:
cut = [form] + cut
return "\n".join(cut)
| [
"os.makedirs",
"os.path.join",
"buzz.Corpus",
"explore.models.Corpus.objects.get",
"os.path.basename",
"re.findall"
] | [((1771, 1817), 'os.path.join', 'os.path.join', (['"""static"""', '"""corpora"""', 'slug', '"""txt"""'], {}), "('static', 'corpora', slug, 'txt')\n", (1783, 1817), False, 'import os\n'), ((1822, 1854), 'os.makedirs', 'os.makedirs', (['base'], {'exist_ok': '(True)'}), '(base, exist_ok=True)\n', (1833, 1854), False, 'import os\n'), ((3211, 3244), 're.findall', 're.findall', (['plaintext', 'MEANINGFUL'], {}), '(plaintext, MEANINGFUL)\n', (3221, 3244), False, 'import re\n'), ((2253, 2282), 'explore.models.Corpus.objects.get', 'Corpus.objects.get', ([], {'slug': 'slug'}), '(slug=slug)\n', (2271, 2282), False, 'from explore.models import Corpus\n'), ((2678, 2701), 'buzz.Corpus', 'BuzzCorpus', (['corpus_path'], {}), '(corpus_path)\n', (2688, 2701), True, 'from buzz import Corpus as BuzzCorpus\n'), ((1870, 1896), 'os.path.basename', 'os.path.basename', (['pdf_path'], {}), '(pdf_path)\n', (1886, 1896), False, 'import os\n'), ((1935, 1963), 'os.path.join', 'os.path.join', (['base', 'filename'], {}), '(base, filename)\n', (1947, 1963), False, 'import os\n')] |
import subprocess
import time
import sys
import signal
from testutils import assert_raises
is_unix = not sys.platform.startswith("win")
if is_unix:
def echo(text):
return ["echo", text]
def sleep(secs):
return ["sleep", str(secs)]
else:
def echo(text):
return ["cmd", "/C", f"echo {text}"]
def sleep(secs):
# TODO: make work in a non-unixy environment (something with timeout.exe?)
return ["sleep", str(secs)]
p = subprocess.Popen(echo("test"))
time.sleep(0.1)
assert p.returncode is None
assert p.poll() == 0
assert p.returncode == 0
p = subprocess.Popen(sleep(2))
assert p.poll() is None
with assert_raises(subprocess.TimeoutExpired):
assert p.wait(1)
p.wait()
assert p.returncode == 0
p = subprocess.Popen(echo("test"), stdout=subprocess.PIPE)
p.wait()
assert p.stdout.read().strip() == b"test"
p = subprocess.Popen(sleep(2))
p.terminate()
p.wait()
if is_unix:
assert p.returncode == -signal.SIGTERM
else:
assert p.returncode == 1
p = subprocess.Popen(sleep(2))
p.kill()
p.wait()
if is_unix:
assert p.returncode == -signal.SIGKILL
else:
assert p.returncode == 1
p = subprocess.Popen(echo("test"), stdout=subprocess.PIPE)
(stdout, stderr) = p.communicate()
assert stdout.strip() == b"test"
p = subprocess.Popen(sleep(5), stdout=subprocess.PIPE)
with assert_raises(subprocess.TimeoutExpired):
p.communicate(timeout=1)
| [
"testutils.assert_raises",
"sys.platform.startswith",
"time.sleep"
] | [((505, 520), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (515, 520), False, 'import time\n'), ((107, 137), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win"""'], {}), "('win')\n", (130, 137), False, 'import sys\n'), ((660, 700), 'testutils.assert_raises', 'assert_raises', (['subprocess.TimeoutExpired'], {}), '(subprocess.TimeoutExpired)\n', (673, 700), False, 'from testutils import assert_raises\n'), ((1331, 1371), 'testutils.assert_raises', 'assert_raises', (['subprocess.TimeoutExpired'], {}), '(subprocess.TimeoutExpired)\n', (1344, 1371), False, 'from testutils import assert_raises\n')] |
import fmtrack
import os
import matplotlib.colors as colors
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import pickle
import pyvista
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import (RBF, Matern, RationalQuadratic,
ExpSineSquared, DotProduct,
ConstantKernel, WhiteKernel)
from sklearn.neighbors import KernelDensity
from sklearn import preprocessing
##########################################################################################
# get filepath for matplotlib style
##########################################################################################
stylepath = os.path.dirname(os.path.abspath(fmtrack.__file__)) + '/el_papers.mplstyle'
##########################################################################################
# import data
##########################################################################################
def import_cell_info(file_prefix_1,file_prefix_2,root_directory):
cell_mesh_1 = np.loadtxt(root_directory+'/Gel_cell_coords/' + file_prefix_1 + '_cell_mesh.txt')
cell_normal_1 = np.loadtxt(root_directory+'/Gel_cell_coords/' + file_prefix_1 + '_cell_normals.txt')
cell_center_1 = np.loadtxt(root_directory+'/Gel_cell_coords/' + file_prefix_1 + '_cell_center.txt')
cell_vol_1 = np.loadtxt(root_directory+'/Gel_cell_coords/' + file_prefix_1 + '_cell_volume.txt')
cell_mesh_2 = np.loadtxt(root_directory+'/Gel_cell_coords/' + file_prefix_2 + '_cell_mesh.txt')
cell_normal_2 = np.loadtxt(root_directory+'/Gel_cell_coords/' + file_prefix_2 + '_cell_normals.txt')
cell_center_2 = np.loadtxt(root_directory+'/Gel_cell_coords/' + file_prefix_2 + '_cell_center.txt')
cell_vol_2 = np.loadtxt(root_directory+'/Gel_cell_coords/' + file_prefix_2 + '_cell_volume.txt')
return cell_mesh_1, cell_normal_1, cell_center_1, cell_vol_1, cell_mesh_2, cell_normal_2, cell_center_2, cell_vol_2
def import_bead_disps(folder):
X = np.loadtxt(folder + '/X.txt')
Y = np.loadtxt(folder + '/Y.txt')
Z = np.loadtxt(folder + '/Z.txt')
U = np.loadtxt(folder + '/U.txt')
V = np.loadtxt(folder + '/V.txt')
W = np.loadtxt(folder + '/W.txt')
return X, Y, Z, U, V, W
##########################################################################################
# additional computations based on the data
##########################################################################################
# can be implemented as needed based on some rule for excluding outliers
def remove_outliers(X, Y, Z, U, V, W):
# maximum plausible displacement
# z-score based displacement
# more complex strategy for determining outliers (hot-spot analysis)
# this code should be implemented on a case by case basis
return X_new, Y_new, Z_new, U_new, V_new, W_new
# compare bead displacement to it's neighbors
def color_point_neighbor_similarity(X, Y, Z, U, V, W, num_neigh):
num_beads = X.shape[0]
neigh_score = []
num_pts = X.shape[0]
for kk in range(0,num_pts):
x = X[kk]; y = Y[kk]; z = Z[kk]
u = U[kk]; v = V[kk]; w = W[kk]
dist_all = ((x - X)**2.0 + (y - Y)**2.0 + (z - Z)**2.0)**(1.0/2.0)
dist_all_sorted = np.argsort(dist_all)
score_dist = np.zeros((num_neigh))
for jj in range(0,num_neigh):
idx = dist_all_sorted[jj]
u2 = U[idx]; v2 = V[idx]; w2 = W[idx]
score_dist[jj] = ((u - u2)**2.0 + (v - v2)**2.0 + (w - w2)**2.0)**(1.0/2.0)
neigh_score.append(np.mean(score_dist))
return neigh_score
# compare bead displacement direction to the initial cell configuration
def color_point_direction(X, Y, Z, U, V, W, cell_mesh, cell_normal):
num_beads = X.shape[0]
dir_score = []
dist_from_cell = []
mag_list = []
# --> down sample the cell mesh (computational efficiency)
num_pts = X.shape[0]
samp = np.random.randint(cell_mesh.shape[0]-1,size=np.min([num_pts,10000]))
reduced_cell_mesh = cell_mesh[samp,:]
reduced_cell_normal = cell_normal[samp,:]
for kk in range(0,num_pts):
x = X[kk]; y = Y[kk]; z = Z[kk]
u = U[kk]; v = V[kk]; w = W[kk]
mag = (u**2.0 + v**2.0 + w**2.0)**(1.0/2.0)
du = u/mag; dv = v/mag; dw = w/mag
dist_all = ((x - reduced_cell_mesh[:,0])**2.0 + (y - reduced_cell_mesh[:,1])**2.0\
+ (z - reduced_cell_mesh[:,2])**2.0)**(1.0/2.0)
arg = np.argmin(dist_all)
val = du*reduced_cell_normal[arg,0] + dv*reduced_cell_normal[arg,1] + dw*reduced_cell_normal[arg,2]
dir_score.append(val)
dist_from_cell.append(dist_all[arg])
mag_list.append(mag)
return dir_score, dist_from_cell, mag_list
# compute bead displacement to the domain edge
def compute_dist_from_edge(X, Y, Z, X_DIM, Y_DIM, Z_DIM):
num_pts = X.shape[0]
dist_from_edge = []
for kk in range(0,num_pts):
x = X[kk]; y = Y[kk]; z = Z[kk]
x_edge = np.min([np.abs(x),np.abs(X_DIM-x)])
y_edge = np.min([np.abs(y),np.abs(Y_DIM-y)])
z_edge = np.min([np.abs(z),np.abs(Z_DIM-z)])
dist = np.min([x_edge,y_edge,z_edge])
dist_from_edge.append(dist)
return dist_from_edge
# bin data to assist with plotting
def mean_bins(data1, data2):
cent_mark = [10,30,50,70]
less_than = [20,40,60,80]
mean_val = []
arg = np.argsort(data1)
data1 = np.sort(data1)
data2 = data2[arg]
idx_d = 0
for idx_l in range(0,len(less_than)):
arr = []; arr.append(0)
while idx_d < data1.shape[0] and data1[idx_d] < less_than[idx_l]:
arr.append(data2[idx_d])
idx_d += 1
mean_val.append(np.mean(arr))
return cent_mark, mean_val
##########################################################################################
# plot raw data (position)
##########################################################################################
# --> y axis is bead displacement magnitude, x axis is distance from cell surface
def plot_surface_disp(axi,cell_mesh,dist_from_edge,dist_from_cell, mag_list):
#--> remove points
keep = []
for kk in range(0,len(dist_from_edge)):
if dist_from_edge[kk] > 5:
keep.append(kk)
keep = np.asarray(keep)
dist_from_cell = np.asarray(dist_from_cell)
mag_list = np.asarray(mag_list)
cent_mark,mean_val = mean_bins(dist_from_cell[keep],mag_list[keep])
axi.plot(dist_from_cell[keep],mag_list[keep],'k.',markersize=0.75)
axi.plot(cent_mark, mean_val,'ro',markersize=10)
axi.set_ylim((0,10))
axi.set_xlabel('distance to cell surface')
axi.set_ylabel(r'displacement magnitude $\mu m$')
# --> 3D plot of the cell, configuration number influences title and color
def plot_cell_3D(ax,cell_num,cell_mesh, cell_center, cell_vol, X_DIM, Y_DIM, Z_DIM):
if cell_num == 1:
col = (0.75,0.75,0.75)
elif cell_num == 2:
col = (0,0,0)
verts = cell_mesh; cent = cell_center; vol = cell_vol
ax.set_aspect('auto')
ax.plot(verts[:,0],verts[:,1],verts[:,2],'.',color=col)
ax.set_xlim((-1,X_DIM))
ax.set_ylim((-1,Y_DIM))
ax.set_zlim((-1,Z_DIM))
if cell_num == 1:
ax.set_title('cell config 1, %.1f $\mu m^3$'%(vol))
elif cell_num == 2:
ax.set_title('cell config 2, %.1f $\mu m^3$'%(vol))
# --> plot of scores (type 1 is similarity to neighbors, type 2 is direction relative to cell)
def plot_scores_subplot(data,title,axi,color_type):
num_pts = 250
X_plot = np.linspace(np.min(data),np.max(data),num_pts).reshape(-1,1)
X = data.reshape(-1,1)
kde = KernelDensity(kernel='gaussian', bandwidth=0.1).fit(X)
log_dens = kde.score_samples(X_plot)
axi.set_xlabel('score')
axi.set_ylabel('probability density function')
axi.set_title(title)
ci_max = np.max(data); ci_min = np.min(data)
axi.plot(X_plot[:,0],np.exp(log_dens),'k-',linewidth=0.5)
for kk in range(0,num_pts):
ci = X_plot[kk,0]
if color_type == 1: #--> all positive numbers, blue is 0, red is high
col = ((ci-ci_min)/(ci_max-ci_min), 0, 1.0 - (ci-ci_min)/(ci_max-ci_min))
elif color_type == 2:
if ci < 0.0:
col = (np.abs(ci),0,0.5*np.abs(ci))
else:
col = (0, np.abs(ci), np.abs(ci))
axi.plot(X_plot[kk, 0], np.exp(log_dens[kk]),'.',color=col)
return
# --> helper function plots slice of cell
def plot_cell(cent,project_1,project_2,project_out,col,axi):
buffer = 0.5
buffer_up = cent + buffer
buffer_low = cent - buffer
plot_1 = []
plot_2 = []
num_pts = project_1.shape[0]
for kk in range(0,num_pts):
if project_out[kk] < buffer_up and project_out[kk] > buffer_low:
plot_1.append(project_1[kk])
plot_2.append(project_2[kk])
axi.plot(plot_1,plot_2,'.',color=col)
return
# --> helper function plots slice of vectors
def plot_vectors(color_type, color_info, project_1, project_2, project_1d, project_2d, cent, project_out, axi):
ci_min = np.min(color_info); ci_max = np.max(color_info)
num_pts = project_1.shape[0]
for kk in range(0,num_pts):
# --> the vectors themselves
scale = 1
proj1_a = project_1[kk]; proj1_d = project_1d[kk]*scale
proj2_a = project_2[kk]; proj2_d = project_2d[kk]*scale
pout = project_out[kk];
buffer = 10
# --> color of the vectors
ci = color_info[kk]
if pout > cent - buffer and pout < cent + buffer:
# --> colortype
if color_type == 1: #--> all positive numbers, blue is 0, red is high
col = ((ci-ci_min)/(ci_max-ci_min), 0, 1.0 - (ci-ci_min)/(ci_max-ci_min))
elif color_type == 2:
if ci < 0.0:
col = (np.abs(ci),0,0.5*np.abs(ci))
else:
col = (0, np.abs(ci), np.abs(ci))
# --> plot the vectors
axi.arrow(proj1_a,proj2_a,proj1_d,proj2_d,color = col,linewidth=1.0,head_width=1.5)
return
# --> plot a slice plot, each has beads and a cell
def plot_cell_vector_slice(color_type, color_info, X, Y, Z, U, V, W, cell_center_1,\
cell_mesh_1, cell_center_2, cell_mesh_2, plane_type, axi, X_DIM, Y_DIM, Z_DIM):
num_beads = X.shape[0]
XYZ = np.zeros((num_beads,3)); XYZ[:,0] = X; XYZ[:,1] = Y; XYZ[:,2] = Z
UVW = np.zeros((num_beads,3)); UVW[:,0] = U; UVW[:,1] = V; UVW[:,2] = W
cell_center_avg = 0.5*cell_center_1 + 0.5*cell_center_2
if plane_type == 1: #XZ-plane
idx_1 = 0
idx_2 = 2
idx_out = 1
elif plane_type == 2: #YZ-plane
idx_1 = 1
idx_2 = 2
idx_out = 0
elif plane_type == 3: #XY-plane
idx_1 = 0
idx_2 = 1
idx_out = 2
cent = cell_center_avg[idx_out]
project_1_cell_A = cell_mesh_1[:,idx_1]
project_2_cell_A = cell_mesh_1[:,idx_2]
project_out_cell_A = cell_mesh_1[:,idx_out]
cell_color_A = (0.75,0.75,0.75)
project_1_cell_B = cell_mesh_2[:,idx_1]
project_2_cell_B = cell_mesh_2[:,idx_2]
project_out_cell_B = cell_mesh_2[:,idx_out]
cell_color_B = (0.0,0.0,0.0)
project_1_bead = XYZ[:,idx_1]
project_2_bead = XYZ[:,idx_2]
project_1d_bead = UVW[:,idx_1]
project_2d_bead = UVW[:,idx_2]
project_out_bead = XYZ[:,idx_out]
# call cell plot for cell 1
plot_cell(cent,project_1_cell_A,project_2_cell_A,project_out_cell_A,cell_color_A,axi)
# call cell plot for cell 2
plot_cell(cent,project_1_cell_B,project_2_cell_B,project_out_cell_B,cell_color_B,axi)
# call vector plot
plot_vectors(color_type, color_info, project_1_bead, project_2_bead, project_1d_bead, project_2d_bead, cent, project_out_bead, axi)
center = cell_center_avg
if plane_type == 1: #XZ-plane
axi.plot([-1,X_DIM],[center[2],center[2]],'k:',linewidth=1.0)
axi.plot([center[0],center[0]],[-1,Z_DIM],'k:',linewidth=1.0)
axi.set_xlim((-1,X_DIM))
axi.set_ylim((-1,Z_DIM))
axi.set_xlabel(r'x-position $\mu m$')
axi.set_ylabel(r'z-position $\mu m$')
elif plane_type == 2: #YZ-plane
axi.plot([-1,Y_DIM],[center[2],center[2]],'k:',linewidth=1.0)
axi.plot([center[1],center[1]],[-1,Z_DIM],'k:',linewidth=1.0)
axi.set_xlim((-1,Y_DIM))
axi.set_ylim((-1,Z_DIM))
axi.set_xlabel(r'y-position $\mu m$')
axi.set_ylabel(r'z-position $\mu m$')
elif plane_type == 3: #XY-plane
axi.plot([-1,X_DIM],[center[1],center[1]],'k:',linewidth=1.0)
axi.plot([center[0],center[0]],[-1,Y_DIM],'k:',linewidth=1.0)
axi.set_xlim((-1,X_DIM))
axi.set_ylim((-1,Y_DIM))
axi.set_xlabel(r'x-position $\mu m$')
axi.set_ylabel(r'y-position $\mu m$')
return
def plot_vector_field(X,Y,Z,U,V,W,cell_init,cell_final,dir_score,should_show,should_save,foldername):
XYZ = np.vstack((X,Y,Z)).transpose()
UVW = np.vstack((U,V,W)).transpose()
point_cloud = pyvista.PolyData(XYZ)
point_cloud["dot(cell normal, displacement)"] = dir_score
point_cloud['vectors'] = UVW
geom = pyvista.Arrow()
arrows = point_cloud.glyph(orient='vectors', scale=False, factor=5.0,geom=geom)
mesh_init = pyvista.PolyData(cell_init)
mesh_final = pyvista.PolyData(cell_final)
if should_show:
plotter = pyvista.Plotter()
plotter.add_mesh(cell_final, color='maroon')
cmap = plt.cm.get_cmap("viridis_r")
plotter.add_mesh(arrows, cmap=cmap)
plotter.remove_scalar_bar()
plotter.add_scalar_bar('Dot(Cell Normal, Vector)', title_font_size=20, label_font_size=15, position_y=0.05)
plotter.show_grid()
plotter.show(title='Bead Deformation around Cell')
if should_save:
mesh_init.save(os.path.join(foldername,'cell_init.vtk'))
mesh_final.save(os.path.join(foldername,'cell_final.vtk'))
arrows.save(os.path.join(foldername,'arrows.vtk'))
# --> plot a cell-vector row
def plot_cell_vector_slice_row(ax_list,color_type,color_info,X,Y,Z,U,V,W,cell_center_1,cell_mesh_1,cell_center_2,cell_mesh_2,X_DIM,Y_DIM,Z_DIM):
axi = ax_list[0]
plane_type = 1
plot_cell_vector_slice(color_type, color_info, X, Y, Z, U, V, W, cell_center_1,cell_mesh_1, cell_center_2, cell_mesh_2, plane_type, axi, X_DIM, Y_DIM, Z_DIM)
axi = ax_list[1]
plane_type = 2
plot_cell_vector_slice(color_type, color_info, X, Y, Z, U, V, W, cell_center_1,cell_mesh_1, cell_center_2, cell_mesh_2, plane_type, axi, X_DIM, Y_DIM, Z_DIM)
axi = ax_list[2]
plane_type = 3
plot_cell_vector_slice(color_type, color_info, X, Y, Z, U, V, W, cell_center_1,cell_mesh_1, cell_center_2, cell_mesh_2, plane_type, axi, X_DIM, Y_DIM, Z_DIM)
return
# --> plot cells
def plot_only_cells(cell_mesh_1,cell_center_1,cell_vol_1,cell_mesh_2,cell_center_2,cell_vol_2,X_DIM,Y_DIM,Z_DIM,folder,figtype_list):
fig = plt.figure()
plt.style.use(stylepath)
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
fig.set_figheight(5)
fig.set_figwidth(10)
axi = fig.add_subplot(1, 2, 1, projection='3d')
plot_cell_3D(axi,1,cell_mesh_1, cell_center_1, cell_vol_1, X_DIM, Y_DIM, Z_DIM)
axi = fig.add_subplot(1, 2, 2, projection='3d')
plot_cell_3D(axi,2,cell_mesh_2, cell_center_2, cell_vol_2, X_DIM, Y_DIM, Z_DIM)
plt.tight_layout()
for end in figtype_list:
fname = folder + '/Cell_plots_3D' + end
plt.savefig(fname)
return
# --> plot scores
def plot_only_scores(neigh_score,dir_score,folder,figtype_list):
fig = plt.figure()
plt.style.use(stylepath)
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
fig.set_figheight(5)
fig.set_figwidth(10)
axi = fig.add_subplot(1,2,1)
data = np.asarray(neigh_score)
color_type = 1
title = 'neighbor distance score'
plot_scores_subplot(data,title,axi,color_type)
axi = fig.add_subplot(1,2,2)
data = np.asarray(dir_score)
color_type = 2
title = r'$n_{cell} \cdot n_{vector}$'
plot_scores_subplot(data,title,axi,color_type)
plt.tight_layout()
for end in figtype_list:
fname = folder + '/Score_plots' + end
plt.savefig(fname)
return
# --> plot slice
def plot_only_slice(dir_score,X,Y,Z,U,V,W,cell_center_1,cell_mesh_1,cell_center_2,cell_mesh_2,X_DIM,Y_DIM,Z_DIM,folder,figtype_list):
fig = plt.figure()
plt.style.use(stylepath)
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
fig.set_figheight(5)
fig.set_figwidth(15)
color_type = 2
color_info = dir_score
axi = fig.add_subplot(1,3,1)
plane_type = 1
plot_cell_vector_slice(color_type, color_info, X, Y, Z, U, V, W, cell_center_1,\
cell_mesh_1, cell_center_2, cell_mesh_2, plane_type, axi, X_DIM, Y_DIM, Z_DIM)
axi = fig.add_subplot(1,3,2)
plane_type = 2
plot_cell_vector_slice(color_type, color_info, X, Y, Z, U, V, W, cell_center_1,\
cell_mesh_1, cell_center_2, cell_mesh_2, plane_type, axi, X_DIM, Y_DIM, Z_DIM)
axi = fig.add_subplot(1,3,3)
plane_type = 3
plot_cell_vector_slice(color_type, color_info, X, Y, Z, U, V, W, cell_center_1,\
cell_mesh_1, cell_center_2, cell_mesh_2, plane_type, axi, X_DIM, Y_DIM, Z_DIM)
plt.tight_layout()
for end in figtype_list:
fname = folder + '/Bead_disp_slice' + end
plt.savefig(fname)
return
# --> plot distance
def plot_only_distance(cell_mesh,dist_from_edge,dist_from_cell,mag_list,folder,figtype_list):
fig = plt.figure()
plt.style.use(stylepath)
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
fig.set_figheight(5)
fig.set_figwidth(5)
axi = fig.add_subplot(1,1,1)
plot_surface_disp(axi,cell_mesh,dist_from_edge,dist_from_cell, mag_list)
plt.tight_layout()
for end in figtype_list:
fname = folder + '/Disp_wrt_dist' + end
plt.savefig(fname)
return
# --> plot all
def plot_all(folder, root_directory, file_prefix_1,file_prefix_2,dir_score,neigh_score,dist_from_edge,dist_from_cell,mag_list,\
X,Y,Z,U,V,W,cell_center_1,cell_mesh_1,cell_vol_1,cell_center_2,cell_mesh_2,cell_vol_2,X_DIM,Y_DIM,Z_DIM,figtype_list):
fig = plt.figure()
plt.style.use(stylepath)
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
fig.set_figheight(10)
fig.set_figwidth(20)
axi = fig.add_subplot(2,4,1)
data = np.asarray(dir_score)
color_type = 2
title = r'$n_{cell} \cdot n_{vector}$'
plot_scores_subplot(data,title,axi,color_type)
axi = fig.add_subplot(2,4,2)
plane_type = 1
plot_cell_vector_slice(color_type, dir_score, X, Y, Z, U, V, W, cell_center_1,\
cell_mesh_1, cell_center_2, cell_mesh_2, plane_type, axi, X_DIM, Y_DIM, Z_DIM)
axi = fig.add_subplot(2,4,3)
plane_type = 2
plot_cell_vector_slice(color_type, dir_score, X, Y, Z, U, V, W, cell_center_1,\
cell_mesh_1, cell_center_2, cell_mesh_2, plane_type, axi, X_DIM, Y_DIM, Z_DIM)
axi = fig.add_subplot(2,4,4)
plane_type = 3
plot_cell_vector_slice(color_type, dir_score, X, Y, Z, U, V, W, cell_center_1,\
cell_mesh_1, cell_center_2, cell_mesh_2, plane_type, axi, X_DIM, Y_DIM, Z_DIM)
axi = fig.add_subplot(2,4,5)
data = np.asarray(neigh_score)
color_type = 1
title = 'neighbor distance score'
plot_scores_subplot(data,title,axi,color_type)
axi = fig.add_subplot(2,4,6)
plot_surface_disp(axi,cell_mesh_1,dist_from_edge,dist_from_cell, mag_list)
axi = fig.add_subplot(2,4,7, projection='3d')
plot_cell_3D(axi,1,cell_mesh_1, cell_center_1, cell_vol_1, X_DIM, Y_DIM, Z_DIM)
axi = fig.add_subplot(2,4,8, projection='3d')
plot_cell_3D(axi,2,cell_mesh_2, cell_center_2, cell_vol_2, X_DIM, Y_DIM, Z_DIM)
plt.tight_layout()
for end in figtype_list:
fname = folder + '/Summary_plot' + end
plt.savefig(fname)
for end in figtype_list:
fname = root_directory + '/Post_proc_summary' + '/' + 'Summary_' + file_prefix_1 + '_to_' + file_prefix_2 + end
plt.savefig(fname)
return
# call individual plots, plus call multiple subplots
def call_plot_main(plot_type,file_prefix_1,file_prefix_2,num_feat,X_DIM,Y_DIM,Z_DIM,figtype_list,use_corrected_cell,root_directory,should_plot):
folder = root_directory + '/Track_' + file_prefix_1 + '_to_' + file_prefix_2
if use_corrected_cell:
cell_mesh_2 = np.loadtxt(folder + '/cell_mesh_2_corrected.txt')
X, Y, Z, U, V, W = import_bead_disps(folder)
cell_mesh_1, cell_normal_1, cell_center_1, cell_vol_1, cell_mesh_2, cell_normal_2, cell_center_2, cell_vol_2 = import_cell_info(file_prefix_1,file_prefix_2,root_directory)
neigh_score = color_point_neighbor_similarity(X, Y, Z, U, V, W, num_feat)
dir_score, dist_from_cell, mag_list = color_point_direction(X, Y, Z, U, V, W, cell_mesh_1, cell_normal_1)
dist_from_edge = compute_dist_from_edge(X, Y, Z, X_DIM, Y_DIM, Z_DIM)
#type 6 will create all plots
# --> arrange data
if plot_type == 1 or plot_type == 6: # big plot with everything, saves it in two directories
plot_all(folder, root_directory, file_prefix_1,file_prefix_2,dir_score,neigh_score,dist_from_edge,dist_from_cell,mag_list,\
X,Y,Z,U,V,W,cell_center_1,cell_mesh_1,cell_vol_1,cell_center_2,cell_mesh_2,cell_vol_2,X_DIM,Y_DIM,Z_DIM,figtype_list)
if plot_type == 2 or plot_type == 6: # plots cells in both configurations
plot_only_cells(cell_mesh_1,cell_center_1,cell_vol_1,cell_mesh_2,cell_center_2,cell_vol_2,X_DIM,Y_DIM,Z_DIM,folder,figtype_list)
if plot_type == 3 or plot_type == 6: # plots scores only
plot_only_scores(neigh_score,dir_score,folder,figtype_list)
if plot_type == 4 or plot_type == 6: # plots slice only
plot_only_slice(dir_score,X,Y,Z,U,V,W,cell_center_1,cell_mesh_1,cell_center_2,cell_mesh_2,X_DIM,Y_DIM,Z_DIM,folder,figtype_list)
if plot_type == 5 or plot_type == 6: # plots magnitude wrt distance from surface
plot_only_distance(cell_mesh_1,dist_from_edge,dist_from_cell,mag_list,folder,figtype_list)
if should_plot:
plot_vector_field(X,Y,Z,U,V,W, cell_mesh_1, cell_mesh_2, dir_score,should_plot,True,folder)
return
##########################################################################################
# displacement interpolation -- use GPR
##########################################################################################
# --> create GP model
def create_gp_model(X,Y,Z,QoI):
num_pts = X.shape[0]
X_train_unscale = np.zeros((num_pts,3))
X_train_unscale[:,0] = X
X_train_unscale[:,1] = Y
X_train_unscale[:,2] = Z
scaler = preprocessing.StandardScaler().fit(X_train_unscale)
X_train = scaler.transform(X_train_unscale)
kernel = RationalQuadratic()
gp = GaussianProcessRegressor(kernel=kernel)
gp.fit(X_train, QoI)
return gp , scaler
# --> create GP models
def create_GP_model(file_prefix_1,file_prefix_2,root_directory):
folder = root_directory + '/Track_' + file_prefix_1 + '_to_' + file_prefix_2
X, Y, Z, U, V, W = import_bead_disps(folder)
gp_U, scaler = create_gp_model(X,Y,Z,U)
gp_V, scaler = create_gp_model(X,Y,Z,V)
gp_W, scaler = create_gp_model(X,Y,Z,W)
pickle.dump(gp_U, open(folder + '/gp_U.sav','wb'))
pickle.dump(gp_V, open(folder + '/gp_V.sav','wb'))
pickle.dump(gp_W, open(folder + '/gp_W.sav','wb'))
pickle.dump(scaler,open(folder + '/scaler.sav','wb'))
return
# --> interpolate GP model
def interpolate_gp_model(plane_case, center, gp, scaler, X_DIM, Y_DIM, Z_DIM ):
x_min = -1; x_max = X_DIM
y_min = -1; y_max = Y_DIM
z_min = -1; z_max = Z_DIM
grid_pts = 100
# --> construct artificial grid for plotting
if plane_case == 1: #x plane
x = center[1]
y = np.linspace(y_min,y_max,grid_pts)
z = np.linspace(z_min,z_max,grid_pts)
Y, Z = np.meshgrid(y,z)
X = x * np.ones((grid_pts,grid_pts))
RES = np.zeros((grid_pts,grid_pts))
elif plane_case == 2: #y plane
x = np.linspace(x_min,x_max,grid_pts)
y = center[0]
z = np.linspace(z_min,z_max,grid_pts)
X, Z = np.meshgrid(x, z)
Y = y * np.ones((grid_pts,grid_pts))
RES = np.zeros((grid_pts,grid_pts))
elif plane_case == 3: #z plane
x = np.linspace(x_min,x_max,grid_pts)
y = np.linspace(y_min,y_max,grid_pts)
z = center[2]
X, Y = np.meshgrid(x, y)
Z = z * np.ones((grid_pts,grid_pts))
RES = np.zeros((grid_pts,grid_pts))
# --> fit model grid
for j in range(0,grid_pts):
input = []
for k in range(0,grid_pts):
li = [X[j,k],Y[j,k],Z[j,k]]
input.append(li)
input = np.asarray(input)
input = scaler.transform(input)
pred = gp.predict(input)
RES[j,:] = pred[:]
if plane_case == 1:
return Y, Z, RES
elif plane_case == 2:
return X, Z, RES
elif plane_case == 3:
return X, Y, RES
# --> create a single GP plot
def plot_gp_model_single_plot(axi,is_mag,data_1,data_2,result,title):
# --> plot interpolated field
vmin = -5; vmax = 5
if is_mag:
vmin = 0; vmax = 10
CS1 = axi.pcolor(data_1, data_2, result, cmap=plt.cm.coolwarm,vmin=vmin,vmax=vmax)
cbar = plt.colorbar(CS1, ax=axi)
cbar.set_label(title,labelpad=-95,y=1.13,rotation=0)
return
# --> plot GPR model, one row
def plot_gp_model_one_row(ax_list,is_mag,X_DIM,Y_DIM,Z_DIM,title,center,gp_model,scaler,cell_mesh_1,cell_mesh_2):
axi = ax_list[0]
plane_case = 2
if is_mag == False:
data_1, data_2, result = interpolate_gp_model(plane_case, center, gp_model, scaler, X_DIM, Y_DIM, Z_DIM)
else:
data_1, data_2, result_0 = interpolate_gp_model(plane_case, center, gp_model[0], scaler, X_DIM, Y_DIM, Z_DIM)
data_1, data_2, result_1 = interpolate_gp_model(plane_case, center, gp_model[1], scaler, X_DIM, Y_DIM, Z_DIM)
data_1, data_2, result_2 = interpolate_gp_model(plane_case, center, gp_model[2], scaler, X_DIM, Y_DIM, Z_DIM)
result = (result_0**2.0 + result_1**2.0 + result_2**2.0)**(1.0/2.0)
plot_gp_model_single_plot(axi,is_mag,data_1,data_2,result,title)
idx0 = 0; idx1 = 2; idx2 = 1
plot_cell(center[idx2],cell_mesh_1[:,idx0],cell_mesh_1[:,idx1],cell_mesh_1[:,idx2],(0.75,0.75,0.75),axi)
plot_cell(center[idx2],cell_mesh_2[:,idx0],cell_mesh_2[:,idx1],cell_mesh_2[:,idx2],(0,0,0),axi)
axi.plot([-1,X_DIM],[center[2],center[2]],'k:',linewidth=1.0)
axi.plot([center[0],center[0]],[-1,Z_DIM],'k:',linewidth=1.0)
axi.set_xlabel(r'x-position $\mu m$')
axi.set_ylabel(r'z-position $\mu m$')
axi.set_xlim((-1,X_DIM))
axi.set_ylim((-1,Z_DIM))
axi = ax_list[1]
place_case = 1
if is_mag == False:
data_1, data_2, result = interpolate_gp_model(plane_case, center, gp_model, scaler, X_DIM, Y_DIM, Z_DIM)
else:
data_1, data_2, result_0 = interpolate_gp_model(plane_case, center, gp_model[0], scaler, X_DIM, Y_DIM, Z_DIM)
data_1, data_2, result_1 = interpolate_gp_model(plane_case, center, gp_model[1], scaler, X_DIM, Y_DIM, Z_DIM)
data_1, data_2, result_2 = interpolate_gp_model(plane_case, center, gp_model[2], scaler, X_DIM, Y_DIM, Z_DIM)
result = (result_0**2.0 + result_1**2.0 + result_2**2.0)**(1.0/2.0)
plot_gp_model_single_plot(axi,is_mag,data_1,data_2,result,title)
idx0 = 1; idx1 = 2; idx2 = 0
plot_cell(center[idx2],cell_mesh_1[:,idx0],cell_mesh_1[:,idx1],cell_mesh_1[:,idx2],(0.75,0.75,0.75),axi)
plot_cell(center[idx2],cell_mesh_2[:,idx0],cell_mesh_2[:,idx1],cell_mesh_2[:,idx2],(0,0,0),axi)
axi.plot([-1,Y_DIM],[center[2],center[2]],'k:',linewidth=1.0)
axi.plot([center[1],center[1]],[-1,Z_DIM],'k:',linewidth=1.0)
axi.set_xlabel(r'y-position $\mu m$')
axi.set_ylabel(r'z-position $\mu m$')
axi.set_xlim((-1,Y_DIM))
axi.set_ylim((-1,Z_DIM))
axi = ax_list[2]
plane_case = 3
if is_mag == False:
data_1, data_2, result = interpolate_gp_model(plane_case, center, gp_model, scaler, X_DIM, Y_DIM, Z_DIM)
else:
data_1, data_2, result_0 = interpolate_gp_model(plane_case, center, gp_model[0], scaler, X_DIM, Y_DIM, Z_DIM)
data_1, data_2, result_1 = interpolate_gp_model(plane_case, center, gp_model[1], scaler, X_DIM, Y_DIM, Z_DIM)
data_1, data_2, result_2 = interpolate_gp_model(plane_case, center, gp_model[2], scaler, X_DIM, Y_DIM, Z_DIM)
result = (result_0**2.0 + result_1**2.0 + result_2**2.0)**(1.0/2.0)
plot_gp_model_single_plot(axi,is_mag,data_1,data_2,result,title)
idx0 = 0; idx1 = 1; idx2 = 2
plot_cell(center[idx2],cell_mesh_1[:,idx0],cell_mesh_1[:,idx1],cell_mesh_1[:,idx2],(0.75,0.75,0.75),axi)
plot_cell(center[idx2],cell_mesh_2[:,idx0],cell_mesh_2[:,idx1],cell_mesh_2[:,idx2],(0,0,0),axi)
axi.plot([-1,X_DIM],[center[1],center[1]],'k:',linewidth=1.0)
axi.plot([center[0],center[0]],[-1,Z_DIM],'k:',linewidth=1.0)
axi.set_xlabel(r'x-position $\mu m$')
axi.set_ylabel(r'y-position $\mu m$')
axi.set_xlim((-1,X_DIM))
axi.set_ylim((-1,Z_DIM))
# --> plot GPR model
def plot_gp_model(file_prefix_1,file_prefix_2,X_DIM,Y_DIM,Z_DIM,figtype_list,use_corrected_cell, root_directory):
cell_mesh_1, cell_normal_1, cell_center_1, cell_vol_1, cell_mesh_2, cell_normal_2, cell_center_2, cell_vol_2 = import_cell_info(file_prefix_1,file_prefix_2,root_directory)
center = 0.5*cell_center_1 + 0.5*cell_center_2
folder = root_directory + '/Track_' + file_prefix_1 + '_to_' + file_prefix_2
if use_corrected_cell:
cell_mesh_2 = np.loadtxt(folder + '/cell_mesh_2_corrected.txt')
gp_U = pickle.load(open(folder + '/gp_U.sav', 'rb'))
gp_V = pickle.load(open(folder + '/gp_V.sav', 'rb'))
gp_W = pickle.load(open(folder + '/gp_W.sav', 'rb'))
scaler = pickle.load(open(folder + '/scaler.sav','rb'))
fig = plt.figure()
plt.style.use(stylepath)
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
fig.set_figheight(20)
fig.set_figwidth(15)
ax1 = fig.add_subplot(4, 3, 1); ax2 = fig.add_subplot(4, 3, 2);ax3 = fig.add_subplot(4, 3, 3)
ax_list1 = [ax1,ax2,ax3]
title = r'x-displacement $\mu m$'
is_mag = False
plot_gp_model_one_row(ax_list1,is_mag,X_DIM,Y_DIM,Z_DIM,title,center,gp_U,scaler,cell_mesh_1,cell_mesh_2)
ax4 = fig.add_subplot(4, 3, 4); ax5 = fig.add_subplot(4, 3, 5); ax6 = fig.add_subplot(4, 3, 6)
ax_list2 = [ax4,ax5,ax6]
title = r'y-displacement $\mu m$'
is_mag = False
plot_gp_model_one_row(ax_list2,is_mag,X_DIM,Y_DIM,Z_DIM,title,center,gp_V,scaler,cell_mesh_1,cell_mesh_2)
ax7 = fig.add_subplot(4, 3, 7); ax8 = fig.add_subplot(4, 3, 8); ax9 = fig.add_subplot(4, 3, 9)
ax_list3 = [ax7,ax8,ax9]
title = r'z-displacement $\mu m$'
is_mag = False
plot_gp_model_one_row(ax_list3,is_mag,X_DIM,Y_DIM,Z_DIM,title,center,gp_W,scaler,cell_mesh_1,cell_mesh_2)
ax10 = fig.add_subplot(4, 3, 10); ax11 = fig.add_subplot(4, 3, 11); ax12 = fig.add_subplot(4, 3, 12)
ax_list4 = [ax10,ax11,ax12]
title = r'mag-displacement $\mu m$'
is_mag = True
plot_gp_model_one_row(ax_list4,is_mag,X_DIM,Y_DIM,Z_DIM,title,center,[gp_U, gp_V, gp_W],scaler,cell_mesh_1,cell_mesh_2)
plt.tight_layout()
for end in figtype_list:
fname = folder + '/Interpolate_plot' + end
plt.savefig(fname) | [
"numpy.argsort",
"pyvista.Arrow",
"sklearn.gaussian_process.GaussianProcessRegressor",
"numpy.mean",
"pyvista.PolyData",
"numpy.sort",
"numpy.asarray",
"matplotlib.pyplot.style.use",
"sklearn.neighbors.KernelDensity",
"numpy.max",
"numpy.exp",
"numpy.linspace",
"numpy.vstack",
"numpy.min",... | [((1131, 1218), 'numpy.loadtxt', 'np.loadtxt', (["(root_directory + '/Gel_cell_coords/' + file_prefix_1 + '_cell_mesh.txt')"], {}), "(root_directory + '/Gel_cell_coords/' + file_prefix_1 +\n '_cell_mesh.txt')\n", (1141, 1218), True, 'import numpy as np\n'), ((1230, 1320), 'numpy.loadtxt', 'np.loadtxt', (["(root_directory + '/Gel_cell_coords/' + file_prefix_1 + '_cell_normals.txt')"], {}), "(root_directory + '/Gel_cell_coords/' + file_prefix_1 +\n '_cell_normals.txt')\n", (1240, 1320), True, 'import numpy as np\n'), ((1332, 1421), 'numpy.loadtxt', 'np.loadtxt', (["(root_directory + '/Gel_cell_coords/' + file_prefix_1 + '_cell_center.txt')"], {}), "(root_directory + '/Gel_cell_coords/' + file_prefix_1 +\n '_cell_center.txt')\n", (1342, 1421), True, 'import numpy as np\n'), ((1430, 1519), 'numpy.loadtxt', 'np.loadtxt', (["(root_directory + '/Gel_cell_coords/' + file_prefix_1 + '_cell_volume.txt')"], {}), "(root_directory + '/Gel_cell_coords/' + file_prefix_1 +\n '_cell_volume.txt')\n", (1440, 1519), True, 'import numpy as np\n'), ((1529, 1616), 'numpy.loadtxt', 'np.loadtxt', (["(root_directory + '/Gel_cell_coords/' + file_prefix_2 + '_cell_mesh.txt')"], {}), "(root_directory + '/Gel_cell_coords/' + file_prefix_2 +\n '_cell_mesh.txt')\n", (1539, 1616), True, 'import numpy as np\n'), ((1628, 1718), 'numpy.loadtxt', 'np.loadtxt', (["(root_directory + '/Gel_cell_coords/' + file_prefix_2 + '_cell_normals.txt')"], {}), "(root_directory + '/Gel_cell_coords/' + file_prefix_2 +\n '_cell_normals.txt')\n", (1638, 1718), True, 'import numpy as np\n'), ((1730, 1819), 'numpy.loadtxt', 'np.loadtxt', (["(root_directory + '/Gel_cell_coords/' + file_prefix_2 + '_cell_center.txt')"], {}), "(root_directory + '/Gel_cell_coords/' + file_prefix_2 +\n '_cell_center.txt')\n", (1740, 1819), True, 'import numpy as np\n'), ((1828, 1917), 'numpy.loadtxt', 'np.loadtxt', (["(root_directory + '/Gel_cell_coords/' + file_prefix_2 + '_cell_volume.txt')"], {}), "(root_directory + '/Gel_cell_coords/' + file_prefix_2 +\n '_cell_volume.txt')\n", (1838, 1917), True, 'import numpy as np\n'), ((2067, 2096), 'numpy.loadtxt', 'np.loadtxt', (["(folder + '/X.txt')"], {}), "(folder + '/X.txt')\n", (2077, 2096), True, 'import numpy as np\n'), ((2102, 2131), 'numpy.loadtxt', 'np.loadtxt', (["(folder + '/Y.txt')"], {}), "(folder + '/Y.txt')\n", (2112, 2131), True, 'import numpy as np\n'), ((2137, 2166), 'numpy.loadtxt', 'np.loadtxt', (["(folder + '/Z.txt')"], {}), "(folder + '/Z.txt')\n", (2147, 2166), True, 'import numpy as np\n'), ((2172, 2201), 'numpy.loadtxt', 'np.loadtxt', (["(folder + '/U.txt')"], {}), "(folder + '/U.txt')\n", (2182, 2201), True, 'import numpy as np\n'), ((2207, 2236), 'numpy.loadtxt', 'np.loadtxt', (["(folder + '/V.txt')"], {}), "(folder + '/V.txt')\n", (2217, 2236), True, 'import numpy as np\n'), ((2242, 2271), 'numpy.loadtxt', 'np.loadtxt', (["(folder + '/W.txt')"], {}), "(folder + '/W.txt')\n", (2252, 2271), True, 'import numpy as np\n'), ((5180, 5197), 'numpy.argsort', 'np.argsort', (['data1'], {}), '(data1)\n', (5190, 5197), True, 'import numpy as np\n'), ((5207, 5221), 'numpy.sort', 'np.sort', (['data1'], {}), '(data1)\n', (5214, 5221), True, 'import numpy as np\n'), ((5992, 6008), 'numpy.asarray', 'np.asarray', (['keep'], {}), '(keep)\n', (6002, 6008), True, 'import numpy as np\n'), ((6027, 6053), 'numpy.asarray', 'np.asarray', (['dist_from_cell'], {}), '(dist_from_cell)\n', (6037, 6053), True, 'import numpy as np\n'), ((6066, 6086), 'numpy.asarray', 'np.asarray', (['mag_list'], {}), '(mag_list)\n', (6076, 6086), True, 'import numpy as np\n'), ((7463, 7475), 'numpy.max', 'np.max', (['data'], {}), '(data)\n', (7469, 7475), True, 'import numpy as np\n'), ((7486, 7498), 'numpy.min', 'np.min', (['data'], {}), '(data)\n', (7492, 7498), True, 'import numpy as np\n'), ((8561, 8579), 'numpy.min', 'np.min', (['color_info'], {}), '(color_info)\n', (8567, 8579), True, 'import numpy as np\n'), ((8590, 8608), 'numpy.max', 'np.max', (['color_info'], {}), '(color_info)\n', (8596, 8608), True, 'import numpy as np\n'), ((9657, 9681), 'numpy.zeros', 'np.zeros', (['(num_beads, 3)'], {}), '((num_beads, 3))\n', (9665, 9681), True, 'import numpy as np\n'), ((9731, 9755), 'numpy.zeros', 'np.zeros', (['(num_beads, 3)'], {}), '((num_beads, 3))\n', (9739, 9755), True, 'import numpy as np\n'), ((12100, 12121), 'pyvista.PolyData', 'pyvista.PolyData', (['XYZ'], {}), '(XYZ)\n', (12116, 12121), False, 'import pyvista\n'), ((12219, 12234), 'pyvista.Arrow', 'pyvista.Arrow', ([], {}), '()\n', (12232, 12234), False, 'import pyvista\n'), ((12330, 12357), 'pyvista.PolyData', 'pyvista.PolyData', (['cell_init'], {}), '(cell_init)\n', (12346, 12357), False, 'import pyvista\n'), ((12372, 12400), 'pyvista.PolyData', 'pyvista.PolyData', (['cell_final'], {}), '(cell_final)\n', (12388, 12400), False, 'import pyvista\n'), ((13904, 13916), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (13914, 13916), True, 'import matplotlib.pyplot as plt\n'), ((13918, 13942), 'matplotlib.pyplot.style.use', 'plt.style.use', (['stylepath'], {}), '(stylepath)\n', (13931, 13942), True, 'import matplotlib.pyplot as plt\n'), ((13944, 13971), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (13950, 13971), True, 'import matplotlib.pyplot as plt\n'), ((13973, 14003), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'family': '"""serif"""'}), "('font', family='serif')\n", (13979, 14003), True, 'import matplotlib.pyplot as plt\n'), ((14309, 14327), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (14325, 14327), True, 'import matplotlib.pyplot as plt\n'), ((14518, 14530), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (14528, 14530), True, 'import matplotlib.pyplot as plt\n'), ((14532, 14556), 'matplotlib.pyplot.style.use', 'plt.style.use', (['stylepath'], {}), '(stylepath)\n', (14545, 14556), True, 'import matplotlib.pyplot as plt\n'), ((14558, 14585), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (14564, 14585), True, 'import matplotlib.pyplot as plt\n'), ((14587, 14617), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'family': '"""serif"""'}), "('font', family='serif')\n", (14593, 14617), True, 'import matplotlib.pyplot as plt\n'), ((14700, 14723), 'numpy.asarray', 'np.asarray', (['neigh_score'], {}), '(neigh_score)\n', (14710, 14723), True, 'import numpy as np\n'), ((14862, 14883), 'numpy.asarray', 'np.asarray', (['dir_score'], {}), '(dir_score)\n', (14872, 14883), True, 'import numpy as np\n'), ((14989, 15007), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (15005, 15007), True, 'import matplotlib.pyplot as plt\n'), ((15262, 15274), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (15272, 15274), True, 'import matplotlib.pyplot as plt\n'), ((15276, 15300), 'matplotlib.pyplot.style.use', 'plt.style.use', (['stylepath'], {}), '(stylepath)\n', (15289, 15300), True, 'import matplotlib.pyplot as plt\n'), ((15302, 15329), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (15308, 15329), True, 'import matplotlib.pyplot as plt\n'), ((15331, 15361), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'family': '"""serif"""'}), "('font', family='serif')\n", (15337, 15361), True, 'import matplotlib.pyplot as plt\n'), ((16074, 16092), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (16090, 16092), True, 'import matplotlib.pyplot as plt\n'), ((16314, 16326), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (16324, 16326), True, 'import matplotlib.pyplot as plt\n'), ((16328, 16352), 'matplotlib.pyplot.style.use', 'plt.style.use', (['stylepath'], {}), '(stylepath)\n', (16341, 16352), True, 'import matplotlib.pyplot as plt\n'), ((16354, 16381), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (16360, 16381), True, 'import matplotlib.pyplot as plt\n'), ((16383, 16413), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'family': '"""serif"""'}), "('font', family='serif')\n", (16389, 16413), True, 'import matplotlib.pyplot as plt\n'), ((16562, 16580), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (16578, 16580), True, 'import matplotlib.pyplot as plt\n'), ((16951, 16963), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (16961, 16963), True, 'import matplotlib.pyplot as plt\n'), ((16965, 16989), 'matplotlib.pyplot.style.use', 'plt.style.use', (['stylepath'], {}), '(stylepath)\n', (16978, 16989), True, 'import matplotlib.pyplot as plt\n'), ((16991, 17018), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (16997, 17018), True, 'import matplotlib.pyplot as plt\n'), ((17020, 17050), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'family': '"""serif"""'}), "('font', family='serif')\n", (17026, 17050), True, 'import matplotlib.pyplot as plt\n'), ((17134, 17155), 'numpy.asarray', 'np.asarray', (['dir_score'], {}), '(dir_score)\n', (17144, 17155), True, 'import numpy as np\n'), ((17922, 17945), 'numpy.asarray', 'np.asarray', (['neigh_score'], {}), '(neigh_score)\n', (17932, 17945), True, 'import numpy as np\n'), ((18411, 18429), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (18427, 18429), True, 'import matplotlib.pyplot as plt\n'), ((21057, 21079), 'numpy.zeros', 'np.zeros', (['(num_pts, 3)'], {}), '((num_pts, 3))\n', (21065, 21079), True, 'import numpy as np\n'), ((21275, 21294), 'sklearn.gaussian_process.kernels.RationalQuadratic', 'RationalQuadratic', ([], {}), '()\n', (21292, 21294), False, 'from sklearn.gaussian_process.kernels import RBF, Matern, RationalQuadratic, ExpSineSquared, DotProduct, ConstantKernel, WhiteKernel\n'), ((21301, 21340), 'sklearn.gaussian_process.GaussianProcessRegressor', 'GaussianProcessRegressor', ([], {'kernel': 'kernel'}), '(kernel=kernel)\n', (21325, 21340), False, 'from sklearn.gaussian_process import GaussianProcessRegressor\n'), ((23552, 23577), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['CS1'], {'ax': 'axi'}), '(CS1, ax=axi)\n', (23564, 23577), True, 'import matplotlib.pyplot as plt\n'), ((27950, 27962), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (27960, 27962), True, 'import matplotlib.pyplot as plt\n'), ((27964, 27988), 'matplotlib.pyplot.style.use', 'plt.style.use', (['stylepath'], {}), '(stylepath)\n', (27977, 27988), True, 'import matplotlib.pyplot as plt\n'), ((27990, 28017), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (27996, 28017), True, 'import matplotlib.pyplot as plt\n'), ((28019, 28049), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'family': '"""serif"""'}), "('font', family='serif')\n", (28025, 28049), True, 'import matplotlib.pyplot as plt\n'), ((29253, 29271), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (29269, 29271), True, 'import matplotlib.pyplot as plt\n'), ((793, 826), 'os.path.abspath', 'os.path.abspath', (['fmtrack.__file__'], {}), '(fmtrack.__file__)\n', (808, 826), False, 'import os\n'), ((3247, 3267), 'numpy.argsort', 'np.argsort', (['dist_all'], {}), '(dist_all)\n', (3257, 3267), True, 'import numpy as np\n'), ((3283, 3302), 'numpy.zeros', 'np.zeros', (['num_neigh'], {}), '(num_neigh)\n', (3291, 3302), True, 'import numpy as np\n'), ((4336, 4355), 'numpy.argmin', 'np.argmin', (['dist_all'], {}), '(dist_all)\n', (4345, 4355), True, 'import numpy as np\n'), ((4953, 4985), 'numpy.min', 'np.min', (['[x_edge, y_edge, z_edge]'], {}), '([x_edge, y_edge, z_edge])\n', (4959, 4985), True, 'import numpy as np\n'), ((7522, 7538), 'numpy.exp', 'np.exp', (['log_dens'], {}), '(log_dens)\n', (7528, 7538), True, 'import numpy as np\n'), ((12431, 12448), 'pyvista.Plotter', 'pyvista.Plotter', ([], {}), '()\n', (12446, 12448), False, 'import pyvista\n'), ((12505, 12533), 'matplotlib.pyplot.cm.get_cmap', 'plt.cm.get_cmap', (['"""viridis_r"""'], {}), "('viridis_r')\n", (12520, 12533), True, 'import matplotlib.pyplot as plt\n'), ((14398, 14416), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fname'], {}), '(fname)\n', (14409, 14416), True, 'import matplotlib.pyplot as plt\n'), ((15076, 15094), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fname'], {}), '(fname)\n', (15087, 15094), True, 'import matplotlib.pyplot as plt\n'), ((16165, 16183), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fname'], {}), '(fname)\n', (16176, 16183), True, 'import matplotlib.pyplot as plt\n'), ((16651, 16669), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fname'], {}), '(fname)\n', (16662, 16669), True, 'import matplotlib.pyplot as plt\n'), ((18499, 18517), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fname'], {}), '(fname)\n', (18510, 18517), True, 'import matplotlib.pyplot as plt\n'), ((18660, 18678), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fname'], {}), '(fname)\n', (18671, 18678), True, 'import matplotlib.pyplot as plt\n'), ((19007, 19056), 'numpy.loadtxt', 'np.loadtxt', (["(folder + '/cell_mesh_2_corrected.txt')"], {}), "(folder + '/cell_mesh_2_corrected.txt')\n", (19017, 19056), True, 'import numpy as np\n'), ((22246, 22281), 'numpy.linspace', 'np.linspace', (['y_min', 'y_max', 'grid_pts'], {}), '(y_min, y_max, grid_pts)\n', (22257, 22281), True, 'import numpy as np\n'), ((22286, 22321), 'numpy.linspace', 'np.linspace', (['z_min', 'z_max', 'grid_pts'], {}), '(z_min, z_max, grid_pts)\n', (22297, 22321), True, 'import numpy as np\n'), ((22329, 22346), 'numpy.meshgrid', 'np.meshgrid', (['y', 'z'], {}), '(y, z)\n', (22340, 22346), True, 'import numpy as np\n'), ((22393, 22423), 'numpy.zeros', 'np.zeros', (['(grid_pts, grid_pts)'], {}), '((grid_pts, grid_pts))\n', (22401, 22423), True, 'import numpy as np\n'), ((23045, 23062), 'numpy.asarray', 'np.asarray', (['input'], {}), '(input)\n', (23055, 23062), True, 'import numpy as np\n'), ((27669, 27718), 'numpy.loadtxt', 'np.loadtxt', (["(folder + '/cell_mesh_2_corrected.txt')"], {}), "(folder + '/cell_mesh_2_corrected.txt')\n", (27679, 27718), True, 'import numpy as np\n'), ((29345, 29363), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fname'], {}), '(fname)\n', (29356, 29363), True, 'import matplotlib.pyplot as plt\n'), ((3507, 3526), 'numpy.mean', 'np.mean', (['score_dist'], {}), '(score_dist)\n', (3514, 3526), True, 'import numpy as np\n'), ((3905, 3929), 'numpy.min', 'np.min', (['[num_pts, 10000]'], {}), '([num_pts, 10000])\n', (3911, 3929), True, 'import numpy as np\n'), ((5449, 5461), 'numpy.mean', 'np.mean', (['arr'], {}), '(arr)\n', (5456, 5461), True, 'import numpy as np\n'), ((7265, 7312), 'sklearn.neighbors.KernelDensity', 'KernelDensity', ([], {'kernel': '"""gaussian"""', 'bandwidth': '(0.1)'}), "(kernel='gaussian', bandwidth=0.1)\n", (7278, 7312), False, 'from sklearn.neighbors import KernelDensity\n'), ((7911, 7931), 'numpy.exp', 'np.exp', (['log_dens[kk]'], {}), '(log_dens[kk])\n', (7917, 7931), True, 'import numpy as np\n'), ((12015, 12035), 'numpy.vstack', 'np.vstack', (['(X, Y, Z)'], {}), '((X, Y, Z))\n', (12024, 12035), True, 'import numpy as np\n'), ((12053, 12073), 'numpy.vstack', 'np.vstack', (['(U, V, W)'], {}), '((U, V, W))\n', (12062, 12073), True, 'import numpy as np\n'), ((12822, 12863), 'os.path.join', 'os.path.join', (['foldername', '"""cell_init.vtk"""'], {}), "(foldername, 'cell_init.vtk')\n", (12834, 12863), False, 'import os\n'), ((12882, 12924), 'os.path.join', 'os.path.join', (['foldername', '"""cell_final.vtk"""'], {}), "(foldername, 'cell_final.vtk')\n", (12894, 12924), False, 'import os\n'), ((12939, 12977), 'os.path.join', 'os.path.join', (['foldername', '"""arrows.vtk"""'], {}), "(foldername, 'arrows.vtk')\n", (12951, 12977), False, 'import os\n'), ((21168, 21198), 'sklearn.preprocessing.StandardScaler', 'preprocessing.StandardScaler', ([], {}), '()\n', (21196, 21198), False, 'from sklearn import preprocessing\n'), ((22356, 22385), 'numpy.ones', 'np.ones', (['(grid_pts, grid_pts)'], {}), '((grid_pts, grid_pts))\n', (22363, 22385), True, 'import numpy as np\n'), ((22461, 22496), 'numpy.linspace', 'np.linspace', (['x_min', 'x_max', 'grid_pts'], {}), '(x_min, x_max, grid_pts)\n', (22472, 22496), True, 'import numpy as np\n'), ((22517, 22552), 'numpy.linspace', 'np.linspace', (['z_min', 'z_max', 'grid_pts'], {}), '(z_min, z_max, grid_pts)\n', (22528, 22552), True, 'import numpy as np\n'), ((22560, 22577), 'numpy.meshgrid', 'np.meshgrid', (['x', 'z'], {}), '(x, z)\n', (22571, 22577), True, 'import numpy as np\n'), ((22625, 22655), 'numpy.zeros', 'np.zeros', (['(grid_pts, grid_pts)'], {}), '((grid_pts, grid_pts))\n', (22633, 22655), True, 'import numpy as np\n'), ((4822, 4831), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (4828, 4831), True, 'import numpy as np\n'), ((4832, 4849), 'numpy.abs', 'np.abs', (['(X_DIM - x)'], {}), '(X_DIM - x)\n', (4838, 4849), True, 'import numpy as np\n'), ((4869, 4878), 'numpy.abs', 'np.abs', (['y'], {}), '(y)\n', (4875, 4878), True, 'import numpy as np\n'), ((4879, 4896), 'numpy.abs', 'np.abs', (['(Y_DIM - y)'], {}), '(Y_DIM - y)\n', (4885, 4896), True, 'import numpy as np\n'), ((4916, 4925), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (4922, 4925), True, 'import numpy as np\n'), ((4926, 4943), 'numpy.abs', 'np.abs', (['(Z_DIM - z)'], {}), '(Z_DIM - z)\n', (4932, 4943), True, 'import numpy as np\n'), ((7184, 7196), 'numpy.min', 'np.min', (['data'], {}), '(data)\n', (7190, 7196), True, 'import numpy as np\n'), ((7197, 7209), 'numpy.max', 'np.max', (['data'], {}), '(data)\n', (7203, 7209), True, 'import numpy as np\n'), ((22588, 22617), 'numpy.ones', 'np.ones', (['(grid_pts, grid_pts)'], {}), '((grid_pts, grid_pts))\n', (22595, 22617), True, 'import numpy as np\n'), ((22693, 22728), 'numpy.linspace', 'np.linspace', (['x_min', 'x_max', 'grid_pts'], {}), '(x_min, x_max, grid_pts)\n', (22704, 22728), True, 'import numpy as np\n'), ((22733, 22768), 'numpy.linspace', 'np.linspace', (['y_min', 'y_max', 'grid_pts'], {}), '(y_min, y_max, grid_pts)\n', (22744, 22768), True, 'import numpy as np\n'), ((22792, 22809), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (22803, 22809), True, 'import numpy as np\n'), ((22857, 22887), 'numpy.zeros', 'np.zeros', (['(grid_pts, grid_pts)'], {}), '((grid_pts, grid_pts))\n', (22865, 22887), True, 'import numpy as np\n'), ((22820, 22849), 'numpy.ones', 'np.ones', (['(grid_pts, grid_pts)'], {}), '((grid_pts, grid_pts))\n', (22827, 22849), True, 'import numpy as np\n'), ((7809, 7819), 'numpy.abs', 'np.abs', (['ci'], {}), '(ci)\n', (7815, 7819), True, 'import numpy as np\n'), ((7861, 7871), 'numpy.abs', 'np.abs', (['ci'], {}), '(ci)\n', (7867, 7871), True, 'import numpy as np\n'), ((7873, 7883), 'numpy.abs', 'np.abs', (['ci'], {}), '(ci)\n', (7879, 7883), True, 'import numpy as np\n'), ((7826, 7836), 'numpy.abs', 'np.abs', (['ci'], {}), '(ci)\n', (7832, 7836), True, 'import numpy as np\n'), ((9205, 9215), 'numpy.abs', 'np.abs', (['ci'], {}), '(ci)\n', (9211, 9215), True, 'import numpy as np\n'), ((9259, 9269), 'numpy.abs', 'np.abs', (['ci'], {}), '(ci)\n', (9265, 9269), True, 'import numpy as np\n'), ((9271, 9281), 'numpy.abs', 'np.abs', (['ci'], {}), '(ci)\n', (9277, 9281), True, 'import numpy as np\n'), ((9222, 9232), 'numpy.abs', 'np.abs', (['ci'], {}), '(ci)\n', (9228, 9232), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from lxml import etree as ElementTree
from odoo.http import Controller, route, request
class Board(Controller):
@route('/board/add_to_dashboard', type='json', auth='user')
def add_to_dashboard(self, action_id, context_to_save, domain, view_mode, name=''):
# Retrieve the 'My Dashboard' action from its xmlid
action = request.env.ref('board.open_board_my_dash_action')
if action and action['res_model'] == 'board.board' and action['views'][0][1] == 'form' and action_id:
# Maybe should check the content instead of model board.board ?
view_id = action['views'][0][0]
board = request.env['board.board'].fields_view_get(view_id, 'form')
if board and 'arch' in board:
xml = ElementTree.fromstring(board['arch'])
column = xml.find('./board/column')
if column is not None:
new_action = ElementTree.Element('action', {
'name': str(action_id),
'string': name,
'view_mode': view_mode,
'context': str(context_to_save),
'domain': str(domain)
})
column.insert(0, new_action)
arch = ElementTree.tostring(xml, encoding='unicode')
request.env['ir.ui.view.custom'].create({
'user_id': request.session.uid,
'ref_id': view_id,
'arch': arch
})
return True
return False
| [
"lxml.etree.tostring",
"odoo.http.request.env.ref",
"lxml.etree.fromstring",
"odoo.http.route"
] | [((221, 279), 'odoo.http.route', 'route', (['"""/board/add_to_dashboard"""'], {'type': '"""json"""', 'auth': '"""user"""'}), "('/board/add_to_dashboard', type='json', auth='user')\n", (226, 279), False, 'from odoo.http import Controller, route, request\n'), ((445, 495), 'odoo.http.request.env.ref', 'request.env.ref', (['"""board.open_board_my_dash_action"""'], {}), "('board.open_board_my_dash_action')\n", (460, 495), False, 'from odoo.http import Controller, route, request\n'), ((871, 908), 'lxml.etree.fromstring', 'ElementTree.fromstring', (["board['arch']"], {}), "(board['arch'])\n", (893, 908), True, 'from lxml import etree as ElementTree\n'), ((1403, 1448), 'lxml.etree.tostring', 'ElementTree.tostring', (['xml'], {'encoding': '"""unicode"""'}), "(xml, encoding='unicode')\n", (1423, 1448), True, 'from lxml import etree as ElementTree\n')] |
import defectio
client = defectio.Client()
@client.event
async def on_ready():
print("We have logged in.")
@client.event
async def on_message(message: defectio.Message):
if message.author == client.user:
return
if message.content.startswith("$hello"):
await message.channel.send("Hello!")
client.run(
session_token="session_token",
user_id="user_id",
)
| [
"defectio.Client"
] | [((26, 43), 'defectio.Client', 'defectio.Client', ([], {}), '()\n', (41, 43), False, 'import defectio\n')] |